code_generator_arm.cc revision 424f676379f2f872acd1478672022f19f3240fc1
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm.h"
18
19#include "entrypoints/quick/quick_entrypoints.h"
20#include "gc/accounting/card_table.h"
21#include "mirror/array-inl.h"
22#include "mirror/art_method.h"
23#include "mirror/class.h"
24#include "thread.h"
25#include "utils/assembler.h"
26#include "utils/arm/assembler_arm.h"
27#include "utils/arm/managed_register_arm.h"
28#include "utils/stack_checks.h"
29
30namespace art {
31
32namespace arm {
33
34static DRegister FromLowSToD(SRegister reg) {
35  DCHECK_EQ(reg % 2, 0);
36  return static_cast<DRegister>(reg / 2);
37}
38
39static constexpr bool kExplicitStackOverflowCheck = false;
40
41static constexpr int kNumberOfPushedRegistersAtEntry = 1 + 2;  // LR, R6, R7
42static constexpr int kCurrentMethodStackOffset = 0;
43
44static constexpr Register kRuntimeParameterCoreRegisters[] = { R0, R1, R2 };
45static constexpr size_t kRuntimeParameterCoreRegistersLength =
46    arraysize(kRuntimeParameterCoreRegisters);
47static constexpr SRegister kRuntimeParameterFpuRegisters[] = { };
48static constexpr size_t kRuntimeParameterFpuRegistersLength = 0;
49
50class InvokeRuntimeCallingConvention : public CallingConvention<Register, SRegister> {
51 public:
52  InvokeRuntimeCallingConvention()
53      : CallingConvention(kRuntimeParameterCoreRegisters,
54                          kRuntimeParameterCoreRegistersLength,
55                          kRuntimeParameterFpuRegisters,
56                          kRuntimeParameterFpuRegistersLength) {}
57
58 private:
59  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
60};
61
62#define __ reinterpret_cast<ArmAssembler*>(codegen->GetAssembler())->
63#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArmWordSize, x).Int32Value()
64
65class SlowPathCodeARM : public SlowPathCode {
66 public:
67  SlowPathCodeARM() : entry_label_(), exit_label_() {}
68
69  Label* GetEntryLabel() { return &entry_label_; }
70  Label* GetExitLabel() { return &exit_label_; }
71
72 private:
73  Label entry_label_;
74  Label exit_label_;
75
76  DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM);
77};
78
79class NullCheckSlowPathARM : public SlowPathCodeARM {
80 public:
81  explicit NullCheckSlowPathARM(HNullCheck* instruction) : instruction_(instruction) {}
82
83  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
84    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
85    __ Bind(GetEntryLabel());
86    arm_codegen->InvokeRuntime(
87        QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc());
88  }
89
90 private:
91  HNullCheck* const instruction_;
92  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM);
93};
94
95class StackOverflowCheckSlowPathARM : public SlowPathCodeARM {
96 public:
97  StackOverflowCheckSlowPathARM() {}
98
99  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
100    __ Bind(GetEntryLabel());
101    __ LoadFromOffset(kLoadWord, PC, TR,
102        QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pThrowStackOverflow).Int32Value());
103  }
104
105 private:
106  DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathARM);
107};
108
109class SuspendCheckSlowPathARM : public SlowPathCodeARM {
110 public:
111  explicit SuspendCheckSlowPathARM(HSuspendCheck* instruction, HBasicBlock* successor)
112      : instruction_(instruction), successor_(successor) {}
113
114  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
115    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
116    __ Bind(GetEntryLabel());
117    codegen->SaveLiveRegisters(instruction_->GetLocations());
118    arm_codegen->InvokeRuntime(
119        QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc());
120    codegen->RestoreLiveRegisters(instruction_->GetLocations());
121    if (successor_ == nullptr) {
122      __ b(GetReturnLabel());
123    } else {
124      __ b(arm_codegen->GetLabelOf(successor_));
125    }
126  }
127
128  Label* GetReturnLabel() {
129    DCHECK(successor_ == nullptr);
130    return &return_label_;
131  }
132
133 private:
134  HSuspendCheck* const instruction_;
135  // If not null, the block to branch to after the suspend check.
136  HBasicBlock* const successor_;
137
138  // If `successor_` is null, the label to branch to after the suspend check.
139  Label return_label_;
140
141  DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM);
142};
143
144class BoundsCheckSlowPathARM : public SlowPathCodeARM {
145 public:
146  BoundsCheckSlowPathARM(HBoundsCheck* instruction,
147                         Location index_location,
148                         Location length_location)
149      : instruction_(instruction),
150        index_location_(index_location),
151        length_location_(length_location) {}
152
153  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
154    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
155    __ Bind(GetEntryLabel());
156    InvokeRuntimeCallingConvention calling_convention;
157    arm_codegen->Move32(
158        Location::RegisterLocation(calling_convention.GetRegisterAt(0)), index_location_);
159    arm_codegen->Move32(
160        Location::RegisterLocation(calling_convention.GetRegisterAt(1)), length_location_);
161    arm_codegen->InvokeRuntime(
162        QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc());
163  }
164
165 private:
166  HBoundsCheck* const instruction_;
167  const Location index_location_;
168  const Location length_location_;
169
170  DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM);
171};
172
173class LoadClassSlowPathARM : public SlowPathCodeARM {
174 public:
175  LoadClassSlowPathARM(HLoadClass* cls,
176                       HInstruction* at,
177                       uint32_t dex_pc,
178                       bool do_clinit)
179      : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
180    DCHECK(at->IsLoadClass() || at->IsClinitCheck());
181  }
182
183  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
184    LocationSummary* locations = at_->GetLocations();
185
186    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
187    __ Bind(GetEntryLabel());
188    codegen->SaveLiveRegisters(locations);
189
190    InvokeRuntimeCallingConvention calling_convention;
191    __ LoadImmediate(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex());
192    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
193    int32_t entry_point_offset = do_clinit_
194        ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
195        : QUICK_ENTRY_POINT(pInitializeType);
196    arm_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_);
197
198    // Move the class to the desired location.
199    if (locations->Out().IsValid()) {
200      DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
201      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
202    }
203    codegen->RestoreLiveRegisters(locations);
204    __ b(GetExitLabel());
205  }
206
207 private:
208  // The class this slow path will load.
209  HLoadClass* const cls_;
210
211  // The instruction where this slow path is happening.
212  // (Might be the load class or an initialization check).
213  HInstruction* const at_;
214
215  // The dex PC of `at_`.
216  const uint32_t dex_pc_;
217
218  // Whether to initialize the class.
219  const bool do_clinit_;
220
221  DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM);
222};
223
224class LoadStringSlowPathARM : public SlowPathCodeARM {
225 public:
226  explicit LoadStringSlowPathARM(HLoadString* instruction) : instruction_(instruction) {}
227
228  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
229    LocationSummary* locations = instruction_->GetLocations();
230    DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
231
232    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
233    __ Bind(GetEntryLabel());
234    codegen->SaveLiveRegisters(locations);
235
236    InvokeRuntimeCallingConvention calling_convention;
237    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(0));
238    __ LoadImmediate(calling_convention.GetRegisterAt(1), instruction_->GetStringIndex());
239    arm_codegen->InvokeRuntime(
240        QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc());
241    arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
242
243    codegen->RestoreLiveRegisters(locations);
244    __ b(GetExitLabel());
245  }
246
247 private:
248  HLoadString* const instruction_;
249
250  DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM);
251};
252
253#undef __
254
255#undef __
256#define __ reinterpret_cast<ArmAssembler*>(GetAssembler())->
257
258inline Condition ARMCondition(IfCondition cond) {
259  switch (cond) {
260    case kCondEQ: return EQ;
261    case kCondNE: return NE;
262    case kCondLT: return LT;
263    case kCondLE: return LE;
264    case kCondGT: return GT;
265    case kCondGE: return GE;
266    default:
267      LOG(FATAL) << "Unknown if condition";
268  }
269  return EQ;        // Unreachable.
270}
271
272inline Condition ARMOppositeCondition(IfCondition cond) {
273  switch (cond) {
274    case kCondEQ: return NE;
275    case kCondNE: return EQ;
276    case kCondLT: return GE;
277    case kCondLE: return GT;
278    case kCondGT: return LE;
279    case kCondGE: return LT;
280    default:
281      LOG(FATAL) << "Unknown if condition";
282  }
283  return EQ;        // Unreachable.
284}
285
286void CodeGeneratorARM::DumpCoreRegister(std::ostream& stream, int reg) const {
287  stream << ArmManagedRegister::FromCoreRegister(Register(reg));
288}
289
290void CodeGeneratorARM::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
291  stream << ArmManagedRegister::FromSRegister(SRegister(reg));
292}
293
294size_t CodeGeneratorARM::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
295  __ StoreToOffset(kStoreWord, static_cast<Register>(reg_id), SP, stack_index);
296  return kArmWordSize;
297}
298
299size_t CodeGeneratorARM::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
300  __ LoadFromOffset(kLoadWord, static_cast<Register>(reg_id), SP, stack_index);
301  return kArmWordSize;
302}
303
304CodeGeneratorARM::CodeGeneratorARM(HGraph* graph)
305    : CodeGenerator(graph, kNumberOfCoreRegisters, kNumberOfSRegisters, kNumberOfRegisterPairs),
306      block_labels_(graph->GetArena(), 0),
307      location_builder_(graph, this),
308      instruction_visitor_(graph, this),
309      move_resolver_(graph->GetArena(), this),
310      assembler_(true) {}
311
312size_t CodeGeneratorARM::FrameEntrySpillSize() const {
313  return kNumberOfPushedRegistersAtEntry * kArmWordSize;
314}
315
316Location CodeGeneratorARM::AllocateFreeRegister(Primitive::Type type) const {
317  switch (type) {
318    case Primitive::kPrimLong: {
319      size_t reg = FindFreeEntry(blocked_register_pairs_, kNumberOfRegisterPairs);
320      ArmManagedRegister pair =
321          ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
322      DCHECK(!blocked_core_registers_[pair.AsRegisterPairLow()]);
323      DCHECK(!blocked_core_registers_[pair.AsRegisterPairHigh()]);
324
325      blocked_core_registers_[pair.AsRegisterPairLow()] = true;
326      blocked_core_registers_[pair.AsRegisterPairHigh()] = true;
327      UpdateBlockedPairRegisters();
328      return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
329    }
330
331    case Primitive::kPrimByte:
332    case Primitive::kPrimBoolean:
333    case Primitive::kPrimChar:
334    case Primitive::kPrimShort:
335    case Primitive::kPrimInt:
336    case Primitive::kPrimNot: {
337      int reg = FindFreeEntry(blocked_core_registers_, kNumberOfCoreRegisters);
338      // Block all register pairs that contain `reg`.
339      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
340        ArmManagedRegister current =
341            ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
342        if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
343          blocked_register_pairs_[i] = true;
344        }
345      }
346      return Location::RegisterLocation(reg);
347    }
348
349    case Primitive::kPrimFloat: {
350      int reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfSRegisters);
351      return Location::FpuRegisterLocation(reg);
352    }
353
354    case Primitive::kPrimDouble: {
355      int reg = FindTwoFreeConsecutiveAlignedEntries(blocked_fpu_registers_, kNumberOfSRegisters);
356      DCHECK_EQ(reg % 2, 0);
357      return Location::FpuRegisterPairLocation(reg, reg + 1);
358    }
359
360    case Primitive::kPrimVoid:
361      LOG(FATAL) << "Unreachable type " << type;
362  }
363
364  return Location();
365}
366
367void CodeGeneratorARM::SetupBlockedRegisters() const {
368  // Don't allocate the dalvik style register pair passing.
369  blocked_register_pairs_[R1_R2] = true;
370
371  // Stack register, LR and PC are always reserved.
372  blocked_core_registers_[SP] = true;
373  blocked_core_registers_[LR] = true;
374  blocked_core_registers_[PC] = true;
375
376  // Reserve R4 for suspend check.
377  blocked_core_registers_[R4] = true;
378
379  // Reserve thread register.
380  blocked_core_registers_[TR] = true;
381
382  // Reserve temp register.
383  blocked_core_registers_[IP] = true;
384
385  // TODO: We currently don't use Quick's callee saved registers.
386  // We always save and restore R6 and R7 to make sure we can use three
387  // register pairs for long operations.
388  blocked_core_registers_[R5] = true;
389  blocked_core_registers_[R8] = true;
390  blocked_core_registers_[R10] = true;
391  blocked_core_registers_[R11] = true;
392
393  blocked_fpu_registers_[S16] = true;
394  blocked_fpu_registers_[S17] = true;
395  blocked_fpu_registers_[S18] = true;
396  blocked_fpu_registers_[S19] = true;
397  blocked_fpu_registers_[S20] = true;
398  blocked_fpu_registers_[S21] = true;
399  blocked_fpu_registers_[S22] = true;
400  blocked_fpu_registers_[S23] = true;
401  blocked_fpu_registers_[S24] = true;
402  blocked_fpu_registers_[S25] = true;
403  blocked_fpu_registers_[S26] = true;
404  blocked_fpu_registers_[S27] = true;
405  blocked_fpu_registers_[S28] = true;
406  blocked_fpu_registers_[S29] = true;
407  blocked_fpu_registers_[S30] = true;
408  blocked_fpu_registers_[S31] = true;
409
410  UpdateBlockedPairRegisters();
411}
412
413void CodeGeneratorARM::UpdateBlockedPairRegisters() const {
414  for (int i = 0; i < kNumberOfRegisterPairs; i++) {
415    ArmManagedRegister current =
416        ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
417    if (blocked_core_registers_[current.AsRegisterPairLow()]
418        || blocked_core_registers_[current.AsRegisterPairHigh()]) {
419      blocked_register_pairs_[i] = true;
420    }
421  }
422}
423
424InstructionCodeGeneratorARM::InstructionCodeGeneratorARM(HGraph* graph, CodeGeneratorARM* codegen)
425      : HGraphVisitor(graph),
426        assembler_(codegen->GetAssembler()),
427        codegen_(codegen) {}
428
429void CodeGeneratorARM::GenerateFrameEntry() {
430  bool skip_overflow_check = IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm);
431  if (!skip_overflow_check) {
432    if (kExplicitStackOverflowCheck) {
433      SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathARM();
434      AddSlowPath(slow_path);
435
436      __ LoadFromOffset(kLoadWord, IP, TR, Thread::StackEndOffset<kArmWordSize>().Int32Value());
437      __ cmp(SP, ShifterOperand(IP));
438      __ b(slow_path->GetEntryLabel(), CC);
439    } else {
440      __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm)));
441      __ LoadFromOffset(kLoadWord, IP, IP, 0);
442      RecordPcInfo(nullptr, 0);
443    }
444  }
445
446  core_spill_mask_ |= (1 << LR | 1 << R6 | 1 << R7);
447  __ PushList(1 << LR | 1 << R6 | 1 << R7);
448
449  // The return PC has already been pushed on the stack.
450  __ AddConstant(SP, -(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize));
451  __ StoreToOffset(kStoreWord, R0, SP, 0);
452}
453
454void CodeGeneratorARM::GenerateFrameExit() {
455  __ AddConstant(SP, GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize);
456  __ PopList(1 << PC | 1 << R6 | 1 << R7);
457}
458
459void CodeGeneratorARM::Bind(HBasicBlock* block) {
460  __ Bind(GetLabelOf(block));
461}
462
463Location CodeGeneratorARM::GetStackLocation(HLoadLocal* load) const {
464  switch (load->GetType()) {
465    case Primitive::kPrimLong:
466    case Primitive::kPrimDouble:
467      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
468      break;
469
470    case Primitive::kPrimInt:
471    case Primitive::kPrimNot:
472    case Primitive::kPrimFloat:
473      return Location::StackSlot(GetStackSlot(load->GetLocal()));
474
475    case Primitive::kPrimBoolean:
476    case Primitive::kPrimByte:
477    case Primitive::kPrimChar:
478    case Primitive::kPrimShort:
479    case Primitive::kPrimVoid:
480      LOG(FATAL) << "Unexpected type " << load->GetType();
481  }
482
483  LOG(FATAL) << "Unreachable";
484  return Location();
485}
486
487Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
488  switch (type) {
489    case Primitive::kPrimBoolean:
490    case Primitive::kPrimByte:
491    case Primitive::kPrimChar:
492    case Primitive::kPrimShort:
493    case Primitive::kPrimInt:
494    case Primitive::kPrimNot: {
495      uint32_t index = gp_index_++;
496      uint32_t stack_index = stack_index_++;
497      if (index < calling_convention.GetNumberOfRegisters()) {
498        return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
499      } else {
500        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
501      }
502    }
503
504    case Primitive::kPrimLong: {
505      uint32_t index = gp_index_;
506      uint32_t stack_index = stack_index_;
507      gp_index_ += 2;
508      stack_index_ += 2;
509      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
510        ArmManagedRegister pair = ArmManagedRegister::FromRegisterPair(
511            calling_convention.GetRegisterPairAt(index));
512        return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
513      } else if (index + 1 == calling_convention.GetNumberOfRegisters()) {
514        return Location::QuickParameter(index, stack_index);
515      } else {
516        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
517      }
518    }
519
520    case Primitive::kPrimFloat: {
521      uint32_t stack_index = stack_index_++;
522      if (float_index_ % 2 == 0) {
523        float_index_ = std::max(double_index_, float_index_);
524      }
525      if (float_index_ < calling_convention.GetNumberOfFpuRegisters()) {
526        return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(float_index_++));
527      } else {
528        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
529      }
530    }
531
532    case Primitive::kPrimDouble: {
533      double_index_ = std::max(double_index_, RoundUp(float_index_, 2));
534      uint32_t stack_index = stack_index_;
535      stack_index_ += 2;
536      if (double_index_ + 1 < calling_convention.GetNumberOfFpuRegisters()) {
537        uint32_t index = double_index_;
538        double_index_ += 2;
539        return Location::FpuRegisterPairLocation(
540          calling_convention.GetFpuRegisterAt(index),
541          calling_convention.GetFpuRegisterAt(index + 1));
542      } else {
543        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
544      }
545    }
546
547    case Primitive::kPrimVoid:
548      LOG(FATAL) << "Unexpected parameter type " << type;
549      break;
550  }
551  return Location();
552}
553
554Location InvokeDexCallingConventionVisitor::GetReturnLocation(Primitive::Type type) {
555  switch (type) {
556    case Primitive::kPrimBoolean:
557    case Primitive::kPrimByte:
558    case Primitive::kPrimChar:
559    case Primitive::kPrimShort:
560    case Primitive::kPrimInt:
561    case Primitive::kPrimNot: {
562      return Location::RegisterLocation(R0);
563    }
564
565    case Primitive::kPrimFloat: {
566      return Location::FpuRegisterLocation(S0);
567    }
568
569    case Primitive::kPrimLong: {
570      return Location::RegisterPairLocation(R0, R1);
571    }
572
573    case Primitive::kPrimDouble: {
574      return Location::FpuRegisterPairLocation(S0, S1);
575    }
576
577    case Primitive::kPrimVoid:
578      return Location();
579  }
580  UNREACHABLE();
581  return Location();
582}
583
584void CodeGeneratorARM::Move32(Location destination, Location source) {
585  if (source.Equals(destination)) {
586    return;
587  }
588  if (destination.IsRegister()) {
589    if (source.IsRegister()) {
590      __ Mov(destination.As<Register>(), source.As<Register>());
591    } else if (source.IsFpuRegister()) {
592      __ vmovrs(destination.As<Register>(), source.As<SRegister>());
593    } else {
594      __ LoadFromOffset(kLoadWord, destination.As<Register>(), SP, source.GetStackIndex());
595    }
596  } else if (destination.IsFpuRegister()) {
597    if (source.IsRegister()) {
598      __ vmovsr(destination.As<SRegister>(), source.As<Register>());
599    } else if (source.IsFpuRegister()) {
600      __ vmovs(destination.As<SRegister>(), source.As<SRegister>());
601    } else {
602      __ LoadSFromOffset(destination.As<SRegister>(), SP, source.GetStackIndex());
603    }
604  } else {
605    DCHECK(destination.IsStackSlot());
606    if (source.IsRegister()) {
607      __ StoreToOffset(kStoreWord, source.As<Register>(), SP, destination.GetStackIndex());
608    } else if (source.IsFpuRegister()) {
609      __ StoreSToOffset(source.As<SRegister>(), SP, destination.GetStackIndex());
610    } else {
611      DCHECK(source.IsStackSlot());
612      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
613      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
614    }
615  }
616}
617
618void CodeGeneratorARM::Move64(Location destination, Location source) {
619  if (source.Equals(destination)) {
620    return;
621  }
622  if (destination.IsRegisterPair()) {
623    if (source.IsRegisterPair()) {
624      __ Mov(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairLow<Register>());
625      __ Mov(destination.AsRegisterPairHigh<Register>(), source.AsRegisterPairHigh<Register>());
626    } else if (source.IsFpuRegister()) {
627      UNIMPLEMENTED(FATAL);
628    } else if (source.IsQuickParameter()) {
629      uint16_t register_index = source.GetQuickParameterRegisterIndex();
630      uint16_t stack_index = source.GetQuickParameterStackIndex();
631      InvokeDexCallingConvention calling_convention;
632      __ Mov(destination.AsRegisterPairLow<Register>(),
633             calling_convention.GetRegisterAt(register_index));
634      __ LoadFromOffset(kLoadWord, destination.AsRegisterPairHigh<Register>(),
635             SP, calling_convention.GetStackOffsetOf(stack_index + 1) + GetFrameSize());
636    } else {
637      DCHECK(source.IsDoubleStackSlot());
638      if (destination.AsRegisterPairLow<Register>() == R1) {
639        DCHECK_EQ(destination.AsRegisterPairHigh<Register>(), R2);
640        __ LoadFromOffset(kLoadWord, R1, SP, source.GetStackIndex());
641        __ LoadFromOffset(kLoadWord, R2, SP, source.GetHighStackIndex(kArmWordSize));
642      } else {
643        __ LoadFromOffset(kLoadWordPair, destination.AsRegisterPairLow<Register>(),
644                          SP, source.GetStackIndex());
645      }
646    }
647  } else if (destination.IsFpuRegisterPair()) {
648    if (source.IsDoubleStackSlot()) {
649      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
650                         SP,
651                         source.GetStackIndex());
652    } else {
653      UNIMPLEMENTED(FATAL);
654    }
655  } else if (destination.IsQuickParameter()) {
656    InvokeDexCallingConvention calling_convention;
657    uint16_t register_index = destination.GetQuickParameterRegisterIndex();
658    uint16_t stack_index = destination.GetQuickParameterStackIndex();
659    if (source.IsRegisterPair()) {
660      __ Mov(calling_convention.GetRegisterAt(register_index),
661             source.AsRegisterPairLow<Register>());
662      __ StoreToOffset(kStoreWord, source.AsRegisterPairHigh<Register>(),
663             SP, calling_convention.GetStackOffsetOf(stack_index + 1));
664    } else if (source.IsFpuRegister()) {
665      UNIMPLEMENTED(FATAL);
666    } else {
667      DCHECK(source.IsDoubleStackSlot());
668      __ LoadFromOffset(
669          kLoadWord, calling_convention.GetRegisterAt(register_index), SP, source.GetStackIndex());
670      __ LoadFromOffset(kLoadWord, R0, SP, source.GetHighStackIndex(kArmWordSize));
671      __ StoreToOffset(kStoreWord, R0, SP, calling_convention.GetStackOffsetOf(stack_index + 1));
672    }
673  } else {
674    DCHECK(destination.IsDoubleStackSlot());
675    if (source.IsRegisterPair()) {
676      if (source.AsRegisterPairLow<Register>() == R1) {
677        DCHECK_EQ(source.AsRegisterPairHigh<Register>(), R2);
678        __ StoreToOffset(kStoreWord, R1, SP, destination.GetStackIndex());
679        __ StoreToOffset(kStoreWord, R2, SP, destination.GetHighStackIndex(kArmWordSize));
680      } else {
681        __ StoreToOffset(kStoreWordPair, source.AsRegisterPairLow<Register>(),
682                         SP, destination.GetStackIndex());
683      }
684    } else if (source.IsQuickParameter()) {
685      InvokeDexCallingConvention calling_convention;
686      uint16_t register_index = source.GetQuickParameterRegisterIndex();
687      uint16_t stack_index = source.GetQuickParameterStackIndex();
688      __ StoreToOffset(kStoreWord, calling_convention.GetRegisterAt(register_index),
689             SP, destination.GetStackIndex());
690      __ LoadFromOffset(kLoadWord, R0,
691             SP, calling_convention.GetStackOffsetOf(stack_index + 1) + GetFrameSize());
692      __ StoreToOffset(kStoreWord, R0, SP, destination.GetHighStackIndex(kArmWordSize));
693    } else if (source.IsFpuRegisterPair()) {
694      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
695                        SP,
696                        destination.GetStackIndex());
697    } else {
698      DCHECK(source.IsDoubleStackSlot());
699      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
700      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
701      __ LoadFromOffset(kLoadWord, IP, SP, source.GetHighStackIndex(kArmWordSize));
702      __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
703    }
704  }
705}
706
707void CodeGeneratorARM::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
708  LocationSummary* locations = instruction->GetLocations();
709  if (locations != nullptr && locations->Out().Equals(location)) {
710    return;
711  }
712
713  if (instruction->IsIntConstant()) {
714    int32_t value = instruction->AsIntConstant()->GetValue();
715    if (location.IsRegister()) {
716      __ LoadImmediate(location.As<Register>(), value);
717    } else {
718      DCHECK(location.IsStackSlot());
719      __ LoadImmediate(IP, value);
720      __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
721    }
722  } else if (instruction->IsLongConstant()) {
723    int64_t value = instruction->AsLongConstant()->GetValue();
724    if (location.IsRegisterPair()) {
725      __ LoadImmediate(location.AsRegisterPairLow<Register>(), Low32Bits(value));
726      __ LoadImmediate(location.AsRegisterPairHigh<Register>(), High32Bits(value));
727    } else {
728      DCHECK(location.IsDoubleStackSlot());
729      __ LoadImmediate(IP, Low32Bits(value));
730      __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
731      __ LoadImmediate(IP, High32Bits(value));
732      __ StoreToOffset(kStoreWord, IP, SP, location.GetHighStackIndex(kArmWordSize));
733    }
734  } else if (instruction->IsLoadLocal()) {
735    uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
736    switch (instruction->GetType()) {
737      case Primitive::kPrimBoolean:
738      case Primitive::kPrimByte:
739      case Primitive::kPrimChar:
740      case Primitive::kPrimShort:
741      case Primitive::kPrimInt:
742      case Primitive::kPrimNot:
743      case Primitive::kPrimFloat:
744        Move32(location, Location::StackSlot(stack_slot));
745        break;
746
747      case Primitive::kPrimLong:
748      case Primitive::kPrimDouble:
749        Move64(location, Location::DoubleStackSlot(stack_slot));
750        break;
751
752      default:
753        LOG(FATAL) << "Unexpected type " << instruction->GetType();
754    }
755  } else {
756    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
757    switch (instruction->GetType()) {
758      case Primitive::kPrimBoolean:
759      case Primitive::kPrimByte:
760      case Primitive::kPrimChar:
761      case Primitive::kPrimShort:
762      case Primitive::kPrimNot:
763      case Primitive::kPrimInt:
764      case Primitive::kPrimFloat:
765        Move32(location, locations->Out());
766        break;
767
768      case Primitive::kPrimLong:
769      case Primitive::kPrimDouble:
770        Move64(location, locations->Out());
771        break;
772
773      default:
774        LOG(FATAL) << "Unexpected type " << instruction->GetType();
775    }
776  }
777}
778
779void CodeGeneratorARM::InvokeRuntime(int32_t entry_point_offset,
780                                     HInstruction* instruction,
781                                     uint32_t dex_pc) {
782  __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
783  __ blx(LR);
784  RecordPcInfo(instruction, dex_pc);
785  DCHECK(instruction->IsSuspendCheck()
786      || instruction->IsBoundsCheck()
787      || instruction->IsNullCheck()
788      || !IsLeafMethod());
789}
790
791void LocationsBuilderARM::VisitGoto(HGoto* got) {
792  got->SetLocations(nullptr);
793}
794
795void InstructionCodeGeneratorARM::VisitGoto(HGoto* got) {
796  HBasicBlock* successor = got->GetSuccessor();
797  DCHECK(!successor->IsExitBlock());
798
799  HBasicBlock* block = got->GetBlock();
800  HInstruction* previous = got->GetPrevious();
801
802  HLoopInformation* info = block->GetLoopInformation();
803  if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
804    codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
805    GenerateSuspendCheck(info->GetSuspendCheck(), successor);
806    return;
807  }
808
809  if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
810    GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
811  }
812  if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
813    __ b(codegen_->GetLabelOf(successor));
814  }
815}
816
817void LocationsBuilderARM::VisitExit(HExit* exit) {
818  exit->SetLocations(nullptr);
819}
820
821void InstructionCodeGeneratorARM::VisitExit(HExit* exit) {
822  UNUSED(exit);
823  if (kIsDebugBuild) {
824    __ Comment("Unreachable");
825    __ bkpt(0);
826  }
827}
828
829void LocationsBuilderARM::VisitIf(HIf* if_instr) {
830  LocationSummary* locations =
831      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
832  HInstruction* cond = if_instr->InputAt(0);
833  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
834    locations->SetInAt(0, Location::RequiresRegister());
835  }
836}
837
838void InstructionCodeGeneratorARM::VisitIf(HIf* if_instr) {
839  HInstruction* cond = if_instr->InputAt(0);
840  if (cond->IsIntConstant()) {
841    // Constant condition, statically compared against 1.
842    int32_t cond_value = cond->AsIntConstant()->GetValue();
843    if (cond_value == 1) {
844      if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
845                                     if_instr->IfTrueSuccessor())) {
846        __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
847      }
848      return;
849    } else {
850      DCHECK_EQ(cond_value, 0);
851    }
852  } else {
853    if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
854      // Condition has been materialized, compare the output to 0
855      DCHECK(if_instr->GetLocations()->InAt(0).IsRegister());
856      __ cmp(if_instr->GetLocations()->InAt(0).As<Register>(),
857             ShifterOperand(0));
858      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()), NE);
859    } else {
860      // Condition has not been materialized, use its inputs as the
861      // comparison and its condition as the branch condition.
862      LocationSummary* locations = cond->GetLocations();
863      if (locations->InAt(1).IsRegister()) {
864        __ cmp(locations->InAt(0).As<Register>(),
865               ShifterOperand(locations->InAt(1).As<Register>()));
866      } else {
867        DCHECK(locations->InAt(1).IsConstant());
868        int32_t value =
869            locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
870        ShifterOperand operand;
871        if (ShifterOperand::CanHoldArm(value, &operand)) {
872          __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(value));
873        } else {
874          Register temp = IP;
875          __ LoadImmediate(temp, value);
876          __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(temp));
877        }
878      }
879      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()),
880           ARMCondition(cond->AsCondition()->GetCondition()));
881    }
882  }
883  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
884                                 if_instr->IfFalseSuccessor())) {
885    __ b(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
886  }
887}
888
889
890void LocationsBuilderARM::VisitCondition(HCondition* comp) {
891  LocationSummary* locations =
892      new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall);
893  locations->SetInAt(0, Location::RequiresRegister());
894  locations->SetInAt(1, Location::RegisterOrConstant(comp->InputAt(1)));
895  if (comp->NeedsMaterialization()) {
896    locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
897  }
898}
899
900void InstructionCodeGeneratorARM::VisitCondition(HCondition* comp) {
901  if (!comp->NeedsMaterialization()) return;
902
903  LocationSummary* locations = comp->GetLocations();
904  if (locations->InAt(1).IsRegister()) {
905    __ cmp(locations->InAt(0).As<Register>(),
906           ShifterOperand(locations->InAt(1).As<Register>()));
907  } else {
908    DCHECK(locations->InAt(1).IsConstant());
909    int32_t value = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
910    ShifterOperand operand;
911    if (ShifterOperand::CanHoldArm(value, &operand)) {
912      __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(value));
913    } else {
914      Register temp = IP;
915      __ LoadImmediate(temp, value);
916      __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(temp));
917    }
918  }
919  __ it(ARMCondition(comp->GetCondition()), kItElse);
920  __ mov(locations->Out().As<Register>(), ShifterOperand(1),
921         ARMCondition(comp->GetCondition()));
922  __ mov(locations->Out().As<Register>(), ShifterOperand(0),
923         ARMOppositeCondition(comp->GetCondition()));
924}
925
926void LocationsBuilderARM::VisitEqual(HEqual* comp) {
927  VisitCondition(comp);
928}
929
930void InstructionCodeGeneratorARM::VisitEqual(HEqual* comp) {
931  VisitCondition(comp);
932}
933
934void LocationsBuilderARM::VisitNotEqual(HNotEqual* comp) {
935  VisitCondition(comp);
936}
937
938void InstructionCodeGeneratorARM::VisitNotEqual(HNotEqual* comp) {
939  VisitCondition(comp);
940}
941
942void LocationsBuilderARM::VisitLessThan(HLessThan* comp) {
943  VisitCondition(comp);
944}
945
946void InstructionCodeGeneratorARM::VisitLessThan(HLessThan* comp) {
947  VisitCondition(comp);
948}
949
950void LocationsBuilderARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
951  VisitCondition(comp);
952}
953
954void InstructionCodeGeneratorARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
955  VisitCondition(comp);
956}
957
958void LocationsBuilderARM::VisitGreaterThan(HGreaterThan* comp) {
959  VisitCondition(comp);
960}
961
962void InstructionCodeGeneratorARM::VisitGreaterThan(HGreaterThan* comp) {
963  VisitCondition(comp);
964}
965
966void LocationsBuilderARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
967  VisitCondition(comp);
968}
969
970void InstructionCodeGeneratorARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
971  VisitCondition(comp);
972}
973
974void LocationsBuilderARM::VisitLocal(HLocal* local) {
975  local->SetLocations(nullptr);
976}
977
978void InstructionCodeGeneratorARM::VisitLocal(HLocal* local) {
979  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
980}
981
982void LocationsBuilderARM::VisitLoadLocal(HLoadLocal* load) {
983  load->SetLocations(nullptr);
984}
985
986void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load) {
987  // Nothing to do, this is driven by the code generator.
988  UNUSED(load);
989}
990
991void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) {
992  LocationSummary* locations =
993      new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
994  switch (store->InputAt(1)->GetType()) {
995    case Primitive::kPrimBoolean:
996    case Primitive::kPrimByte:
997    case Primitive::kPrimChar:
998    case Primitive::kPrimShort:
999    case Primitive::kPrimInt:
1000    case Primitive::kPrimNot:
1001    case Primitive::kPrimFloat:
1002      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
1003      break;
1004
1005    case Primitive::kPrimLong:
1006    case Primitive::kPrimDouble:
1007      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
1008      break;
1009
1010    default:
1011      LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType();
1012  }
1013}
1014
1015void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store) {
1016  UNUSED(store);
1017}
1018
1019void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) {
1020  LocationSummary* locations =
1021      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1022  locations->SetOut(Location::ConstantLocation(constant));
1023}
1024
1025void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant) {
1026  // Will be generated at use site.
1027  UNUSED(constant);
1028}
1029
1030void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) {
1031  LocationSummary* locations =
1032      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1033  locations->SetOut(Location::ConstantLocation(constant));
1034}
1035
1036void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant) {
1037  // Will be generated at use site.
1038  UNUSED(constant);
1039}
1040
1041void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) {
1042  LocationSummary* locations =
1043      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1044  locations->SetOut(Location::ConstantLocation(constant));
1045}
1046
1047void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant) {
1048  // Will be generated at use site.
1049  UNUSED(constant);
1050}
1051
1052void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) {
1053  LocationSummary* locations =
1054      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1055  locations->SetOut(Location::ConstantLocation(constant));
1056}
1057
1058void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant) {
1059  // Will be generated at use site.
1060  UNUSED(constant);
1061}
1062
1063void LocationsBuilderARM::VisitReturnVoid(HReturnVoid* ret) {
1064  ret->SetLocations(nullptr);
1065}
1066
1067void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret) {
1068  UNUSED(ret);
1069  codegen_->GenerateFrameExit();
1070}
1071
1072void LocationsBuilderARM::VisitReturn(HReturn* ret) {
1073  LocationSummary* locations =
1074      new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
1075  locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType()));
1076}
1077
1078void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret) {
1079  UNUSED(ret);
1080  codegen_->GenerateFrameExit();
1081}
1082
1083void LocationsBuilderARM::VisitInvokeStatic(HInvokeStatic* invoke) {
1084  HandleInvoke(invoke);
1085}
1086
1087void CodeGeneratorARM::LoadCurrentMethod(Register reg) {
1088  __ LoadFromOffset(kLoadWord, reg, SP, kCurrentMethodStackOffset);
1089}
1090
1091void InstructionCodeGeneratorARM::VisitInvokeStatic(HInvokeStatic* invoke) {
1092  Register temp = invoke->GetLocations()->GetTemp(0).As<Register>();
1093
1094  // TODO: Implement all kinds of calls:
1095  // 1) boot -> boot
1096  // 2) app -> boot
1097  // 3) app -> app
1098  //
1099  // Currently we implement the app -> app logic, which looks up in the resolve cache.
1100
1101  // temp = method;
1102  codegen_->LoadCurrentMethod(temp);
1103  // temp = temp->dex_cache_resolved_methods_;
1104  __ LoadFromOffset(
1105      kLoadWord, temp, temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value());
1106  // temp = temp[index_in_cache]
1107  __ LoadFromOffset(
1108      kLoadWord, temp, temp, CodeGenerator::GetCacheOffset(invoke->GetIndexInDexCache()));
1109  // LR = temp[offset_of_quick_compiled_code]
1110  __ LoadFromOffset(kLoadWord, LR, temp,
1111                     mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value());
1112  // LR()
1113  __ blx(LR);
1114
1115  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1116  DCHECK(!codegen_->IsLeafMethod());
1117}
1118
1119void LocationsBuilderARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1120  HandleInvoke(invoke);
1121}
1122
1123void LocationsBuilderARM::HandleInvoke(HInvoke* invoke) {
1124  LocationSummary* locations =
1125      new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1126  locations->AddTemp(Location::RegisterLocation(R0));
1127
1128  InvokeDexCallingConventionVisitor calling_convention_visitor;
1129  for (size_t i = 0; i < invoke->InputCount(); i++) {
1130    HInstruction* input = invoke->InputAt(i);
1131    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1132  }
1133
1134  locations->SetOut(calling_convention_visitor.GetReturnLocation(invoke->GetType()));
1135}
1136
1137
1138void InstructionCodeGeneratorARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1139  Register temp = invoke->GetLocations()->GetTemp(0).As<Register>();
1140  uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() +
1141          invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
1142  LocationSummary* locations = invoke->GetLocations();
1143  Location receiver = locations->InAt(0);
1144  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1145  // temp = object->GetClass();
1146  if (receiver.IsStackSlot()) {
1147    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1148    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1149  } else {
1150    __ LoadFromOffset(kLoadWord, temp, receiver.As<Register>(), class_offset);
1151  }
1152  // temp = temp->GetMethodAt(method_offset);
1153  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value();
1154  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1155  // LR = temp->GetEntryPoint();
1156  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1157  // LR();
1158  __ blx(LR);
1159  DCHECK(!codegen_->IsLeafMethod());
1160  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1161}
1162
1163void LocationsBuilderARM::VisitNeg(HNeg* neg) {
1164  LocationSummary* locations =
1165      new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
1166  switch (neg->GetResultType()) {
1167    case Primitive::kPrimInt:
1168    case Primitive::kPrimLong: {
1169      bool output_overlaps = (neg->GetResultType() == Primitive::kPrimLong);
1170      locations->SetInAt(0, Location::RequiresRegister());
1171      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1172      break;
1173    }
1174
1175    case Primitive::kPrimFloat:
1176    case Primitive::kPrimDouble:
1177      LOG(FATAL) << "Not yet implemented neg type " << neg->GetResultType();
1178      break;
1179
1180    default:
1181      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1182  }
1183}
1184
1185void InstructionCodeGeneratorARM::VisitNeg(HNeg* neg) {
1186  LocationSummary* locations = neg->GetLocations();
1187  Location out = locations->Out();
1188  Location in = locations->InAt(0);
1189  switch (neg->GetResultType()) {
1190    case Primitive::kPrimInt:
1191      DCHECK(in.IsRegister());
1192      __ rsb(out.As<Register>(), in.As<Register>(), ShifterOperand(0));
1193      break;
1194
1195    case Primitive::kPrimLong:
1196      DCHECK(in.IsRegisterPair());
1197      // out.lo = 0 - in.lo (and update the carry/borrow (C) flag)
1198      __ rsbs(out.AsRegisterPairLow<Register>(),
1199              in.AsRegisterPairLow<Register>(),
1200              ShifterOperand(0));
1201      // We cannot emit an RSC (Reverse Subtract with Carry)
1202      // instruction here, as it does not exist in the Thumb-2
1203      // instruction set.  We use the following approach
1204      // using SBC and SUB instead.
1205      //
1206      // out.hi = -C
1207      __ sbc(out.AsRegisterPairHigh<Register>(),
1208             out.AsRegisterPairHigh<Register>(),
1209             ShifterOperand(out.AsRegisterPairHigh<Register>()));
1210      // out.hi = out.hi - in.hi
1211      __ sub(out.AsRegisterPairHigh<Register>(),
1212             out.AsRegisterPairHigh<Register>(),
1213             ShifterOperand(in.AsRegisterPairHigh<Register>()));
1214      break;
1215
1216    case Primitive::kPrimFloat:
1217    case Primitive::kPrimDouble:
1218      LOG(FATAL) << "Not yet implemented neg type " << neg->GetResultType();
1219      break;
1220
1221    default:
1222      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1223  }
1224}
1225
1226void LocationsBuilderARM::VisitAdd(HAdd* add) {
1227  LocationSummary* locations =
1228      new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
1229  switch (add->GetResultType()) {
1230    case Primitive::kPrimInt:
1231    case Primitive::kPrimLong: {
1232      bool output_overlaps = (add->GetResultType() == Primitive::kPrimLong);
1233      locations->SetInAt(0, Location::RequiresRegister());
1234      locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
1235      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1236      break;
1237    }
1238
1239    case Primitive::kPrimFloat:
1240    case Primitive::kPrimDouble: {
1241      locations->SetInAt(0, Location::RequiresFpuRegister());
1242      locations->SetInAt(1, Location::RequiresFpuRegister());
1243      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1244      break;
1245    }
1246
1247    default:
1248      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1249  }
1250}
1251
1252void InstructionCodeGeneratorARM::VisitAdd(HAdd* add) {
1253  LocationSummary* locations = add->GetLocations();
1254  Location out = locations->Out();
1255  Location first = locations->InAt(0);
1256  Location second = locations->InAt(1);
1257  switch (add->GetResultType()) {
1258    case Primitive::kPrimInt:
1259      if (second.IsRegister()) {
1260        __ add(out.As<Register>(), first.As<Register>(), ShifterOperand(second.As<Register>()));
1261      } else {
1262        __ AddConstant(out.As<Register>(),
1263                       first.As<Register>(),
1264                       second.GetConstant()->AsIntConstant()->GetValue());
1265      }
1266      break;
1267
1268    case Primitive::kPrimLong:
1269      __ adds(out.AsRegisterPairLow<Register>(),
1270              first.AsRegisterPairLow<Register>(),
1271              ShifterOperand(second.AsRegisterPairLow<Register>()));
1272      __ adc(out.AsRegisterPairHigh<Register>(),
1273             first.AsRegisterPairHigh<Register>(),
1274             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1275      break;
1276
1277    case Primitive::kPrimFloat:
1278      __ vadds(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>());
1279      break;
1280
1281    case Primitive::kPrimDouble:
1282      __ vaddd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1283               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1284               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1285      break;
1286
1287    default:
1288      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1289  }
1290}
1291
1292void LocationsBuilderARM::VisitSub(HSub* sub) {
1293  LocationSummary* locations =
1294      new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
1295  switch (sub->GetResultType()) {
1296    case Primitive::kPrimInt:
1297    case Primitive::kPrimLong: {
1298      bool output_overlaps = (sub->GetResultType() == Primitive::kPrimLong);
1299      locations->SetInAt(0, Location::RequiresRegister());
1300      locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1)));
1301      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1302      break;
1303    }
1304    case Primitive::kPrimFloat:
1305    case Primitive::kPrimDouble: {
1306      locations->SetInAt(0, Location::RequiresFpuRegister());
1307      locations->SetInAt(1, Location::RequiresFpuRegister());
1308      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1309      break;
1310    }
1311    default:
1312      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1313  }
1314}
1315
1316void InstructionCodeGeneratorARM::VisitSub(HSub* sub) {
1317  LocationSummary* locations = sub->GetLocations();
1318  Location out = locations->Out();
1319  Location first = locations->InAt(0);
1320  Location second = locations->InAt(1);
1321  switch (sub->GetResultType()) {
1322    case Primitive::kPrimInt: {
1323      if (second.IsRegister()) {
1324        __ sub(out.As<Register>(), first.As<Register>(), ShifterOperand(second.As<Register>()));
1325      } else {
1326        __ AddConstant(out.As<Register>(),
1327                       first.As<Register>(),
1328                       -second.GetConstant()->AsIntConstant()->GetValue());
1329      }
1330      break;
1331    }
1332
1333    case Primitive::kPrimLong: {
1334      __ subs(out.AsRegisterPairLow<Register>(),
1335              first.AsRegisterPairLow<Register>(),
1336              ShifterOperand(second.AsRegisterPairLow<Register>()));
1337      __ sbc(out.AsRegisterPairHigh<Register>(),
1338             first.AsRegisterPairHigh<Register>(),
1339             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1340      break;
1341    }
1342
1343    case Primitive::kPrimFloat: {
1344      __ vsubs(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>());
1345      break;
1346    }
1347
1348    case Primitive::kPrimDouble: {
1349      __ vsubd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1350               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1351               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1352      break;
1353    }
1354
1355
1356    default:
1357      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1358  }
1359}
1360
1361void LocationsBuilderARM::VisitMul(HMul* mul) {
1362  LocationSummary* locations =
1363      new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
1364  switch (mul->GetResultType()) {
1365    case Primitive::kPrimInt:
1366    case Primitive::kPrimLong:  {
1367      locations->SetInAt(0, Location::RequiresRegister());
1368      locations->SetInAt(1, Location::RequiresRegister());
1369      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1370      break;
1371    }
1372
1373    case Primitive::kPrimFloat:
1374    case Primitive::kPrimDouble: {
1375      locations->SetInAt(0, Location::RequiresFpuRegister());
1376      locations->SetInAt(1, Location::RequiresFpuRegister());
1377      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1378      break;
1379    }
1380
1381    default:
1382      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1383  }
1384}
1385
1386void InstructionCodeGeneratorARM::VisitMul(HMul* mul) {
1387  LocationSummary* locations = mul->GetLocations();
1388  Location out = locations->Out();
1389  Location first = locations->InAt(0);
1390  Location second = locations->InAt(1);
1391  switch (mul->GetResultType()) {
1392    case Primitive::kPrimInt: {
1393      __ mul(out.As<Register>(), first.As<Register>(), second.As<Register>());
1394      break;
1395    }
1396    case Primitive::kPrimLong: {
1397      Register out_hi = out.AsRegisterPairHigh<Register>();
1398      Register out_lo = out.AsRegisterPairLow<Register>();
1399      Register in1_hi = first.AsRegisterPairHigh<Register>();
1400      Register in1_lo = first.AsRegisterPairLow<Register>();
1401      Register in2_hi = second.AsRegisterPairHigh<Register>();
1402      Register in2_lo = second.AsRegisterPairLow<Register>();
1403
1404      // Extra checks to protect caused by the existence of R1_R2.
1405      // The algorithm is wrong if out.hi is either in1.lo or in2.lo:
1406      // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2);
1407      DCHECK_NE(out_hi, in1_lo);
1408      DCHECK_NE(out_hi, in2_lo);
1409
1410      // input: in1 - 64 bits, in2 - 64 bits
1411      // output: out
1412      // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
1413      // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
1414      // parts: out.lo = (in1.lo * in2.lo)[31:0]
1415
1416      // IP <- in1.lo * in2.hi
1417      __ mul(IP, in1_lo, in2_hi);
1418      // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo
1419      __ mla(out_hi, in1_hi, in2_lo, IP);
1420      // out.lo <- (in1.lo * in2.lo)[31:0];
1421      __ umull(out_lo, IP, in1_lo, in2_lo);
1422      // out.hi <- in2.hi * in1.lo +  in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
1423      __ add(out_hi, out_hi, ShifterOperand(IP));
1424      break;
1425    }
1426
1427    case Primitive::kPrimFloat: {
1428      __ vmuls(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>());
1429      break;
1430    }
1431
1432    case Primitive::kPrimDouble: {
1433      __ vmuld(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1434               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1435               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1436      break;
1437    }
1438
1439    default:
1440      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1441  }
1442}
1443
1444void LocationsBuilderARM::VisitDiv(HDiv* div) {
1445  LocationSummary* locations =
1446      new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
1447  switch (div->GetResultType()) {
1448    case Primitive::kPrimInt:
1449    case Primitive::kPrimLong: {
1450      LOG(FATAL) << "Not implemented div type" << div->GetResultType();
1451      break;
1452    }
1453    case Primitive::kPrimFloat:
1454    case Primitive::kPrimDouble: {
1455      locations->SetInAt(0, Location::RequiresFpuRegister());
1456      locations->SetInAt(1, Location::RequiresFpuRegister());
1457      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1458      break;
1459    }
1460
1461    default:
1462      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
1463  }
1464}
1465
1466void InstructionCodeGeneratorARM::VisitDiv(HDiv* div) {
1467  LocationSummary* locations = div->GetLocations();
1468  Location out = locations->Out();
1469  Location first = locations->InAt(0);
1470  Location second = locations->InAt(1);
1471
1472  switch (div->GetResultType()) {
1473    case Primitive::kPrimInt:
1474    case Primitive::kPrimLong: {
1475      LOG(FATAL) << "Not implemented div type" << div->GetResultType();
1476      break;
1477    }
1478
1479    case Primitive::kPrimFloat: {
1480      __ vdivs(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>());
1481      break;
1482    }
1483
1484    case Primitive::kPrimDouble: {
1485      __ vdivd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1486               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1487               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1488      break;
1489    }
1490
1491    default:
1492      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
1493  }
1494}
1495
1496void LocationsBuilderARM::VisitNewInstance(HNewInstance* instruction) {
1497  LocationSummary* locations =
1498      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
1499  InvokeRuntimeCallingConvention calling_convention;
1500  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1501  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1502  locations->SetOut(Location::RegisterLocation(R0));
1503}
1504
1505void InstructionCodeGeneratorARM::VisitNewInstance(HNewInstance* instruction) {
1506  InvokeRuntimeCallingConvention calling_convention;
1507  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
1508  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
1509  codegen_->InvokeRuntime(
1510      QUICK_ENTRY_POINT(pAllocObjectWithAccessCheck), instruction, instruction->GetDexPc());
1511}
1512
1513void LocationsBuilderARM::VisitNewArray(HNewArray* instruction) {
1514  LocationSummary* locations =
1515      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
1516  InvokeRuntimeCallingConvention calling_convention;
1517  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1518  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1519  locations->SetOut(Location::RegisterLocation(R0));
1520  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1521}
1522
1523void InstructionCodeGeneratorARM::VisitNewArray(HNewArray* instruction) {
1524  InvokeRuntimeCallingConvention calling_convention;
1525  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
1526  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
1527  codegen_->InvokeRuntime(
1528      QUICK_ENTRY_POINT(pAllocArrayWithAccessCheck), instruction, instruction->GetDexPc());
1529}
1530
1531void LocationsBuilderARM::VisitParameterValue(HParameterValue* instruction) {
1532  LocationSummary* locations =
1533      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1534  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
1535  if (location.IsStackSlot()) {
1536    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1537  } else if (location.IsDoubleStackSlot()) {
1538    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1539  }
1540  locations->SetOut(location);
1541}
1542
1543void InstructionCodeGeneratorARM::VisitParameterValue(HParameterValue* instruction) {
1544  // Nothing to do, the parameter is already at its location.
1545  UNUSED(instruction);
1546}
1547
1548void LocationsBuilderARM::VisitNot(HNot* not_) {
1549  LocationSummary* locations =
1550      new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
1551  locations->SetInAt(0, Location::RequiresRegister());
1552  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1553}
1554
1555void InstructionCodeGeneratorARM::VisitNot(HNot* not_) {
1556  LocationSummary* locations = not_->GetLocations();
1557  Location out = locations->Out();
1558  Location in = locations->InAt(0);
1559  switch (not_->InputAt(0)->GetType()) {
1560    case Primitive::kPrimBoolean:
1561      __ eor(out.As<Register>(), in.As<Register>(), ShifterOperand(1));
1562      break;
1563
1564    case Primitive::kPrimInt:
1565      __ mvn(out.As<Register>(), ShifterOperand(in.As<Register>()));
1566      break;
1567
1568    case Primitive::kPrimLong:
1569      __ mvn(out.AsRegisterPairLow<Register>(),
1570             ShifterOperand(in.AsRegisterPairLow<Register>()));
1571      __ mvn(out.AsRegisterPairHigh<Register>(),
1572             ShifterOperand(in.AsRegisterPairHigh<Register>()));
1573      break;
1574
1575    default:
1576      LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
1577  }
1578}
1579
1580void LocationsBuilderARM::VisitCompare(HCompare* compare) {
1581  LocationSummary* locations =
1582      new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
1583  locations->SetInAt(0, Location::RequiresRegister());
1584  locations->SetInAt(1, Location::RequiresRegister());
1585  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1586}
1587
1588void InstructionCodeGeneratorARM::VisitCompare(HCompare* compare) {
1589  Label greater, done;
1590  LocationSummary* locations = compare->GetLocations();
1591  switch (compare->InputAt(0)->GetType()) {
1592    case Primitive::kPrimLong: {
1593      Register output = locations->Out().As<Register>();
1594      Location left = locations->InAt(0);
1595      Location right = locations->InAt(1);
1596      Label less, greater, done;
1597      __ cmp(left.AsRegisterPairHigh<Register>(),
1598             ShifterOperand(right.AsRegisterPairHigh<Register>()));  // Signed compare.
1599      __ b(&less, LT);
1600      __ b(&greater, GT);
1601      // Do LoadImmediate before any `cmp`, as LoadImmediate might affect
1602      // the status flags.
1603      __ LoadImmediate(output, 0);
1604      __ cmp(left.AsRegisterPairLow<Register>(),
1605             ShifterOperand(right.AsRegisterPairLow<Register>()));  // Unsigned compare.
1606      __ b(&done, EQ);
1607      __ b(&less, CC);
1608
1609      __ Bind(&greater);
1610      __ LoadImmediate(output, 1);
1611      __ b(&done);
1612
1613      __ Bind(&less);
1614      __ LoadImmediate(output, -1);
1615
1616      __ Bind(&done);
1617      break;
1618    }
1619    default:
1620      LOG(FATAL) << "Unimplemented compare type " << compare->InputAt(0)->GetType();
1621  }
1622}
1623
1624void LocationsBuilderARM::VisitPhi(HPhi* instruction) {
1625  LocationSummary* locations =
1626      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1627  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
1628    locations->SetInAt(i, Location::Any());
1629  }
1630  locations->SetOut(Location::Any());
1631}
1632
1633void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction) {
1634  UNUSED(instruction);
1635  LOG(FATAL) << "Unreachable";
1636}
1637
1638void LocationsBuilderARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
1639  LocationSummary* locations =
1640      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1641  bool is_object_type = instruction->GetFieldType() == Primitive::kPrimNot;
1642  locations->SetInAt(0, Location::RequiresRegister());
1643  locations->SetInAt(1, Location::RequiresRegister());
1644  // Temporary registers for the write barrier.
1645  if (is_object_type) {
1646    locations->AddTemp(Location::RequiresRegister());
1647    locations->AddTemp(Location::RequiresRegister());
1648  }
1649}
1650
1651void InstructionCodeGeneratorARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
1652  LocationSummary* locations = instruction->GetLocations();
1653  Register obj = locations->InAt(0).As<Register>();
1654  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
1655  Primitive::Type field_type = instruction->GetFieldType();
1656
1657  switch (field_type) {
1658    case Primitive::kPrimBoolean:
1659    case Primitive::kPrimByte: {
1660      Register value = locations->InAt(1).As<Register>();
1661      __ StoreToOffset(kStoreByte, value, obj, offset);
1662      break;
1663    }
1664
1665    case Primitive::kPrimShort:
1666    case Primitive::kPrimChar: {
1667      Register value = locations->InAt(1).As<Register>();
1668      __ StoreToOffset(kStoreHalfword, value, obj, offset);
1669      break;
1670    }
1671
1672    case Primitive::kPrimInt:
1673    case Primitive::kPrimNot: {
1674      Register value = locations->InAt(1).As<Register>();
1675      __ StoreToOffset(kStoreWord, value, obj, offset);
1676      if (field_type == Primitive::kPrimNot) {
1677        Register temp = locations->GetTemp(0).As<Register>();
1678        Register card = locations->GetTemp(1).As<Register>();
1679        codegen_->MarkGCCard(temp, card, obj, value);
1680      }
1681      break;
1682    }
1683
1684    case Primitive::kPrimLong: {
1685      Location value = locations->InAt(1);
1686      __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset);
1687      break;
1688    }
1689
1690    case Primitive::kPrimFloat:
1691    case Primitive::kPrimDouble:
1692      LOG(FATAL) << "Unimplemented register type " << field_type;
1693      UNREACHABLE();
1694    case Primitive::kPrimVoid:
1695      LOG(FATAL) << "Unreachable type " << field_type;
1696      UNREACHABLE();
1697  }
1698}
1699
1700void LocationsBuilderARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
1701  LocationSummary* locations =
1702      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1703  locations->SetInAt(0, Location::RequiresRegister());
1704  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1705}
1706
1707void InstructionCodeGeneratorARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
1708  LocationSummary* locations = instruction->GetLocations();
1709  Register obj = locations->InAt(0).As<Register>();
1710  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
1711
1712  switch (instruction->GetType()) {
1713    case Primitive::kPrimBoolean: {
1714      Register out = locations->Out().As<Register>();
1715      __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
1716      break;
1717    }
1718
1719    case Primitive::kPrimByte: {
1720      Register out = locations->Out().As<Register>();
1721      __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
1722      break;
1723    }
1724
1725    case Primitive::kPrimShort: {
1726      Register out = locations->Out().As<Register>();
1727      __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
1728      break;
1729    }
1730
1731    case Primitive::kPrimChar: {
1732      Register out = locations->Out().As<Register>();
1733      __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
1734      break;
1735    }
1736
1737    case Primitive::kPrimInt:
1738    case Primitive::kPrimNot: {
1739      Register out = locations->Out().As<Register>();
1740      __ LoadFromOffset(kLoadWord, out, obj, offset);
1741      break;
1742    }
1743
1744    case Primitive::kPrimLong: {
1745      // TODO: support volatile.
1746      Location out = locations->Out();
1747      __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset);
1748      break;
1749    }
1750
1751    case Primitive::kPrimFloat:
1752    case Primitive::kPrimDouble:
1753      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
1754      UNREACHABLE();
1755    case Primitive::kPrimVoid:
1756      LOG(FATAL) << "Unreachable type " << instruction->GetType();
1757      UNREACHABLE();
1758  }
1759}
1760
1761void LocationsBuilderARM::VisitNullCheck(HNullCheck* instruction) {
1762  LocationSummary* locations =
1763      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1764  locations->SetInAt(0, Location::RequiresRegister());
1765  if (instruction->HasUses()) {
1766    locations->SetOut(Location::SameAsFirstInput());
1767  }
1768}
1769
1770void InstructionCodeGeneratorARM::VisitNullCheck(HNullCheck* instruction) {
1771  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM(instruction);
1772  codegen_->AddSlowPath(slow_path);
1773
1774  LocationSummary* locations = instruction->GetLocations();
1775  Location obj = locations->InAt(0);
1776
1777  if (obj.IsRegister()) {
1778    __ cmp(obj.As<Register>(), ShifterOperand(0));
1779    __ b(slow_path->GetEntryLabel(), EQ);
1780  } else {
1781    DCHECK(obj.IsConstant()) << obj;
1782    DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0);
1783    __ b(slow_path->GetEntryLabel());
1784  }
1785}
1786
1787void LocationsBuilderARM::VisitArrayGet(HArrayGet* instruction) {
1788  LocationSummary* locations =
1789      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1790  locations->SetInAt(0, Location::RequiresRegister());
1791  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1792  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1793}
1794
1795void InstructionCodeGeneratorARM::VisitArrayGet(HArrayGet* instruction) {
1796  LocationSummary* locations = instruction->GetLocations();
1797  Register obj = locations->InAt(0).As<Register>();
1798  Location index = locations->InAt(1);
1799
1800  switch (instruction->GetType()) {
1801    case Primitive::kPrimBoolean: {
1802      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
1803      Register out = locations->Out().As<Register>();
1804      if (index.IsConstant()) {
1805        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
1806        __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
1807      } else {
1808        __ add(IP, obj, ShifterOperand(index.As<Register>()));
1809        __ LoadFromOffset(kLoadUnsignedByte, out, IP, data_offset);
1810      }
1811      break;
1812    }
1813
1814    case Primitive::kPrimByte: {
1815      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
1816      Register out = locations->Out().As<Register>();
1817      if (index.IsConstant()) {
1818        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
1819        __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
1820      } else {
1821        __ add(IP, obj, ShifterOperand(index.As<Register>()));
1822        __ LoadFromOffset(kLoadSignedByte, out, IP, data_offset);
1823      }
1824      break;
1825    }
1826
1827    case Primitive::kPrimShort: {
1828      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
1829      Register out = locations->Out().As<Register>();
1830      if (index.IsConstant()) {
1831        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
1832        __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
1833      } else {
1834        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_2));
1835        __ LoadFromOffset(kLoadSignedHalfword, out, IP, data_offset);
1836      }
1837      break;
1838    }
1839
1840    case Primitive::kPrimChar: {
1841      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
1842      Register out = locations->Out().As<Register>();
1843      if (index.IsConstant()) {
1844        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
1845        __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
1846      } else {
1847        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_2));
1848        __ LoadFromOffset(kLoadUnsignedHalfword, out, IP, data_offset);
1849      }
1850      break;
1851    }
1852
1853    case Primitive::kPrimInt:
1854    case Primitive::kPrimNot: {
1855      DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
1856      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
1857      Register out = locations->Out().As<Register>();
1858      if (index.IsConstant()) {
1859        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
1860        __ LoadFromOffset(kLoadWord, out, obj, offset);
1861      } else {
1862        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_4));
1863        __ LoadFromOffset(kLoadWord, out, IP, data_offset);
1864      }
1865      break;
1866    }
1867
1868    case Primitive::kPrimLong: {
1869      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
1870      Location out = locations->Out();
1871      if (index.IsConstant()) {
1872        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
1873        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset);
1874      } else {
1875        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_8));
1876        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), IP, data_offset);
1877      }
1878      break;
1879    }
1880
1881    case Primitive::kPrimFloat:
1882    case Primitive::kPrimDouble:
1883      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
1884      UNREACHABLE();
1885    case Primitive::kPrimVoid:
1886      LOG(FATAL) << "Unreachable type " << instruction->GetType();
1887      UNREACHABLE();
1888  }
1889}
1890
1891void LocationsBuilderARM::VisitArraySet(HArraySet* instruction) {
1892  Primitive::Type value_type = instruction->GetComponentType();
1893  bool is_object = value_type == Primitive::kPrimNot;
1894  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
1895      instruction, is_object ? LocationSummary::kCall : LocationSummary::kNoCall);
1896  if (is_object) {
1897    InvokeRuntimeCallingConvention calling_convention;
1898    locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1899    locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1900    locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1901  } else {
1902    locations->SetInAt(0, Location::RequiresRegister());
1903    locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1904    locations->SetInAt(2, Location::RequiresRegister());
1905  }
1906}
1907
1908void InstructionCodeGeneratorARM::VisitArraySet(HArraySet* instruction) {
1909  LocationSummary* locations = instruction->GetLocations();
1910  Register obj = locations->InAt(0).As<Register>();
1911  Location index = locations->InAt(1);
1912  Primitive::Type value_type = instruction->GetComponentType();
1913
1914  switch (value_type) {
1915    case Primitive::kPrimBoolean:
1916    case Primitive::kPrimByte: {
1917      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
1918      Register value = locations->InAt(2).As<Register>();
1919      if (index.IsConstant()) {
1920        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
1921        __ StoreToOffset(kStoreByte, value, obj, offset);
1922      } else {
1923        __ add(IP, obj, ShifterOperand(index.As<Register>()));
1924        __ StoreToOffset(kStoreByte, value, IP, data_offset);
1925      }
1926      break;
1927    }
1928
1929    case Primitive::kPrimShort:
1930    case Primitive::kPrimChar: {
1931      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
1932      Register value = locations->InAt(2).As<Register>();
1933      if (index.IsConstant()) {
1934        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
1935        __ StoreToOffset(kStoreHalfword, value, obj, offset);
1936      } else {
1937        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_2));
1938        __ StoreToOffset(kStoreHalfword, value, IP, data_offset);
1939      }
1940      break;
1941    }
1942
1943    case Primitive::kPrimInt: {
1944      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
1945      Register value = locations->InAt(2).As<Register>();
1946      if (index.IsConstant()) {
1947        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
1948        __ StoreToOffset(kStoreWord, value, obj, offset);
1949      } else {
1950        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_4));
1951        __ StoreToOffset(kStoreWord, value, IP, data_offset);
1952      }
1953      break;
1954    }
1955
1956    case Primitive::kPrimNot: {
1957      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject), instruction, instruction->GetDexPc());
1958      break;
1959    }
1960
1961    case Primitive::kPrimLong: {
1962      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
1963      Location value = locations->InAt(2);
1964      if (index.IsConstant()) {
1965        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
1966        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset);
1967      } else {
1968        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_8));
1969        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), IP, data_offset);
1970      }
1971      break;
1972    }
1973
1974    case Primitive::kPrimFloat:
1975    case Primitive::kPrimDouble:
1976      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
1977      UNREACHABLE();
1978    case Primitive::kPrimVoid:
1979      LOG(FATAL) << "Unreachable type " << instruction->GetType();
1980      UNREACHABLE();
1981  }
1982}
1983
1984void LocationsBuilderARM::VisitArrayLength(HArrayLength* instruction) {
1985  LocationSummary* locations =
1986      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1987  locations->SetInAt(0, Location::RequiresRegister());
1988  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1989}
1990
1991void InstructionCodeGeneratorARM::VisitArrayLength(HArrayLength* instruction) {
1992  LocationSummary* locations = instruction->GetLocations();
1993  uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
1994  Register obj = locations->InAt(0).As<Register>();
1995  Register out = locations->Out().As<Register>();
1996  __ LoadFromOffset(kLoadWord, out, obj, offset);
1997}
1998
1999void LocationsBuilderARM::VisitBoundsCheck(HBoundsCheck* instruction) {
2000  LocationSummary* locations =
2001      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2002  locations->SetInAt(0, Location::RequiresRegister());
2003  locations->SetInAt(1, Location::RequiresRegister());
2004  if (instruction->HasUses()) {
2005    locations->SetOut(Location::SameAsFirstInput());
2006  }
2007}
2008
2009void InstructionCodeGeneratorARM::VisitBoundsCheck(HBoundsCheck* instruction) {
2010  LocationSummary* locations = instruction->GetLocations();
2011  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM(
2012      instruction, locations->InAt(0), locations->InAt(1));
2013  codegen_->AddSlowPath(slow_path);
2014
2015  Register index = locations->InAt(0).As<Register>();
2016  Register length = locations->InAt(1).As<Register>();
2017
2018  __ cmp(index, ShifterOperand(length));
2019  __ b(slow_path->GetEntryLabel(), CS);
2020}
2021
2022void CodeGeneratorARM::MarkGCCard(Register temp, Register card, Register object, Register value) {
2023  Label is_null;
2024  __ CompareAndBranchIfZero(value, &is_null);
2025  __ LoadFromOffset(kLoadWord, card, TR, Thread::CardTableOffset<kArmWordSize>().Int32Value());
2026  __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
2027  __ strb(card, Address(card, temp));
2028  __ Bind(&is_null);
2029}
2030
2031void LocationsBuilderARM::VisitTemporary(HTemporary* temp) {
2032  temp->SetLocations(nullptr);
2033}
2034
2035void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp) {
2036  // Nothing to do, this is driven by the code generator.
2037  UNUSED(temp);
2038}
2039
2040void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction) {
2041  UNUSED(instruction);
2042  LOG(FATAL) << "Unreachable";
2043}
2044
2045void InstructionCodeGeneratorARM::VisitParallelMove(HParallelMove* instruction) {
2046  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
2047}
2048
2049void LocationsBuilderARM::VisitSuspendCheck(HSuspendCheck* instruction) {
2050  new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
2051}
2052
2053void InstructionCodeGeneratorARM::VisitSuspendCheck(HSuspendCheck* instruction) {
2054  HBasicBlock* block = instruction->GetBlock();
2055  if (block->GetLoopInformation() != nullptr) {
2056    DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
2057    // The back edge will generate the suspend check.
2058    return;
2059  }
2060  if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
2061    // The goto will generate the suspend check.
2062    return;
2063  }
2064  GenerateSuspendCheck(instruction, nullptr);
2065}
2066
2067void InstructionCodeGeneratorARM::GenerateSuspendCheck(HSuspendCheck* instruction,
2068                                                       HBasicBlock* successor) {
2069  SuspendCheckSlowPathARM* slow_path =
2070      new (GetGraph()->GetArena()) SuspendCheckSlowPathARM(instruction, successor);
2071  codegen_->AddSlowPath(slow_path);
2072
2073  __ subs(R4, R4, ShifterOperand(1));
2074  if (successor == nullptr) {
2075    __ b(slow_path->GetEntryLabel(), EQ);
2076    __ Bind(slow_path->GetReturnLabel());
2077  } else {
2078    __ b(codegen_->GetLabelOf(successor), NE);
2079    __ b(slow_path->GetEntryLabel());
2080  }
2081}
2082
2083ArmAssembler* ParallelMoveResolverARM::GetAssembler() const {
2084  return codegen_->GetAssembler();
2085}
2086
2087void ParallelMoveResolverARM::EmitMove(size_t index) {
2088  MoveOperands* move = moves_.Get(index);
2089  Location source = move->GetSource();
2090  Location destination = move->GetDestination();
2091
2092  if (source.IsRegister()) {
2093    if (destination.IsRegister()) {
2094      __ Mov(destination.As<Register>(), source.As<Register>());
2095    } else {
2096      DCHECK(destination.IsStackSlot());
2097      __ StoreToOffset(kStoreWord, source.As<Register>(),
2098                       SP, destination.GetStackIndex());
2099    }
2100  } else if (source.IsStackSlot()) {
2101    if (destination.IsRegister()) {
2102      __ LoadFromOffset(kLoadWord, destination.As<Register>(),
2103                        SP, source.GetStackIndex());
2104    } else {
2105      DCHECK(destination.IsStackSlot());
2106      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
2107      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
2108    }
2109  } else {
2110    DCHECK(source.IsConstant());
2111    DCHECK(source.GetConstant()->IsIntConstant());
2112    int32_t value = source.GetConstant()->AsIntConstant()->GetValue();
2113    if (destination.IsRegister()) {
2114      __ LoadImmediate(destination.As<Register>(), value);
2115    } else {
2116      DCHECK(destination.IsStackSlot());
2117      __ LoadImmediate(IP, value);
2118      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
2119    }
2120  }
2121}
2122
2123void ParallelMoveResolverARM::Exchange(Register reg, int mem) {
2124  __ Mov(IP, reg);
2125  __ LoadFromOffset(kLoadWord, reg, SP, mem);
2126  __ StoreToOffset(kStoreWord, IP, SP, mem);
2127}
2128
2129void ParallelMoveResolverARM::Exchange(int mem1, int mem2) {
2130  ScratchRegisterScope ensure_scratch(this, IP, R0, codegen_->GetNumberOfCoreRegisters());
2131  int stack_offset = ensure_scratch.IsSpilled() ? kArmWordSize : 0;
2132  __ LoadFromOffset(kLoadWord, static_cast<Register>(ensure_scratch.GetRegister()),
2133                    SP, mem1 + stack_offset);
2134  __ LoadFromOffset(kLoadWord, IP, SP, mem2 + stack_offset);
2135  __ StoreToOffset(kStoreWord, static_cast<Register>(ensure_scratch.GetRegister()),
2136                   SP, mem2 + stack_offset);
2137  __ StoreToOffset(kStoreWord, IP, SP, mem1 + stack_offset);
2138}
2139
2140void ParallelMoveResolverARM::EmitSwap(size_t index) {
2141  MoveOperands* move = moves_.Get(index);
2142  Location source = move->GetSource();
2143  Location destination = move->GetDestination();
2144
2145  if (source.IsRegister() && destination.IsRegister()) {
2146    DCHECK_NE(source.As<Register>(), IP);
2147    DCHECK_NE(destination.As<Register>(), IP);
2148    __ Mov(IP, source.As<Register>());
2149    __ Mov(source.As<Register>(), destination.As<Register>());
2150    __ Mov(destination.As<Register>(), IP);
2151  } else if (source.IsRegister() && destination.IsStackSlot()) {
2152    Exchange(source.As<Register>(), destination.GetStackIndex());
2153  } else if (source.IsStackSlot() && destination.IsRegister()) {
2154    Exchange(destination.As<Register>(), source.GetStackIndex());
2155  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
2156    Exchange(source.GetStackIndex(), destination.GetStackIndex());
2157  } else {
2158    LOG(FATAL) << "Unimplemented";
2159  }
2160}
2161
2162void ParallelMoveResolverARM::SpillScratch(int reg) {
2163  __ Push(static_cast<Register>(reg));
2164}
2165
2166void ParallelMoveResolverARM::RestoreScratch(int reg) {
2167  __ Pop(static_cast<Register>(reg));
2168}
2169
2170void LocationsBuilderARM::VisitLoadClass(HLoadClass* cls) {
2171  LocationSummary::CallKind call_kind = cls->CanCallRuntime()
2172      ? LocationSummary::kCallOnSlowPath
2173      : LocationSummary::kNoCall;
2174  LocationSummary* locations =
2175      new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
2176  locations->SetOut(Location::RequiresRegister());
2177}
2178
2179void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) {
2180  Register out = cls->GetLocations()->Out().As<Register>();
2181  if (cls->IsReferrersClass()) {
2182    DCHECK(!cls->CanCallRuntime());
2183    DCHECK(!cls->MustGenerateClinitCheck());
2184    codegen_->LoadCurrentMethod(out);
2185    __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
2186  } else {
2187    DCHECK(cls->CanCallRuntime());
2188    codegen_->LoadCurrentMethod(out);
2189    __ LoadFromOffset(
2190        kLoadWord, out, out, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value());
2191    __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
2192
2193    SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
2194        cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
2195    codegen_->AddSlowPath(slow_path);
2196    __ cmp(out, ShifterOperand(0));
2197    __ b(slow_path->GetEntryLabel(), EQ);
2198    if (cls->MustGenerateClinitCheck()) {
2199      GenerateClassInitializationCheck(slow_path, out);
2200    } else {
2201      __ Bind(slow_path->GetExitLabel());
2202    }
2203  }
2204}
2205
2206void LocationsBuilderARM::VisitClinitCheck(HClinitCheck* check) {
2207  LocationSummary* locations =
2208      new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
2209  locations->SetInAt(0, Location::RequiresRegister());
2210  if (check->HasUses()) {
2211    locations->SetOut(Location::SameAsFirstInput());
2212  }
2213}
2214
2215void InstructionCodeGeneratorARM::VisitClinitCheck(HClinitCheck* check) {
2216  // We assume the class is not null.
2217  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
2218      check->GetLoadClass(), check, check->GetDexPc(), true);
2219  codegen_->AddSlowPath(slow_path);
2220  GenerateClassInitializationCheck(slow_path, check->GetLocations()->InAt(0).As<Register>());
2221}
2222
2223void InstructionCodeGeneratorARM::GenerateClassInitializationCheck(
2224    SlowPathCodeARM* slow_path, Register class_reg) {
2225  __ LoadFromOffset(kLoadWord, IP, class_reg, mirror::Class::StatusOffset().Int32Value());
2226  __ cmp(IP, ShifterOperand(mirror::Class::kStatusInitialized));
2227  __ b(slow_path->GetEntryLabel(), LT);
2228  // Even if the initialized flag is set, we may be in a situation where caches are not synced
2229  // properly. Therefore, we do a memory fence.
2230  __ dmb(ISH);
2231  __ Bind(slow_path->GetExitLabel());
2232}
2233
2234void LocationsBuilderARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2235  LocationSummary* locations =
2236      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2237  locations->SetInAt(0, Location::RequiresRegister());
2238  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2239}
2240
2241void InstructionCodeGeneratorARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2242  LocationSummary* locations = instruction->GetLocations();
2243  Register cls = locations->InAt(0).As<Register>();
2244  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
2245
2246  switch (instruction->GetType()) {
2247    case Primitive::kPrimBoolean: {
2248      Register out = locations->Out().As<Register>();
2249      __ LoadFromOffset(kLoadUnsignedByte, out, cls, offset);
2250      break;
2251    }
2252
2253    case Primitive::kPrimByte: {
2254      Register out = locations->Out().As<Register>();
2255      __ LoadFromOffset(kLoadSignedByte, out, cls, offset);
2256      break;
2257    }
2258
2259    case Primitive::kPrimShort: {
2260      Register out = locations->Out().As<Register>();
2261      __ LoadFromOffset(kLoadSignedHalfword, out, cls, offset);
2262      break;
2263    }
2264
2265    case Primitive::kPrimChar: {
2266      Register out = locations->Out().As<Register>();
2267      __ LoadFromOffset(kLoadUnsignedHalfword, out, cls, offset);
2268      break;
2269    }
2270
2271    case Primitive::kPrimInt:
2272    case Primitive::kPrimNot: {
2273      Register out = locations->Out().As<Register>();
2274      __ LoadFromOffset(kLoadWord, out, cls, offset);
2275      break;
2276    }
2277
2278    case Primitive::kPrimLong: {
2279      // TODO: support volatile.
2280      Location out = locations->Out();
2281      __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), cls, offset);
2282      break;
2283    }
2284
2285    case Primitive::kPrimFloat:
2286    case Primitive::kPrimDouble:
2287      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
2288      UNREACHABLE();
2289    case Primitive::kPrimVoid:
2290      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2291      UNREACHABLE();
2292  }
2293}
2294
2295void LocationsBuilderARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2296  LocationSummary* locations =
2297      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2298  bool is_object_type = instruction->GetFieldType() == Primitive::kPrimNot;
2299  locations->SetInAt(0, Location::RequiresRegister());
2300  locations->SetInAt(1, Location::RequiresRegister());
2301  // Temporary registers for the write barrier.
2302  if (is_object_type) {
2303    locations->AddTemp(Location::RequiresRegister());
2304    locations->AddTemp(Location::RequiresRegister());
2305  }
2306}
2307
2308void InstructionCodeGeneratorARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2309  LocationSummary* locations = instruction->GetLocations();
2310  Register cls = locations->InAt(0).As<Register>();
2311  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
2312  Primitive::Type field_type = instruction->GetFieldType();
2313
2314  switch (field_type) {
2315    case Primitive::kPrimBoolean:
2316    case Primitive::kPrimByte: {
2317      Register value = locations->InAt(1).As<Register>();
2318      __ StoreToOffset(kStoreByte, value, cls, offset);
2319      break;
2320    }
2321
2322    case Primitive::kPrimShort:
2323    case Primitive::kPrimChar: {
2324      Register value = locations->InAt(1).As<Register>();
2325      __ StoreToOffset(kStoreHalfword, value, cls, offset);
2326      break;
2327    }
2328
2329    case Primitive::kPrimInt:
2330    case Primitive::kPrimNot: {
2331      Register value = locations->InAt(1).As<Register>();
2332      __ StoreToOffset(kStoreWord, value, cls, offset);
2333      if (field_type == Primitive::kPrimNot) {
2334        Register temp = locations->GetTemp(0).As<Register>();
2335        Register card = locations->GetTemp(1).As<Register>();
2336        codegen_->MarkGCCard(temp, card, cls, value);
2337      }
2338      break;
2339    }
2340
2341    case Primitive::kPrimLong: {
2342      Location value = locations->InAt(1);
2343      __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), cls, offset);
2344      break;
2345    }
2346
2347    case Primitive::kPrimFloat:
2348    case Primitive::kPrimDouble:
2349      LOG(FATAL) << "Unimplemented register type " << field_type;
2350      UNREACHABLE();
2351    case Primitive::kPrimVoid:
2352      LOG(FATAL) << "Unreachable type " << field_type;
2353      UNREACHABLE();
2354  }
2355}
2356
2357void LocationsBuilderARM::VisitLoadString(HLoadString* load) {
2358  LocationSummary* locations =
2359      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
2360  locations->SetOut(Location::RequiresRegister());
2361}
2362
2363void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) {
2364  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM(load);
2365  codegen_->AddSlowPath(slow_path);
2366
2367  Register out = load->GetLocations()->Out().As<Register>();
2368  codegen_->LoadCurrentMethod(out);
2369  __ LoadFromOffset(
2370      kLoadWord, out, out, mirror::ArtMethod::DexCacheStringsOffset().Int32Value());
2371  __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(load->GetStringIndex()));
2372  __ cmp(out, ShifterOperand(0));
2373  __ b(slow_path->GetEntryLabel(), EQ);
2374  __ Bind(slow_path->GetExitLabel());
2375}
2376
2377}  // namespace arm
2378}  // namespace art
2379