code_generator_arm.cc revision 3dbcb38a8b2237b0da290ae35dc0caab3cb47b3d
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm.h"
18
19#include "entrypoints/quick/quick_entrypoints.h"
20#include "gc/accounting/card_table.h"
21#include "mirror/array-inl.h"
22#include "mirror/art_method.h"
23#include "mirror/class.h"
24#include "thread.h"
25#include "utils/assembler.h"
26#include "utils/arm/assembler_arm.h"
27#include "utils/arm/managed_register_arm.h"
28#include "utils/stack_checks.h"
29
30namespace art {
31
32namespace arm {
33
34static DRegister FromLowSToD(SRegister reg) {
35  DCHECK_EQ(reg % 2, 0);
36  return static_cast<DRegister>(reg / 2);
37}
38
39static constexpr bool kExplicitStackOverflowCheck = false;
40
41static constexpr int kNumberOfPushedRegistersAtEntry = 1 + 2;  // LR, R6, R7
42static constexpr int kCurrentMethodStackOffset = 0;
43
44static constexpr Register kRuntimeParameterCoreRegisters[] = { R0, R1, R2 };
45static constexpr size_t kRuntimeParameterCoreRegistersLength =
46    arraysize(kRuntimeParameterCoreRegisters);
47static constexpr SRegister kRuntimeParameterFpuRegisters[] = { };
48static constexpr size_t kRuntimeParameterFpuRegistersLength = 0;
49
50class InvokeRuntimeCallingConvention : public CallingConvention<Register, SRegister> {
51 public:
52  InvokeRuntimeCallingConvention()
53      : CallingConvention(kRuntimeParameterCoreRegisters,
54                          kRuntimeParameterCoreRegistersLength,
55                          kRuntimeParameterFpuRegisters,
56                          kRuntimeParameterFpuRegistersLength) {}
57
58 private:
59  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
60};
61
62#define __ reinterpret_cast<ArmAssembler*>(codegen->GetAssembler())->
63#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArmWordSize, x).Int32Value()
64
65class SlowPathCodeARM : public SlowPathCode {
66 public:
67  SlowPathCodeARM() : entry_label_(), exit_label_() {}
68
69  Label* GetEntryLabel() { return &entry_label_; }
70  Label* GetExitLabel() { return &exit_label_; }
71
72 private:
73  Label entry_label_;
74  Label exit_label_;
75
76  DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM);
77};
78
79class NullCheckSlowPathARM : public SlowPathCodeARM {
80 public:
81  explicit NullCheckSlowPathARM(HNullCheck* instruction) : instruction_(instruction) {}
82
83  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
84    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
85    __ Bind(GetEntryLabel());
86    arm_codegen->InvokeRuntime(
87        QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc());
88  }
89
90 private:
91  HNullCheck* const instruction_;
92  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM);
93};
94
95class DivZeroCheckSlowPathARM : public SlowPathCodeARM {
96 public:
97  explicit DivZeroCheckSlowPathARM(HDivZeroCheck* instruction) : instruction_(instruction) {}
98
99  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
100    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
101    __ Bind(GetEntryLabel());
102    arm_codegen->InvokeRuntime(
103        QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc());
104  }
105
106 private:
107  HDivZeroCheck* const instruction_;
108  DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM);
109};
110
111class StackOverflowCheckSlowPathARM : public SlowPathCodeARM {
112 public:
113  StackOverflowCheckSlowPathARM() {}
114
115  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
116    __ Bind(GetEntryLabel());
117    __ LoadFromOffset(kLoadWord, PC, TR,
118        QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pThrowStackOverflow).Int32Value());
119  }
120
121 private:
122  DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathARM);
123};
124
125class SuspendCheckSlowPathARM : public SlowPathCodeARM {
126 public:
127  explicit SuspendCheckSlowPathARM(HSuspendCheck* instruction, HBasicBlock* successor)
128      : instruction_(instruction), successor_(successor) {}
129
130  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
131    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
132    __ Bind(GetEntryLabel());
133    codegen->SaveLiveRegisters(instruction_->GetLocations());
134    arm_codegen->InvokeRuntime(
135        QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc());
136    codegen->RestoreLiveRegisters(instruction_->GetLocations());
137    if (successor_ == nullptr) {
138      __ b(GetReturnLabel());
139    } else {
140      __ b(arm_codegen->GetLabelOf(successor_));
141    }
142  }
143
144  Label* GetReturnLabel() {
145    DCHECK(successor_ == nullptr);
146    return &return_label_;
147  }
148
149 private:
150  HSuspendCheck* const instruction_;
151  // If not null, the block to branch to after the suspend check.
152  HBasicBlock* const successor_;
153
154  // If `successor_` is null, the label to branch to after the suspend check.
155  Label return_label_;
156
157  DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM);
158};
159
160class BoundsCheckSlowPathARM : public SlowPathCodeARM {
161 public:
162  BoundsCheckSlowPathARM(HBoundsCheck* instruction,
163                         Location index_location,
164                         Location length_location)
165      : instruction_(instruction),
166        index_location_(index_location),
167        length_location_(length_location) {}
168
169  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
170    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
171    __ Bind(GetEntryLabel());
172    InvokeRuntimeCallingConvention calling_convention;
173    arm_codegen->Move32(
174        Location::RegisterLocation(calling_convention.GetRegisterAt(0)), index_location_);
175    arm_codegen->Move32(
176        Location::RegisterLocation(calling_convention.GetRegisterAt(1)), length_location_);
177    arm_codegen->InvokeRuntime(
178        QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc());
179  }
180
181 private:
182  HBoundsCheck* const instruction_;
183  const Location index_location_;
184  const Location length_location_;
185
186  DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM);
187};
188
189class LoadClassSlowPathARM : public SlowPathCodeARM {
190 public:
191  LoadClassSlowPathARM(HLoadClass* cls,
192                       HInstruction* at,
193                       uint32_t dex_pc,
194                       bool do_clinit)
195      : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
196    DCHECK(at->IsLoadClass() || at->IsClinitCheck());
197  }
198
199  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
200    LocationSummary* locations = at_->GetLocations();
201
202    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
203    __ Bind(GetEntryLabel());
204    codegen->SaveLiveRegisters(locations);
205
206    InvokeRuntimeCallingConvention calling_convention;
207    __ LoadImmediate(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex());
208    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
209    int32_t entry_point_offset = do_clinit_
210        ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
211        : QUICK_ENTRY_POINT(pInitializeType);
212    arm_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_);
213
214    // Move the class to the desired location.
215    if (locations->Out().IsValid()) {
216      DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
217      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
218    }
219    codegen->RestoreLiveRegisters(locations);
220    __ b(GetExitLabel());
221  }
222
223 private:
224  // The class this slow path will load.
225  HLoadClass* const cls_;
226
227  // The instruction where this slow path is happening.
228  // (Might be the load class or an initialization check).
229  HInstruction* const at_;
230
231  // The dex PC of `at_`.
232  const uint32_t dex_pc_;
233
234  // Whether to initialize the class.
235  const bool do_clinit_;
236
237  DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM);
238};
239
240class LoadStringSlowPathARM : public SlowPathCodeARM {
241 public:
242  explicit LoadStringSlowPathARM(HLoadString* instruction) : instruction_(instruction) {}
243
244  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
245    LocationSummary* locations = instruction_->GetLocations();
246    DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
247
248    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
249    __ Bind(GetEntryLabel());
250    codegen->SaveLiveRegisters(locations);
251
252    InvokeRuntimeCallingConvention calling_convention;
253    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(0));
254    __ LoadImmediate(calling_convention.GetRegisterAt(1), instruction_->GetStringIndex());
255    arm_codegen->InvokeRuntime(
256        QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc());
257    arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
258
259    codegen->RestoreLiveRegisters(locations);
260    __ b(GetExitLabel());
261  }
262
263 private:
264  HLoadString* const instruction_;
265
266  DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM);
267};
268
269#undef __
270
271#undef __
272#define __ reinterpret_cast<ArmAssembler*>(GetAssembler())->
273
274inline Condition ARMCondition(IfCondition cond) {
275  switch (cond) {
276    case kCondEQ: return EQ;
277    case kCondNE: return NE;
278    case kCondLT: return LT;
279    case kCondLE: return LE;
280    case kCondGT: return GT;
281    case kCondGE: return GE;
282    default:
283      LOG(FATAL) << "Unknown if condition";
284  }
285  return EQ;        // Unreachable.
286}
287
288inline Condition ARMOppositeCondition(IfCondition cond) {
289  switch (cond) {
290    case kCondEQ: return NE;
291    case kCondNE: return EQ;
292    case kCondLT: return GE;
293    case kCondLE: return GT;
294    case kCondGT: return LE;
295    case kCondGE: return LT;
296    default:
297      LOG(FATAL) << "Unknown if condition";
298  }
299  return EQ;        // Unreachable.
300}
301
302void CodeGeneratorARM::DumpCoreRegister(std::ostream& stream, int reg) const {
303  stream << ArmManagedRegister::FromCoreRegister(Register(reg));
304}
305
306void CodeGeneratorARM::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
307  stream << ArmManagedRegister::FromSRegister(SRegister(reg));
308}
309
310size_t CodeGeneratorARM::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
311  __ StoreToOffset(kStoreWord, static_cast<Register>(reg_id), SP, stack_index);
312  return kArmWordSize;
313}
314
315size_t CodeGeneratorARM::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
316  __ LoadFromOffset(kLoadWord, static_cast<Register>(reg_id), SP, stack_index);
317  return kArmWordSize;
318}
319
320CodeGeneratorARM::CodeGeneratorARM(HGraph* graph)
321    : CodeGenerator(graph, kNumberOfCoreRegisters, kNumberOfSRegisters, kNumberOfRegisterPairs),
322      block_labels_(graph->GetArena(), 0),
323      location_builder_(graph, this),
324      instruction_visitor_(graph, this),
325      move_resolver_(graph->GetArena(), this),
326      assembler_(true) {}
327
328size_t CodeGeneratorARM::FrameEntrySpillSize() const {
329  return kNumberOfPushedRegistersAtEntry * kArmWordSize;
330}
331
332Location CodeGeneratorARM::AllocateFreeRegister(Primitive::Type type) const {
333  switch (type) {
334    case Primitive::kPrimLong: {
335      size_t reg = FindFreeEntry(blocked_register_pairs_, kNumberOfRegisterPairs);
336      ArmManagedRegister pair =
337          ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
338      DCHECK(!blocked_core_registers_[pair.AsRegisterPairLow()]);
339      DCHECK(!blocked_core_registers_[pair.AsRegisterPairHigh()]);
340
341      blocked_core_registers_[pair.AsRegisterPairLow()] = true;
342      blocked_core_registers_[pair.AsRegisterPairHigh()] = true;
343      UpdateBlockedPairRegisters();
344      return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
345    }
346
347    case Primitive::kPrimByte:
348    case Primitive::kPrimBoolean:
349    case Primitive::kPrimChar:
350    case Primitive::kPrimShort:
351    case Primitive::kPrimInt:
352    case Primitive::kPrimNot: {
353      int reg = FindFreeEntry(blocked_core_registers_, kNumberOfCoreRegisters);
354      // Block all register pairs that contain `reg`.
355      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
356        ArmManagedRegister current =
357            ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
358        if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
359          blocked_register_pairs_[i] = true;
360        }
361      }
362      return Location::RegisterLocation(reg);
363    }
364
365    case Primitive::kPrimFloat: {
366      int reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfSRegisters);
367      return Location::FpuRegisterLocation(reg);
368    }
369
370    case Primitive::kPrimDouble: {
371      int reg = FindTwoFreeConsecutiveAlignedEntries(blocked_fpu_registers_, kNumberOfSRegisters);
372      DCHECK_EQ(reg % 2, 0);
373      return Location::FpuRegisterPairLocation(reg, reg + 1);
374    }
375
376    case Primitive::kPrimVoid:
377      LOG(FATAL) << "Unreachable type " << type;
378  }
379
380  return Location();
381}
382
383void CodeGeneratorARM::SetupBlockedRegisters() const {
384  // Don't allocate the dalvik style register pair passing.
385  blocked_register_pairs_[R1_R2] = true;
386
387  // Stack register, LR and PC are always reserved.
388  blocked_core_registers_[SP] = true;
389  blocked_core_registers_[LR] = true;
390  blocked_core_registers_[PC] = true;
391
392  // Reserve thread register.
393  blocked_core_registers_[TR] = true;
394
395  // Reserve temp register.
396  blocked_core_registers_[IP] = true;
397
398  // TODO: We currently don't use Quick's callee saved registers.
399  // We always save and restore R6 and R7 to make sure we can use three
400  // register pairs for long operations.
401  blocked_core_registers_[R4] = true;
402  blocked_core_registers_[R5] = true;
403  blocked_core_registers_[R8] = true;
404  blocked_core_registers_[R10] = true;
405  blocked_core_registers_[R11] = true;
406
407  blocked_fpu_registers_[S16] = true;
408  blocked_fpu_registers_[S17] = true;
409  blocked_fpu_registers_[S18] = true;
410  blocked_fpu_registers_[S19] = true;
411  blocked_fpu_registers_[S20] = true;
412  blocked_fpu_registers_[S21] = true;
413  blocked_fpu_registers_[S22] = true;
414  blocked_fpu_registers_[S23] = true;
415  blocked_fpu_registers_[S24] = true;
416  blocked_fpu_registers_[S25] = true;
417  blocked_fpu_registers_[S26] = true;
418  blocked_fpu_registers_[S27] = true;
419  blocked_fpu_registers_[S28] = true;
420  blocked_fpu_registers_[S29] = true;
421  blocked_fpu_registers_[S30] = true;
422  blocked_fpu_registers_[S31] = true;
423
424  UpdateBlockedPairRegisters();
425}
426
427void CodeGeneratorARM::UpdateBlockedPairRegisters() const {
428  for (int i = 0; i < kNumberOfRegisterPairs; i++) {
429    ArmManagedRegister current =
430        ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
431    if (blocked_core_registers_[current.AsRegisterPairLow()]
432        || blocked_core_registers_[current.AsRegisterPairHigh()]) {
433      blocked_register_pairs_[i] = true;
434    }
435  }
436}
437
438InstructionCodeGeneratorARM::InstructionCodeGeneratorARM(HGraph* graph, CodeGeneratorARM* codegen)
439      : HGraphVisitor(graph),
440        assembler_(codegen->GetAssembler()),
441        codegen_(codegen) {}
442
443void CodeGeneratorARM::GenerateFrameEntry() {
444  bool skip_overflow_check = IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm);
445  if (!skip_overflow_check) {
446    if (kExplicitStackOverflowCheck) {
447      SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathARM();
448      AddSlowPath(slow_path);
449
450      __ LoadFromOffset(kLoadWord, IP, TR, Thread::StackEndOffset<kArmWordSize>().Int32Value());
451      __ cmp(SP, ShifterOperand(IP));
452      __ b(slow_path->GetEntryLabel(), CC);
453    } else {
454      __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm)));
455      __ LoadFromOffset(kLoadWord, IP, IP, 0);
456      RecordPcInfo(nullptr, 0);
457    }
458  }
459
460  core_spill_mask_ |= (1 << LR | 1 << R6 | 1 << R7);
461  __ PushList(1 << LR | 1 << R6 | 1 << R7);
462
463  // The return PC has already been pushed on the stack.
464  __ AddConstant(SP, -(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize));
465  __ StoreToOffset(kStoreWord, R0, SP, 0);
466}
467
468void CodeGeneratorARM::GenerateFrameExit() {
469  __ AddConstant(SP, GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize);
470  __ PopList(1 << PC | 1 << R6 | 1 << R7);
471}
472
473void CodeGeneratorARM::Bind(HBasicBlock* block) {
474  __ Bind(GetLabelOf(block));
475}
476
477Location CodeGeneratorARM::GetStackLocation(HLoadLocal* load) const {
478  switch (load->GetType()) {
479    case Primitive::kPrimLong:
480    case Primitive::kPrimDouble:
481      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
482      break;
483
484    case Primitive::kPrimInt:
485    case Primitive::kPrimNot:
486    case Primitive::kPrimFloat:
487      return Location::StackSlot(GetStackSlot(load->GetLocal()));
488
489    case Primitive::kPrimBoolean:
490    case Primitive::kPrimByte:
491    case Primitive::kPrimChar:
492    case Primitive::kPrimShort:
493    case Primitive::kPrimVoid:
494      LOG(FATAL) << "Unexpected type " << load->GetType();
495  }
496
497  LOG(FATAL) << "Unreachable";
498  return Location();
499}
500
501Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
502  switch (type) {
503    case Primitive::kPrimBoolean:
504    case Primitive::kPrimByte:
505    case Primitive::kPrimChar:
506    case Primitive::kPrimShort:
507    case Primitive::kPrimInt:
508    case Primitive::kPrimNot: {
509      uint32_t index = gp_index_++;
510      uint32_t stack_index = stack_index_++;
511      if (index < calling_convention.GetNumberOfRegisters()) {
512        return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
513      } else {
514        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
515      }
516    }
517
518    case Primitive::kPrimLong: {
519      uint32_t index = gp_index_;
520      uint32_t stack_index = stack_index_;
521      gp_index_ += 2;
522      stack_index_ += 2;
523      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
524        ArmManagedRegister pair = ArmManagedRegister::FromRegisterPair(
525            calling_convention.GetRegisterPairAt(index));
526        return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
527      } else if (index + 1 == calling_convention.GetNumberOfRegisters()) {
528        return Location::QuickParameter(index, stack_index);
529      } else {
530        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
531      }
532    }
533
534    case Primitive::kPrimFloat: {
535      uint32_t stack_index = stack_index_++;
536      if (float_index_ % 2 == 0) {
537        float_index_ = std::max(double_index_, float_index_);
538      }
539      if (float_index_ < calling_convention.GetNumberOfFpuRegisters()) {
540        return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(float_index_++));
541      } else {
542        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
543      }
544    }
545
546    case Primitive::kPrimDouble: {
547      double_index_ = std::max(double_index_, RoundUp(float_index_, 2));
548      uint32_t stack_index = stack_index_;
549      stack_index_ += 2;
550      if (double_index_ + 1 < calling_convention.GetNumberOfFpuRegisters()) {
551        uint32_t index = double_index_;
552        double_index_ += 2;
553        return Location::FpuRegisterPairLocation(
554          calling_convention.GetFpuRegisterAt(index),
555          calling_convention.GetFpuRegisterAt(index + 1));
556      } else {
557        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
558      }
559    }
560
561    case Primitive::kPrimVoid:
562      LOG(FATAL) << "Unexpected parameter type " << type;
563      break;
564  }
565  return Location();
566}
567
568Location InvokeDexCallingConventionVisitor::GetReturnLocation(Primitive::Type type) {
569  switch (type) {
570    case Primitive::kPrimBoolean:
571    case Primitive::kPrimByte:
572    case Primitive::kPrimChar:
573    case Primitive::kPrimShort:
574    case Primitive::kPrimInt:
575    case Primitive::kPrimNot: {
576      return Location::RegisterLocation(R0);
577    }
578
579    case Primitive::kPrimFloat: {
580      return Location::FpuRegisterLocation(S0);
581    }
582
583    case Primitive::kPrimLong: {
584      return Location::RegisterPairLocation(R0, R1);
585    }
586
587    case Primitive::kPrimDouble: {
588      return Location::FpuRegisterPairLocation(S0, S1);
589    }
590
591    case Primitive::kPrimVoid:
592      return Location();
593  }
594  UNREACHABLE();
595  return Location();
596}
597
598void CodeGeneratorARM::Move32(Location destination, Location source) {
599  if (source.Equals(destination)) {
600    return;
601  }
602  if (destination.IsRegister()) {
603    if (source.IsRegister()) {
604      __ Mov(destination.As<Register>(), source.As<Register>());
605    } else if (source.IsFpuRegister()) {
606      __ vmovrs(destination.As<Register>(), source.As<SRegister>());
607    } else {
608      __ LoadFromOffset(kLoadWord, destination.As<Register>(), SP, source.GetStackIndex());
609    }
610  } else if (destination.IsFpuRegister()) {
611    if (source.IsRegister()) {
612      __ vmovsr(destination.As<SRegister>(), source.As<Register>());
613    } else if (source.IsFpuRegister()) {
614      __ vmovs(destination.As<SRegister>(), source.As<SRegister>());
615    } else {
616      __ LoadSFromOffset(destination.As<SRegister>(), SP, source.GetStackIndex());
617    }
618  } else {
619    DCHECK(destination.IsStackSlot());
620    if (source.IsRegister()) {
621      __ StoreToOffset(kStoreWord, source.As<Register>(), SP, destination.GetStackIndex());
622    } else if (source.IsFpuRegister()) {
623      __ StoreSToOffset(source.As<SRegister>(), SP, destination.GetStackIndex());
624    } else {
625      DCHECK(source.IsStackSlot());
626      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
627      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
628    }
629  }
630}
631
632void CodeGeneratorARM::Move64(Location destination, Location source) {
633  if (source.Equals(destination)) {
634    return;
635  }
636  if (destination.IsRegisterPair()) {
637    if (source.IsRegisterPair()) {
638      __ Mov(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairLow<Register>());
639      __ Mov(destination.AsRegisterPairHigh<Register>(), source.AsRegisterPairHigh<Register>());
640    } else if (source.IsFpuRegister()) {
641      UNIMPLEMENTED(FATAL);
642    } else if (source.IsQuickParameter()) {
643      uint16_t register_index = source.GetQuickParameterRegisterIndex();
644      uint16_t stack_index = source.GetQuickParameterStackIndex();
645      InvokeDexCallingConvention calling_convention;
646      __ Mov(destination.AsRegisterPairLow<Register>(),
647             calling_convention.GetRegisterAt(register_index));
648      __ LoadFromOffset(kLoadWord, destination.AsRegisterPairHigh<Register>(),
649             SP, calling_convention.GetStackOffsetOf(stack_index + 1) + GetFrameSize());
650    } else {
651      DCHECK(source.IsDoubleStackSlot());
652      if (destination.AsRegisterPairLow<Register>() == R1) {
653        DCHECK_EQ(destination.AsRegisterPairHigh<Register>(), R2);
654        __ LoadFromOffset(kLoadWord, R1, SP, source.GetStackIndex());
655        __ LoadFromOffset(kLoadWord, R2, SP, source.GetHighStackIndex(kArmWordSize));
656      } else {
657        __ LoadFromOffset(kLoadWordPair, destination.AsRegisterPairLow<Register>(),
658                          SP, source.GetStackIndex());
659      }
660    }
661  } else if (destination.IsFpuRegisterPair()) {
662    if (source.IsDoubleStackSlot()) {
663      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
664                         SP,
665                         source.GetStackIndex());
666    } else {
667      UNIMPLEMENTED(FATAL);
668    }
669  } else if (destination.IsQuickParameter()) {
670    InvokeDexCallingConvention calling_convention;
671    uint16_t register_index = destination.GetQuickParameterRegisterIndex();
672    uint16_t stack_index = destination.GetQuickParameterStackIndex();
673    if (source.IsRegisterPair()) {
674      __ Mov(calling_convention.GetRegisterAt(register_index),
675             source.AsRegisterPairLow<Register>());
676      __ StoreToOffset(kStoreWord, source.AsRegisterPairHigh<Register>(),
677             SP, calling_convention.GetStackOffsetOf(stack_index + 1));
678    } else if (source.IsFpuRegister()) {
679      UNIMPLEMENTED(FATAL);
680    } else {
681      DCHECK(source.IsDoubleStackSlot());
682      __ LoadFromOffset(
683          kLoadWord, calling_convention.GetRegisterAt(register_index), SP, source.GetStackIndex());
684      __ LoadFromOffset(kLoadWord, R0, SP, source.GetHighStackIndex(kArmWordSize));
685      __ StoreToOffset(kStoreWord, R0, SP, calling_convention.GetStackOffsetOf(stack_index + 1));
686    }
687  } else {
688    DCHECK(destination.IsDoubleStackSlot());
689    if (source.IsRegisterPair()) {
690      if (source.AsRegisterPairLow<Register>() == R1) {
691        DCHECK_EQ(source.AsRegisterPairHigh<Register>(), R2);
692        __ StoreToOffset(kStoreWord, R1, SP, destination.GetStackIndex());
693        __ StoreToOffset(kStoreWord, R2, SP, destination.GetHighStackIndex(kArmWordSize));
694      } else {
695        __ StoreToOffset(kStoreWordPair, source.AsRegisterPairLow<Register>(),
696                         SP, destination.GetStackIndex());
697      }
698    } else if (source.IsQuickParameter()) {
699      InvokeDexCallingConvention calling_convention;
700      uint16_t register_index = source.GetQuickParameterRegisterIndex();
701      uint16_t stack_index = source.GetQuickParameterStackIndex();
702      __ StoreToOffset(kStoreWord, calling_convention.GetRegisterAt(register_index),
703             SP, destination.GetStackIndex());
704      __ LoadFromOffset(kLoadWord, R0,
705             SP, calling_convention.GetStackOffsetOf(stack_index + 1) + GetFrameSize());
706      __ StoreToOffset(kStoreWord, R0, SP, destination.GetHighStackIndex(kArmWordSize));
707    } else if (source.IsFpuRegisterPair()) {
708      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
709                        SP,
710                        destination.GetStackIndex());
711    } else {
712      DCHECK(source.IsDoubleStackSlot());
713      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
714      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
715      __ LoadFromOffset(kLoadWord, IP, SP, source.GetHighStackIndex(kArmWordSize));
716      __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
717    }
718  }
719}
720
721void CodeGeneratorARM::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
722  LocationSummary* locations = instruction->GetLocations();
723  if (locations != nullptr && locations->Out().Equals(location)) {
724    return;
725  }
726
727  if (instruction->IsIntConstant()) {
728    int32_t value = instruction->AsIntConstant()->GetValue();
729    if (location.IsRegister()) {
730      __ LoadImmediate(location.As<Register>(), value);
731    } else {
732      DCHECK(location.IsStackSlot());
733      __ LoadImmediate(IP, value);
734      __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
735    }
736  } else if (instruction->IsLongConstant()) {
737    int64_t value = instruction->AsLongConstant()->GetValue();
738    if (location.IsRegisterPair()) {
739      __ LoadImmediate(location.AsRegisterPairLow<Register>(), Low32Bits(value));
740      __ LoadImmediate(location.AsRegisterPairHigh<Register>(), High32Bits(value));
741    } else {
742      DCHECK(location.IsDoubleStackSlot());
743      __ LoadImmediate(IP, Low32Bits(value));
744      __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
745      __ LoadImmediate(IP, High32Bits(value));
746      __ StoreToOffset(kStoreWord, IP, SP, location.GetHighStackIndex(kArmWordSize));
747    }
748  } else if (instruction->IsLoadLocal()) {
749    uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
750    switch (instruction->GetType()) {
751      case Primitive::kPrimBoolean:
752      case Primitive::kPrimByte:
753      case Primitive::kPrimChar:
754      case Primitive::kPrimShort:
755      case Primitive::kPrimInt:
756      case Primitive::kPrimNot:
757      case Primitive::kPrimFloat:
758        Move32(location, Location::StackSlot(stack_slot));
759        break;
760
761      case Primitive::kPrimLong:
762      case Primitive::kPrimDouble:
763        Move64(location, Location::DoubleStackSlot(stack_slot));
764        break;
765
766      default:
767        LOG(FATAL) << "Unexpected type " << instruction->GetType();
768    }
769  } else {
770    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
771    switch (instruction->GetType()) {
772      case Primitive::kPrimBoolean:
773      case Primitive::kPrimByte:
774      case Primitive::kPrimChar:
775      case Primitive::kPrimShort:
776      case Primitive::kPrimNot:
777      case Primitive::kPrimInt:
778      case Primitive::kPrimFloat:
779        Move32(location, locations->Out());
780        break;
781
782      case Primitive::kPrimLong:
783      case Primitive::kPrimDouble:
784        Move64(location, locations->Out());
785        break;
786
787      default:
788        LOG(FATAL) << "Unexpected type " << instruction->GetType();
789    }
790  }
791}
792
793void CodeGeneratorARM::InvokeRuntime(int32_t entry_point_offset,
794                                     HInstruction* instruction,
795                                     uint32_t dex_pc) {
796  __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
797  __ blx(LR);
798  RecordPcInfo(instruction, dex_pc);
799  DCHECK(instruction->IsSuspendCheck()
800      || instruction->IsBoundsCheck()
801      || instruction->IsNullCheck()
802      || instruction->IsDivZeroCheck()
803      || !IsLeafMethod());
804}
805
806void LocationsBuilderARM::VisitGoto(HGoto* got) {
807  got->SetLocations(nullptr);
808}
809
810void InstructionCodeGeneratorARM::VisitGoto(HGoto* got) {
811  HBasicBlock* successor = got->GetSuccessor();
812  DCHECK(!successor->IsExitBlock());
813
814  HBasicBlock* block = got->GetBlock();
815  HInstruction* previous = got->GetPrevious();
816
817  HLoopInformation* info = block->GetLoopInformation();
818  if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
819    codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
820    GenerateSuspendCheck(info->GetSuspendCheck(), successor);
821    return;
822  }
823
824  if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
825    GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
826  }
827  if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
828    __ b(codegen_->GetLabelOf(successor));
829  }
830}
831
832void LocationsBuilderARM::VisitExit(HExit* exit) {
833  exit->SetLocations(nullptr);
834}
835
836void InstructionCodeGeneratorARM::VisitExit(HExit* exit) {
837  UNUSED(exit);
838  if (kIsDebugBuild) {
839    __ Comment("Unreachable");
840    __ bkpt(0);
841  }
842}
843
844void LocationsBuilderARM::VisitIf(HIf* if_instr) {
845  LocationSummary* locations =
846      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
847  HInstruction* cond = if_instr->InputAt(0);
848  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
849    locations->SetInAt(0, Location::RequiresRegister());
850  }
851}
852
853void InstructionCodeGeneratorARM::VisitIf(HIf* if_instr) {
854  HInstruction* cond = if_instr->InputAt(0);
855  if (cond->IsIntConstant()) {
856    // Constant condition, statically compared against 1.
857    int32_t cond_value = cond->AsIntConstant()->GetValue();
858    if (cond_value == 1) {
859      if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
860                                     if_instr->IfTrueSuccessor())) {
861        __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
862      }
863      return;
864    } else {
865      DCHECK_EQ(cond_value, 0);
866    }
867  } else {
868    if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
869      // Condition has been materialized, compare the output to 0
870      DCHECK(if_instr->GetLocations()->InAt(0).IsRegister());
871      __ cmp(if_instr->GetLocations()->InAt(0).As<Register>(),
872             ShifterOperand(0));
873      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()), NE);
874    } else {
875      // Condition has not been materialized, use its inputs as the
876      // comparison and its condition as the branch condition.
877      LocationSummary* locations = cond->GetLocations();
878      if (locations->InAt(1).IsRegister()) {
879        __ cmp(locations->InAt(0).As<Register>(),
880               ShifterOperand(locations->InAt(1).As<Register>()));
881      } else {
882        DCHECK(locations->InAt(1).IsConstant());
883        int32_t value =
884            locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
885        ShifterOperand operand;
886        if (ShifterOperand::CanHoldArm(value, &operand)) {
887          __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(value));
888        } else {
889          Register temp = IP;
890          __ LoadImmediate(temp, value);
891          __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(temp));
892        }
893      }
894      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()),
895           ARMCondition(cond->AsCondition()->GetCondition()));
896    }
897  }
898  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
899                                 if_instr->IfFalseSuccessor())) {
900    __ b(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
901  }
902}
903
904
905void LocationsBuilderARM::VisitCondition(HCondition* comp) {
906  LocationSummary* locations =
907      new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall);
908  locations->SetInAt(0, Location::RequiresRegister());
909  locations->SetInAt(1, Location::RegisterOrConstant(comp->InputAt(1)));
910  if (comp->NeedsMaterialization()) {
911    locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
912  }
913}
914
915void InstructionCodeGeneratorARM::VisitCondition(HCondition* comp) {
916  if (!comp->NeedsMaterialization()) return;
917
918  LocationSummary* locations = comp->GetLocations();
919  if (locations->InAt(1).IsRegister()) {
920    __ cmp(locations->InAt(0).As<Register>(),
921           ShifterOperand(locations->InAt(1).As<Register>()));
922  } else {
923    DCHECK(locations->InAt(1).IsConstant());
924    int32_t value = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
925    ShifterOperand operand;
926    if (ShifterOperand::CanHoldArm(value, &operand)) {
927      __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(value));
928    } else {
929      Register temp = IP;
930      __ LoadImmediate(temp, value);
931      __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(temp));
932    }
933  }
934  __ it(ARMCondition(comp->GetCondition()), kItElse);
935  __ mov(locations->Out().As<Register>(), ShifterOperand(1),
936         ARMCondition(comp->GetCondition()));
937  __ mov(locations->Out().As<Register>(), ShifterOperand(0),
938         ARMOppositeCondition(comp->GetCondition()));
939}
940
941void LocationsBuilderARM::VisitEqual(HEqual* comp) {
942  VisitCondition(comp);
943}
944
945void InstructionCodeGeneratorARM::VisitEqual(HEqual* comp) {
946  VisitCondition(comp);
947}
948
949void LocationsBuilderARM::VisitNotEqual(HNotEqual* comp) {
950  VisitCondition(comp);
951}
952
953void InstructionCodeGeneratorARM::VisitNotEqual(HNotEqual* comp) {
954  VisitCondition(comp);
955}
956
957void LocationsBuilderARM::VisitLessThan(HLessThan* comp) {
958  VisitCondition(comp);
959}
960
961void InstructionCodeGeneratorARM::VisitLessThan(HLessThan* comp) {
962  VisitCondition(comp);
963}
964
965void LocationsBuilderARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
966  VisitCondition(comp);
967}
968
969void InstructionCodeGeneratorARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
970  VisitCondition(comp);
971}
972
973void LocationsBuilderARM::VisitGreaterThan(HGreaterThan* comp) {
974  VisitCondition(comp);
975}
976
977void InstructionCodeGeneratorARM::VisitGreaterThan(HGreaterThan* comp) {
978  VisitCondition(comp);
979}
980
981void LocationsBuilderARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
982  VisitCondition(comp);
983}
984
985void InstructionCodeGeneratorARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
986  VisitCondition(comp);
987}
988
989void LocationsBuilderARM::VisitLocal(HLocal* local) {
990  local->SetLocations(nullptr);
991}
992
993void InstructionCodeGeneratorARM::VisitLocal(HLocal* local) {
994  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
995}
996
997void LocationsBuilderARM::VisitLoadLocal(HLoadLocal* load) {
998  load->SetLocations(nullptr);
999}
1000
1001void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load) {
1002  // Nothing to do, this is driven by the code generator.
1003  UNUSED(load);
1004}
1005
1006void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) {
1007  LocationSummary* locations =
1008      new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
1009  switch (store->InputAt(1)->GetType()) {
1010    case Primitive::kPrimBoolean:
1011    case Primitive::kPrimByte:
1012    case Primitive::kPrimChar:
1013    case Primitive::kPrimShort:
1014    case Primitive::kPrimInt:
1015    case Primitive::kPrimNot:
1016    case Primitive::kPrimFloat:
1017      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
1018      break;
1019
1020    case Primitive::kPrimLong:
1021    case Primitive::kPrimDouble:
1022      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
1023      break;
1024
1025    default:
1026      LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType();
1027  }
1028}
1029
1030void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store) {
1031  UNUSED(store);
1032}
1033
1034void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) {
1035  LocationSummary* locations =
1036      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1037  locations->SetOut(Location::ConstantLocation(constant));
1038}
1039
1040void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant) {
1041  // Will be generated at use site.
1042  UNUSED(constant);
1043}
1044
1045void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) {
1046  LocationSummary* locations =
1047      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1048  locations->SetOut(Location::ConstantLocation(constant));
1049}
1050
1051void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant) {
1052  // Will be generated at use site.
1053  UNUSED(constant);
1054}
1055
1056void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) {
1057  LocationSummary* locations =
1058      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1059  locations->SetOut(Location::ConstantLocation(constant));
1060}
1061
1062void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant) {
1063  // Will be generated at use site.
1064  UNUSED(constant);
1065}
1066
1067void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) {
1068  LocationSummary* locations =
1069      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1070  locations->SetOut(Location::ConstantLocation(constant));
1071}
1072
1073void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant) {
1074  // Will be generated at use site.
1075  UNUSED(constant);
1076}
1077
1078void LocationsBuilderARM::VisitReturnVoid(HReturnVoid* ret) {
1079  ret->SetLocations(nullptr);
1080}
1081
1082void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret) {
1083  UNUSED(ret);
1084  codegen_->GenerateFrameExit();
1085}
1086
1087void LocationsBuilderARM::VisitReturn(HReturn* ret) {
1088  LocationSummary* locations =
1089      new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
1090  locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType()));
1091}
1092
1093void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret) {
1094  UNUSED(ret);
1095  codegen_->GenerateFrameExit();
1096}
1097
1098void LocationsBuilderARM::VisitInvokeStatic(HInvokeStatic* invoke) {
1099  HandleInvoke(invoke);
1100}
1101
1102void CodeGeneratorARM::LoadCurrentMethod(Register reg) {
1103  __ LoadFromOffset(kLoadWord, reg, SP, kCurrentMethodStackOffset);
1104}
1105
1106void InstructionCodeGeneratorARM::VisitInvokeStatic(HInvokeStatic* invoke) {
1107  Register temp = invoke->GetLocations()->GetTemp(0).As<Register>();
1108
1109  // TODO: Implement all kinds of calls:
1110  // 1) boot -> boot
1111  // 2) app -> boot
1112  // 3) app -> app
1113  //
1114  // Currently we implement the app -> app logic, which looks up in the resolve cache.
1115
1116  // temp = method;
1117  codegen_->LoadCurrentMethod(temp);
1118  // temp = temp->dex_cache_resolved_methods_;
1119  __ LoadFromOffset(
1120      kLoadWord, temp, temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value());
1121  // temp = temp[index_in_cache]
1122  __ LoadFromOffset(
1123      kLoadWord, temp, temp, CodeGenerator::GetCacheOffset(invoke->GetIndexInDexCache()));
1124  // LR = temp[offset_of_quick_compiled_code]
1125  __ LoadFromOffset(kLoadWord, LR, temp,
1126                     mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value());
1127  // LR()
1128  __ blx(LR);
1129
1130  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1131  DCHECK(!codegen_->IsLeafMethod());
1132}
1133
1134void LocationsBuilderARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1135  HandleInvoke(invoke);
1136}
1137
1138void LocationsBuilderARM::HandleInvoke(HInvoke* invoke) {
1139  LocationSummary* locations =
1140      new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1141  locations->AddTemp(Location::RegisterLocation(R0));
1142
1143  InvokeDexCallingConventionVisitor calling_convention_visitor;
1144  for (size_t i = 0; i < invoke->InputCount(); i++) {
1145    HInstruction* input = invoke->InputAt(i);
1146    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1147  }
1148
1149  locations->SetOut(calling_convention_visitor.GetReturnLocation(invoke->GetType()));
1150}
1151
1152
1153void InstructionCodeGeneratorARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1154  Register temp = invoke->GetLocations()->GetTemp(0).As<Register>();
1155  uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() +
1156          invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
1157  LocationSummary* locations = invoke->GetLocations();
1158  Location receiver = locations->InAt(0);
1159  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1160  // temp = object->GetClass();
1161  if (receiver.IsStackSlot()) {
1162    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1163    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1164  } else {
1165    __ LoadFromOffset(kLoadWord, temp, receiver.As<Register>(), class_offset);
1166  }
1167  // temp = temp->GetMethodAt(method_offset);
1168  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value();
1169  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1170  // LR = temp->GetEntryPoint();
1171  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1172  // LR();
1173  __ blx(LR);
1174  DCHECK(!codegen_->IsLeafMethod());
1175  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1176}
1177
1178void LocationsBuilderARM::VisitNeg(HNeg* neg) {
1179  LocationSummary* locations =
1180      new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
1181  switch (neg->GetResultType()) {
1182    case Primitive::kPrimInt:
1183    case Primitive::kPrimLong: {
1184      bool output_overlaps = (neg->GetResultType() == Primitive::kPrimLong);
1185      locations->SetInAt(0, Location::RequiresRegister());
1186      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1187      break;
1188    }
1189
1190    case Primitive::kPrimFloat:
1191    case Primitive::kPrimDouble:
1192      locations->SetInAt(0, Location::RequiresFpuRegister());
1193      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1194      break;
1195
1196    default:
1197      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1198  }
1199}
1200
1201void InstructionCodeGeneratorARM::VisitNeg(HNeg* neg) {
1202  LocationSummary* locations = neg->GetLocations();
1203  Location out = locations->Out();
1204  Location in = locations->InAt(0);
1205  switch (neg->GetResultType()) {
1206    case Primitive::kPrimInt:
1207      DCHECK(in.IsRegister());
1208      __ rsb(out.As<Register>(), in.As<Register>(), ShifterOperand(0));
1209      break;
1210
1211    case Primitive::kPrimLong:
1212      DCHECK(in.IsRegisterPair());
1213      // out.lo = 0 - in.lo (and update the carry/borrow (C) flag)
1214      __ rsbs(out.AsRegisterPairLow<Register>(),
1215              in.AsRegisterPairLow<Register>(),
1216              ShifterOperand(0));
1217      // We cannot emit an RSC (Reverse Subtract with Carry)
1218      // instruction here, as it does not exist in the Thumb-2
1219      // instruction set.  We use the following approach
1220      // using SBC and SUB instead.
1221      //
1222      // out.hi = -C
1223      __ sbc(out.AsRegisterPairHigh<Register>(),
1224             out.AsRegisterPairHigh<Register>(),
1225             ShifterOperand(out.AsRegisterPairHigh<Register>()));
1226      // out.hi = out.hi - in.hi
1227      __ sub(out.AsRegisterPairHigh<Register>(),
1228             out.AsRegisterPairHigh<Register>(),
1229             ShifterOperand(in.AsRegisterPairHigh<Register>()));
1230      break;
1231
1232    case Primitive::kPrimFloat:
1233      DCHECK(in.IsFpuRegister());
1234      __ vnegs(out.As<SRegister>(), in.As<SRegister>());
1235      break;
1236
1237    case Primitive::kPrimDouble:
1238      DCHECK(in.IsFpuRegisterPair());
1239      __ vnegd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1240               FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1241      break;
1242
1243    default:
1244      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1245  }
1246}
1247
1248void LocationsBuilderARM::VisitTypeConversion(HTypeConversion* conversion) {
1249  LocationSummary* locations =
1250      new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
1251  Primitive::Type result_type = conversion->GetResultType();
1252  Primitive::Type input_type = conversion->GetInputType();
1253  switch (result_type) {
1254    case Primitive::kPrimLong:
1255      switch (input_type) {
1256        case Primitive::kPrimByte:
1257        case Primitive::kPrimShort:
1258        case Primitive::kPrimInt:
1259          // int-to-long conversion.
1260          locations->SetInAt(0, Location::RequiresRegister());
1261          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1262          break;
1263
1264        case Primitive::kPrimFloat:
1265        case Primitive::kPrimDouble:
1266          LOG(FATAL) << "Type conversion from " << input_type << " to "
1267                     << result_type << " not yet implemented";
1268          break;
1269
1270        default:
1271          LOG(FATAL) << "Unexpected type conversion from " << input_type
1272                     << " to " << result_type;
1273      }
1274      break;
1275
1276    case Primitive::kPrimInt:
1277    case Primitive::kPrimFloat:
1278    case Primitive::kPrimDouble:
1279      LOG(FATAL) << "Type conversion from " << input_type
1280                 << " to " << result_type << " not yet implemented";
1281      break;
1282
1283    default:
1284      LOG(FATAL) << "Unexpected type conversion from " << input_type
1285                 << " to " << result_type;
1286  }
1287}
1288
1289void InstructionCodeGeneratorARM::VisitTypeConversion(HTypeConversion* conversion) {
1290  LocationSummary* locations = conversion->GetLocations();
1291  Location out = locations->Out();
1292  Location in = locations->InAt(0);
1293  Primitive::Type result_type = conversion->GetResultType();
1294  Primitive::Type input_type = conversion->GetInputType();
1295  switch (result_type) {
1296    case Primitive::kPrimLong:
1297      switch (input_type) {
1298        case Primitive::kPrimByte:
1299        case Primitive::kPrimShort:
1300        case Primitive::kPrimInt:
1301          // int-to-long conversion.
1302          DCHECK(out.IsRegisterPair());
1303          DCHECK(in.IsRegister());
1304          __ Mov(out.AsRegisterPairLow<Register>(), in.As<Register>());
1305          // Sign extension.
1306          __ Asr(out.AsRegisterPairHigh<Register>(),
1307                 out.AsRegisterPairLow<Register>(),
1308                 31);
1309          break;
1310
1311        case Primitive::kPrimFloat:
1312        case Primitive::kPrimDouble:
1313          LOG(FATAL) << "Type conversion from " << input_type << " to "
1314                     << result_type << " not yet implemented";
1315          break;
1316
1317        default:
1318          LOG(FATAL) << "Unexpected type conversion from " << input_type
1319                     << " to " << result_type;
1320      }
1321      break;
1322
1323    case Primitive::kPrimInt:
1324    case Primitive::kPrimFloat:
1325    case Primitive::kPrimDouble:
1326      LOG(FATAL) << "Type conversion from " << input_type
1327                 << " to " << result_type << " not yet implemented";
1328      break;
1329
1330    default:
1331      LOG(FATAL) << "Unexpected type conversion from " << input_type
1332                 << " to " << result_type;
1333  }
1334}
1335
1336void LocationsBuilderARM::VisitAdd(HAdd* add) {
1337  LocationSummary* locations =
1338      new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
1339  switch (add->GetResultType()) {
1340    case Primitive::kPrimInt:
1341    case Primitive::kPrimLong: {
1342      bool output_overlaps = (add->GetResultType() == Primitive::kPrimLong);
1343      locations->SetInAt(0, Location::RequiresRegister());
1344      locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
1345      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1346      break;
1347    }
1348
1349    case Primitive::kPrimFloat:
1350    case Primitive::kPrimDouble: {
1351      locations->SetInAt(0, Location::RequiresFpuRegister());
1352      locations->SetInAt(1, Location::RequiresFpuRegister());
1353      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1354      break;
1355    }
1356
1357    default:
1358      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1359  }
1360}
1361
1362void InstructionCodeGeneratorARM::VisitAdd(HAdd* add) {
1363  LocationSummary* locations = add->GetLocations();
1364  Location out = locations->Out();
1365  Location first = locations->InAt(0);
1366  Location second = locations->InAt(1);
1367  switch (add->GetResultType()) {
1368    case Primitive::kPrimInt:
1369      if (second.IsRegister()) {
1370        __ add(out.As<Register>(), first.As<Register>(), ShifterOperand(second.As<Register>()));
1371      } else {
1372        __ AddConstant(out.As<Register>(),
1373                       first.As<Register>(),
1374                       second.GetConstant()->AsIntConstant()->GetValue());
1375      }
1376      break;
1377
1378    case Primitive::kPrimLong:
1379      __ adds(out.AsRegisterPairLow<Register>(),
1380              first.AsRegisterPairLow<Register>(),
1381              ShifterOperand(second.AsRegisterPairLow<Register>()));
1382      __ adc(out.AsRegisterPairHigh<Register>(),
1383             first.AsRegisterPairHigh<Register>(),
1384             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1385      break;
1386
1387    case Primitive::kPrimFloat:
1388      __ vadds(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>());
1389      break;
1390
1391    case Primitive::kPrimDouble:
1392      __ vaddd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1393               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1394               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1395      break;
1396
1397    default:
1398      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1399  }
1400}
1401
1402void LocationsBuilderARM::VisitSub(HSub* sub) {
1403  LocationSummary* locations =
1404      new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
1405  switch (sub->GetResultType()) {
1406    case Primitive::kPrimInt:
1407    case Primitive::kPrimLong: {
1408      bool output_overlaps = (sub->GetResultType() == Primitive::kPrimLong);
1409      locations->SetInAt(0, Location::RequiresRegister());
1410      locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1)));
1411      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1412      break;
1413    }
1414    case Primitive::kPrimFloat:
1415    case Primitive::kPrimDouble: {
1416      locations->SetInAt(0, Location::RequiresFpuRegister());
1417      locations->SetInAt(1, Location::RequiresFpuRegister());
1418      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1419      break;
1420    }
1421    default:
1422      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1423  }
1424}
1425
1426void InstructionCodeGeneratorARM::VisitSub(HSub* sub) {
1427  LocationSummary* locations = sub->GetLocations();
1428  Location out = locations->Out();
1429  Location first = locations->InAt(0);
1430  Location second = locations->InAt(1);
1431  switch (sub->GetResultType()) {
1432    case Primitive::kPrimInt: {
1433      if (second.IsRegister()) {
1434        __ sub(out.As<Register>(), first.As<Register>(), ShifterOperand(second.As<Register>()));
1435      } else {
1436        __ AddConstant(out.As<Register>(),
1437                       first.As<Register>(),
1438                       -second.GetConstant()->AsIntConstant()->GetValue());
1439      }
1440      break;
1441    }
1442
1443    case Primitive::kPrimLong: {
1444      __ subs(out.AsRegisterPairLow<Register>(),
1445              first.AsRegisterPairLow<Register>(),
1446              ShifterOperand(second.AsRegisterPairLow<Register>()));
1447      __ sbc(out.AsRegisterPairHigh<Register>(),
1448             first.AsRegisterPairHigh<Register>(),
1449             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1450      break;
1451    }
1452
1453    case Primitive::kPrimFloat: {
1454      __ vsubs(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>());
1455      break;
1456    }
1457
1458    case Primitive::kPrimDouble: {
1459      __ vsubd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1460               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1461               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1462      break;
1463    }
1464
1465
1466    default:
1467      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1468  }
1469}
1470
1471void LocationsBuilderARM::VisitMul(HMul* mul) {
1472  LocationSummary* locations =
1473      new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
1474  switch (mul->GetResultType()) {
1475    case Primitive::kPrimInt:
1476    case Primitive::kPrimLong:  {
1477      locations->SetInAt(0, Location::RequiresRegister());
1478      locations->SetInAt(1, Location::RequiresRegister());
1479      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1480      break;
1481    }
1482
1483    case Primitive::kPrimFloat:
1484    case Primitive::kPrimDouble: {
1485      locations->SetInAt(0, Location::RequiresFpuRegister());
1486      locations->SetInAt(1, Location::RequiresFpuRegister());
1487      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1488      break;
1489    }
1490
1491    default:
1492      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1493  }
1494}
1495
1496void InstructionCodeGeneratorARM::VisitMul(HMul* mul) {
1497  LocationSummary* locations = mul->GetLocations();
1498  Location out = locations->Out();
1499  Location first = locations->InAt(0);
1500  Location second = locations->InAt(1);
1501  switch (mul->GetResultType()) {
1502    case Primitive::kPrimInt: {
1503      __ mul(out.As<Register>(), first.As<Register>(), second.As<Register>());
1504      break;
1505    }
1506    case Primitive::kPrimLong: {
1507      Register out_hi = out.AsRegisterPairHigh<Register>();
1508      Register out_lo = out.AsRegisterPairLow<Register>();
1509      Register in1_hi = first.AsRegisterPairHigh<Register>();
1510      Register in1_lo = first.AsRegisterPairLow<Register>();
1511      Register in2_hi = second.AsRegisterPairHigh<Register>();
1512      Register in2_lo = second.AsRegisterPairLow<Register>();
1513
1514      // Extra checks to protect caused by the existence of R1_R2.
1515      // The algorithm is wrong if out.hi is either in1.lo or in2.lo:
1516      // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2);
1517      DCHECK_NE(out_hi, in1_lo);
1518      DCHECK_NE(out_hi, in2_lo);
1519
1520      // input: in1 - 64 bits, in2 - 64 bits
1521      // output: out
1522      // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
1523      // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
1524      // parts: out.lo = (in1.lo * in2.lo)[31:0]
1525
1526      // IP <- in1.lo * in2.hi
1527      __ mul(IP, in1_lo, in2_hi);
1528      // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo
1529      __ mla(out_hi, in1_hi, in2_lo, IP);
1530      // out.lo <- (in1.lo * in2.lo)[31:0];
1531      __ umull(out_lo, IP, in1_lo, in2_lo);
1532      // out.hi <- in2.hi * in1.lo +  in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
1533      __ add(out_hi, out_hi, ShifterOperand(IP));
1534      break;
1535    }
1536
1537    case Primitive::kPrimFloat: {
1538      __ vmuls(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>());
1539      break;
1540    }
1541
1542    case Primitive::kPrimDouble: {
1543      __ vmuld(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1544               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1545               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1546      break;
1547    }
1548
1549    default:
1550      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1551  }
1552}
1553
1554void LocationsBuilderARM::VisitDiv(HDiv* div) {
1555  LocationSummary* locations =
1556      new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
1557  switch (div->GetResultType()) {
1558    case Primitive::kPrimInt: {
1559      locations->SetInAt(0, Location::RequiresRegister());
1560      locations->SetInAt(1, Location::RequiresRegister());
1561      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1562      break;
1563    }
1564    case Primitive::kPrimLong: {
1565      LOG(FATAL) << "Not implemented div type" << div->GetResultType();
1566      break;
1567    }
1568    case Primitive::kPrimFloat:
1569    case Primitive::kPrimDouble: {
1570      locations->SetInAt(0, Location::RequiresFpuRegister());
1571      locations->SetInAt(1, Location::RequiresFpuRegister());
1572      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1573      break;
1574    }
1575
1576    default:
1577      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
1578  }
1579}
1580
1581void InstructionCodeGeneratorARM::VisitDiv(HDiv* div) {
1582  LocationSummary* locations = div->GetLocations();
1583  Location out = locations->Out();
1584  Location first = locations->InAt(0);
1585  Location second = locations->InAt(1);
1586
1587  switch (div->GetResultType()) {
1588    case Primitive::kPrimInt: {
1589      __ sdiv(out.As<Register>(), first.As<Register>(), second.As<Register>());
1590      break;
1591    }
1592
1593    case Primitive::kPrimLong: {
1594      LOG(FATAL) << "Not implemented div type" << div->GetResultType();
1595      break;
1596    }
1597
1598    case Primitive::kPrimFloat: {
1599      __ vdivs(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>());
1600      break;
1601    }
1602
1603    case Primitive::kPrimDouble: {
1604      __ vdivd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1605               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1606               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1607      break;
1608    }
1609
1610    default:
1611      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
1612  }
1613}
1614
1615void LocationsBuilderARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1616  LocationSummary* locations =
1617      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1618  locations->SetInAt(0, Location::RequiresRegister());
1619  if (instruction->HasUses()) {
1620    locations->SetOut(Location::SameAsFirstInput());
1621  }
1622}
1623
1624void InstructionCodeGeneratorARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1625  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM(instruction);
1626  codegen_->AddSlowPath(slow_path);
1627
1628  LocationSummary* locations = instruction->GetLocations();
1629  Location value = locations->InAt(0);
1630
1631  DCHECK(value.IsRegister()) << value;
1632  __ cmp(value.As<Register>(), ShifterOperand(0));
1633  __ b(slow_path->GetEntryLabel(), EQ);
1634}
1635
1636void LocationsBuilderARM::VisitNewInstance(HNewInstance* instruction) {
1637  LocationSummary* locations =
1638      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
1639  InvokeRuntimeCallingConvention calling_convention;
1640  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1641  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1642  locations->SetOut(Location::RegisterLocation(R0));
1643}
1644
1645void InstructionCodeGeneratorARM::VisitNewInstance(HNewInstance* instruction) {
1646  InvokeRuntimeCallingConvention calling_convention;
1647  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
1648  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
1649  codegen_->InvokeRuntime(
1650      QUICK_ENTRY_POINT(pAllocObjectWithAccessCheck), instruction, instruction->GetDexPc());
1651}
1652
1653void LocationsBuilderARM::VisitNewArray(HNewArray* instruction) {
1654  LocationSummary* locations =
1655      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
1656  InvokeRuntimeCallingConvention calling_convention;
1657  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1658  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1659  locations->SetOut(Location::RegisterLocation(R0));
1660  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1661}
1662
1663void InstructionCodeGeneratorARM::VisitNewArray(HNewArray* instruction) {
1664  InvokeRuntimeCallingConvention calling_convention;
1665  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
1666  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
1667  codegen_->InvokeRuntime(
1668      QUICK_ENTRY_POINT(pAllocArrayWithAccessCheck), instruction, instruction->GetDexPc());
1669}
1670
1671void LocationsBuilderARM::VisitParameterValue(HParameterValue* instruction) {
1672  LocationSummary* locations =
1673      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1674  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
1675  if (location.IsStackSlot()) {
1676    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1677  } else if (location.IsDoubleStackSlot()) {
1678    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1679  }
1680  locations->SetOut(location);
1681}
1682
1683void InstructionCodeGeneratorARM::VisitParameterValue(HParameterValue* instruction) {
1684  // Nothing to do, the parameter is already at its location.
1685  UNUSED(instruction);
1686}
1687
1688void LocationsBuilderARM::VisitNot(HNot* not_) {
1689  LocationSummary* locations =
1690      new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
1691  locations->SetInAt(0, Location::RequiresRegister());
1692  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1693}
1694
1695void InstructionCodeGeneratorARM::VisitNot(HNot* not_) {
1696  LocationSummary* locations = not_->GetLocations();
1697  Location out = locations->Out();
1698  Location in = locations->InAt(0);
1699  switch (not_->InputAt(0)->GetType()) {
1700    case Primitive::kPrimBoolean:
1701      __ eor(out.As<Register>(), in.As<Register>(), ShifterOperand(1));
1702      break;
1703
1704    case Primitive::kPrimInt:
1705      __ mvn(out.As<Register>(), ShifterOperand(in.As<Register>()));
1706      break;
1707
1708    case Primitive::kPrimLong:
1709      __ mvn(out.AsRegisterPairLow<Register>(),
1710             ShifterOperand(in.AsRegisterPairLow<Register>()));
1711      __ mvn(out.AsRegisterPairHigh<Register>(),
1712             ShifterOperand(in.AsRegisterPairHigh<Register>()));
1713      break;
1714
1715    default:
1716      LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
1717  }
1718}
1719
1720void LocationsBuilderARM::VisitCompare(HCompare* compare) {
1721  LocationSummary* locations =
1722      new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
1723  locations->SetInAt(0, Location::RequiresRegister());
1724  locations->SetInAt(1, Location::RequiresRegister());
1725  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1726}
1727
1728void InstructionCodeGeneratorARM::VisitCompare(HCompare* compare) {
1729  LocationSummary* locations = compare->GetLocations();
1730  switch (compare->InputAt(0)->GetType()) {
1731    case Primitive::kPrimLong: {
1732      Register output = locations->Out().As<Register>();
1733      Location left = locations->InAt(0);
1734      Location right = locations->InAt(1);
1735      Label less, greater, done;
1736      __ cmp(left.AsRegisterPairHigh<Register>(),
1737             ShifterOperand(right.AsRegisterPairHigh<Register>()));  // Signed compare.
1738      __ b(&less, LT);
1739      __ b(&greater, GT);
1740      // Do LoadImmediate before any `cmp`, as LoadImmediate might affect
1741      // the status flags.
1742      __ LoadImmediate(output, 0);
1743      __ cmp(left.AsRegisterPairLow<Register>(),
1744             ShifterOperand(right.AsRegisterPairLow<Register>()));  // Unsigned compare.
1745      __ b(&done, EQ);
1746      __ b(&less, CC);
1747
1748      __ Bind(&greater);
1749      __ LoadImmediate(output, 1);
1750      __ b(&done);
1751
1752      __ Bind(&less);
1753      __ LoadImmediate(output, -1);
1754
1755      __ Bind(&done);
1756      break;
1757    }
1758    default:
1759      LOG(FATAL) << "Unimplemented compare type " << compare->InputAt(0)->GetType();
1760  }
1761}
1762
1763void LocationsBuilderARM::VisitPhi(HPhi* instruction) {
1764  LocationSummary* locations =
1765      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1766  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
1767    locations->SetInAt(i, Location::Any());
1768  }
1769  locations->SetOut(Location::Any());
1770}
1771
1772void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction) {
1773  UNUSED(instruction);
1774  LOG(FATAL) << "Unreachable";
1775}
1776
1777void LocationsBuilderARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
1778  LocationSummary* locations =
1779      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1780  bool is_object_type = instruction->GetFieldType() == Primitive::kPrimNot;
1781  locations->SetInAt(0, Location::RequiresRegister());
1782  locations->SetInAt(1, Location::RequiresRegister());
1783  // Temporary registers for the write barrier.
1784  if (is_object_type) {
1785    locations->AddTemp(Location::RequiresRegister());
1786    locations->AddTemp(Location::RequiresRegister());
1787  }
1788}
1789
1790void InstructionCodeGeneratorARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
1791  LocationSummary* locations = instruction->GetLocations();
1792  Register obj = locations->InAt(0).As<Register>();
1793  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
1794  Primitive::Type field_type = instruction->GetFieldType();
1795
1796  switch (field_type) {
1797    case Primitive::kPrimBoolean:
1798    case Primitive::kPrimByte: {
1799      Register value = locations->InAt(1).As<Register>();
1800      __ StoreToOffset(kStoreByte, value, obj, offset);
1801      break;
1802    }
1803
1804    case Primitive::kPrimShort:
1805    case Primitive::kPrimChar: {
1806      Register value = locations->InAt(1).As<Register>();
1807      __ StoreToOffset(kStoreHalfword, value, obj, offset);
1808      break;
1809    }
1810
1811    case Primitive::kPrimInt:
1812    case Primitive::kPrimNot: {
1813      Register value = locations->InAt(1).As<Register>();
1814      __ StoreToOffset(kStoreWord, value, obj, offset);
1815      if (field_type == Primitive::kPrimNot) {
1816        Register temp = locations->GetTemp(0).As<Register>();
1817        Register card = locations->GetTemp(1).As<Register>();
1818        codegen_->MarkGCCard(temp, card, obj, value);
1819      }
1820      break;
1821    }
1822
1823    case Primitive::kPrimLong: {
1824      Location value = locations->InAt(1);
1825      __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset);
1826      break;
1827    }
1828
1829    case Primitive::kPrimFloat:
1830    case Primitive::kPrimDouble:
1831      LOG(FATAL) << "Unimplemented register type " << field_type;
1832      UNREACHABLE();
1833    case Primitive::kPrimVoid:
1834      LOG(FATAL) << "Unreachable type " << field_type;
1835      UNREACHABLE();
1836  }
1837}
1838
1839void LocationsBuilderARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
1840  LocationSummary* locations =
1841      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1842  locations->SetInAt(0, Location::RequiresRegister());
1843  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1844}
1845
1846void InstructionCodeGeneratorARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
1847  LocationSummary* locations = instruction->GetLocations();
1848  Register obj = locations->InAt(0).As<Register>();
1849  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
1850
1851  switch (instruction->GetType()) {
1852    case Primitive::kPrimBoolean: {
1853      Register out = locations->Out().As<Register>();
1854      __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
1855      break;
1856    }
1857
1858    case Primitive::kPrimByte: {
1859      Register out = locations->Out().As<Register>();
1860      __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
1861      break;
1862    }
1863
1864    case Primitive::kPrimShort: {
1865      Register out = locations->Out().As<Register>();
1866      __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
1867      break;
1868    }
1869
1870    case Primitive::kPrimChar: {
1871      Register out = locations->Out().As<Register>();
1872      __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
1873      break;
1874    }
1875
1876    case Primitive::kPrimInt:
1877    case Primitive::kPrimNot: {
1878      Register out = locations->Out().As<Register>();
1879      __ LoadFromOffset(kLoadWord, out, obj, offset);
1880      break;
1881    }
1882
1883    case Primitive::kPrimLong: {
1884      // TODO: support volatile.
1885      Location out = locations->Out();
1886      __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset);
1887      break;
1888    }
1889
1890    case Primitive::kPrimFloat:
1891    case Primitive::kPrimDouble:
1892      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
1893      UNREACHABLE();
1894    case Primitive::kPrimVoid:
1895      LOG(FATAL) << "Unreachable type " << instruction->GetType();
1896      UNREACHABLE();
1897  }
1898}
1899
1900void LocationsBuilderARM::VisitNullCheck(HNullCheck* instruction) {
1901  LocationSummary* locations =
1902      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1903  locations->SetInAt(0, Location::RequiresRegister());
1904  if (instruction->HasUses()) {
1905    locations->SetOut(Location::SameAsFirstInput());
1906  }
1907}
1908
1909void InstructionCodeGeneratorARM::VisitNullCheck(HNullCheck* instruction) {
1910  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM(instruction);
1911  codegen_->AddSlowPath(slow_path);
1912
1913  LocationSummary* locations = instruction->GetLocations();
1914  Location obj = locations->InAt(0);
1915
1916  if (obj.IsRegister()) {
1917    __ cmp(obj.As<Register>(), ShifterOperand(0));
1918    __ b(slow_path->GetEntryLabel(), EQ);
1919  } else {
1920    DCHECK(obj.IsConstant()) << obj;
1921    DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0);
1922    __ b(slow_path->GetEntryLabel());
1923  }
1924}
1925
1926void LocationsBuilderARM::VisitArrayGet(HArrayGet* instruction) {
1927  LocationSummary* locations =
1928      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1929  locations->SetInAt(0, Location::RequiresRegister());
1930  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1931  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1932}
1933
1934void InstructionCodeGeneratorARM::VisitArrayGet(HArrayGet* instruction) {
1935  LocationSummary* locations = instruction->GetLocations();
1936  Register obj = locations->InAt(0).As<Register>();
1937  Location index = locations->InAt(1);
1938
1939  switch (instruction->GetType()) {
1940    case Primitive::kPrimBoolean: {
1941      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
1942      Register out = locations->Out().As<Register>();
1943      if (index.IsConstant()) {
1944        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
1945        __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
1946      } else {
1947        __ add(IP, obj, ShifterOperand(index.As<Register>()));
1948        __ LoadFromOffset(kLoadUnsignedByte, out, IP, data_offset);
1949      }
1950      break;
1951    }
1952
1953    case Primitive::kPrimByte: {
1954      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
1955      Register out = locations->Out().As<Register>();
1956      if (index.IsConstant()) {
1957        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
1958        __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
1959      } else {
1960        __ add(IP, obj, ShifterOperand(index.As<Register>()));
1961        __ LoadFromOffset(kLoadSignedByte, out, IP, data_offset);
1962      }
1963      break;
1964    }
1965
1966    case Primitive::kPrimShort: {
1967      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
1968      Register out = locations->Out().As<Register>();
1969      if (index.IsConstant()) {
1970        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
1971        __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
1972      } else {
1973        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_2));
1974        __ LoadFromOffset(kLoadSignedHalfword, out, IP, data_offset);
1975      }
1976      break;
1977    }
1978
1979    case Primitive::kPrimChar: {
1980      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
1981      Register out = locations->Out().As<Register>();
1982      if (index.IsConstant()) {
1983        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
1984        __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
1985      } else {
1986        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_2));
1987        __ LoadFromOffset(kLoadUnsignedHalfword, out, IP, data_offset);
1988      }
1989      break;
1990    }
1991
1992    case Primitive::kPrimInt:
1993    case Primitive::kPrimNot: {
1994      DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
1995      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
1996      Register out = locations->Out().As<Register>();
1997      if (index.IsConstant()) {
1998        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
1999        __ LoadFromOffset(kLoadWord, out, obj, offset);
2000      } else {
2001        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_4));
2002        __ LoadFromOffset(kLoadWord, out, IP, data_offset);
2003      }
2004      break;
2005    }
2006
2007    case Primitive::kPrimLong: {
2008      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
2009      Location out = locations->Out();
2010      if (index.IsConstant()) {
2011        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
2012        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset);
2013      } else {
2014        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_8));
2015        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), IP, data_offset);
2016      }
2017      break;
2018    }
2019
2020    case Primitive::kPrimFloat:
2021    case Primitive::kPrimDouble:
2022      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
2023      UNREACHABLE();
2024    case Primitive::kPrimVoid:
2025      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2026      UNREACHABLE();
2027  }
2028}
2029
2030void LocationsBuilderARM::VisitArraySet(HArraySet* instruction) {
2031  Primitive::Type value_type = instruction->GetComponentType();
2032  bool is_object = value_type == Primitive::kPrimNot;
2033  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2034      instruction, is_object ? LocationSummary::kCall : LocationSummary::kNoCall);
2035  if (is_object) {
2036    InvokeRuntimeCallingConvention calling_convention;
2037    locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2038    locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2039    locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2040  } else {
2041    locations->SetInAt(0, Location::RequiresRegister());
2042    locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2043    locations->SetInAt(2, Location::RequiresRegister());
2044  }
2045}
2046
2047void InstructionCodeGeneratorARM::VisitArraySet(HArraySet* instruction) {
2048  LocationSummary* locations = instruction->GetLocations();
2049  Register obj = locations->InAt(0).As<Register>();
2050  Location index = locations->InAt(1);
2051  Primitive::Type value_type = instruction->GetComponentType();
2052
2053  switch (value_type) {
2054    case Primitive::kPrimBoolean:
2055    case Primitive::kPrimByte: {
2056      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
2057      Register value = locations->InAt(2).As<Register>();
2058      if (index.IsConstant()) {
2059        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2060        __ StoreToOffset(kStoreByte, value, obj, offset);
2061      } else {
2062        __ add(IP, obj, ShifterOperand(index.As<Register>()));
2063        __ StoreToOffset(kStoreByte, value, IP, data_offset);
2064      }
2065      break;
2066    }
2067
2068    case Primitive::kPrimShort:
2069    case Primitive::kPrimChar: {
2070      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
2071      Register value = locations->InAt(2).As<Register>();
2072      if (index.IsConstant()) {
2073        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2074        __ StoreToOffset(kStoreHalfword, value, obj, offset);
2075      } else {
2076        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_2));
2077        __ StoreToOffset(kStoreHalfword, value, IP, data_offset);
2078      }
2079      break;
2080    }
2081
2082    case Primitive::kPrimInt: {
2083      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2084      Register value = locations->InAt(2).As<Register>();
2085      if (index.IsConstant()) {
2086        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2087        __ StoreToOffset(kStoreWord, value, obj, offset);
2088      } else {
2089        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_4));
2090        __ StoreToOffset(kStoreWord, value, IP, data_offset);
2091      }
2092      break;
2093    }
2094
2095    case Primitive::kPrimNot: {
2096      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject), instruction, instruction->GetDexPc());
2097      break;
2098    }
2099
2100    case Primitive::kPrimLong: {
2101      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
2102      Location value = locations->InAt(2);
2103      if (index.IsConstant()) {
2104        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
2105        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset);
2106      } else {
2107        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_8));
2108        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), IP, data_offset);
2109      }
2110      break;
2111    }
2112
2113    case Primitive::kPrimFloat:
2114    case Primitive::kPrimDouble:
2115      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
2116      UNREACHABLE();
2117    case Primitive::kPrimVoid:
2118      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2119      UNREACHABLE();
2120  }
2121}
2122
2123void LocationsBuilderARM::VisitArrayLength(HArrayLength* instruction) {
2124  LocationSummary* locations =
2125      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2126  locations->SetInAt(0, Location::RequiresRegister());
2127  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2128}
2129
2130void InstructionCodeGeneratorARM::VisitArrayLength(HArrayLength* instruction) {
2131  LocationSummary* locations = instruction->GetLocations();
2132  uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
2133  Register obj = locations->InAt(0).As<Register>();
2134  Register out = locations->Out().As<Register>();
2135  __ LoadFromOffset(kLoadWord, out, obj, offset);
2136}
2137
2138void LocationsBuilderARM::VisitBoundsCheck(HBoundsCheck* instruction) {
2139  LocationSummary* locations =
2140      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2141  locations->SetInAt(0, Location::RequiresRegister());
2142  locations->SetInAt(1, Location::RequiresRegister());
2143  if (instruction->HasUses()) {
2144    locations->SetOut(Location::SameAsFirstInput());
2145  }
2146}
2147
2148void InstructionCodeGeneratorARM::VisitBoundsCheck(HBoundsCheck* instruction) {
2149  LocationSummary* locations = instruction->GetLocations();
2150  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM(
2151      instruction, locations->InAt(0), locations->InAt(1));
2152  codegen_->AddSlowPath(slow_path);
2153
2154  Register index = locations->InAt(0).As<Register>();
2155  Register length = locations->InAt(1).As<Register>();
2156
2157  __ cmp(index, ShifterOperand(length));
2158  __ b(slow_path->GetEntryLabel(), CS);
2159}
2160
2161void CodeGeneratorARM::MarkGCCard(Register temp, Register card, Register object, Register value) {
2162  Label is_null;
2163  __ CompareAndBranchIfZero(value, &is_null);
2164  __ LoadFromOffset(kLoadWord, card, TR, Thread::CardTableOffset<kArmWordSize>().Int32Value());
2165  __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
2166  __ strb(card, Address(card, temp));
2167  __ Bind(&is_null);
2168}
2169
2170void LocationsBuilderARM::VisitTemporary(HTemporary* temp) {
2171  temp->SetLocations(nullptr);
2172}
2173
2174void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp) {
2175  // Nothing to do, this is driven by the code generator.
2176  UNUSED(temp);
2177}
2178
2179void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction) {
2180  UNUSED(instruction);
2181  LOG(FATAL) << "Unreachable";
2182}
2183
2184void InstructionCodeGeneratorARM::VisitParallelMove(HParallelMove* instruction) {
2185  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
2186}
2187
2188void LocationsBuilderARM::VisitSuspendCheck(HSuspendCheck* instruction) {
2189  new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
2190}
2191
2192void InstructionCodeGeneratorARM::VisitSuspendCheck(HSuspendCheck* instruction) {
2193  HBasicBlock* block = instruction->GetBlock();
2194  if (block->GetLoopInformation() != nullptr) {
2195    DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
2196    // The back edge will generate the suspend check.
2197    return;
2198  }
2199  if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
2200    // The goto will generate the suspend check.
2201    return;
2202  }
2203  GenerateSuspendCheck(instruction, nullptr);
2204}
2205
2206void InstructionCodeGeneratorARM::GenerateSuspendCheck(HSuspendCheck* instruction,
2207                                                       HBasicBlock* successor) {
2208  SuspendCheckSlowPathARM* slow_path =
2209      new (GetGraph()->GetArena()) SuspendCheckSlowPathARM(instruction, successor);
2210  codegen_->AddSlowPath(slow_path);
2211
2212  __ LoadFromOffset(
2213      kLoadUnsignedHalfword, IP, TR, Thread::ThreadFlagsOffset<kArmWordSize>().Int32Value());
2214  __ cmp(IP, ShifterOperand(0));
2215  // TODO: Figure out the branch offsets and use cbz/cbnz.
2216  if (successor == nullptr) {
2217    __ b(slow_path->GetEntryLabel(), NE);
2218    __ Bind(slow_path->GetReturnLabel());
2219  } else {
2220    __ b(codegen_->GetLabelOf(successor), EQ);
2221    __ b(slow_path->GetEntryLabel());
2222  }
2223}
2224
2225ArmAssembler* ParallelMoveResolverARM::GetAssembler() const {
2226  return codegen_->GetAssembler();
2227}
2228
2229void ParallelMoveResolverARM::EmitMove(size_t index) {
2230  MoveOperands* move = moves_.Get(index);
2231  Location source = move->GetSource();
2232  Location destination = move->GetDestination();
2233
2234  if (source.IsRegister()) {
2235    if (destination.IsRegister()) {
2236      __ Mov(destination.As<Register>(), source.As<Register>());
2237    } else {
2238      DCHECK(destination.IsStackSlot());
2239      __ StoreToOffset(kStoreWord, source.As<Register>(),
2240                       SP, destination.GetStackIndex());
2241    }
2242  } else if (source.IsStackSlot()) {
2243    if (destination.IsRegister()) {
2244      __ LoadFromOffset(kLoadWord, destination.As<Register>(),
2245                        SP, source.GetStackIndex());
2246    } else {
2247      DCHECK(destination.IsStackSlot());
2248      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
2249      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
2250    }
2251  } else {
2252    DCHECK(source.IsConstant());
2253    DCHECK(source.GetConstant()->IsIntConstant());
2254    int32_t value = source.GetConstant()->AsIntConstant()->GetValue();
2255    if (destination.IsRegister()) {
2256      __ LoadImmediate(destination.As<Register>(), value);
2257    } else {
2258      DCHECK(destination.IsStackSlot());
2259      __ LoadImmediate(IP, value);
2260      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
2261    }
2262  }
2263}
2264
2265void ParallelMoveResolverARM::Exchange(Register reg, int mem) {
2266  __ Mov(IP, reg);
2267  __ LoadFromOffset(kLoadWord, reg, SP, mem);
2268  __ StoreToOffset(kStoreWord, IP, SP, mem);
2269}
2270
2271void ParallelMoveResolverARM::Exchange(int mem1, int mem2) {
2272  ScratchRegisterScope ensure_scratch(this, IP, R0, codegen_->GetNumberOfCoreRegisters());
2273  int stack_offset = ensure_scratch.IsSpilled() ? kArmWordSize : 0;
2274  __ LoadFromOffset(kLoadWord, static_cast<Register>(ensure_scratch.GetRegister()),
2275                    SP, mem1 + stack_offset);
2276  __ LoadFromOffset(kLoadWord, IP, SP, mem2 + stack_offset);
2277  __ StoreToOffset(kStoreWord, static_cast<Register>(ensure_scratch.GetRegister()),
2278                   SP, mem2 + stack_offset);
2279  __ StoreToOffset(kStoreWord, IP, SP, mem1 + stack_offset);
2280}
2281
2282void ParallelMoveResolverARM::EmitSwap(size_t index) {
2283  MoveOperands* move = moves_.Get(index);
2284  Location source = move->GetSource();
2285  Location destination = move->GetDestination();
2286
2287  if (source.IsRegister() && destination.IsRegister()) {
2288    DCHECK_NE(source.As<Register>(), IP);
2289    DCHECK_NE(destination.As<Register>(), IP);
2290    __ Mov(IP, source.As<Register>());
2291    __ Mov(source.As<Register>(), destination.As<Register>());
2292    __ Mov(destination.As<Register>(), IP);
2293  } else if (source.IsRegister() && destination.IsStackSlot()) {
2294    Exchange(source.As<Register>(), destination.GetStackIndex());
2295  } else if (source.IsStackSlot() && destination.IsRegister()) {
2296    Exchange(destination.As<Register>(), source.GetStackIndex());
2297  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
2298    Exchange(source.GetStackIndex(), destination.GetStackIndex());
2299  } else {
2300    LOG(FATAL) << "Unimplemented";
2301  }
2302}
2303
2304void ParallelMoveResolverARM::SpillScratch(int reg) {
2305  __ Push(static_cast<Register>(reg));
2306}
2307
2308void ParallelMoveResolverARM::RestoreScratch(int reg) {
2309  __ Pop(static_cast<Register>(reg));
2310}
2311
2312void LocationsBuilderARM::VisitLoadClass(HLoadClass* cls) {
2313  LocationSummary::CallKind call_kind = cls->CanCallRuntime()
2314      ? LocationSummary::kCallOnSlowPath
2315      : LocationSummary::kNoCall;
2316  LocationSummary* locations =
2317      new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
2318  locations->SetOut(Location::RequiresRegister());
2319}
2320
2321void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) {
2322  Register out = cls->GetLocations()->Out().As<Register>();
2323  if (cls->IsReferrersClass()) {
2324    DCHECK(!cls->CanCallRuntime());
2325    DCHECK(!cls->MustGenerateClinitCheck());
2326    codegen_->LoadCurrentMethod(out);
2327    __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
2328  } else {
2329    DCHECK(cls->CanCallRuntime());
2330    codegen_->LoadCurrentMethod(out);
2331    __ LoadFromOffset(
2332        kLoadWord, out, out, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value());
2333    __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
2334
2335    SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
2336        cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
2337    codegen_->AddSlowPath(slow_path);
2338    __ cmp(out, ShifterOperand(0));
2339    __ b(slow_path->GetEntryLabel(), EQ);
2340    if (cls->MustGenerateClinitCheck()) {
2341      GenerateClassInitializationCheck(slow_path, out);
2342    } else {
2343      __ Bind(slow_path->GetExitLabel());
2344    }
2345  }
2346}
2347
2348void LocationsBuilderARM::VisitClinitCheck(HClinitCheck* check) {
2349  LocationSummary* locations =
2350      new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
2351  locations->SetInAt(0, Location::RequiresRegister());
2352  if (check->HasUses()) {
2353    locations->SetOut(Location::SameAsFirstInput());
2354  }
2355}
2356
2357void InstructionCodeGeneratorARM::VisitClinitCheck(HClinitCheck* check) {
2358  // We assume the class is not null.
2359  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
2360      check->GetLoadClass(), check, check->GetDexPc(), true);
2361  codegen_->AddSlowPath(slow_path);
2362  GenerateClassInitializationCheck(slow_path, check->GetLocations()->InAt(0).As<Register>());
2363}
2364
2365void InstructionCodeGeneratorARM::GenerateClassInitializationCheck(
2366    SlowPathCodeARM* slow_path, Register class_reg) {
2367  __ LoadFromOffset(kLoadWord, IP, class_reg, mirror::Class::StatusOffset().Int32Value());
2368  __ cmp(IP, ShifterOperand(mirror::Class::kStatusInitialized));
2369  __ b(slow_path->GetEntryLabel(), LT);
2370  // Even if the initialized flag is set, we may be in a situation where caches are not synced
2371  // properly. Therefore, we do a memory fence.
2372  __ dmb(ISH);
2373  __ Bind(slow_path->GetExitLabel());
2374}
2375
2376void LocationsBuilderARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2377  LocationSummary* locations =
2378      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2379  locations->SetInAt(0, Location::RequiresRegister());
2380  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2381}
2382
2383void InstructionCodeGeneratorARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2384  LocationSummary* locations = instruction->GetLocations();
2385  Register cls = locations->InAt(0).As<Register>();
2386  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
2387
2388  switch (instruction->GetType()) {
2389    case Primitive::kPrimBoolean: {
2390      Register out = locations->Out().As<Register>();
2391      __ LoadFromOffset(kLoadUnsignedByte, out, cls, offset);
2392      break;
2393    }
2394
2395    case Primitive::kPrimByte: {
2396      Register out = locations->Out().As<Register>();
2397      __ LoadFromOffset(kLoadSignedByte, out, cls, offset);
2398      break;
2399    }
2400
2401    case Primitive::kPrimShort: {
2402      Register out = locations->Out().As<Register>();
2403      __ LoadFromOffset(kLoadSignedHalfword, out, cls, offset);
2404      break;
2405    }
2406
2407    case Primitive::kPrimChar: {
2408      Register out = locations->Out().As<Register>();
2409      __ LoadFromOffset(kLoadUnsignedHalfword, out, cls, offset);
2410      break;
2411    }
2412
2413    case Primitive::kPrimInt:
2414    case Primitive::kPrimNot: {
2415      Register out = locations->Out().As<Register>();
2416      __ LoadFromOffset(kLoadWord, out, cls, offset);
2417      break;
2418    }
2419
2420    case Primitive::kPrimLong: {
2421      // TODO: support volatile.
2422      Location out = locations->Out();
2423      __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), cls, offset);
2424      break;
2425    }
2426
2427    case Primitive::kPrimFloat:
2428    case Primitive::kPrimDouble:
2429      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
2430      UNREACHABLE();
2431    case Primitive::kPrimVoid:
2432      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2433      UNREACHABLE();
2434  }
2435}
2436
2437void LocationsBuilderARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2438  LocationSummary* locations =
2439      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2440  bool is_object_type = instruction->GetFieldType() == Primitive::kPrimNot;
2441  locations->SetInAt(0, Location::RequiresRegister());
2442  locations->SetInAt(1, Location::RequiresRegister());
2443  // Temporary registers for the write barrier.
2444  if (is_object_type) {
2445    locations->AddTemp(Location::RequiresRegister());
2446    locations->AddTemp(Location::RequiresRegister());
2447  }
2448}
2449
2450void InstructionCodeGeneratorARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2451  LocationSummary* locations = instruction->GetLocations();
2452  Register cls = locations->InAt(0).As<Register>();
2453  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
2454  Primitive::Type field_type = instruction->GetFieldType();
2455
2456  switch (field_type) {
2457    case Primitive::kPrimBoolean:
2458    case Primitive::kPrimByte: {
2459      Register value = locations->InAt(1).As<Register>();
2460      __ StoreToOffset(kStoreByte, value, cls, offset);
2461      break;
2462    }
2463
2464    case Primitive::kPrimShort:
2465    case Primitive::kPrimChar: {
2466      Register value = locations->InAt(1).As<Register>();
2467      __ StoreToOffset(kStoreHalfword, value, cls, offset);
2468      break;
2469    }
2470
2471    case Primitive::kPrimInt:
2472    case Primitive::kPrimNot: {
2473      Register value = locations->InAt(1).As<Register>();
2474      __ StoreToOffset(kStoreWord, value, cls, offset);
2475      if (field_type == Primitive::kPrimNot) {
2476        Register temp = locations->GetTemp(0).As<Register>();
2477        Register card = locations->GetTemp(1).As<Register>();
2478        codegen_->MarkGCCard(temp, card, cls, value);
2479      }
2480      break;
2481    }
2482
2483    case Primitive::kPrimLong: {
2484      Location value = locations->InAt(1);
2485      __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), cls, offset);
2486      break;
2487    }
2488
2489    case Primitive::kPrimFloat:
2490    case Primitive::kPrimDouble:
2491      LOG(FATAL) << "Unimplemented register type " << field_type;
2492      UNREACHABLE();
2493    case Primitive::kPrimVoid:
2494      LOG(FATAL) << "Unreachable type " << field_type;
2495      UNREACHABLE();
2496  }
2497}
2498
2499void LocationsBuilderARM::VisitLoadString(HLoadString* load) {
2500  LocationSummary* locations =
2501      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
2502  locations->SetOut(Location::RequiresRegister());
2503}
2504
2505void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) {
2506  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM(load);
2507  codegen_->AddSlowPath(slow_path);
2508
2509  Register out = load->GetLocations()->Out().As<Register>();
2510  codegen_->LoadCurrentMethod(out);
2511  __ LoadFromOffset(
2512      kLoadWord, out, out, mirror::ArtMethod::DexCacheStringsOffset().Int32Value());
2513  __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(load->GetStringIndex()));
2514  __ cmp(out, ShifterOperand(0));
2515  __ b(slow_path->GetEntryLabel(), EQ);
2516  __ Bind(slow_path->GetExitLabel());
2517}
2518
2519}  // namespace arm
2520}  // namespace art
2521