code_generator_arm.cc revision 3adfd1b4fb20ac2b0217b5d2737bfe30ad90257a
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm.h"
18
19#include "entrypoints/quick/quick_entrypoints.h"
20#include "gc/accounting/card_table.h"
21#include "mirror/array-inl.h"
22#include "mirror/art_method.h"
23#include "mirror/class.h"
24#include "thread.h"
25#include "utils/assembler.h"
26#include "utils/arm/assembler_arm.h"
27#include "utils/arm/managed_register_arm.h"
28#include "utils/stack_checks.h"
29
30namespace art {
31
32namespace arm {
33
34static DRegister FromLowSToD(SRegister reg) {
35  DCHECK_EQ(reg % 2, 0);
36  return static_cast<DRegister>(reg / 2);
37}
38
39static constexpr bool kExplicitStackOverflowCheck = false;
40
41static constexpr int kNumberOfPushedRegistersAtEntry = 1 + 2;  // LR, R6, R7
42static constexpr int kCurrentMethodStackOffset = 0;
43
44static constexpr Register kRuntimeParameterCoreRegisters[] = { R0, R1, R2 };
45static constexpr size_t kRuntimeParameterCoreRegistersLength =
46    arraysize(kRuntimeParameterCoreRegisters);
47static constexpr SRegister kRuntimeParameterFpuRegisters[] = { };
48static constexpr size_t kRuntimeParameterFpuRegistersLength = 0;
49
50class InvokeRuntimeCallingConvention : public CallingConvention<Register, SRegister> {
51 public:
52  InvokeRuntimeCallingConvention()
53      : CallingConvention(kRuntimeParameterCoreRegisters,
54                          kRuntimeParameterCoreRegistersLength,
55                          kRuntimeParameterFpuRegisters,
56                          kRuntimeParameterFpuRegistersLength) {}
57
58 private:
59  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
60};
61
62#define __ reinterpret_cast<ArmAssembler*>(codegen->GetAssembler())->
63#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArmWordSize, x).Int32Value()
64
65class SlowPathCodeARM : public SlowPathCode {
66 public:
67  SlowPathCodeARM() : entry_label_(), exit_label_() {}
68
69  Label* GetEntryLabel() { return &entry_label_; }
70  Label* GetExitLabel() { return &exit_label_; }
71
72 private:
73  Label entry_label_;
74  Label exit_label_;
75
76  DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM);
77};
78
79class NullCheckSlowPathARM : public SlowPathCodeARM {
80 public:
81  explicit NullCheckSlowPathARM(HNullCheck* instruction) : instruction_(instruction) {}
82
83  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
84    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
85    __ Bind(GetEntryLabel());
86    arm_codegen->InvokeRuntime(
87        QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc());
88  }
89
90 private:
91  HNullCheck* const instruction_;
92  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM);
93};
94
95class DivZeroCheckSlowPathARM : public SlowPathCodeARM {
96 public:
97  explicit DivZeroCheckSlowPathARM(HDivZeroCheck* instruction) : instruction_(instruction) {}
98
99  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
100    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
101    __ Bind(GetEntryLabel());
102    arm_codegen->InvokeRuntime(
103        QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc());
104  }
105
106 private:
107  HDivZeroCheck* const instruction_;
108  DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM);
109};
110
111class StackOverflowCheckSlowPathARM : public SlowPathCodeARM {
112 public:
113  StackOverflowCheckSlowPathARM() {}
114
115  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
116    __ Bind(GetEntryLabel());
117    __ LoadFromOffset(kLoadWord, PC, TR,
118        QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pThrowStackOverflow).Int32Value());
119  }
120
121 private:
122  DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathARM);
123};
124
125class SuspendCheckSlowPathARM : public SlowPathCodeARM {
126 public:
127  explicit SuspendCheckSlowPathARM(HSuspendCheck* instruction, HBasicBlock* successor)
128      : instruction_(instruction), successor_(successor) {}
129
130  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
131    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
132    __ Bind(GetEntryLabel());
133    codegen->SaveLiveRegisters(instruction_->GetLocations());
134    arm_codegen->InvokeRuntime(
135        QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc());
136    codegen->RestoreLiveRegisters(instruction_->GetLocations());
137    if (successor_ == nullptr) {
138      __ b(GetReturnLabel());
139    } else {
140      __ b(arm_codegen->GetLabelOf(successor_));
141    }
142  }
143
144  Label* GetReturnLabel() {
145    DCHECK(successor_ == nullptr);
146    return &return_label_;
147  }
148
149 private:
150  HSuspendCheck* const instruction_;
151  // If not null, the block to branch to after the suspend check.
152  HBasicBlock* const successor_;
153
154  // If `successor_` is null, the label to branch to after the suspend check.
155  Label return_label_;
156
157  DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM);
158};
159
160class BoundsCheckSlowPathARM : public SlowPathCodeARM {
161 public:
162  BoundsCheckSlowPathARM(HBoundsCheck* instruction,
163                         Location index_location,
164                         Location length_location)
165      : instruction_(instruction),
166        index_location_(index_location),
167        length_location_(length_location) {}
168
169  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
170    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
171    __ Bind(GetEntryLabel());
172    InvokeRuntimeCallingConvention calling_convention;
173    arm_codegen->Move32(
174        Location::RegisterLocation(calling_convention.GetRegisterAt(0)), index_location_);
175    arm_codegen->Move32(
176        Location::RegisterLocation(calling_convention.GetRegisterAt(1)), length_location_);
177    arm_codegen->InvokeRuntime(
178        QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc());
179  }
180
181 private:
182  HBoundsCheck* const instruction_;
183  const Location index_location_;
184  const Location length_location_;
185
186  DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM);
187};
188
189class LoadClassSlowPathARM : public SlowPathCodeARM {
190 public:
191  LoadClassSlowPathARM(HLoadClass* cls,
192                       HInstruction* at,
193                       uint32_t dex_pc,
194                       bool do_clinit)
195      : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
196    DCHECK(at->IsLoadClass() || at->IsClinitCheck());
197  }
198
199  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
200    LocationSummary* locations = at_->GetLocations();
201
202    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
203    __ Bind(GetEntryLabel());
204    codegen->SaveLiveRegisters(locations);
205
206    InvokeRuntimeCallingConvention calling_convention;
207    __ LoadImmediate(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex());
208    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
209    int32_t entry_point_offset = do_clinit_
210        ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
211        : QUICK_ENTRY_POINT(pInitializeType);
212    arm_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_);
213
214    // Move the class to the desired location.
215    Location out = locations->Out();
216    if (out.IsValid()) {
217      DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
218      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
219    }
220    codegen->RestoreLiveRegisters(locations);
221    __ b(GetExitLabel());
222  }
223
224 private:
225  // The class this slow path will load.
226  HLoadClass* const cls_;
227
228  // The instruction where this slow path is happening.
229  // (Might be the load class or an initialization check).
230  HInstruction* const at_;
231
232  // The dex PC of `at_`.
233  const uint32_t dex_pc_;
234
235  // Whether to initialize the class.
236  const bool do_clinit_;
237
238  DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM);
239};
240
241class LoadStringSlowPathARM : public SlowPathCodeARM {
242 public:
243  explicit LoadStringSlowPathARM(HLoadString* instruction) : instruction_(instruction) {}
244
245  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
246    LocationSummary* locations = instruction_->GetLocations();
247    DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
248
249    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
250    __ Bind(GetEntryLabel());
251    codegen->SaveLiveRegisters(locations);
252
253    InvokeRuntimeCallingConvention calling_convention;
254    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(0));
255    __ LoadImmediate(calling_convention.GetRegisterAt(1), instruction_->GetStringIndex());
256    arm_codegen->InvokeRuntime(
257        QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc());
258    arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
259
260    codegen->RestoreLiveRegisters(locations);
261    __ b(GetExitLabel());
262  }
263
264 private:
265  HLoadString* const instruction_;
266
267  DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM);
268};
269
270class TypeCheckSlowPathARM : public SlowPathCodeARM {
271 public:
272  explicit TypeCheckSlowPathARM(HTypeCheck* instruction, Location object_class)
273      : instruction_(instruction),
274        object_class_(object_class) {}
275
276  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
277    LocationSummary* locations = instruction_->GetLocations();
278    DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
279
280    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
281    __ Bind(GetEntryLabel());
282    codegen->SaveLiveRegisters(locations);
283
284    // We're moving two locations to locations that could overlap, so we need a parallel
285    // move resolver.
286    InvokeRuntimeCallingConvention calling_convention;
287    MoveOperands move1(locations->InAt(1),
288                       Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
289                       nullptr);
290    MoveOperands move2(object_class_,
291                       Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
292                       nullptr);
293    HParallelMove parallel_move(codegen->GetGraph()->GetArena());
294    parallel_move.AddMove(&move1);
295    parallel_move.AddMove(&move2);
296    arm_codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
297
298    arm_codegen->InvokeRuntime(
299        QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, instruction_->GetDexPc());
300    arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
301
302    codegen->RestoreLiveRegisters(locations);
303    __ b(GetExitLabel());
304  }
305
306 private:
307  HTypeCheck* const instruction_;
308  const Location object_class_;
309
310  DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM);
311};
312
313#undef __
314
315#undef __
316#define __ reinterpret_cast<ArmAssembler*>(GetAssembler())->
317
318inline Condition ARMCondition(IfCondition cond) {
319  switch (cond) {
320    case kCondEQ: return EQ;
321    case kCondNE: return NE;
322    case kCondLT: return LT;
323    case kCondLE: return LE;
324    case kCondGT: return GT;
325    case kCondGE: return GE;
326    default:
327      LOG(FATAL) << "Unknown if condition";
328  }
329  return EQ;        // Unreachable.
330}
331
332inline Condition ARMOppositeCondition(IfCondition cond) {
333  switch (cond) {
334    case kCondEQ: return NE;
335    case kCondNE: return EQ;
336    case kCondLT: return GE;
337    case kCondLE: return GT;
338    case kCondGT: return LE;
339    case kCondGE: return LT;
340    default:
341      LOG(FATAL) << "Unknown if condition";
342  }
343  return EQ;        // Unreachable.
344}
345
346void CodeGeneratorARM::DumpCoreRegister(std::ostream& stream, int reg) const {
347  stream << ArmManagedRegister::FromCoreRegister(Register(reg));
348}
349
350void CodeGeneratorARM::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
351  stream << ArmManagedRegister::FromSRegister(SRegister(reg));
352}
353
354size_t CodeGeneratorARM::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
355  __ StoreToOffset(kStoreWord, static_cast<Register>(reg_id), SP, stack_index);
356  return kArmWordSize;
357}
358
359size_t CodeGeneratorARM::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
360  __ LoadFromOffset(kLoadWord, static_cast<Register>(reg_id), SP, stack_index);
361  return kArmWordSize;
362}
363
364CodeGeneratorARM::CodeGeneratorARM(HGraph* graph)
365    : CodeGenerator(graph, kNumberOfCoreRegisters, kNumberOfSRegisters, kNumberOfRegisterPairs),
366      block_labels_(graph->GetArena(), 0),
367      location_builder_(graph, this),
368      instruction_visitor_(graph, this),
369      move_resolver_(graph->GetArena(), this),
370      assembler_(true) {}
371
372size_t CodeGeneratorARM::FrameEntrySpillSize() const {
373  return kNumberOfPushedRegistersAtEntry * kArmWordSize;
374}
375
376Location CodeGeneratorARM::AllocateFreeRegister(Primitive::Type type) const {
377  switch (type) {
378    case Primitive::kPrimLong: {
379      size_t reg = FindFreeEntry(blocked_register_pairs_, kNumberOfRegisterPairs);
380      ArmManagedRegister pair =
381          ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
382      DCHECK(!blocked_core_registers_[pair.AsRegisterPairLow()]);
383      DCHECK(!blocked_core_registers_[pair.AsRegisterPairHigh()]);
384
385      blocked_core_registers_[pair.AsRegisterPairLow()] = true;
386      blocked_core_registers_[pair.AsRegisterPairHigh()] = true;
387      UpdateBlockedPairRegisters();
388      return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
389    }
390
391    case Primitive::kPrimByte:
392    case Primitive::kPrimBoolean:
393    case Primitive::kPrimChar:
394    case Primitive::kPrimShort:
395    case Primitive::kPrimInt:
396    case Primitive::kPrimNot: {
397      int reg = FindFreeEntry(blocked_core_registers_, kNumberOfCoreRegisters);
398      // Block all register pairs that contain `reg`.
399      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
400        ArmManagedRegister current =
401            ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
402        if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
403          blocked_register_pairs_[i] = true;
404        }
405      }
406      return Location::RegisterLocation(reg);
407    }
408
409    case Primitive::kPrimFloat: {
410      int reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfSRegisters);
411      return Location::FpuRegisterLocation(reg);
412    }
413
414    case Primitive::kPrimDouble: {
415      int reg = FindTwoFreeConsecutiveAlignedEntries(blocked_fpu_registers_, kNumberOfSRegisters);
416      DCHECK_EQ(reg % 2, 0);
417      return Location::FpuRegisterPairLocation(reg, reg + 1);
418    }
419
420    case Primitive::kPrimVoid:
421      LOG(FATAL) << "Unreachable type " << type;
422  }
423
424  return Location();
425}
426
427void CodeGeneratorARM::SetupBlockedRegisters() const {
428  // Don't allocate the dalvik style register pair passing.
429  blocked_register_pairs_[R1_R2] = true;
430
431  // Stack register, LR and PC are always reserved.
432  blocked_core_registers_[SP] = true;
433  blocked_core_registers_[LR] = true;
434  blocked_core_registers_[PC] = true;
435
436  // Reserve thread register.
437  blocked_core_registers_[TR] = true;
438
439  // Reserve temp register.
440  blocked_core_registers_[IP] = true;
441
442  // TODO: We currently don't use Quick's callee saved registers.
443  // We always save and restore R6 and R7 to make sure we can use three
444  // register pairs for long operations.
445  blocked_core_registers_[R4] = true;
446  blocked_core_registers_[R5] = true;
447  blocked_core_registers_[R8] = true;
448  blocked_core_registers_[R10] = true;
449  blocked_core_registers_[R11] = true;
450
451  blocked_fpu_registers_[S16] = true;
452  blocked_fpu_registers_[S17] = true;
453  blocked_fpu_registers_[S18] = true;
454  blocked_fpu_registers_[S19] = true;
455  blocked_fpu_registers_[S20] = true;
456  blocked_fpu_registers_[S21] = true;
457  blocked_fpu_registers_[S22] = true;
458  blocked_fpu_registers_[S23] = true;
459  blocked_fpu_registers_[S24] = true;
460  blocked_fpu_registers_[S25] = true;
461  blocked_fpu_registers_[S26] = true;
462  blocked_fpu_registers_[S27] = true;
463  blocked_fpu_registers_[S28] = true;
464  blocked_fpu_registers_[S29] = true;
465  blocked_fpu_registers_[S30] = true;
466  blocked_fpu_registers_[S31] = true;
467
468  UpdateBlockedPairRegisters();
469}
470
471void CodeGeneratorARM::UpdateBlockedPairRegisters() const {
472  for (int i = 0; i < kNumberOfRegisterPairs; i++) {
473    ArmManagedRegister current =
474        ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
475    if (blocked_core_registers_[current.AsRegisterPairLow()]
476        || blocked_core_registers_[current.AsRegisterPairHigh()]) {
477      blocked_register_pairs_[i] = true;
478    }
479  }
480}
481
482InstructionCodeGeneratorARM::InstructionCodeGeneratorARM(HGraph* graph, CodeGeneratorARM* codegen)
483      : HGraphVisitor(graph),
484        assembler_(codegen->GetAssembler()),
485        codegen_(codegen) {}
486
487void CodeGeneratorARM::GenerateFrameEntry() {
488  bool skip_overflow_check = IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm);
489  if (!skip_overflow_check) {
490    if (kExplicitStackOverflowCheck) {
491      SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathARM();
492      AddSlowPath(slow_path);
493
494      __ LoadFromOffset(kLoadWord, IP, TR, Thread::StackEndOffset<kArmWordSize>().Int32Value());
495      __ cmp(SP, ShifterOperand(IP));
496      __ b(slow_path->GetEntryLabel(), CC);
497    } else {
498      __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm)));
499      __ LoadFromOffset(kLoadWord, IP, IP, 0);
500      RecordPcInfo(nullptr, 0);
501    }
502  }
503
504  core_spill_mask_ |= (1 << LR | 1 << R6 | 1 << R7);
505  __ PushList(1 << LR | 1 << R6 | 1 << R7);
506
507  // The return PC has already been pushed on the stack.
508  __ AddConstant(SP, -(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize));
509  __ StoreToOffset(kStoreWord, R0, SP, 0);
510}
511
512void CodeGeneratorARM::GenerateFrameExit() {
513  __ AddConstant(SP, GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize);
514  __ PopList(1 << PC | 1 << R6 | 1 << R7);
515}
516
517void CodeGeneratorARM::Bind(HBasicBlock* block) {
518  __ Bind(GetLabelOf(block));
519}
520
521Location CodeGeneratorARM::GetStackLocation(HLoadLocal* load) const {
522  switch (load->GetType()) {
523    case Primitive::kPrimLong:
524    case Primitive::kPrimDouble:
525      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
526      break;
527
528    case Primitive::kPrimInt:
529    case Primitive::kPrimNot:
530    case Primitive::kPrimFloat:
531      return Location::StackSlot(GetStackSlot(load->GetLocal()));
532
533    case Primitive::kPrimBoolean:
534    case Primitive::kPrimByte:
535    case Primitive::kPrimChar:
536    case Primitive::kPrimShort:
537    case Primitive::kPrimVoid:
538      LOG(FATAL) << "Unexpected type " << load->GetType();
539  }
540
541  LOG(FATAL) << "Unreachable";
542  return Location();
543}
544
545Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
546  switch (type) {
547    case Primitive::kPrimBoolean:
548    case Primitive::kPrimByte:
549    case Primitive::kPrimChar:
550    case Primitive::kPrimShort:
551    case Primitive::kPrimInt:
552    case Primitive::kPrimNot: {
553      uint32_t index = gp_index_++;
554      uint32_t stack_index = stack_index_++;
555      if (index < calling_convention.GetNumberOfRegisters()) {
556        return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
557      } else {
558        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
559      }
560    }
561
562    case Primitive::kPrimLong: {
563      uint32_t index = gp_index_;
564      uint32_t stack_index = stack_index_;
565      gp_index_ += 2;
566      stack_index_ += 2;
567      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
568        ArmManagedRegister pair = ArmManagedRegister::FromRegisterPair(
569            calling_convention.GetRegisterPairAt(index));
570        return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
571      } else if (index + 1 == calling_convention.GetNumberOfRegisters()) {
572        return Location::QuickParameter(index, stack_index);
573      } else {
574        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
575      }
576    }
577
578    case Primitive::kPrimFloat: {
579      uint32_t stack_index = stack_index_++;
580      if (float_index_ % 2 == 0) {
581        float_index_ = std::max(double_index_, float_index_);
582      }
583      if (float_index_ < calling_convention.GetNumberOfFpuRegisters()) {
584        return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(float_index_++));
585      } else {
586        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
587      }
588    }
589
590    case Primitive::kPrimDouble: {
591      double_index_ = std::max(double_index_, RoundUp(float_index_, 2));
592      uint32_t stack_index = stack_index_;
593      stack_index_ += 2;
594      if (double_index_ + 1 < calling_convention.GetNumberOfFpuRegisters()) {
595        uint32_t index = double_index_;
596        double_index_ += 2;
597        return Location::FpuRegisterPairLocation(
598          calling_convention.GetFpuRegisterAt(index),
599          calling_convention.GetFpuRegisterAt(index + 1));
600      } else {
601        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
602      }
603    }
604
605    case Primitive::kPrimVoid:
606      LOG(FATAL) << "Unexpected parameter type " << type;
607      break;
608  }
609  return Location();
610}
611
612Location InvokeDexCallingConventionVisitor::GetReturnLocation(Primitive::Type type) {
613  switch (type) {
614    case Primitive::kPrimBoolean:
615    case Primitive::kPrimByte:
616    case Primitive::kPrimChar:
617    case Primitive::kPrimShort:
618    case Primitive::kPrimInt:
619    case Primitive::kPrimNot: {
620      return Location::RegisterLocation(R0);
621    }
622
623    case Primitive::kPrimFloat: {
624      return Location::FpuRegisterLocation(S0);
625    }
626
627    case Primitive::kPrimLong: {
628      return Location::RegisterPairLocation(R0, R1);
629    }
630
631    case Primitive::kPrimDouble: {
632      return Location::FpuRegisterPairLocation(S0, S1);
633    }
634
635    case Primitive::kPrimVoid:
636      return Location();
637  }
638  UNREACHABLE();
639  return Location();
640}
641
642void CodeGeneratorARM::Move32(Location destination, Location source) {
643  if (source.Equals(destination)) {
644    return;
645  }
646  if (destination.IsRegister()) {
647    if (source.IsRegister()) {
648      __ Mov(destination.As<Register>(), source.As<Register>());
649    } else if (source.IsFpuRegister()) {
650      __ vmovrs(destination.As<Register>(), source.As<SRegister>());
651    } else {
652      __ LoadFromOffset(kLoadWord, destination.As<Register>(), SP, source.GetStackIndex());
653    }
654  } else if (destination.IsFpuRegister()) {
655    if (source.IsRegister()) {
656      __ vmovsr(destination.As<SRegister>(), source.As<Register>());
657    } else if (source.IsFpuRegister()) {
658      __ vmovs(destination.As<SRegister>(), source.As<SRegister>());
659    } else {
660      __ LoadSFromOffset(destination.As<SRegister>(), SP, source.GetStackIndex());
661    }
662  } else {
663    DCHECK(destination.IsStackSlot());
664    if (source.IsRegister()) {
665      __ StoreToOffset(kStoreWord, source.As<Register>(), SP, destination.GetStackIndex());
666    } else if (source.IsFpuRegister()) {
667      __ StoreSToOffset(source.As<SRegister>(), SP, destination.GetStackIndex());
668    } else {
669      DCHECK(source.IsStackSlot());
670      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
671      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
672    }
673  }
674}
675
676void CodeGeneratorARM::Move64(Location destination, Location source) {
677  if (source.Equals(destination)) {
678    return;
679  }
680  if (destination.IsRegisterPair()) {
681    if (source.IsRegisterPair()) {
682      __ Mov(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairLow<Register>());
683      __ Mov(destination.AsRegisterPairHigh<Register>(), source.AsRegisterPairHigh<Register>());
684    } else if (source.IsFpuRegister()) {
685      UNIMPLEMENTED(FATAL);
686    } else if (source.IsQuickParameter()) {
687      uint16_t register_index = source.GetQuickParameterRegisterIndex();
688      uint16_t stack_index = source.GetQuickParameterStackIndex();
689      InvokeDexCallingConvention calling_convention;
690      __ Mov(destination.AsRegisterPairLow<Register>(),
691             calling_convention.GetRegisterAt(register_index));
692      __ LoadFromOffset(kLoadWord, destination.AsRegisterPairHigh<Register>(),
693             SP, calling_convention.GetStackOffsetOf(stack_index + 1) + GetFrameSize());
694    } else {
695      DCHECK(source.IsDoubleStackSlot());
696      if (destination.AsRegisterPairLow<Register>() == R1) {
697        DCHECK_EQ(destination.AsRegisterPairHigh<Register>(), R2);
698        __ LoadFromOffset(kLoadWord, R1, SP, source.GetStackIndex());
699        __ LoadFromOffset(kLoadWord, R2, SP, source.GetHighStackIndex(kArmWordSize));
700      } else {
701        __ LoadFromOffset(kLoadWordPair, destination.AsRegisterPairLow<Register>(),
702                          SP, source.GetStackIndex());
703      }
704    }
705  } else if (destination.IsFpuRegisterPair()) {
706    if (source.IsDoubleStackSlot()) {
707      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
708                         SP,
709                         source.GetStackIndex());
710    } else {
711      UNIMPLEMENTED(FATAL);
712    }
713  } else if (destination.IsQuickParameter()) {
714    InvokeDexCallingConvention calling_convention;
715    uint16_t register_index = destination.GetQuickParameterRegisterIndex();
716    uint16_t stack_index = destination.GetQuickParameterStackIndex();
717    if (source.IsRegisterPair()) {
718      __ Mov(calling_convention.GetRegisterAt(register_index),
719             source.AsRegisterPairLow<Register>());
720      __ StoreToOffset(kStoreWord, source.AsRegisterPairHigh<Register>(),
721             SP, calling_convention.GetStackOffsetOf(stack_index + 1));
722    } else if (source.IsFpuRegister()) {
723      UNIMPLEMENTED(FATAL);
724    } else {
725      DCHECK(source.IsDoubleStackSlot());
726      __ LoadFromOffset(
727          kLoadWord, calling_convention.GetRegisterAt(register_index), SP, source.GetStackIndex());
728      __ LoadFromOffset(kLoadWord, R0, SP, source.GetHighStackIndex(kArmWordSize));
729      __ StoreToOffset(kStoreWord, R0, SP, calling_convention.GetStackOffsetOf(stack_index + 1));
730    }
731  } else {
732    DCHECK(destination.IsDoubleStackSlot());
733    if (source.IsRegisterPair()) {
734      if (source.AsRegisterPairLow<Register>() == R1) {
735        DCHECK_EQ(source.AsRegisterPairHigh<Register>(), R2);
736        __ StoreToOffset(kStoreWord, R1, SP, destination.GetStackIndex());
737        __ StoreToOffset(kStoreWord, R2, SP, destination.GetHighStackIndex(kArmWordSize));
738      } else {
739        __ StoreToOffset(kStoreWordPair, source.AsRegisterPairLow<Register>(),
740                         SP, destination.GetStackIndex());
741      }
742    } else if (source.IsQuickParameter()) {
743      InvokeDexCallingConvention calling_convention;
744      uint16_t register_index = source.GetQuickParameterRegisterIndex();
745      uint16_t stack_index = source.GetQuickParameterStackIndex();
746      __ StoreToOffset(kStoreWord, calling_convention.GetRegisterAt(register_index),
747             SP, destination.GetStackIndex());
748      __ LoadFromOffset(kLoadWord, R0,
749             SP, calling_convention.GetStackOffsetOf(stack_index + 1) + GetFrameSize());
750      __ StoreToOffset(kStoreWord, R0, SP, destination.GetHighStackIndex(kArmWordSize));
751    } else if (source.IsFpuRegisterPair()) {
752      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
753                        SP,
754                        destination.GetStackIndex());
755    } else {
756      DCHECK(source.IsDoubleStackSlot());
757      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
758      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
759      __ LoadFromOffset(kLoadWord, IP, SP, source.GetHighStackIndex(kArmWordSize));
760      __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
761    }
762  }
763}
764
765void CodeGeneratorARM::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
766  LocationSummary* locations = instruction->GetLocations();
767  if (locations != nullptr && locations->Out().Equals(location)) {
768    return;
769  }
770
771  if (instruction->IsIntConstant()) {
772    int32_t value = instruction->AsIntConstant()->GetValue();
773    if (location.IsRegister()) {
774      __ LoadImmediate(location.As<Register>(), value);
775    } else {
776      DCHECK(location.IsStackSlot());
777      __ LoadImmediate(IP, value);
778      __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
779    }
780  } else if (instruction->IsLongConstant()) {
781    int64_t value = instruction->AsLongConstant()->GetValue();
782    if (location.IsRegisterPair()) {
783      __ LoadImmediate(location.AsRegisterPairLow<Register>(), Low32Bits(value));
784      __ LoadImmediate(location.AsRegisterPairHigh<Register>(), High32Bits(value));
785    } else {
786      DCHECK(location.IsDoubleStackSlot());
787      __ LoadImmediate(IP, Low32Bits(value));
788      __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
789      __ LoadImmediate(IP, High32Bits(value));
790      __ StoreToOffset(kStoreWord, IP, SP, location.GetHighStackIndex(kArmWordSize));
791    }
792  } else if (instruction->IsLoadLocal()) {
793    uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
794    switch (instruction->GetType()) {
795      case Primitive::kPrimBoolean:
796      case Primitive::kPrimByte:
797      case Primitive::kPrimChar:
798      case Primitive::kPrimShort:
799      case Primitive::kPrimInt:
800      case Primitive::kPrimNot:
801      case Primitive::kPrimFloat:
802        Move32(location, Location::StackSlot(stack_slot));
803        break;
804
805      case Primitive::kPrimLong:
806      case Primitive::kPrimDouble:
807        Move64(location, Location::DoubleStackSlot(stack_slot));
808        break;
809
810      default:
811        LOG(FATAL) << "Unexpected type " << instruction->GetType();
812    }
813  } else if (instruction->IsTemporary()) {
814    Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
815    Move32(location, temp_location);
816  } else {
817    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
818    switch (instruction->GetType()) {
819      case Primitive::kPrimBoolean:
820      case Primitive::kPrimByte:
821      case Primitive::kPrimChar:
822      case Primitive::kPrimShort:
823      case Primitive::kPrimNot:
824      case Primitive::kPrimInt:
825      case Primitive::kPrimFloat:
826        Move32(location, locations->Out());
827        break;
828
829      case Primitive::kPrimLong:
830      case Primitive::kPrimDouble:
831        Move64(location, locations->Out());
832        break;
833
834      default:
835        LOG(FATAL) << "Unexpected type " << instruction->GetType();
836    }
837  }
838}
839
840void CodeGeneratorARM::InvokeRuntime(int32_t entry_point_offset,
841                                     HInstruction* instruction,
842                                     uint32_t dex_pc) {
843  __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
844  __ blx(LR);
845  RecordPcInfo(instruction, dex_pc);
846  DCHECK(instruction->IsSuspendCheck()
847      || instruction->IsBoundsCheck()
848      || instruction->IsNullCheck()
849      || instruction->IsDivZeroCheck()
850      || !IsLeafMethod());
851}
852
853void LocationsBuilderARM::VisitGoto(HGoto* got) {
854  got->SetLocations(nullptr);
855}
856
857void InstructionCodeGeneratorARM::VisitGoto(HGoto* got) {
858  HBasicBlock* successor = got->GetSuccessor();
859  DCHECK(!successor->IsExitBlock());
860
861  HBasicBlock* block = got->GetBlock();
862  HInstruction* previous = got->GetPrevious();
863
864  HLoopInformation* info = block->GetLoopInformation();
865  if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
866    codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
867    GenerateSuspendCheck(info->GetSuspendCheck(), successor);
868    return;
869  }
870
871  if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
872    GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
873  }
874  if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
875    __ b(codegen_->GetLabelOf(successor));
876  }
877}
878
879void LocationsBuilderARM::VisitExit(HExit* exit) {
880  exit->SetLocations(nullptr);
881}
882
883void InstructionCodeGeneratorARM::VisitExit(HExit* exit) {
884  UNUSED(exit);
885  if (kIsDebugBuild) {
886    __ Comment("Unreachable");
887    __ bkpt(0);
888  }
889}
890
891void LocationsBuilderARM::VisitIf(HIf* if_instr) {
892  LocationSummary* locations =
893      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
894  HInstruction* cond = if_instr->InputAt(0);
895  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
896    locations->SetInAt(0, Location::RequiresRegister());
897  }
898}
899
900void InstructionCodeGeneratorARM::VisitIf(HIf* if_instr) {
901  HInstruction* cond = if_instr->InputAt(0);
902  if (cond->IsIntConstant()) {
903    // Constant condition, statically compared against 1.
904    int32_t cond_value = cond->AsIntConstant()->GetValue();
905    if (cond_value == 1) {
906      if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
907                                     if_instr->IfTrueSuccessor())) {
908        __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
909      }
910      return;
911    } else {
912      DCHECK_EQ(cond_value, 0);
913    }
914  } else {
915    if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
916      // Condition has been materialized, compare the output to 0
917      DCHECK(if_instr->GetLocations()->InAt(0).IsRegister());
918      __ cmp(if_instr->GetLocations()->InAt(0).As<Register>(),
919             ShifterOperand(0));
920      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()), NE);
921    } else {
922      // Condition has not been materialized, use its inputs as the
923      // comparison and its condition as the branch condition.
924      LocationSummary* locations = cond->GetLocations();
925      if (locations->InAt(1).IsRegister()) {
926        __ cmp(locations->InAt(0).As<Register>(),
927               ShifterOperand(locations->InAt(1).As<Register>()));
928      } else {
929        DCHECK(locations->InAt(1).IsConstant());
930        int32_t value =
931            locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
932        ShifterOperand operand;
933        if (ShifterOperand::CanHoldArm(value, &operand)) {
934          __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(value));
935        } else {
936          Register temp = IP;
937          __ LoadImmediate(temp, value);
938          __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(temp));
939        }
940      }
941      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()),
942           ARMCondition(cond->AsCondition()->GetCondition()));
943    }
944  }
945  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
946                                 if_instr->IfFalseSuccessor())) {
947    __ b(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
948  }
949}
950
951
952void LocationsBuilderARM::VisitCondition(HCondition* comp) {
953  LocationSummary* locations =
954      new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall);
955  locations->SetInAt(0, Location::RequiresRegister());
956  locations->SetInAt(1, Location::RegisterOrConstant(comp->InputAt(1)));
957  if (comp->NeedsMaterialization()) {
958    locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
959  }
960}
961
962void InstructionCodeGeneratorARM::VisitCondition(HCondition* comp) {
963  if (!comp->NeedsMaterialization()) return;
964
965  LocationSummary* locations = comp->GetLocations();
966  if (locations->InAt(1).IsRegister()) {
967    __ cmp(locations->InAt(0).As<Register>(),
968           ShifterOperand(locations->InAt(1).As<Register>()));
969  } else {
970    DCHECK(locations->InAt(1).IsConstant());
971    int32_t value = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
972    ShifterOperand operand;
973    if (ShifterOperand::CanHoldArm(value, &operand)) {
974      __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(value));
975    } else {
976      Register temp = IP;
977      __ LoadImmediate(temp, value);
978      __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(temp));
979    }
980  }
981  __ it(ARMCondition(comp->GetCondition()), kItElse);
982  __ mov(locations->Out().As<Register>(), ShifterOperand(1),
983         ARMCondition(comp->GetCondition()));
984  __ mov(locations->Out().As<Register>(), ShifterOperand(0),
985         ARMOppositeCondition(comp->GetCondition()));
986}
987
988void LocationsBuilderARM::VisitEqual(HEqual* comp) {
989  VisitCondition(comp);
990}
991
992void InstructionCodeGeneratorARM::VisitEqual(HEqual* comp) {
993  VisitCondition(comp);
994}
995
996void LocationsBuilderARM::VisitNotEqual(HNotEqual* comp) {
997  VisitCondition(comp);
998}
999
1000void InstructionCodeGeneratorARM::VisitNotEqual(HNotEqual* comp) {
1001  VisitCondition(comp);
1002}
1003
1004void LocationsBuilderARM::VisitLessThan(HLessThan* comp) {
1005  VisitCondition(comp);
1006}
1007
1008void InstructionCodeGeneratorARM::VisitLessThan(HLessThan* comp) {
1009  VisitCondition(comp);
1010}
1011
1012void LocationsBuilderARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1013  VisitCondition(comp);
1014}
1015
1016void InstructionCodeGeneratorARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1017  VisitCondition(comp);
1018}
1019
1020void LocationsBuilderARM::VisitGreaterThan(HGreaterThan* comp) {
1021  VisitCondition(comp);
1022}
1023
1024void InstructionCodeGeneratorARM::VisitGreaterThan(HGreaterThan* comp) {
1025  VisitCondition(comp);
1026}
1027
1028void LocationsBuilderARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1029  VisitCondition(comp);
1030}
1031
1032void InstructionCodeGeneratorARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1033  VisitCondition(comp);
1034}
1035
1036void LocationsBuilderARM::VisitLocal(HLocal* local) {
1037  local->SetLocations(nullptr);
1038}
1039
1040void InstructionCodeGeneratorARM::VisitLocal(HLocal* local) {
1041  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
1042}
1043
1044void LocationsBuilderARM::VisitLoadLocal(HLoadLocal* load) {
1045  load->SetLocations(nullptr);
1046}
1047
1048void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load) {
1049  // Nothing to do, this is driven by the code generator.
1050  UNUSED(load);
1051}
1052
1053void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) {
1054  LocationSummary* locations =
1055      new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
1056  switch (store->InputAt(1)->GetType()) {
1057    case Primitive::kPrimBoolean:
1058    case Primitive::kPrimByte:
1059    case Primitive::kPrimChar:
1060    case Primitive::kPrimShort:
1061    case Primitive::kPrimInt:
1062    case Primitive::kPrimNot:
1063    case Primitive::kPrimFloat:
1064      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
1065      break;
1066
1067    case Primitive::kPrimLong:
1068    case Primitive::kPrimDouble:
1069      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
1070      break;
1071
1072    default:
1073      LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType();
1074  }
1075}
1076
1077void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store) {
1078  UNUSED(store);
1079}
1080
1081void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) {
1082  LocationSummary* locations =
1083      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1084  locations->SetOut(Location::ConstantLocation(constant));
1085}
1086
1087void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant) {
1088  // Will be generated at use site.
1089  UNUSED(constant);
1090}
1091
1092void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) {
1093  LocationSummary* locations =
1094      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1095  locations->SetOut(Location::ConstantLocation(constant));
1096}
1097
1098void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant) {
1099  // Will be generated at use site.
1100  UNUSED(constant);
1101}
1102
1103void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) {
1104  LocationSummary* locations =
1105      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1106  locations->SetOut(Location::ConstantLocation(constant));
1107}
1108
1109void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant) {
1110  // Will be generated at use site.
1111  UNUSED(constant);
1112}
1113
1114void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) {
1115  LocationSummary* locations =
1116      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1117  locations->SetOut(Location::ConstantLocation(constant));
1118}
1119
1120void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant) {
1121  // Will be generated at use site.
1122  UNUSED(constant);
1123}
1124
1125void LocationsBuilderARM::VisitReturnVoid(HReturnVoid* ret) {
1126  ret->SetLocations(nullptr);
1127}
1128
1129void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret) {
1130  UNUSED(ret);
1131  codegen_->GenerateFrameExit();
1132}
1133
1134void LocationsBuilderARM::VisitReturn(HReturn* ret) {
1135  LocationSummary* locations =
1136      new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
1137  locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType()));
1138}
1139
1140void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret) {
1141  UNUSED(ret);
1142  codegen_->GenerateFrameExit();
1143}
1144
1145void LocationsBuilderARM::VisitInvokeStatic(HInvokeStatic* invoke) {
1146  HandleInvoke(invoke);
1147}
1148
1149void CodeGeneratorARM::LoadCurrentMethod(Register reg) {
1150  __ LoadFromOffset(kLoadWord, reg, SP, kCurrentMethodStackOffset);
1151}
1152
1153void InstructionCodeGeneratorARM::VisitInvokeStatic(HInvokeStatic* invoke) {
1154  Register temp = invoke->GetLocations()->GetTemp(0).As<Register>();
1155
1156  // TODO: Implement all kinds of calls:
1157  // 1) boot -> boot
1158  // 2) app -> boot
1159  // 3) app -> app
1160  //
1161  // Currently we implement the app -> app logic, which looks up in the resolve cache.
1162
1163  // temp = method;
1164  codegen_->LoadCurrentMethod(temp);
1165  // temp = temp->dex_cache_resolved_methods_;
1166  __ LoadFromOffset(
1167      kLoadWord, temp, temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value());
1168  // temp = temp[index_in_cache]
1169  __ LoadFromOffset(
1170      kLoadWord, temp, temp, CodeGenerator::GetCacheOffset(invoke->GetIndexInDexCache()));
1171  // LR = temp[offset_of_quick_compiled_code]
1172  __ LoadFromOffset(kLoadWord, LR, temp,
1173                     mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value());
1174  // LR()
1175  __ blx(LR);
1176
1177  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1178  DCHECK(!codegen_->IsLeafMethod());
1179}
1180
1181void LocationsBuilderARM::HandleInvoke(HInvoke* invoke) {
1182  LocationSummary* locations =
1183      new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1184  locations->AddTemp(Location::RegisterLocation(R0));
1185
1186  InvokeDexCallingConventionVisitor calling_convention_visitor;
1187  for (size_t i = 0; i < invoke->InputCount(); i++) {
1188    HInstruction* input = invoke->InputAt(i);
1189    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1190  }
1191
1192  locations->SetOut(calling_convention_visitor.GetReturnLocation(invoke->GetType()));
1193}
1194
1195void LocationsBuilderARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1196  HandleInvoke(invoke);
1197}
1198
1199void InstructionCodeGeneratorARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1200  Register temp = invoke->GetLocations()->GetTemp(0).As<Register>();
1201  uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() +
1202          invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
1203  LocationSummary* locations = invoke->GetLocations();
1204  Location receiver = locations->InAt(0);
1205  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1206  // temp = object->GetClass();
1207  if (receiver.IsStackSlot()) {
1208    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1209    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1210  } else {
1211    __ LoadFromOffset(kLoadWord, temp, receiver.As<Register>(), class_offset);
1212  }
1213  // temp = temp->GetMethodAt(method_offset);
1214  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value();
1215  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1216  // LR = temp->GetEntryPoint();
1217  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1218  // LR();
1219  __ blx(LR);
1220  DCHECK(!codegen_->IsLeafMethod());
1221  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1222}
1223
1224void LocationsBuilderARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1225  HandleInvoke(invoke);
1226  // Add the hidden argument.
1227  invoke->GetLocations()->AddTemp(Location::RegisterLocation(R12));
1228}
1229
1230void InstructionCodeGeneratorARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1231  // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
1232  Register temp = invoke->GetLocations()->GetTemp(0).As<Register>();
1233  uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() +
1234          (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry);
1235  LocationSummary* locations = invoke->GetLocations();
1236  Location receiver = locations->InAt(0);
1237  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1238
1239  // Set the hidden argument.
1240  __ LoadImmediate(invoke->GetLocations()->GetTemp(1).As<Register>(), invoke->GetDexMethodIndex());
1241
1242  // temp = object->GetClass();
1243  if (receiver.IsStackSlot()) {
1244    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1245    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1246  } else {
1247    __ LoadFromOffset(kLoadWord, temp, receiver.As<Register>(), class_offset);
1248  }
1249  // temp = temp->GetImtEntryAt(method_offset);
1250  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value();
1251  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1252  // LR = temp->GetEntryPoint();
1253  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1254  // LR();
1255  __ blx(LR);
1256  DCHECK(!codegen_->IsLeafMethod());
1257  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1258}
1259
1260void LocationsBuilderARM::VisitNeg(HNeg* neg) {
1261  LocationSummary* locations =
1262      new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
1263  switch (neg->GetResultType()) {
1264    case Primitive::kPrimInt:
1265    case Primitive::kPrimLong: {
1266      bool output_overlaps = (neg->GetResultType() == Primitive::kPrimLong);
1267      locations->SetInAt(0, Location::RequiresRegister());
1268      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1269      break;
1270    }
1271
1272    case Primitive::kPrimFloat:
1273    case Primitive::kPrimDouble:
1274      locations->SetInAt(0, Location::RequiresFpuRegister());
1275      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1276      break;
1277
1278    default:
1279      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1280  }
1281}
1282
1283void InstructionCodeGeneratorARM::VisitNeg(HNeg* neg) {
1284  LocationSummary* locations = neg->GetLocations();
1285  Location out = locations->Out();
1286  Location in = locations->InAt(0);
1287  switch (neg->GetResultType()) {
1288    case Primitive::kPrimInt:
1289      DCHECK(in.IsRegister());
1290      __ rsb(out.As<Register>(), in.As<Register>(), ShifterOperand(0));
1291      break;
1292
1293    case Primitive::kPrimLong:
1294      DCHECK(in.IsRegisterPair());
1295      // out.lo = 0 - in.lo (and update the carry/borrow (C) flag)
1296      __ rsbs(out.AsRegisterPairLow<Register>(),
1297              in.AsRegisterPairLow<Register>(),
1298              ShifterOperand(0));
1299      // We cannot emit an RSC (Reverse Subtract with Carry)
1300      // instruction here, as it does not exist in the Thumb-2
1301      // instruction set.  We use the following approach
1302      // using SBC and SUB instead.
1303      //
1304      // out.hi = -C
1305      __ sbc(out.AsRegisterPairHigh<Register>(),
1306             out.AsRegisterPairHigh<Register>(),
1307             ShifterOperand(out.AsRegisterPairHigh<Register>()));
1308      // out.hi = out.hi - in.hi
1309      __ sub(out.AsRegisterPairHigh<Register>(),
1310             out.AsRegisterPairHigh<Register>(),
1311             ShifterOperand(in.AsRegisterPairHigh<Register>()));
1312      break;
1313
1314    case Primitive::kPrimFloat:
1315      DCHECK(in.IsFpuRegister());
1316      __ vnegs(out.As<SRegister>(), in.As<SRegister>());
1317      break;
1318
1319    case Primitive::kPrimDouble:
1320      DCHECK(in.IsFpuRegisterPair());
1321      __ vnegd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1322               FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1323      break;
1324
1325    default:
1326      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1327  }
1328}
1329
1330void LocationsBuilderARM::VisitTypeConversion(HTypeConversion* conversion) {
1331  LocationSummary* locations =
1332      new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
1333  Primitive::Type result_type = conversion->GetResultType();
1334  Primitive::Type input_type = conversion->GetInputType();
1335  switch (result_type) {
1336    case Primitive::kPrimLong:
1337      switch (input_type) {
1338        case Primitive::kPrimByte:
1339        case Primitive::kPrimShort:
1340        case Primitive::kPrimInt:
1341        case Primitive::kPrimChar:
1342          // int-to-long conversion.
1343          locations->SetInAt(0, Location::RequiresRegister());
1344          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1345          break;
1346
1347        case Primitive::kPrimFloat:
1348        case Primitive::kPrimDouble:
1349          LOG(FATAL) << "Type conversion from " << input_type << " to "
1350                     << result_type << " not yet implemented";
1351          break;
1352
1353        default:
1354          LOG(FATAL) << "Unexpected type conversion from " << input_type
1355                     << " to " << result_type;
1356      }
1357      break;
1358
1359    case Primitive::kPrimInt:
1360    case Primitive::kPrimFloat:
1361    case Primitive::kPrimDouble:
1362      LOG(FATAL) << "Type conversion from " << input_type
1363                 << " to " << result_type << " not yet implemented";
1364      break;
1365
1366    default:
1367      LOG(FATAL) << "Unexpected type conversion from " << input_type
1368                 << " to " << result_type;
1369  }
1370}
1371
1372void InstructionCodeGeneratorARM::VisitTypeConversion(HTypeConversion* conversion) {
1373  LocationSummary* locations = conversion->GetLocations();
1374  Location out = locations->Out();
1375  Location in = locations->InAt(0);
1376  Primitive::Type result_type = conversion->GetResultType();
1377  Primitive::Type input_type = conversion->GetInputType();
1378  switch (result_type) {
1379    case Primitive::kPrimLong:
1380      switch (input_type) {
1381        case Primitive::kPrimByte:
1382        case Primitive::kPrimShort:
1383        case Primitive::kPrimInt:
1384        case Primitive::kPrimChar:
1385          // int-to-long conversion.
1386          DCHECK(out.IsRegisterPair());
1387          DCHECK(in.IsRegister());
1388          __ Mov(out.AsRegisterPairLow<Register>(), in.As<Register>());
1389          // Sign extension.
1390          __ Asr(out.AsRegisterPairHigh<Register>(),
1391                 out.AsRegisterPairLow<Register>(),
1392                 31);
1393          break;
1394
1395        case Primitive::kPrimFloat:
1396        case Primitive::kPrimDouble:
1397          LOG(FATAL) << "Type conversion from " << input_type << " to "
1398                     << result_type << " not yet implemented";
1399          break;
1400
1401        default:
1402          LOG(FATAL) << "Unexpected type conversion from " << input_type
1403                     << " to " << result_type;
1404      }
1405      break;
1406
1407    case Primitive::kPrimInt:
1408    case Primitive::kPrimFloat:
1409    case Primitive::kPrimDouble:
1410      LOG(FATAL) << "Type conversion from " << input_type
1411                 << " to " << result_type << " not yet implemented";
1412      break;
1413
1414    default:
1415      LOG(FATAL) << "Unexpected type conversion from " << input_type
1416                 << " to " << result_type;
1417  }
1418}
1419
1420void LocationsBuilderARM::VisitAdd(HAdd* add) {
1421  LocationSummary* locations =
1422      new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
1423  switch (add->GetResultType()) {
1424    case Primitive::kPrimInt:
1425    case Primitive::kPrimLong: {
1426      bool output_overlaps = (add->GetResultType() == Primitive::kPrimLong);
1427      locations->SetInAt(0, Location::RequiresRegister());
1428      locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
1429      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1430      break;
1431    }
1432
1433    case Primitive::kPrimFloat:
1434    case Primitive::kPrimDouble: {
1435      locations->SetInAt(0, Location::RequiresFpuRegister());
1436      locations->SetInAt(1, Location::RequiresFpuRegister());
1437      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1438      break;
1439    }
1440
1441    default:
1442      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1443  }
1444}
1445
1446void InstructionCodeGeneratorARM::VisitAdd(HAdd* add) {
1447  LocationSummary* locations = add->GetLocations();
1448  Location out = locations->Out();
1449  Location first = locations->InAt(0);
1450  Location second = locations->InAt(1);
1451  switch (add->GetResultType()) {
1452    case Primitive::kPrimInt:
1453      if (second.IsRegister()) {
1454        __ add(out.As<Register>(), first.As<Register>(), ShifterOperand(second.As<Register>()));
1455      } else {
1456        __ AddConstant(out.As<Register>(),
1457                       first.As<Register>(),
1458                       second.GetConstant()->AsIntConstant()->GetValue());
1459      }
1460      break;
1461
1462    case Primitive::kPrimLong:
1463      __ adds(out.AsRegisterPairLow<Register>(),
1464              first.AsRegisterPairLow<Register>(),
1465              ShifterOperand(second.AsRegisterPairLow<Register>()));
1466      __ adc(out.AsRegisterPairHigh<Register>(),
1467             first.AsRegisterPairHigh<Register>(),
1468             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1469      break;
1470
1471    case Primitive::kPrimFloat:
1472      __ vadds(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>());
1473      break;
1474
1475    case Primitive::kPrimDouble:
1476      __ vaddd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1477               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1478               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1479      break;
1480
1481    default:
1482      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1483  }
1484}
1485
1486void LocationsBuilderARM::VisitSub(HSub* sub) {
1487  LocationSummary* locations =
1488      new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
1489  switch (sub->GetResultType()) {
1490    case Primitive::kPrimInt:
1491    case Primitive::kPrimLong: {
1492      bool output_overlaps = (sub->GetResultType() == Primitive::kPrimLong);
1493      locations->SetInAt(0, Location::RequiresRegister());
1494      locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1)));
1495      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1496      break;
1497    }
1498    case Primitive::kPrimFloat:
1499    case Primitive::kPrimDouble: {
1500      locations->SetInAt(0, Location::RequiresFpuRegister());
1501      locations->SetInAt(1, Location::RequiresFpuRegister());
1502      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1503      break;
1504    }
1505    default:
1506      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1507  }
1508}
1509
1510void InstructionCodeGeneratorARM::VisitSub(HSub* sub) {
1511  LocationSummary* locations = sub->GetLocations();
1512  Location out = locations->Out();
1513  Location first = locations->InAt(0);
1514  Location second = locations->InAt(1);
1515  switch (sub->GetResultType()) {
1516    case Primitive::kPrimInt: {
1517      if (second.IsRegister()) {
1518        __ sub(out.As<Register>(), first.As<Register>(), ShifterOperand(second.As<Register>()));
1519      } else {
1520        __ AddConstant(out.As<Register>(),
1521                       first.As<Register>(),
1522                       -second.GetConstant()->AsIntConstant()->GetValue());
1523      }
1524      break;
1525    }
1526
1527    case Primitive::kPrimLong: {
1528      __ subs(out.AsRegisterPairLow<Register>(),
1529              first.AsRegisterPairLow<Register>(),
1530              ShifterOperand(second.AsRegisterPairLow<Register>()));
1531      __ sbc(out.AsRegisterPairHigh<Register>(),
1532             first.AsRegisterPairHigh<Register>(),
1533             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1534      break;
1535    }
1536
1537    case Primitive::kPrimFloat: {
1538      __ vsubs(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>());
1539      break;
1540    }
1541
1542    case Primitive::kPrimDouble: {
1543      __ vsubd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1544               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1545               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1546      break;
1547    }
1548
1549
1550    default:
1551      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1552  }
1553}
1554
1555void LocationsBuilderARM::VisitMul(HMul* mul) {
1556  LocationSummary* locations =
1557      new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
1558  switch (mul->GetResultType()) {
1559    case Primitive::kPrimInt:
1560    case Primitive::kPrimLong:  {
1561      locations->SetInAt(0, Location::RequiresRegister());
1562      locations->SetInAt(1, Location::RequiresRegister());
1563      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1564      break;
1565    }
1566
1567    case Primitive::kPrimFloat:
1568    case Primitive::kPrimDouble: {
1569      locations->SetInAt(0, Location::RequiresFpuRegister());
1570      locations->SetInAt(1, Location::RequiresFpuRegister());
1571      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1572      break;
1573    }
1574
1575    default:
1576      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1577  }
1578}
1579
1580void InstructionCodeGeneratorARM::VisitMul(HMul* mul) {
1581  LocationSummary* locations = mul->GetLocations();
1582  Location out = locations->Out();
1583  Location first = locations->InAt(0);
1584  Location second = locations->InAt(1);
1585  switch (mul->GetResultType()) {
1586    case Primitive::kPrimInt: {
1587      __ mul(out.As<Register>(), first.As<Register>(), second.As<Register>());
1588      break;
1589    }
1590    case Primitive::kPrimLong: {
1591      Register out_hi = out.AsRegisterPairHigh<Register>();
1592      Register out_lo = out.AsRegisterPairLow<Register>();
1593      Register in1_hi = first.AsRegisterPairHigh<Register>();
1594      Register in1_lo = first.AsRegisterPairLow<Register>();
1595      Register in2_hi = second.AsRegisterPairHigh<Register>();
1596      Register in2_lo = second.AsRegisterPairLow<Register>();
1597
1598      // Extra checks to protect caused by the existence of R1_R2.
1599      // The algorithm is wrong if out.hi is either in1.lo or in2.lo:
1600      // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2);
1601      DCHECK_NE(out_hi, in1_lo);
1602      DCHECK_NE(out_hi, in2_lo);
1603
1604      // input: in1 - 64 bits, in2 - 64 bits
1605      // output: out
1606      // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
1607      // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
1608      // parts: out.lo = (in1.lo * in2.lo)[31:0]
1609
1610      // IP <- in1.lo * in2.hi
1611      __ mul(IP, in1_lo, in2_hi);
1612      // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo
1613      __ mla(out_hi, in1_hi, in2_lo, IP);
1614      // out.lo <- (in1.lo * in2.lo)[31:0];
1615      __ umull(out_lo, IP, in1_lo, in2_lo);
1616      // out.hi <- in2.hi * in1.lo +  in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
1617      __ add(out_hi, out_hi, ShifterOperand(IP));
1618      break;
1619    }
1620
1621    case Primitive::kPrimFloat: {
1622      __ vmuls(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>());
1623      break;
1624    }
1625
1626    case Primitive::kPrimDouble: {
1627      __ vmuld(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1628               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1629               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1630      break;
1631    }
1632
1633    default:
1634      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1635  }
1636}
1637
1638void LocationsBuilderARM::VisitDiv(HDiv* div) {
1639  LocationSummary* locations =
1640      new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
1641  switch (div->GetResultType()) {
1642    case Primitive::kPrimInt: {
1643      locations->SetInAt(0, Location::RequiresRegister());
1644      locations->SetInAt(1, Location::RequiresRegister());
1645      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1646      break;
1647    }
1648    case Primitive::kPrimLong: {
1649      LOG(FATAL) << "Not implemented div type" << div->GetResultType();
1650      break;
1651    }
1652    case Primitive::kPrimFloat:
1653    case Primitive::kPrimDouble: {
1654      locations->SetInAt(0, Location::RequiresFpuRegister());
1655      locations->SetInAt(1, Location::RequiresFpuRegister());
1656      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1657      break;
1658    }
1659
1660    default:
1661      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
1662  }
1663}
1664
1665void InstructionCodeGeneratorARM::VisitDiv(HDiv* div) {
1666  LocationSummary* locations = div->GetLocations();
1667  Location out = locations->Out();
1668  Location first = locations->InAt(0);
1669  Location second = locations->InAt(1);
1670
1671  switch (div->GetResultType()) {
1672    case Primitive::kPrimInt: {
1673      __ sdiv(out.As<Register>(), first.As<Register>(), second.As<Register>());
1674      break;
1675    }
1676
1677    case Primitive::kPrimLong: {
1678      LOG(FATAL) << "Not implemented div type" << div->GetResultType();
1679      break;
1680    }
1681
1682    case Primitive::kPrimFloat: {
1683      __ vdivs(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>());
1684      break;
1685    }
1686
1687    case Primitive::kPrimDouble: {
1688      __ vdivd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1689               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1690               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1691      break;
1692    }
1693
1694    default:
1695      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
1696  }
1697}
1698
1699void LocationsBuilderARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1700  LocationSummary* locations =
1701      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1702  locations->SetInAt(0, Location::RequiresRegister());
1703  if (instruction->HasUses()) {
1704    locations->SetOut(Location::SameAsFirstInput());
1705  }
1706}
1707
1708void InstructionCodeGeneratorARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1709  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM(instruction);
1710  codegen_->AddSlowPath(slow_path);
1711
1712  LocationSummary* locations = instruction->GetLocations();
1713  Location value = locations->InAt(0);
1714
1715  DCHECK(value.IsRegister()) << value;
1716  __ cmp(value.As<Register>(), ShifterOperand(0));
1717  __ b(slow_path->GetEntryLabel(), EQ);
1718}
1719
1720void LocationsBuilderARM::VisitNewInstance(HNewInstance* instruction) {
1721  LocationSummary* locations =
1722      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
1723  InvokeRuntimeCallingConvention calling_convention;
1724  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1725  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1726  locations->SetOut(Location::RegisterLocation(R0));
1727}
1728
1729void InstructionCodeGeneratorARM::VisitNewInstance(HNewInstance* instruction) {
1730  InvokeRuntimeCallingConvention calling_convention;
1731  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
1732  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
1733  codegen_->InvokeRuntime(
1734      QUICK_ENTRY_POINT(pAllocObjectWithAccessCheck), instruction, instruction->GetDexPc());
1735}
1736
1737void LocationsBuilderARM::VisitNewArray(HNewArray* instruction) {
1738  LocationSummary* locations =
1739      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
1740  InvokeRuntimeCallingConvention calling_convention;
1741  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1742  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1743  locations->SetOut(Location::RegisterLocation(R0));
1744  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1745}
1746
1747void InstructionCodeGeneratorARM::VisitNewArray(HNewArray* instruction) {
1748  InvokeRuntimeCallingConvention calling_convention;
1749  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
1750  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
1751  codegen_->InvokeRuntime(
1752      QUICK_ENTRY_POINT(pAllocArrayWithAccessCheck), instruction, instruction->GetDexPc());
1753}
1754
1755void LocationsBuilderARM::VisitParameterValue(HParameterValue* instruction) {
1756  LocationSummary* locations =
1757      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1758  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
1759  if (location.IsStackSlot()) {
1760    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1761  } else if (location.IsDoubleStackSlot()) {
1762    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1763  }
1764  locations->SetOut(location);
1765}
1766
1767void InstructionCodeGeneratorARM::VisitParameterValue(HParameterValue* instruction) {
1768  // Nothing to do, the parameter is already at its location.
1769  UNUSED(instruction);
1770}
1771
1772void LocationsBuilderARM::VisitNot(HNot* not_) {
1773  LocationSummary* locations =
1774      new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
1775  locations->SetInAt(0, Location::RequiresRegister());
1776  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1777}
1778
1779void InstructionCodeGeneratorARM::VisitNot(HNot* not_) {
1780  LocationSummary* locations = not_->GetLocations();
1781  Location out = locations->Out();
1782  Location in = locations->InAt(0);
1783  switch (not_->InputAt(0)->GetType()) {
1784    case Primitive::kPrimBoolean:
1785      __ eor(out.As<Register>(), in.As<Register>(), ShifterOperand(1));
1786      break;
1787
1788    case Primitive::kPrimInt:
1789      __ mvn(out.As<Register>(), ShifterOperand(in.As<Register>()));
1790      break;
1791
1792    case Primitive::kPrimLong:
1793      __ mvn(out.AsRegisterPairLow<Register>(),
1794             ShifterOperand(in.AsRegisterPairLow<Register>()));
1795      __ mvn(out.AsRegisterPairHigh<Register>(),
1796             ShifterOperand(in.AsRegisterPairHigh<Register>()));
1797      break;
1798
1799    default:
1800      LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
1801  }
1802}
1803
1804void LocationsBuilderARM::VisitCompare(HCompare* compare) {
1805  LocationSummary* locations =
1806      new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
1807  locations->SetInAt(0, Location::RequiresRegister());
1808  locations->SetInAt(1, Location::RequiresRegister());
1809  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1810}
1811
1812void InstructionCodeGeneratorARM::VisitCompare(HCompare* compare) {
1813  LocationSummary* locations = compare->GetLocations();
1814  switch (compare->InputAt(0)->GetType()) {
1815    case Primitive::kPrimLong: {
1816      Register output = locations->Out().As<Register>();
1817      Location left = locations->InAt(0);
1818      Location right = locations->InAt(1);
1819      Label less, greater, done;
1820      __ cmp(left.AsRegisterPairHigh<Register>(),
1821             ShifterOperand(right.AsRegisterPairHigh<Register>()));  // Signed compare.
1822      __ b(&less, LT);
1823      __ b(&greater, GT);
1824      // Do LoadImmediate before any `cmp`, as LoadImmediate might affect
1825      // the status flags.
1826      __ LoadImmediate(output, 0);
1827      __ cmp(left.AsRegisterPairLow<Register>(),
1828             ShifterOperand(right.AsRegisterPairLow<Register>()));  // Unsigned compare.
1829      __ b(&done, EQ);
1830      __ b(&less, CC);
1831
1832      __ Bind(&greater);
1833      __ LoadImmediate(output, 1);
1834      __ b(&done);
1835
1836      __ Bind(&less);
1837      __ LoadImmediate(output, -1);
1838
1839      __ Bind(&done);
1840      break;
1841    }
1842    default:
1843      LOG(FATAL) << "Unimplemented compare type " << compare->InputAt(0)->GetType();
1844  }
1845}
1846
1847void LocationsBuilderARM::VisitPhi(HPhi* instruction) {
1848  LocationSummary* locations =
1849      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1850  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
1851    locations->SetInAt(i, Location::Any());
1852  }
1853  locations->SetOut(Location::Any());
1854}
1855
1856void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction) {
1857  UNUSED(instruction);
1858  LOG(FATAL) << "Unreachable";
1859}
1860
1861void LocationsBuilderARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
1862  LocationSummary* locations =
1863      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1864  bool is_object_type = instruction->GetFieldType() == Primitive::kPrimNot;
1865  locations->SetInAt(0, Location::RequiresRegister());
1866  locations->SetInAt(1, Location::RequiresRegister());
1867  // Temporary registers for the write barrier.
1868  if (is_object_type) {
1869    locations->AddTemp(Location::RequiresRegister());
1870    locations->AddTemp(Location::RequiresRegister());
1871  }
1872}
1873
1874void InstructionCodeGeneratorARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
1875  LocationSummary* locations = instruction->GetLocations();
1876  Register obj = locations->InAt(0).As<Register>();
1877  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
1878  Primitive::Type field_type = instruction->GetFieldType();
1879
1880  switch (field_type) {
1881    case Primitive::kPrimBoolean:
1882    case Primitive::kPrimByte: {
1883      Register value = locations->InAt(1).As<Register>();
1884      __ StoreToOffset(kStoreByte, value, obj, offset);
1885      break;
1886    }
1887
1888    case Primitive::kPrimShort:
1889    case Primitive::kPrimChar: {
1890      Register value = locations->InAt(1).As<Register>();
1891      __ StoreToOffset(kStoreHalfword, value, obj, offset);
1892      break;
1893    }
1894
1895    case Primitive::kPrimInt:
1896    case Primitive::kPrimNot: {
1897      Register value = locations->InAt(1).As<Register>();
1898      __ StoreToOffset(kStoreWord, value, obj, offset);
1899      if (field_type == Primitive::kPrimNot) {
1900        Register temp = locations->GetTemp(0).As<Register>();
1901        Register card = locations->GetTemp(1).As<Register>();
1902        codegen_->MarkGCCard(temp, card, obj, value);
1903      }
1904      break;
1905    }
1906
1907    case Primitive::kPrimLong: {
1908      Location value = locations->InAt(1);
1909      __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset);
1910      break;
1911    }
1912
1913    case Primitive::kPrimFloat: {
1914      SRegister value = locations->InAt(1).As<SRegister>();
1915      __ StoreSToOffset(value, obj, offset);
1916      break;
1917    }
1918
1919    case Primitive::kPrimDouble: {
1920      DRegister value = FromLowSToD(locations->InAt(1).AsFpuRegisterPairLow<SRegister>());
1921      __ StoreDToOffset(value, obj, offset);
1922      break;
1923    }
1924
1925    case Primitive::kPrimVoid:
1926      LOG(FATAL) << "Unreachable type " << field_type;
1927      UNREACHABLE();
1928  }
1929}
1930
1931void LocationsBuilderARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
1932  LocationSummary* locations =
1933      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1934  locations->SetInAt(0, Location::RequiresRegister());
1935  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1936}
1937
1938void InstructionCodeGeneratorARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
1939  LocationSummary* locations = instruction->GetLocations();
1940  Register obj = locations->InAt(0).As<Register>();
1941  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
1942
1943  switch (instruction->GetType()) {
1944    case Primitive::kPrimBoolean: {
1945      Register out = locations->Out().As<Register>();
1946      __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
1947      break;
1948    }
1949
1950    case Primitive::kPrimByte: {
1951      Register out = locations->Out().As<Register>();
1952      __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
1953      break;
1954    }
1955
1956    case Primitive::kPrimShort: {
1957      Register out = locations->Out().As<Register>();
1958      __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
1959      break;
1960    }
1961
1962    case Primitive::kPrimChar: {
1963      Register out = locations->Out().As<Register>();
1964      __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
1965      break;
1966    }
1967
1968    case Primitive::kPrimInt:
1969    case Primitive::kPrimNot: {
1970      Register out = locations->Out().As<Register>();
1971      __ LoadFromOffset(kLoadWord, out, obj, offset);
1972      break;
1973    }
1974
1975    case Primitive::kPrimLong: {
1976      // TODO: support volatile.
1977      Location out = locations->Out();
1978      __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset);
1979      break;
1980    }
1981
1982    case Primitive::kPrimFloat: {
1983      SRegister out = locations->Out().As<SRegister>();
1984      __ LoadSFromOffset(out, obj, offset);
1985      break;
1986    }
1987
1988    case Primitive::kPrimDouble: {
1989      DRegister out = FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>());
1990      __ LoadDFromOffset(out, obj, offset);
1991      break;
1992    }
1993
1994    case Primitive::kPrimVoid:
1995      LOG(FATAL) << "Unreachable type " << instruction->GetType();
1996      UNREACHABLE();
1997  }
1998}
1999
2000void LocationsBuilderARM::VisitNullCheck(HNullCheck* instruction) {
2001  LocationSummary* locations =
2002      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2003  locations->SetInAt(0, Location::RequiresRegister());
2004  if (instruction->HasUses()) {
2005    locations->SetOut(Location::SameAsFirstInput());
2006  }
2007}
2008
2009void InstructionCodeGeneratorARM::VisitNullCheck(HNullCheck* instruction) {
2010  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM(instruction);
2011  codegen_->AddSlowPath(slow_path);
2012
2013  LocationSummary* locations = instruction->GetLocations();
2014  Location obj = locations->InAt(0);
2015
2016  if (obj.IsRegister()) {
2017    __ cmp(obj.As<Register>(), ShifterOperand(0));
2018    __ b(slow_path->GetEntryLabel(), EQ);
2019  } else {
2020    DCHECK(obj.IsConstant()) << obj;
2021    DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0);
2022    __ b(slow_path->GetEntryLabel());
2023  }
2024}
2025
2026void LocationsBuilderARM::VisitArrayGet(HArrayGet* instruction) {
2027  LocationSummary* locations =
2028      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2029  locations->SetInAt(0, Location::RequiresRegister());
2030  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2031  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2032}
2033
2034void InstructionCodeGeneratorARM::VisitArrayGet(HArrayGet* instruction) {
2035  LocationSummary* locations = instruction->GetLocations();
2036  Register obj = locations->InAt(0).As<Register>();
2037  Location index = locations->InAt(1);
2038
2039  switch (instruction->GetType()) {
2040    case Primitive::kPrimBoolean: {
2041      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
2042      Register out = locations->Out().As<Register>();
2043      if (index.IsConstant()) {
2044        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2045        __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
2046      } else {
2047        __ add(IP, obj, ShifterOperand(index.As<Register>()));
2048        __ LoadFromOffset(kLoadUnsignedByte, out, IP, data_offset);
2049      }
2050      break;
2051    }
2052
2053    case Primitive::kPrimByte: {
2054      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
2055      Register out = locations->Out().As<Register>();
2056      if (index.IsConstant()) {
2057        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2058        __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
2059      } else {
2060        __ add(IP, obj, ShifterOperand(index.As<Register>()));
2061        __ LoadFromOffset(kLoadSignedByte, out, IP, data_offset);
2062      }
2063      break;
2064    }
2065
2066    case Primitive::kPrimShort: {
2067      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
2068      Register out = locations->Out().As<Register>();
2069      if (index.IsConstant()) {
2070        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2071        __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
2072      } else {
2073        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_2));
2074        __ LoadFromOffset(kLoadSignedHalfword, out, IP, data_offset);
2075      }
2076      break;
2077    }
2078
2079    case Primitive::kPrimChar: {
2080      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
2081      Register out = locations->Out().As<Register>();
2082      if (index.IsConstant()) {
2083        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2084        __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
2085      } else {
2086        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_2));
2087        __ LoadFromOffset(kLoadUnsignedHalfword, out, IP, data_offset);
2088      }
2089      break;
2090    }
2091
2092    case Primitive::kPrimInt:
2093    case Primitive::kPrimNot: {
2094      DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
2095      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2096      Register out = locations->Out().As<Register>();
2097      if (index.IsConstant()) {
2098        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2099        __ LoadFromOffset(kLoadWord, out, obj, offset);
2100      } else {
2101        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_4));
2102        __ LoadFromOffset(kLoadWord, out, IP, data_offset);
2103      }
2104      break;
2105    }
2106
2107    case Primitive::kPrimLong: {
2108      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
2109      Location out = locations->Out();
2110      if (index.IsConstant()) {
2111        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
2112        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset);
2113      } else {
2114        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_8));
2115        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), IP, data_offset);
2116      }
2117      break;
2118    }
2119
2120    case Primitive::kPrimFloat:
2121    case Primitive::kPrimDouble:
2122      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
2123      UNREACHABLE();
2124    case Primitive::kPrimVoid:
2125      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2126      UNREACHABLE();
2127  }
2128}
2129
2130void LocationsBuilderARM::VisitArraySet(HArraySet* instruction) {
2131  Primitive::Type value_type = instruction->GetComponentType();
2132  bool is_object = value_type == Primitive::kPrimNot;
2133  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2134      instruction, is_object ? LocationSummary::kCall : LocationSummary::kNoCall);
2135  if (is_object) {
2136    InvokeRuntimeCallingConvention calling_convention;
2137    locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2138    locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2139    locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2140  } else {
2141    locations->SetInAt(0, Location::RequiresRegister());
2142    locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2143    locations->SetInAt(2, Location::RequiresRegister());
2144  }
2145}
2146
2147void InstructionCodeGeneratorARM::VisitArraySet(HArraySet* instruction) {
2148  LocationSummary* locations = instruction->GetLocations();
2149  Register obj = locations->InAt(0).As<Register>();
2150  Location index = locations->InAt(1);
2151  Primitive::Type value_type = instruction->GetComponentType();
2152
2153  switch (value_type) {
2154    case Primitive::kPrimBoolean:
2155    case Primitive::kPrimByte: {
2156      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
2157      Register value = locations->InAt(2).As<Register>();
2158      if (index.IsConstant()) {
2159        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2160        __ StoreToOffset(kStoreByte, value, obj, offset);
2161      } else {
2162        __ add(IP, obj, ShifterOperand(index.As<Register>()));
2163        __ StoreToOffset(kStoreByte, value, IP, data_offset);
2164      }
2165      break;
2166    }
2167
2168    case Primitive::kPrimShort:
2169    case Primitive::kPrimChar: {
2170      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
2171      Register value = locations->InAt(2).As<Register>();
2172      if (index.IsConstant()) {
2173        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2174        __ StoreToOffset(kStoreHalfword, value, obj, offset);
2175      } else {
2176        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_2));
2177        __ StoreToOffset(kStoreHalfword, value, IP, data_offset);
2178      }
2179      break;
2180    }
2181
2182    case Primitive::kPrimInt: {
2183      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2184      Register value = locations->InAt(2).As<Register>();
2185      if (index.IsConstant()) {
2186        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2187        __ StoreToOffset(kStoreWord, value, obj, offset);
2188      } else {
2189        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_4));
2190        __ StoreToOffset(kStoreWord, value, IP, data_offset);
2191      }
2192      break;
2193    }
2194
2195    case Primitive::kPrimNot: {
2196      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject), instruction, instruction->GetDexPc());
2197      break;
2198    }
2199
2200    case Primitive::kPrimLong: {
2201      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
2202      Location value = locations->InAt(2);
2203      if (index.IsConstant()) {
2204        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
2205        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset);
2206      } else {
2207        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_8));
2208        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), IP, data_offset);
2209      }
2210      break;
2211    }
2212
2213    case Primitive::kPrimFloat:
2214    case Primitive::kPrimDouble:
2215      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
2216      UNREACHABLE();
2217    case Primitive::kPrimVoid:
2218      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2219      UNREACHABLE();
2220  }
2221}
2222
2223void LocationsBuilderARM::VisitArrayLength(HArrayLength* instruction) {
2224  LocationSummary* locations =
2225      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2226  locations->SetInAt(0, Location::RequiresRegister());
2227  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2228}
2229
2230void InstructionCodeGeneratorARM::VisitArrayLength(HArrayLength* instruction) {
2231  LocationSummary* locations = instruction->GetLocations();
2232  uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
2233  Register obj = locations->InAt(0).As<Register>();
2234  Register out = locations->Out().As<Register>();
2235  __ LoadFromOffset(kLoadWord, out, obj, offset);
2236}
2237
2238void LocationsBuilderARM::VisitBoundsCheck(HBoundsCheck* instruction) {
2239  LocationSummary* locations =
2240      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2241  locations->SetInAt(0, Location::RequiresRegister());
2242  locations->SetInAt(1, Location::RequiresRegister());
2243  if (instruction->HasUses()) {
2244    locations->SetOut(Location::SameAsFirstInput());
2245  }
2246}
2247
2248void InstructionCodeGeneratorARM::VisitBoundsCheck(HBoundsCheck* instruction) {
2249  LocationSummary* locations = instruction->GetLocations();
2250  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM(
2251      instruction, locations->InAt(0), locations->InAt(1));
2252  codegen_->AddSlowPath(slow_path);
2253
2254  Register index = locations->InAt(0).As<Register>();
2255  Register length = locations->InAt(1).As<Register>();
2256
2257  __ cmp(index, ShifterOperand(length));
2258  __ b(slow_path->GetEntryLabel(), CS);
2259}
2260
2261void CodeGeneratorARM::MarkGCCard(Register temp, Register card, Register object, Register value) {
2262  Label is_null;
2263  __ CompareAndBranchIfZero(value, &is_null);
2264  __ LoadFromOffset(kLoadWord, card, TR, Thread::CardTableOffset<kArmWordSize>().Int32Value());
2265  __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
2266  __ strb(card, Address(card, temp));
2267  __ Bind(&is_null);
2268}
2269
2270void LocationsBuilderARM::VisitTemporary(HTemporary* temp) {
2271  temp->SetLocations(nullptr);
2272}
2273
2274void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp) {
2275  // Nothing to do, this is driven by the code generator.
2276  UNUSED(temp);
2277}
2278
2279void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction) {
2280  UNUSED(instruction);
2281  LOG(FATAL) << "Unreachable";
2282}
2283
2284void InstructionCodeGeneratorARM::VisitParallelMove(HParallelMove* instruction) {
2285  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
2286}
2287
2288void LocationsBuilderARM::VisitSuspendCheck(HSuspendCheck* instruction) {
2289  new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
2290}
2291
2292void InstructionCodeGeneratorARM::VisitSuspendCheck(HSuspendCheck* instruction) {
2293  HBasicBlock* block = instruction->GetBlock();
2294  if (block->GetLoopInformation() != nullptr) {
2295    DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
2296    // The back edge will generate the suspend check.
2297    return;
2298  }
2299  if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
2300    // The goto will generate the suspend check.
2301    return;
2302  }
2303  GenerateSuspendCheck(instruction, nullptr);
2304}
2305
2306void InstructionCodeGeneratorARM::GenerateSuspendCheck(HSuspendCheck* instruction,
2307                                                       HBasicBlock* successor) {
2308  SuspendCheckSlowPathARM* slow_path =
2309      new (GetGraph()->GetArena()) SuspendCheckSlowPathARM(instruction, successor);
2310  codegen_->AddSlowPath(slow_path);
2311
2312  __ LoadFromOffset(
2313      kLoadUnsignedHalfword, IP, TR, Thread::ThreadFlagsOffset<kArmWordSize>().Int32Value());
2314  __ cmp(IP, ShifterOperand(0));
2315  // TODO: Figure out the branch offsets and use cbz/cbnz.
2316  if (successor == nullptr) {
2317    __ b(slow_path->GetEntryLabel(), NE);
2318    __ Bind(slow_path->GetReturnLabel());
2319  } else {
2320    __ b(codegen_->GetLabelOf(successor), EQ);
2321    __ b(slow_path->GetEntryLabel());
2322  }
2323}
2324
2325ArmAssembler* ParallelMoveResolverARM::GetAssembler() const {
2326  return codegen_->GetAssembler();
2327}
2328
2329void ParallelMoveResolverARM::EmitMove(size_t index) {
2330  MoveOperands* move = moves_.Get(index);
2331  Location source = move->GetSource();
2332  Location destination = move->GetDestination();
2333
2334  if (source.IsRegister()) {
2335    if (destination.IsRegister()) {
2336      __ Mov(destination.As<Register>(), source.As<Register>());
2337    } else {
2338      DCHECK(destination.IsStackSlot());
2339      __ StoreToOffset(kStoreWord, source.As<Register>(),
2340                       SP, destination.GetStackIndex());
2341    }
2342  } else if (source.IsStackSlot()) {
2343    if (destination.IsRegister()) {
2344      __ LoadFromOffset(kLoadWord, destination.As<Register>(),
2345                        SP, source.GetStackIndex());
2346    } else {
2347      DCHECK(destination.IsStackSlot());
2348      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
2349      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
2350    }
2351  } else {
2352    DCHECK(source.IsConstant());
2353    DCHECK(source.GetConstant()->IsIntConstant());
2354    int32_t value = source.GetConstant()->AsIntConstant()->GetValue();
2355    if (destination.IsRegister()) {
2356      __ LoadImmediate(destination.As<Register>(), value);
2357    } else {
2358      DCHECK(destination.IsStackSlot());
2359      __ LoadImmediate(IP, value);
2360      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
2361    }
2362  }
2363}
2364
2365void ParallelMoveResolverARM::Exchange(Register reg, int mem) {
2366  __ Mov(IP, reg);
2367  __ LoadFromOffset(kLoadWord, reg, SP, mem);
2368  __ StoreToOffset(kStoreWord, IP, SP, mem);
2369}
2370
2371void ParallelMoveResolverARM::Exchange(int mem1, int mem2) {
2372  ScratchRegisterScope ensure_scratch(this, IP, R0, codegen_->GetNumberOfCoreRegisters());
2373  int stack_offset = ensure_scratch.IsSpilled() ? kArmWordSize : 0;
2374  __ LoadFromOffset(kLoadWord, static_cast<Register>(ensure_scratch.GetRegister()),
2375                    SP, mem1 + stack_offset);
2376  __ LoadFromOffset(kLoadWord, IP, SP, mem2 + stack_offset);
2377  __ StoreToOffset(kStoreWord, static_cast<Register>(ensure_scratch.GetRegister()),
2378                   SP, mem2 + stack_offset);
2379  __ StoreToOffset(kStoreWord, IP, SP, mem1 + stack_offset);
2380}
2381
2382void ParallelMoveResolverARM::EmitSwap(size_t index) {
2383  MoveOperands* move = moves_.Get(index);
2384  Location source = move->GetSource();
2385  Location destination = move->GetDestination();
2386
2387  if (source.IsRegister() && destination.IsRegister()) {
2388    DCHECK_NE(source.As<Register>(), IP);
2389    DCHECK_NE(destination.As<Register>(), IP);
2390    __ Mov(IP, source.As<Register>());
2391    __ Mov(source.As<Register>(), destination.As<Register>());
2392    __ Mov(destination.As<Register>(), IP);
2393  } else if (source.IsRegister() && destination.IsStackSlot()) {
2394    Exchange(source.As<Register>(), destination.GetStackIndex());
2395  } else if (source.IsStackSlot() && destination.IsRegister()) {
2396    Exchange(destination.As<Register>(), source.GetStackIndex());
2397  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
2398    Exchange(source.GetStackIndex(), destination.GetStackIndex());
2399  } else {
2400    LOG(FATAL) << "Unimplemented";
2401  }
2402}
2403
2404void ParallelMoveResolverARM::SpillScratch(int reg) {
2405  __ Push(static_cast<Register>(reg));
2406}
2407
2408void ParallelMoveResolverARM::RestoreScratch(int reg) {
2409  __ Pop(static_cast<Register>(reg));
2410}
2411
2412void LocationsBuilderARM::VisitLoadClass(HLoadClass* cls) {
2413  LocationSummary::CallKind call_kind = cls->CanCallRuntime()
2414      ? LocationSummary::kCallOnSlowPath
2415      : LocationSummary::kNoCall;
2416  LocationSummary* locations =
2417      new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
2418  locations->SetOut(Location::RequiresRegister());
2419}
2420
2421void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) {
2422  Register out = cls->GetLocations()->Out().As<Register>();
2423  if (cls->IsReferrersClass()) {
2424    DCHECK(!cls->CanCallRuntime());
2425    DCHECK(!cls->MustGenerateClinitCheck());
2426    codegen_->LoadCurrentMethod(out);
2427    __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
2428  } else {
2429    DCHECK(cls->CanCallRuntime());
2430    codegen_->LoadCurrentMethod(out);
2431    __ LoadFromOffset(
2432        kLoadWord, out, out, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value());
2433    __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
2434
2435    SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
2436        cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
2437    codegen_->AddSlowPath(slow_path);
2438    __ cmp(out, ShifterOperand(0));
2439    __ b(slow_path->GetEntryLabel(), EQ);
2440    if (cls->MustGenerateClinitCheck()) {
2441      GenerateClassInitializationCheck(slow_path, out);
2442    } else {
2443      __ Bind(slow_path->GetExitLabel());
2444    }
2445  }
2446}
2447
2448void LocationsBuilderARM::VisitClinitCheck(HClinitCheck* check) {
2449  LocationSummary* locations =
2450      new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
2451  locations->SetInAt(0, Location::RequiresRegister());
2452  if (check->HasUses()) {
2453    locations->SetOut(Location::SameAsFirstInput());
2454  }
2455}
2456
2457void InstructionCodeGeneratorARM::VisitClinitCheck(HClinitCheck* check) {
2458  // We assume the class is not null.
2459  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
2460      check->GetLoadClass(), check, check->GetDexPc(), true);
2461  codegen_->AddSlowPath(slow_path);
2462  GenerateClassInitializationCheck(slow_path, check->GetLocations()->InAt(0).As<Register>());
2463}
2464
2465void InstructionCodeGeneratorARM::GenerateClassInitializationCheck(
2466    SlowPathCodeARM* slow_path, Register class_reg) {
2467  __ LoadFromOffset(kLoadWord, IP, class_reg, mirror::Class::StatusOffset().Int32Value());
2468  __ cmp(IP, ShifterOperand(mirror::Class::kStatusInitialized));
2469  __ b(slow_path->GetEntryLabel(), LT);
2470  // Even if the initialized flag is set, we may be in a situation where caches are not synced
2471  // properly. Therefore, we do a memory fence.
2472  __ dmb(ISH);
2473  __ Bind(slow_path->GetExitLabel());
2474}
2475
2476void LocationsBuilderARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2477  LocationSummary* locations =
2478      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2479  locations->SetInAt(0, Location::RequiresRegister());
2480  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2481}
2482
2483void InstructionCodeGeneratorARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2484  LocationSummary* locations = instruction->GetLocations();
2485  Register cls = locations->InAt(0).As<Register>();
2486  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
2487
2488  switch (instruction->GetType()) {
2489    case Primitive::kPrimBoolean: {
2490      Register out = locations->Out().As<Register>();
2491      __ LoadFromOffset(kLoadUnsignedByte, out, cls, offset);
2492      break;
2493    }
2494
2495    case Primitive::kPrimByte: {
2496      Register out = locations->Out().As<Register>();
2497      __ LoadFromOffset(kLoadSignedByte, out, cls, offset);
2498      break;
2499    }
2500
2501    case Primitive::kPrimShort: {
2502      Register out = locations->Out().As<Register>();
2503      __ LoadFromOffset(kLoadSignedHalfword, out, cls, offset);
2504      break;
2505    }
2506
2507    case Primitive::kPrimChar: {
2508      Register out = locations->Out().As<Register>();
2509      __ LoadFromOffset(kLoadUnsignedHalfword, out, cls, offset);
2510      break;
2511    }
2512
2513    case Primitive::kPrimInt:
2514    case Primitive::kPrimNot: {
2515      Register out = locations->Out().As<Register>();
2516      __ LoadFromOffset(kLoadWord, out, cls, offset);
2517      break;
2518    }
2519
2520    case Primitive::kPrimLong: {
2521      // TODO: support volatile.
2522      Location out = locations->Out();
2523      __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), cls, offset);
2524      break;
2525    }
2526
2527    case Primitive::kPrimFloat: {
2528      SRegister out = locations->Out().As<SRegister>();
2529      __ LoadSFromOffset(out, cls, offset);
2530      break;
2531    }
2532
2533    case Primitive::kPrimDouble: {
2534      DRegister out = FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>());
2535      __ LoadDFromOffset(out, cls, offset);
2536      break;
2537    }
2538
2539    case Primitive::kPrimVoid:
2540      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2541      UNREACHABLE();
2542  }
2543}
2544
2545void LocationsBuilderARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2546  LocationSummary* locations =
2547      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2548  bool is_object_type = instruction->GetFieldType() == Primitive::kPrimNot;
2549  locations->SetInAt(0, Location::RequiresRegister());
2550  locations->SetInAt(1, Location::RequiresRegister());
2551  // Temporary registers for the write barrier.
2552  if (is_object_type) {
2553    locations->AddTemp(Location::RequiresRegister());
2554    locations->AddTemp(Location::RequiresRegister());
2555  }
2556}
2557
2558void InstructionCodeGeneratorARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2559  LocationSummary* locations = instruction->GetLocations();
2560  Register cls = locations->InAt(0).As<Register>();
2561  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
2562  Primitive::Type field_type = instruction->GetFieldType();
2563
2564  switch (field_type) {
2565    case Primitive::kPrimBoolean:
2566    case Primitive::kPrimByte: {
2567      Register value = locations->InAt(1).As<Register>();
2568      __ StoreToOffset(kStoreByte, value, cls, offset);
2569      break;
2570    }
2571
2572    case Primitive::kPrimShort:
2573    case Primitive::kPrimChar: {
2574      Register value = locations->InAt(1).As<Register>();
2575      __ StoreToOffset(kStoreHalfword, value, cls, offset);
2576      break;
2577    }
2578
2579    case Primitive::kPrimInt:
2580    case Primitive::kPrimNot: {
2581      Register value = locations->InAt(1).As<Register>();
2582      __ StoreToOffset(kStoreWord, value, cls, offset);
2583      if (field_type == Primitive::kPrimNot) {
2584        Register temp = locations->GetTemp(0).As<Register>();
2585        Register card = locations->GetTemp(1).As<Register>();
2586        codegen_->MarkGCCard(temp, card, cls, value);
2587      }
2588      break;
2589    }
2590
2591    case Primitive::kPrimLong: {
2592      Location value = locations->InAt(1);
2593      __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), cls, offset);
2594      break;
2595    }
2596
2597    case Primitive::kPrimFloat: {
2598      SRegister value = locations->InAt(1).As<SRegister>();
2599      __ StoreSToOffset(value, cls, offset);
2600      break;
2601    }
2602
2603    case Primitive::kPrimDouble: {
2604      DRegister value = FromLowSToD(locations->InAt(1).AsFpuRegisterPairLow<SRegister>());
2605      __ StoreDToOffset(value, cls, offset);
2606      break;
2607    }
2608
2609    case Primitive::kPrimVoid:
2610      LOG(FATAL) << "Unreachable type " << field_type;
2611      UNREACHABLE();
2612  }
2613}
2614
2615void LocationsBuilderARM::VisitLoadString(HLoadString* load) {
2616  LocationSummary* locations =
2617      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
2618  locations->SetOut(Location::RequiresRegister());
2619}
2620
2621void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) {
2622  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM(load);
2623  codegen_->AddSlowPath(slow_path);
2624
2625  Register out = load->GetLocations()->Out().As<Register>();
2626  codegen_->LoadCurrentMethod(out);
2627  __ LoadFromOffset(
2628      kLoadWord, out, out, mirror::ArtMethod::DexCacheStringsOffset().Int32Value());
2629  __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(load->GetStringIndex()));
2630  __ cmp(out, ShifterOperand(0));
2631  __ b(slow_path->GetEntryLabel(), EQ);
2632  __ Bind(slow_path->GetExitLabel());
2633}
2634
2635void LocationsBuilderARM::VisitLoadException(HLoadException* load) {
2636  LocationSummary* locations =
2637      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
2638  locations->SetOut(Location::RequiresRegister());
2639}
2640
2641void InstructionCodeGeneratorARM::VisitLoadException(HLoadException* load) {
2642  Register out = load->GetLocations()->Out().As<Register>();
2643  int32_t offset = Thread::ExceptionOffset<kArmWordSize>().Int32Value();
2644  __ LoadFromOffset(kLoadWord, out, TR, offset);
2645  __ LoadImmediate(IP, 0);
2646  __ StoreToOffset(kStoreWord, IP, TR, offset);
2647}
2648
2649void LocationsBuilderARM::VisitThrow(HThrow* instruction) {
2650  LocationSummary* locations =
2651      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2652  InvokeRuntimeCallingConvention calling_convention;
2653  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2654}
2655
2656void InstructionCodeGeneratorARM::VisitThrow(HThrow* instruction) {
2657  codegen_->InvokeRuntime(
2658      QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc());
2659}
2660
2661void LocationsBuilderARM::VisitTypeCheck(HTypeCheck* instruction) {
2662  LocationSummary::CallKind call_kind = instruction->IsClassFinal()
2663      ? LocationSummary::kNoCall
2664      : LocationSummary::kCallOnSlowPath;
2665  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
2666  locations->SetInAt(0, Location::RequiresRegister());
2667  locations->SetInAt(1, Location::RequiresRegister());
2668  locations->SetOut(Location::RequiresRegister());
2669}
2670
2671void InstructionCodeGeneratorARM::VisitTypeCheck(HTypeCheck* instruction) {
2672  LocationSummary* locations = instruction->GetLocations();
2673  Register obj = locations->InAt(0).As<Register>();
2674  Register cls = locations->InAt(1).As<Register>();
2675  Register out = locations->Out().As<Register>();
2676  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2677  Label done, zero;
2678  SlowPathCodeARM* slow_path = nullptr;
2679
2680  // Return 0 if `obj` is null.
2681  // TODO: avoid this check if we know obj is not null.
2682  __ cmp(obj, ShifterOperand(0));
2683  __ b(&zero, EQ);
2684  // Compare the class of `obj` with `cls`.
2685  __ LoadFromOffset(kLoadWord, out, obj, class_offset);
2686  __ cmp(out, ShifterOperand(cls));
2687  if (instruction->IsClassFinal()) {
2688    // Classes must be equal for the instanceof to succeed.
2689    __ b(&zero, NE);
2690    __ LoadImmediate(out, 1);
2691    __ b(&done);
2692  } else {
2693    // If the classes are not equal, we go into a slow path.
2694    DCHECK(locations->OnlyCallsOnSlowPath());
2695    slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
2696        instruction, Location::RegisterLocation(out));
2697    codegen_->AddSlowPath(slow_path);
2698    __ b(slow_path->GetEntryLabel(), NE);
2699    __ LoadImmediate(out, 1);
2700    __ b(&done);
2701  }
2702  __ Bind(&zero);
2703  __ LoadImmediate(out, 0);
2704  if (slow_path != nullptr) {
2705    __ Bind(slow_path->GetExitLabel());
2706  }
2707  __ Bind(&done);
2708}
2709
2710}  // namespace arm
2711}  // namespace art
2712