code_generator_arm.cc revision e21aa42e1341d34250742abafdd83311ad9fa737
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
20#include "art_method.h"
21#include "code_generator_utils.h"
22#include "entrypoints/quick/quick_entrypoints.h"
23#include "gc/accounting/card_table.h"
24#include "intrinsics.h"
25#include "intrinsics_arm.h"
26#include "mirror/array-inl.h"
27#include "mirror/class-inl.h"
28#include "thread.h"
29#include "utils/arm/assembler_arm.h"
30#include "utils/arm/managed_register_arm.h"
31#include "utils/assembler.h"
32#include "utils/stack_checks.h"
33
34namespace art {
35
36namespace arm {
37
38static bool ExpectedPairLayout(Location location) {
39  // We expected this for both core and fpu register pairs.
40  return ((location.low() & 1) == 0) && (location.low() + 1 == location.high());
41}
42
43static constexpr int kCurrentMethodStackOffset = 0;
44static constexpr Register kMethodRegisterArgument = R0;
45
46// We unconditionally allocate R5 to ensure we can do long operations
47// with baseline.
48static constexpr Register kCoreSavedRegisterForBaseline = R5;
49static constexpr Register kCoreCalleeSaves[] =
50    { R5, R6, R7, R8, R10, R11, PC };
51static constexpr SRegister kFpuCalleeSaves[] =
52    { S16, S17, S18, S19, S20, S21, S22, S23, S24, S25, S26, S27, S28, S29, S30, S31 };
53
54// D31 cannot be split into two S registers, and the register allocator only works on
55// S registers. Therefore there is no need to block it.
56static constexpr DRegister DTMP = D31;
57
58#define __ down_cast<ArmAssembler*>(codegen->GetAssembler())->
59#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArmWordSize, x).Int32Value()
60
61class NullCheckSlowPathARM : public SlowPathCodeARM {
62 public:
63  explicit NullCheckSlowPathARM(HNullCheck* instruction) : instruction_(instruction) {}
64
65  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
66    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
67    __ Bind(GetEntryLabel());
68    arm_codegen->InvokeRuntime(
69        QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc(), this);
70  }
71
72 private:
73  HNullCheck* const instruction_;
74  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM);
75};
76
77class DivZeroCheckSlowPathARM : public SlowPathCodeARM {
78 public:
79  explicit DivZeroCheckSlowPathARM(HDivZeroCheck* instruction) : instruction_(instruction) {}
80
81  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
82    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
83    __ Bind(GetEntryLabel());
84    arm_codegen->InvokeRuntime(
85        QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc(), this);
86  }
87
88 private:
89  HDivZeroCheck* const instruction_;
90  DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM);
91};
92
93class SuspendCheckSlowPathARM : public SlowPathCodeARM {
94 public:
95  SuspendCheckSlowPathARM(HSuspendCheck* instruction, HBasicBlock* successor)
96      : instruction_(instruction), successor_(successor) {}
97
98  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
99    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
100    __ Bind(GetEntryLabel());
101    SaveLiveRegisters(codegen, instruction_->GetLocations());
102    arm_codegen->InvokeRuntime(
103        QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc(), this);
104    RestoreLiveRegisters(codegen, instruction_->GetLocations());
105    if (successor_ == nullptr) {
106      __ b(GetReturnLabel());
107    } else {
108      __ b(arm_codegen->GetLabelOf(successor_));
109    }
110  }
111
112  Label* GetReturnLabel() {
113    DCHECK(successor_ == nullptr);
114    return &return_label_;
115  }
116
117  HBasicBlock* GetSuccessor() const {
118    return successor_;
119  }
120
121 private:
122  HSuspendCheck* const instruction_;
123  // If not null, the block to branch to after the suspend check.
124  HBasicBlock* const successor_;
125
126  // If `successor_` is null, the label to branch to after the suspend check.
127  Label return_label_;
128
129  DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM);
130};
131
132class BoundsCheckSlowPathARM : public SlowPathCodeARM {
133 public:
134  BoundsCheckSlowPathARM(HBoundsCheck* instruction,
135                         Location index_location,
136                         Location length_location)
137      : instruction_(instruction),
138        index_location_(index_location),
139        length_location_(length_location) {}
140
141  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
142    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
143    __ Bind(GetEntryLabel());
144    // We're moving two locations to locations that could overlap, so we need a parallel
145    // move resolver.
146    InvokeRuntimeCallingConvention calling_convention;
147    codegen->EmitParallelMoves(
148        index_location_,
149        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
150        Primitive::kPrimInt,
151        length_location_,
152        Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
153        Primitive::kPrimInt);
154    arm_codegen->InvokeRuntime(
155        QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc(), this);
156  }
157
158 private:
159  HBoundsCheck* const instruction_;
160  const Location index_location_;
161  const Location length_location_;
162
163  DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM);
164};
165
166class LoadClassSlowPathARM : public SlowPathCodeARM {
167 public:
168  LoadClassSlowPathARM(HLoadClass* cls,
169                       HInstruction* at,
170                       uint32_t dex_pc,
171                       bool do_clinit)
172      : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
173    DCHECK(at->IsLoadClass() || at->IsClinitCheck());
174  }
175
176  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
177    LocationSummary* locations = at_->GetLocations();
178
179    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
180    __ Bind(GetEntryLabel());
181    SaveLiveRegisters(codegen, locations);
182
183    InvokeRuntimeCallingConvention calling_convention;
184    __ LoadImmediate(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex());
185    int32_t entry_point_offset = do_clinit_
186        ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
187        : QUICK_ENTRY_POINT(pInitializeType);
188    arm_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_, this);
189
190    // Move the class to the desired location.
191    Location out = locations->Out();
192    if (out.IsValid()) {
193      DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
194      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
195    }
196    RestoreLiveRegisters(codegen, locations);
197    __ b(GetExitLabel());
198  }
199
200 private:
201  // The class this slow path will load.
202  HLoadClass* const cls_;
203
204  // The instruction where this slow path is happening.
205  // (Might be the load class or an initialization check).
206  HInstruction* const at_;
207
208  // The dex PC of `at_`.
209  const uint32_t dex_pc_;
210
211  // Whether to initialize the class.
212  const bool do_clinit_;
213
214  DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM);
215};
216
217class LoadStringSlowPathARM : public SlowPathCodeARM {
218 public:
219  explicit LoadStringSlowPathARM(HLoadString* instruction) : instruction_(instruction) {}
220
221  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
222    LocationSummary* locations = instruction_->GetLocations();
223    DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
224
225    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
226    __ Bind(GetEntryLabel());
227    SaveLiveRegisters(codegen, locations);
228
229    InvokeRuntimeCallingConvention calling_convention;
230    __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction_->GetStringIndex());
231    arm_codegen->InvokeRuntime(
232        QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc(), this);
233    arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
234
235    RestoreLiveRegisters(codegen, locations);
236    __ b(GetExitLabel());
237  }
238
239 private:
240  HLoadString* const instruction_;
241
242  DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM);
243};
244
245class TypeCheckSlowPathARM : public SlowPathCodeARM {
246 public:
247  TypeCheckSlowPathARM(HInstruction* instruction,
248                       Location class_to_check,
249                       Location object_class,
250                       uint32_t dex_pc)
251      : instruction_(instruction),
252        class_to_check_(class_to_check),
253        object_class_(object_class),
254        dex_pc_(dex_pc) {}
255
256  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
257    LocationSummary* locations = instruction_->GetLocations();
258    DCHECK(instruction_->IsCheckCast()
259           || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
260
261    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
262    __ Bind(GetEntryLabel());
263    SaveLiveRegisters(codegen, locations);
264
265    // We're moving two locations to locations that could overlap, so we need a parallel
266    // move resolver.
267    InvokeRuntimeCallingConvention calling_convention;
268    codegen->EmitParallelMoves(
269        class_to_check_,
270        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
271        Primitive::kPrimNot,
272        object_class_,
273        Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
274        Primitive::kPrimNot);
275
276    if (instruction_->IsInstanceOf()) {
277      arm_codegen->InvokeRuntime(
278          QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc_, this);
279      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
280    } else {
281      DCHECK(instruction_->IsCheckCast());
282      arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc_, this);
283    }
284
285    RestoreLiveRegisters(codegen, locations);
286    __ b(GetExitLabel());
287  }
288
289 private:
290  HInstruction* const instruction_;
291  const Location class_to_check_;
292  const Location object_class_;
293  uint32_t dex_pc_;
294
295  DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM);
296};
297
298class DeoptimizationSlowPathARM : public SlowPathCodeARM {
299 public:
300  explicit DeoptimizationSlowPathARM(HInstruction* instruction)
301    : instruction_(instruction) {}
302
303  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
304    __ Bind(GetEntryLabel());
305    SaveLiveRegisters(codegen, instruction_->GetLocations());
306    DCHECK(instruction_->IsDeoptimize());
307    HDeoptimize* deoptimize = instruction_->AsDeoptimize();
308    uint32_t dex_pc = deoptimize->GetDexPc();
309    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
310    arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize), instruction_, dex_pc, this);
311  }
312
313 private:
314  HInstruction* const instruction_;
315  DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM);
316};
317
318#undef __
319
320#undef __
321#define __ down_cast<ArmAssembler*>(GetAssembler())->
322
323inline Condition ARMCondition(IfCondition cond) {
324  switch (cond) {
325    case kCondEQ: return EQ;
326    case kCondNE: return NE;
327    case kCondLT: return LT;
328    case kCondLE: return LE;
329    case kCondGT: return GT;
330    case kCondGE: return GE;
331    default:
332      LOG(FATAL) << "Unknown if condition";
333  }
334  return EQ;        // Unreachable.
335}
336
337inline Condition ARMOppositeCondition(IfCondition cond) {
338  switch (cond) {
339    case kCondEQ: return NE;
340    case kCondNE: return EQ;
341    case kCondLT: return GE;
342    case kCondLE: return GT;
343    case kCondGT: return LE;
344    case kCondGE: return LT;
345    default:
346      LOG(FATAL) << "Unknown if condition";
347  }
348  return EQ;        // Unreachable.
349}
350
351void CodeGeneratorARM::DumpCoreRegister(std::ostream& stream, int reg) const {
352  stream << Register(reg);
353}
354
355void CodeGeneratorARM::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
356  stream << SRegister(reg);
357}
358
359size_t CodeGeneratorARM::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
360  __ StoreToOffset(kStoreWord, static_cast<Register>(reg_id), SP, stack_index);
361  return kArmWordSize;
362}
363
364size_t CodeGeneratorARM::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
365  __ LoadFromOffset(kLoadWord, static_cast<Register>(reg_id), SP, stack_index);
366  return kArmWordSize;
367}
368
369size_t CodeGeneratorARM::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
370  __ StoreSToOffset(static_cast<SRegister>(reg_id), SP, stack_index);
371  return kArmWordSize;
372}
373
374size_t CodeGeneratorARM::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
375  __ LoadSFromOffset(static_cast<SRegister>(reg_id), SP, stack_index);
376  return kArmWordSize;
377}
378
379CodeGeneratorARM::CodeGeneratorARM(HGraph* graph,
380                                   const ArmInstructionSetFeatures& isa_features,
381                                   const CompilerOptions& compiler_options)
382    : CodeGenerator(graph,
383                    kNumberOfCoreRegisters,
384                    kNumberOfSRegisters,
385                    kNumberOfRegisterPairs,
386                    ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
387                                        arraysize(kCoreCalleeSaves)),
388                    ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
389                                        arraysize(kFpuCalleeSaves)),
390                    compiler_options),
391      block_labels_(graph->GetArena(), 0),
392      location_builder_(graph, this),
393      instruction_visitor_(graph, this),
394      move_resolver_(graph->GetArena(), this),
395      assembler_(false /* can_relocate_branches */),
396      isa_features_(isa_features) {
397  // Save the PC register to mimic Quick.
398  AddAllocatedRegister(Location::RegisterLocation(PC));
399}
400
401Location CodeGeneratorARM::AllocateFreeRegister(Primitive::Type type) const {
402  switch (type) {
403    case Primitive::kPrimLong: {
404      size_t reg = FindFreeEntry(blocked_register_pairs_, kNumberOfRegisterPairs);
405      ArmManagedRegister pair =
406          ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
407      DCHECK(!blocked_core_registers_[pair.AsRegisterPairLow()]);
408      DCHECK(!blocked_core_registers_[pair.AsRegisterPairHigh()]);
409
410      blocked_core_registers_[pair.AsRegisterPairLow()] = true;
411      blocked_core_registers_[pair.AsRegisterPairHigh()] = true;
412      UpdateBlockedPairRegisters();
413      return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
414    }
415
416    case Primitive::kPrimByte:
417    case Primitive::kPrimBoolean:
418    case Primitive::kPrimChar:
419    case Primitive::kPrimShort:
420    case Primitive::kPrimInt:
421    case Primitive::kPrimNot: {
422      int reg = FindFreeEntry(blocked_core_registers_, kNumberOfCoreRegisters);
423      // Block all register pairs that contain `reg`.
424      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
425        ArmManagedRegister current =
426            ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
427        if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
428          blocked_register_pairs_[i] = true;
429        }
430      }
431      return Location::RegisterLocation(reg);
432    }
433
434    case Primitive::kPrimFloat: {
435      int reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfSRegisters);
436      return Location::FpuRegisterLocation(reg);
437    }
438
439    case Primitive::kPrimDouble: {
440      int reg = FindTwoFreeConsecutiveAlignedEntries(blocked_fpu_registers_, kNumberOfSRegisters);
441      DCHECK_EQ(reg % 2, 0);
442      return Location::FpuRegisterPairLocation(reg, reg + 1);
443    }
444
445    case Primitive::kPrimVoid:
446      LOG(FATAL) << "Unreachable type " << type;
447  }
448
449  return Location();
450}
451
452void CodeGeneratorARM::SetupBlockedRegisters(bool is_baseline) const {
453  // Don't allocate the dalvik style register pair passing.
454  blocked_register_pairs_[R1_R2] = true;
455
456  // Stack register, LR and PC are always reserved.
457  blocked_core_registers_[SP] = true;
458  blocked_core_registers_[LR] = true;
459  blocked_core_registers_[PC] = true;
460
461  // Reserve thread register.
462  blocked_core_registers_[TR] = true;
463
464  // Reserve temp register.
465  blocked_core_registers_[IP] = true;
466
467  if (is_baseline) {
468    for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
469      blocked_core_registers_[kCoreCalleeSaves[i]] = true;
470    }
471
472    blocked_core_registers_[kCoreSavedRegisterForBaseline] = false;
473
474    for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
475      blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
476    }
477  }
478
479  UpdateBlockedPairRegisters();
480}
481
482void CodeGeneratorARM::UpdateBlockedPairRegisters() const {
483  for (int i = 0; i < kNumberOfRegisterPairs; i++) {
484    ArmManagedRegister current =
485        ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
486    if (blocked_core_registers_[current.AsRegisterPairLow()]
487        || blocked_core_registers_[current.AsRegisterPairHigh()]) {
488      blocked_register_pairs_[i] = true;
489    }
490  }
491}
492
493InstructionCodeGeneratorARM::InstructionCodeGeneratorARM(HGraph* graph, CodeGeneratorARM* codegen)
494      : HGraphVisitor(graph),
495        assembler_(codegen->GetAssembler()),
496        codegen_(codegen) {}
497
498void CodeGeneratorARM::ComputeSpillMask() {
499  core_spill_mask_ = allocated_registers_.GetCoreRegisters() & core_callee_save_mask_;
500  // Save one extra register for baseline. Note that on thumb2, there is no easy
501  // instruction to restore just the PC, so this actually helps both baseline
502  // and non-baseline to save and restore at least two registers at entry and exit.
503  core_spill_mask_ |= (1 << kCoreSavedRegisterForBaseline);
504  DCHECK_NE(core_spill_mask_, 0u) << "At least the return address register must be saved";
505  fpu_spill_mask_ = allocated_registers_.GetFloatingPointRegisters() & fpu_callee_save_mask_;
506  // We use vpush and vpop for saving and restoring floating point registers, which take
507  // a SRegister and the number of registers to save/restore after that SRegister. We
508  // therefore update the `fpu_spill_mask_` to also contain those registers not allocated,
509  // but in the range.
510  if (fpu_spill_mask_ != 0) {
511    uint32_t least_significant_bit = LeastSignificantBit(fpu_spill_mask_);
512    uint32_t most_significant_bit = MostSignificantBit(fpu_spill_mask_);
513    for (uint32_t i = least_significant_bit + 1 ; i < most_significant_bit; ++i) {
514      fpu_spill_mask_ |= (1 << i);
515    }
516  }
517}
518
519static dwarf::Reg DWARFReg(Register reg) {
520  return dwarf::Reg::ArmCore(static_cast<int>(reg));
521}
522
523static dwarf::Reg DWARFReg(SRegister reg) {
524  return dwarf::Reg::ArmFp(static_cast<int>(reg));
525}
526
527void CodeGeneratorARM::GenerateFrameEntry() {
528  bool skip_overflow_check =
529      IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm);
530  DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
531  __ Bind(&frame_entry_label_);
532
533  if (HasEmptyFrame()) {
534    return;
535  }
536
537  if (!skip_overflow_check) {
538    __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm)));
539    __ LoadFromOffset(kLoadWord, IP, IP, 0);
540    RecordPcInfo(nullptr, 0);
541  }
542
543  // PC is in the list of callee-save to mimic Quick, but we need to push
544  // LR at entry instead.
545  uint32_t push_mask = (core_spill_mask_ & (~(1 << PC))) | 1 << LR;
546  __ PushList(push_mask);
547  __ cfi().AdjustCFAOffset(kArmWordSize * POPCOUNT(push_mask));
548  __ cfi().RelOffsetForMany(DWARFReg(kMethodRegisterArgument), 0, push_mask, kArmWordSize);
549  if (fpu_spill_mask_ != 0) {
550    SRegister start_register = SRegister(LeastSignificantBit(fpu_spill_mask_));
551    __ vpushs(start_register, POPCOUNT(fpu_spill_mask_));
552    __ cfi().AdjustCFAOffset(kArmWordSize * POPCOUNT(fpu_spill_mask_));
553    __ cfi().RelOffsetForMany(DWARFReg(S0), 0, fpu_spill_mask_, kArmWordSize);
554  }
555  int adjust = GetFrameSize() - FrameEntrySpillSize();
556  __ AddConstant(SP, -adjust);
557  __ cfi().AdjustCFAOffset(adjust);
558  __ StoreToOffset(kStoreWord, kMethodRegisterArgument, SP, 0);
559}
560
561void CodeGeneratorARM::GenerateFrameExit() {
562  if (HasEmptyFrame()) {
563    __ bx(LR);
564    return;
565  }
566  __ cfi().RememberState();
567  int adjust = GetFrameSize() - FrameEntrySpillSize();
568  __ AddConstant(SP, adjust);
569  __ cfi().AdjustCFAOffset(-adjust);
570  if (fpu_spill_mask_ != 0) {
571    SRegister start_register = SRegister(LeastSignificantBit(fpu_spill_mask_));
572    __ vpops(start_register, POPCOUNT(fpu_spill_mask_));
573    __ cfi().AdjustCFAOffset(-kArmPointerSize * POPCOUNT(fpu_spill_mask_));
574    __ cfi().RestoreMany(DWARFReg(SRegister(0)), fpu_spill_mask_);
575  }
576  __ PopList(core_spill_mask_);
577  __ cfi().RestoreState();
578  __ cfi().DefCFAOffset(GetFrameSize());
579}
580
581void CodeGeneratorARM::Bind(HBasicBlock* block) {
582  __ Bind(GetLabelOf(block));
583}
584
585Location CodeGeneratorARM::GetStackLocation(HLoadLocal* load) const {
586  switch (load->GetType()) {
587    case Primitive::kPrimLong:
588    case Primitive::kPrimDouble:
589      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
590
591    case Primitive::kPrimInt:
592    case Primitive::kPrimNot:
593    case Primitive::kPrimFloat:
594      return Location::StackSlot(GetStackSlot(load->GetLocal()));
595
596    case Primitive::kPrimBoolean:
597    case Primitive::kPrimByte:
598    case Primitive::kPrimChar:
599    case Primitive::kPrimShort:
600    case Primitive::kPrimVoid:
601      LOG(FATAL) << "Unexpected type " << load->GetType();
602      UNREACHABLE();
603  }
604
605  LOG(FATAL) << "Unreachable";
606  UNREACHABLE();
607}
608
609Location InvokeDexCallingConventionVisitorARM::GetNextLocation(Primitive::Type type) {
610  switch (type) {
611    case Primitive::kPrimBoolean:
612    case Primitive::kPrimByte:
613    case Primitive::kPrimChar:
614    case Primitive::kPrimShort:
615    case Primitive::kPrimInt:
616    case Primitive::kPrimNot: {
617      uint32_t index = gp_index_++;
618      uint32_t stack_index = stack_index_++;
619      if (index < calling_convention.GetNumberOfRegisters()) {
620        return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
621      } else {
622        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
623      }
624    }
625
626    case Primitive::kPrimLong: {
627      uint32_t index = gp_index_;
628      uint32_t stack_index = stack_index_;
629      gp_index_ += 2;
630      stack_index_ += 2;
631      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
632        if (calling_convention.GetRegisterAt(index) == R1) {
633          // Skip R1, and use R2_R3 instead.
634          gp_index_++;
635          index++;
636        }
637      }
638      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
639        DCHECK_EQ(calling_convention.GetRegisterAt(index) + 1,
640                  calling_convention.GetRegisterAt(index + 1));
641        return Location::RegisterPairLocation(calling_convention.GetRegisterAt(index),
642                                              calling_convention.GetRegisterAt(index + 1));
643      } else {
644        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
645      }
646    }
647
648    case Primitive::kPrimFloat: {
649      uint32_t stack_index = stack_index_++;
650      if (float_index_ % 2 == 0) {
651        float_index_ = std::max(double_index_, float_index_);
652      }
653      if (float_index_ < calling_convention.GetNumberOfFpuRegisters()) {
654        return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(float_index_++));
655      } else {
656        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
657      }
658    }
659
660    case Primitive::kPrimDouble: {
661      double_index_ = std::max(double_index_, RoundUp(float_index_, 2));
662      uint32_t stack_index = stack_index_;
663      stack_index_ += 2;
664      if (double_index_ + 1 < calling_convention.GetNumberOfFpuRegisters()) {
665        uint32_t index = double_index_;
666        double_index_ += 2;
667        Location result = Location::FpuRegisterPairLocation(
668          calling_convention.GetFpuRegisterAt(index),
669          calling_convention.GetFpuRegisterAt(index + 1));
670        DCHECK(ExpectedPairLayout(result));
671        return result;
672      } else {
673        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
674      }
675    }
676
677    case Primitive::kPrimVoid:
678      LOG(FATAL) << "Unexpected parameter type " << type;
679      break;
680  }
681  return Location();
682}
683
684Location InvokeDexCallingConventionVisitorARM::GetReturnLocation(Primitive::Type type) const {
685  switch (type) {
686    case Primitive::kPrimBoolean:
687    case Primitive::kPrimByte:
688    case Primitive::kPrimChar:
689    case Primitive::kPrimShort:
690    case Primitive::kPrimInt:
691    case Primitive::kPrimNot: {
692      return Location::RegisterLocation(R0);
693    }
694
695    case Primitive::kPrimFloat: {
696      return Location::FpuRegisterLocation(S0);
697    }
698
699    case Primitive::kPrimLong: {
700      return Location::RegisterPairLocation(R0, R1);
701    }
702
703    case Primitive::kPrimDouble: {
704      return Location::FpuRegisterPairLocation(S0, S1);
705    }
706
707    case Primitive::kPrimVoid:
708      return Location();
709  }
710
711  UNREACHABLE();
712}
713
714Location InvokeDexCallingConventionVisitorARM::GetMethodLocation() const {
715  return Location::RegisterLocation(kMethodRegisterArgument);
716}
717
718void CodeGeneratorARM::Move32(Location destination, Location source) {
719  if (source.Equals(destination)) {
720    return;
721  }
722  if (destination.IsRegister()) {
723    if (source.IsRegister()) {
724      __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>());
725    } else if (source.IsFpuRegister()) {
726      __ vmovrs(destination.AsRegister<Register>(), source.AsFpuRegister<SRegister>());
727    } else {
728      __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(), SP, source.GetStackIndex());
729    }
730  } else if (destination.IsFpuRegister()) {
731    if (source.IsRegister()) {
732      __ vmovsr(destination.AsFpuRegister<SRegister>(), source.AsRegister<Register>());
733    } else if (source.IsFpuRegister()) {
734      __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>());
735    } else {
736      __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex());
737    }
738  } else {
739    DCHECK(destination.IsStackSlot()) << destination;
740    if (source.IsRegister()) {
741      __ StoreToOffset(kStoreWord, source.AsRegister<Register>(), SP, destination.GetStackIndex());
742    } else if (source.IsFpuRegister()) {
743      __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex());
744    } else {
745      DCHECK(source.IsStackSlot()) << source;
746      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
747      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
748    }
749  }
750}
751
752void CodeGeneratorARM::Move64(Location destination, Location source) {
753  if (source.Equals(destination)) {
754    return;
755  }
756  if (destination.IsRegisterPair()) {
757    if (source.IsRegisterPair()) {
758      EmitParallelMoves(
759          Location::RegisterLocation(source.AsRegisterPairHigh<Register>()),
760          Location::RegisterLocation(destination.AsRegisterPairHigh<Register>()),
761          Primitive::kPrimInt,
762          Location::RegisterLocation(source.AsRegisterPairLow<Register>()),
763          Location::RegisterLocation(destination.AsRegisterPairLow<Register>()),
764          Primitive::kPrimInt);
765    } else if (source.IsFpuRegister()) {
766      UNIMPLEMENTED(FATAL);
767    } else {
768      DCHECK(source.IsDoubleStackSlot());
769      DCHECK(ExpectedPairLayout(destination));
770      __ LoadFromOffset(kLoadWordPair, destination.AsRegisterPairLow<Register>(),
771                        SP, source.GetStackIndex());
772    }
773  } else if (destination.IsFpuRegisterPair()) {
774    if (source.IsDoubleStackSlot()) {
775      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
776                         SP,
777                         source.GetStackIndex());
778    } else {
779      UNIMPLEMENTED(FATAL);
780    }
781  } else {
782    DCHECK(destination.IsDoubleStackSlot());
783    if (source.IsRegisterPair()) {
784      // No conflict possible, so just do the moves.
785      if (source.AsRegisterPairLow<Register>() == R1) {
786        DCHECK_EQ(source.AsRegisterPairHigh<Register>(), R2);
787        __ StoreToOffset(kStoreWord, R1, SP, destination.GetStackIndex());
788        __ StoreToOffset(kStoreWord, R2, SP, destination.GetHighStackIndex(kArmWordSize));
789      } else {
790        __ StoreToOffset(kStoreWordPair, source.AsRegisterPairLow<Register>(),
791                         SP, destination.GetStackIndex());
792      }
793    } else if (source.IsFpuRegisterPair()) {
794      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
795                        SP,
796                        destination.GetStackIndex());
797    } else {
798      DCHECK(source.IsDoubleStackSlot());
799      EmitParallelMoves(
800          Location::StackSlot(source.GetStackIndex()),
801          Location::StackSlot(destination.GetStackIndex()),
802          Primitive::kPrimInt,
803          Location::StackSlot(source.GetHighStackIndex(kArmWordSize)),
804          Location::StackSlot(destination.GetHighStackIndex(kArmWordSize)),
805          Primitive::kPrimInt);
806    }
807  }
808}
809
810void CodeGeneratorARM::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
811  LocationSummary* locations = instruction->GetLocations();
812  if (instruction->IsCurrentMethod()) {
813    Move32(location, Location::StackSlot(kCurrentMethodStackOffset));
814  } else if (locations != nullptr && locations->Out().Equals(location)) {
815    return;
816  } else if (locations != nullptr && locations->Out().IsConstant()) {
817    HConstant* const_to_move = locations->Out().GetConstant();
818    if (const_to_move->IsIntConstant() || const_to_move->IsNullConstant()) {
819      int32_t value = GetInt32ValueOf(const_to_move);
820      if (location.IsRegister()) {
821        __ LoadImmediate(location.AsRegister<Register>(), value);
822      } else {
823        DCHECK(location.IsStackSlot());
824        __ LoadImmediate(IP, value);
825        __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
826      }
827    } else {
828      DCHECK(const_to_move->IsLongConstant()) << const_to_move->DebugName();
829      int64_t value = const_to_move->AsLongConstant()->GetValue();
830      if (location.IsRegisterPair()) {
831        __ LoadImmediate(location.AsRegisterPairLow<Register>(), Low32Bits(value));
832        __ LoadImmediate(location.AsRegisterPairHigh<Register>(), High32Bits(value));
833      } else {
834        DCHECK(location.IsDoubleStackSlot());
835        __ LoadImmediate(IP, Low32Bits(value));
836        __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
837        __ LoadImmediate(IP, High32Bits(value));
838        __ StoreToOffset(kStoreWord, IP, SP, location.GetHighStackIndex(kArmWordSize));
839      }
840    }
841  } else if (instruction->IsLoadLocal()) {
842    uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
843    switch (instruction->GetType()) {
844      case Primitive::kPrimBoolean:
845      case Primitive::kPrimByte:
846      case Primitive::kPrimChar:
847      case Primitive::kPrimShort:
848      case Primitive::kPrimInt:
849      case Primitive::kPrimNot:
850      case Primitive::kPrimFloat:
851        Move32(location, Location::StackSlot(stack_slot));
852        break;
853
854      case Primitive::kPrimLong:
855      case Primitive::kPrimDouble:
856        Move64(location, Location::DoubleStackSlot(stack_slot));
857        break;
858
859      default:
860        LOG(FATAL) << "Unexpected type " << instruction->GetType();
861    }
862  } else if (instruction->IsTemporary()) {
863    Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
864    if (temp_location.IsStackSlot()) {
865      Move32(location, temp_location);
866    } else {
867      DCHECK(temp_location.IsDoubleStackSlot());
868      Move64(location, temp_location);
869    }
870  } else {
871    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
872    switch (instruction->GetType()) {
873      case Primitive::kPrimBoolean:
874      case Primitive::kPrimByte:
875      case Primitive::kPrimChar:
876      case Primitive::kPrimShort:
877      case Primitive::kPrimNot:
878      case Primitive::kPrimInt:
879      case Primitive::kPrimFloat:
880        Move32(location, locations->Out());
881        break;
882
883      case Primitive::kPrimLong:
884      case Primitive::kPrimDouble:
885        Move64(location, locations->Out());
886        break;
887
888      default:
889        LOG(FATAL) << "Unexpected type " << instruction->GetType();
890    }
891  }
892}
893
894void CodeGeneratorARM::InvokeRuntime(int32_t entry_point_offset,
895                                     HInstruction* instruction,
896                                     uint32_t dex_pc,
897                                     SlowPathCode* slow_path) {
898  __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
899  __ blx(LR);
900  RecordPcInfo(instruction, dex_pc, slow_path);
901  DCHECK(instruction->IsSuspendCheck()
902      || instruction->IsBoundsCheck()
903      || instruction->IsNullCheck()
904      || instruction->IsDivZeroCheck()
905      || instruction->GetLocations()->CanCall()
906      || !IsLeafMethod());
907}
908
909void LocationsBuilderARM::VisitGoto(HGoto* got) {
910  got->SetLocations(nullptr);
911}
912
913void InstructionCodeGeneratorARM::VisitGoto(HGoto* got) {
914  HBasicBlock* successor = got->GetSuccessor();
915  DCHECK(!successor->IsExitBlock());
916
917  HBasicBlock* block = got->GetBlock();
918  HInstruction* previous = got->GetPrevious();
919
920  HLoopInformation* info = block->GetLoopInformation();
921  if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
922    codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
923    GenerateSuspendCheck(info->GetSuspendCheck(), successor);
924    return;
925  }
926
927  if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
928    GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
929  }
930  if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
931    __ b(codegen_->GetLabelOf(successor));
932  }
933}
934
935void LocationsBuilderARM::VisitExit(HExit* exit) {
936  exit->SetLocations(nullptr);
937}
938
939void InstructionCodeGeneratorARM::VisitExit(HExit* exit) {
940  UNUSED(exit);
941}
942
943void InstructionCodeGeneratorARM::GenerateTestAndBranch(HInstruction* instruction,
944                                                        Label* true_target,
945                                                        Label* false_target,
946                                                        Label* always_true_target) {
947  HInstruction* cond = instruction->InputAt(0);
948  if (cond->IsIntConstant()) {
949    // Constant condition, statically compared against 1.
950    int32_t cond_value = cond->AsIntConstant()->GetValue();
951    if (cond_value == 1) {
952      if (always_true_target != nullptr) {
953        __ b(always_true_target);
954      }
955      return;
956    } else {
957      DCHECK_EQ(cond_value, 0);
958    }
959  } else {
960    if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
961      // Condition has been materialized, compare the output to 0
962      DCHECK(instruction->GetLocations()->InAt(0).IsRegister());
963      __ cmp(instruction->GetLocations()->InAt(0).AsRegister<Register>(),
964             ShifterOperand(0));
965      __ b(true_target, NE);
966    } else {
967      // Condition has not been materialized, use its inputs as the
968      // comparison and its condition as the branch condition.
969      LocationSummary* locations = cond->GetLocations();
970      DCHECK(locations->InAt(0).IsRegister()) << locations->InAt(0);
971      Register left = locations->InAt(0).AsRegister<Register>();
972      if (locations->InAt(1).IsRegister()) {
973        __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>()));
974      } else {
975        DCHECK(locations->InAt(1).IsConstant());
976        HConstant* constant = locations->InAt(1).GetConstant();
977        int32_t value = CodeGenerator::GetInt32ValueOf(constant);
978        ShifterOperand operand;
979        if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) {
980          __ cmp(left, operand);
981        } else {
982          Register temp = IP;
983          __ LoadImmediate(temp, value);
984          __ cmp(left, ShifterOperand(temp));
985        }
986      }
987      __ b(true_target, ARMCondition(cond->AsCondition()->GetCondition()));
988    }
989  }
990  if (false_target != nullptr) {
991    __ b(false_target);
992  }
993}
994
995void LocationsBuilderARM::VisitIf(HIf* if_instr) {
996  LocationSummary* locations =
997      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
998  HInstruction* cond = if_instr->InputAt(0);
999  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
1000    locations->SetInAt(0, Location::RequiresRegister());
1001  }
1002}
1003
1004void InstructionCodeGeneratorARM::VisitIf(HIf* if_instr) {
1005  Label* true_target = codegen_->GetLabelOf(if_instr->IfTrueSuccessor());
1006  Label* false_target = codegen_->GetLabelOf(if_instr->IfFalseSuccessor());
1007  Label* always_true_target = true_target;
1008  if (codegen_->GoesToNextBlock(if_instr->GetBlock(),
1009                                if_instr->IfTrueSuccessor())) {
1010    always_true_target = nullptr;
1011  }
1012  if (codegen_->GoesToNextBlock(if_instr->GetBlock(),
1013                                if_instr->IfFalseSuccessor())) {
1014    false_target = nullptr;
1015  }
1016  GenerateTestAndBranch(if_instr, true_target, false_target, always_true_target);
1017}
1018
1019void LocationsBuilderARM::VisitDeoptimize(HDeoptimize* deoptimize) {
1020  LocationSummary* locations = new (GetGraph()->GetArena())
1021      LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
1022  HInstruction* cond = deoptimize->InputAt(0);
1023  DCHECK(cond->IsCondition());
1024  if (cond->AsCondition()->NeedsMaterialization()) {
1025    locations->SetInAt(0, Location::RequiresRegister());
1026  }
1027}
1028
1029void InstructionCodeGeneratorARM::VisitDeoptimize(HDeoptimize* deoptimize) {
1030  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena())
1031      DeoptimizationSlowPathARM(deoptimize);
1032  codegen_->AddSlowPath(slow_path);
1033  Label* slow_path_entry = slow_path->GetEntryLabel();
1034  GenerateTestAndBranch(deoptimize, slow_path_entry, nullptr, slow_path_entry);
1035}
1036
1037void LocationsBuilderARM::VisitCondition(HCondition* cond) {
1038  LocationSummary* locations =
1039      new (GetGraph()->GetArena()) LocationSummary(cond, LocationSummary::kNoCall);
1040  locations->SetInAt(0, Location::RequiresRegister());
1041  locations->SetInAt(1, Location::RegisterOrConstant(cond->InputAt(1)));
1042  if (cond->NeedsMaterialization()) {
1043    locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1044  }
1045}
1046
1047void InstructionCodeGeneratorARM::VisitCondition(HCondition* cond) {
1048  if (!cond->NeedsMaterialization()) return;
1049  LocationSummary* locations = cond->GetLocations();
1050  Register left = locations->InAt(0).AsRegister<Register>();
1051
1052  if (locations->InAt(1).IsRegister()) {
1053    __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>()));
1054  } else {
1055    DCHECK(locations->InAt(1).IsConstant());
1056    int32_t value = CodeGenerator::GetInt32ValueOf(locations->InAt(1).GetConstant());
1057    ShifterOperand operand;
1058    if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) {
1059      __ cmp(left, operand);
1060    } else {
1061      Register temp = IP;
1062      __ LoadImmediate(temp, value);
1063      __ cmp(left, ShifterOperand(temp));
1064    }
1065  }
1066  __ it(ARMCondition(cond->GetCondition()), kItElse);
1067  __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(1),
1068         ARMCondition(cond->GetCondition()));
1069  __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(0),
1070         ARMOppositeCondition(cond->GetCondition()));
1071}
1072
1073void LocationsBuilderARM::VisitEqual(HEqual* comp) {
1074  VisitCondition(comp);
1075}
1076
1077void InstructionCodeGeneratorARM::VisitEqual(HEqual* comp) {
1078  VisitCondition(comp);
1079}
1080
1081void LocationsBuilderARM::VisitNotEqual(HNotEqual* comp) {
1082  VisitCondition(comp);
1083}
1084
1085void InstructionCodeGeneratorARM::VisitNotEqual(HNotEqual* comp) {
1086  VisitCondition(comp);
1087}
1088
1089void LocationsBuilderARM::VisitLessThan(HLessThan* comp) {
1090  VisitCondition(comp);
1091}
1092
1093void InstructionCodeGeneratorARM::VisitLessThan(HLessThan* comp) {
1094  VisitCondition(comp);
1095}
1096
1097void LocationsBuilderARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1098  VisitCondition(comp);
1099}
1100
1101void InstructionCodeGeneratorARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1102  VisitCondition(comp);
1103}
1104
1105void LocationsBuilderARM::VisitGreaterThan(HGreaterThan* comp) {
1106  VisitCondition(comp);
1107}
1108
1109void InstructionCodeGeneratorARM::VisitGreaterThan(HGreaterThan* comp) {
1110  VisitCondition(comp);
1111}
1112
1113void LocationsBuilderARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1114  VisitCondition(comp);
1115}
1116
1117void InstructionCodeGeneratorARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1118  VisitCondition(comp);
1119}
1120
1121void LocationsBuilderARM::VisitLocal(HLocal* local) {
1122  local->SetLocations(nullptr);
1123}
1124
1125void InstructionCodeGeneratorARM::VisitLocal(HLocal* local) {
1126  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
1127}
1128
1129void LocationsBuilderARM::VisitLoadLocal(HLoadLocal* load) {
1130  load->SetLocations(nullptr);
1131}
1132
1133void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load) {
1134  // Nothing to do, this is driven by the code generator.
1135  UNUSED(load);
1136}
1137
1138void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) {
1139  LocationSummary* locations =
1140      new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
1141  switch (store->InputAt(1)->GetType()) {
1142    case Primitive::kPrimBoolean:
1143    case Primitive::kPrimByte:
1144    case Primitive::kPrimChar:
1145    case Primitive::kPrimShort:
1146    case Primitive::kPrimInt:
1147    case Primitive::kPrimNot:
1148    case Primitive::kPrimFloat:
1149      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
1150      break;
1151
1152    case Primitive::kPrimLong:
1153    case Primitive::kPrimDouble:
1154      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
1155      break;
1156
1157    default:
1158      LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType();
1159  }
1160}
1161
1162void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store) {
1163  UNUSED(store);
1164}
1165
1166void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) {
1167  LocationSummary* locations =
1168      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1169  locations->SetOut(Location::ConstantLocation(constant));
1170}
1171
1172void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant) {
1173  // Will be generated at use site.
1174  UNUSED(constant);
1175}
1176
1177void LocationsBuilderARM::VisitNullConstant(HNullConstant* constant) {
1178  LocationSummary* locations =
1179      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1180  locations->SetOut(Location::ConstantLocation(constant));
1181}
1182
1183void InstructionCodeGeneratorARM::VisitNullConstant(HNullConstant* constant) {
1184  // Will be generated at use site.
1185  UNUSED(constant);
1186}
1187
1188void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) {
1189  LocationSummary* locations =
1190      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1191  locations->SetOut(Location::ConstantLocation(constant));
1192}
1193
1194void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant) {
1195  // Will be generated at use site.
1196  UNUSED(constant);
1197}
1198
1199void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) {
1200  LocationSummary* locations =
1201      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1202  locations->SetOut(Location::ConstantLocation(constant));
1203}
1204
1205void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant) {
1206  // Will be generated at use site.
1207  UNUSED(constant);
1208}
1209
1210void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) {
1211  LocationSummary* locations =
1212      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1213  locations->SetOut(Location::ConstantLocation(constant));
1214}
1215
1216void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant) {
1217  // Will be generated at use site.
1218  UNUSED(constant);
1219}
1220
1221void LocationsBuilderARM::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
1222  memory_barrier->SetLocations(nullptr);
1223}
1224
1225void InstructionCodeGeneratorARM::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
1226  GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
1227}
1228
1229void LocationsBuilderARM::VisitReturnVoid(HReturnVoid* ret) {
1230  ret->SetLocations(nullptr);
1231}
1232
1233void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret) {
1234  UNUSED(ret);
1235  codegen_->GenerateFrameExit();
1236}
1237
1238void LocationsBuilderARM::VisitReturn(HReturn* ret) {
1239  LocationSummary* locations =
1240      new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
1241  locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType()));
1242}
1243
1244void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret) {
1245  UNUSED(ret);
1246  codegen_->GenerateFrameExit();
1247}
1248
1249void LocationsBuilderARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1250  // When we do not run baseline, explicit clinit checks triggered by static
1251  // invokes must have been pruned by art::PrepareForRegisterAllocation.
1252  DCHECK(codegen_->IsBaseline() || !invoke->IsStaticWithExplicitClinitCheck());
1253
1254  IntrinsicLocationsBuilderARM intrinsic(GetGraph()->GetArena(),
1255                                         codegen_->GetInstructionSetFeatures());
1256  if (intrinsic.TryDispatch(invoke)) {
1257    LocationSummary* locations = invoke->GetLocations();
1258    if (locations->CanCall()) {
1259      locations->SetInAt(invoke->GetCurrentMethodInputIndex(), Location::RequiresRegister());
1260    }
1261    return;
1262  }
1263
1264  HandleInvoke(invoke);
1265}
1266
1267static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM* codegen) {
1268  if (invoke->GetLocations()->Intrinsified()) {
1269    IntrinsicCodeGeneratorARM intrinsic(codegen);
1270    intrinsic.Dispatch(invoke);
1271    return true;
1272  }
1273  return false;
1274}
1275
1276void InstructionCodeGeneratorARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1277  // When we do not run baseline, explicit clinit checks triggered by static
1278  // invokes must have been pruned by art::PrepareForRegisterAllocation.
1279  DCHECK(codegen_->IsBaseline() || !invoke->IsStaticWithExplicitClinitCheck());
1280
1281  if (TryGenerateIntrinsicCode(invoke, codegen_)) {
1282    return;
1283  }
1284
1285  LocationSummary* locations = invoke->GetLocations();
1286  codegen_->GenerateStaticOrDirectCall(
1287      invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
1288  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1289}
1290
1291void LocationsBuilderARM::HandleInvoke(HInvoke* invoke) {
1292  InvokeDexCallingConventionVisitorARM calling_convention_visitor;
1293  CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
1294}
1295
1296void LocationsBuilderARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1297  IntrinsicLocationsBuilderARM intrinsic(GetGraph()->GetArena(),
1298                                         codegen_->GetInstructionSetFeatures());
1299  if (intrinsic.TryDispatch(invoke)) {
1300    return;
1301  }
1302
1303  HandleInvoke(invoke);
1304}
1305
1306void InstructionCodeGeneratorARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1307  if (TryGenerateIntrinsicCode(invoke, codegen_)) {
1308    return;
1309  }
1310
1311  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1312  uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
1313      invoke->GetVTableIndex(), kArmPointerSize).Uint32Value();
1314  LocationSummary* locations = invoke->GetLocations();
1315  Location receiver = locations->InAt(0);
1316  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1317  // temp = object->GetClass();
1318  DCHECK(receiver.IsRegister());
1319  __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
1320  codegen_->MaybeRecordImplicitNullCheck(invoke);
1321  // temp = temp->GetMethodAt(method_offset);
1322  uint32_t entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1323      kArmWordSize).Int32Value();
1324  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1325  // LR = temp->GetEntryPoint();
1326  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1327  // LR();
1328  __ blx(LR);
1329  DCHECK(!codegen_->IsLeafMethod());
1330  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1331}
1332
1333void LocationsBuilderARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1334  HandleInvoke(invoke);
1335  // Add the hidden argument.
1336  invoke->GetLocations()->AddTemp(Location::RegisterLocation(R12));
1337}
1338
1339void InstructionCodeGeneratorARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1340  // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
1341  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1342  uint32_t method_offset = mirror::Class::EmbeddedImTableEntryOffset(
1343      invoke->GetImtIndex() % mirror::Class::kImtSize, kArmPointerSize).Uint32Value();
1344  LocationSummary* locations = invoke->GetLocations();
1345  Location receiver = locations->InAt(0);
1346  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1347
1348  // Set the hidden argument.
1349  __ LoadImmediate(invoke->GetLocations()->GetTemp(1).AsRegister<Register>(),
1350                   invoke->GetDexMethodIndex());
1351
1352  // temp = object->GetClass();
1353  if (receiver.IsStackSlot()) {
1354    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1355    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1356  } else {
1357    __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
1358  }
1359  codegen_->MaybeRecordImplicitNullCheck(invoke);
1360  // temp = temp->GetImtEntryAt(method_offset);
1361  uint32_t entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1362      kArmWordSize).Int32Value();
1363  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1364  // LR = temp->GetEntryPoint();
1365  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1366  // LR();
1367  __ blx(LR);
1368  DCHECK(!codegen_->IsLeafMethod());
1369  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1370}
1371
1372void LocationsBuilderARM::VisitNeg(HNeg* neg) {
1373  LocationSummary* locations =
1374      new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
1375  switch (neg->GetResultType()) {
1376    case Primitive::kPrimInt: {
1377      locations->SetInAt(0, Location::RequiresRegister());
1378      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1379      break;
1380    }
1381    case Primitive::kPrimLong: {
1382      locations->SetInAt(0, Location::RequiresRegister());
1383      locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1384      break;
1385    }
1386
1387    case Primitive::kPrimFloat:
1388    case Primitive::kPrimDouble:
1389      locations->SetInAt(0, Location::RequiresFpuRegister());
1390      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1391      break;
1392
1393    default:
1394      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1395  }
1396}
1397
1398void InstructionCodeGeneratorARM::VisitNeg(HNeg* neg) {
1399  LocationSummary* locations = neg->GetLocations();
1400  Location out = locations->Out();
1401  Location in = locations->InAt(0);
1402  switch (neg->GetResultType()) {
1403    case Primitive::kPrimInt:
1404      DCHECK(in.IsRegister());
1405      __ rsb(out.AsRegister<Register>(), in.AsRegister<Register>(), ShifterOperand(0));
1406      break;
1407
1408    case Primitive::kPrimLong:
1409      DCHECK(in.IsRegisterPair());
1410      // out.lo = 0 - in.lo (and update the carry/borrow (C) flag)
1411      __ rsbs(out.AsRegisterPairLow<Register>(),
1412              in.AsRegisterPairLow<Register>(),
1413              ShifterOperand(0));
1414      // We cannot emit an RSC (Reverse Subtract with Carry)
1415      // instruction here, as it does not exist in the Thumb-2
1416      // instruction set.  We use the following approach
1417      // using SBC and SUB instead.
1418      //
1419      // out.hi = -C
1420      __ sbc(out.AsRegisterPairHigh<Register>(),
1421             out.AsRegisterPairHigh<Register>(),
1422             ShifterOperand(out.AsRegisterPairHigh<Register>()));
1423      // out.hi = out.hi - in.hi
1424      __ sub(out.AsRegisterPairHigh<Register>(),
1425             out.AsRegisterPairHigh<Register>(),
1426             ShifterOperand(in.AsRegisterPairHigh<Register>()));
1427      break;
1428
1429    case Primitive::kPrimFloat:
1430      DCHECK(in.IsFpuRegister());
1431      __ vnegs(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>());
1432      break;
1433
1434    case Primitive::kPrimDouble:
1435      DCHECK(in.IsFpuRegisterPair());
1436      __ vnegd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1437               FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1438      break;
1439
1440    default:
1441      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1442  }
1443}
1444
1445void LocationsBuilderARM::VisitTypeConversion(HTypeConversion* conversion) {
1446  Primitive::Type result_type = conversion->GetResultType();
1447  Primitive::Type input_type = conversion->GetInputType();
1448  DCHECK_NE(result_type, input_type);
1449
1450  // The float-to-long, double-to-long and long-to-float type conversions
1451  // rely on a call to the runtime.
1452  LocationSummary::CallKind call_kind =
1453      (((input_type == Primitive::kPrimFloat || input_type == Primitive::kPrimDouble)
1454        && result_type == Primitive::kPrimLong)
1455       || (input_type == Primitive::kPrimLong && result_type == Primitive::kPrimFloat))
1456      ? LocationSummary::kCall
1457      : LocationSummary::kNoCall;
1458  LocationSummary* locations =
1459      new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind);
1460
1461  // The Java language does not allow treating boolean as an integral type but
1462  // our bit representation makes it safe.
1463
1464  switch (result_type) {
1465    case Primitive::kPrimByte:
1466      switch (input_type) {
1467        case Primitive::kPrimBoolean:
1468          // Boolean input is a result of code transformations.
1469        case Primitive::kPrimShort:
1470        case Primitive::kPrimInt:
1471        case Primitive::kPrimChar:
1472          // Processing a Dex `int-to-byte' instruction.
1473          locations->SetInAt(0, Location::RequiresRegister());
1474          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1475          break;
1476
1477        default:
1478          LOG(FATAL) << "Unexpected type conversion from " << input_type
1479                     << " to " << result_type;
1480      }
1481      break;
1482
1483    case Primitive::kPrimShort:
1484      switch (input_type) {
1485        case Primitive::kPrimBoolean:
1486          // Boolean input is a result of code transformations.
1487        case Primitive::kPrimByte:
1488        case Primitive::kPrimInt:
1489        case Primitive::kPrimChar:
1490          // Processing a Dex `int-to-short' instruction.
1491          locations->SetInAt(0, Location::RequiresRegister());
1492          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1493          break;
1494
1495        default:
1496          LOG(FATAL) << "Unexpected type conversion from " << input_type
1497                     << " to " << result_type;
1498      }
1499      break;
1500
1501    case Primitive::kPrimInt:
1502      switch (input_type) {
1503        case Primitive::kPrimLong:
1504          // Processing a Dex `long-to-int' instruction.
1505          locations->SetInAt(0, Location::Any());
1506          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1507          break;
1508
1509        case Primitive::kPrimFloat:
1510          // Processing a Dex `float-to-int' instruction.
1511          locations->SetInAt(0, Location::RequiresFpuRegister());
1512          locations->SetOut(Location::RequiresRegister());
1513          locations->AddTemp(Location::RequiresFpuRegister());
1514          break;
1515
1516        case Primitive::kPrimDouble:
1517          // Processing a Dex `double-to-int' instruction.
1518          locations->SetInAt(0, Location::RequiresFpuRegister());
1519          locations->SetOut(Location::RequiresRegister());
1520          locations->AddTemp(Location::RequiresFpuRegister());
1521          break;
1522
1523        default:
1524          LOG(FATAL) << "Unexpected type conversion from " << input_type
1525                     << " to " << result_type;
1526      }
1527      break;
1528
1529    case Primitive::kPrimLong:
1530      switch (input_type) {
1531        case Primitive::kPrimBoolean:
1532          // Boolean input is a result of code transformations.
1533        case Primitive::kPrimByte:
1534        case Primitive::kPrimShort:
1535        case Primitive::kPrimInt:
1536        case Primitive::kPrimChar:
1537          // Processing a Dex `int-to-long' instruction.
1538          locations->SetInAt(0, Location::RequiresRegister());
1539          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1540          break;
1541
1542        case Primitive::kPrimFloat: {
1543          // Processing a Dex `float-to-long' instruction.
1544          InvokeRuntimeCallingConvention calling_convention;
1545          locations->SetInAt(0, Location::FpuRegisterLocation(
1546              calling_convention.GetFpuRegisterAt(0)));
1547          locations->SetOut(Location::RegisterPairLocation(R0, R1));
1548          break;
1549        }
1550
1551        case Primitive::kPrimDouble: {
1552          // Processing a Dex `double-to-long' instruction.
1553          InvokeRuntimeCallingConvention calling_convention;
1554          locations->SetInAt(0, Location::FpuRegisterPairLocation(
1555              calling_convention.GetFpuRegisterAt(0),
1556              calling_convention.GetFpuRegisterAt(1)));
1557          locations->SetOut(Location::RegisterPairLocation(R0, R1));
1558          break;
1559        }
1560
1561        default:
1562          LOG(FATAL) << "Unexpected type conversion from " << input_type
1563                     << " to " << result_type;
1564      }
1565      break;
1566
1567    case Primitive::kPrimChar:
1568      switch (input_type) {
1569        case Primitive::kPrimBoolean:
1570          // Boolean input is a result of code transformations.
1571        case Primitive::kPrimByte:
1572        case Primitive::kPrimShort:
1573        case Primitive::kPrimInt:
1574          // Processing a Dex `int-to-char' instruction.
1575          locations->SetInAt(0, Location::RequiresRegister());
1576          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1577          break;
1578
1579        default:
1580          LOG(FATAL) << "Unexpected type conversion from " << input_type
1581                     << " to " << result_type;
1582      }
1583      break;
1584
1585    case Primitive::kPrimFloat:
1586      switch (input_type) {
1587        case Primitive::kPrimBoolean:
1588          // Boolean input is a result of code transformations.
1589        case Primitive::kPrimByte:
1590        case Primitive::kPrimShort:
1591        case Primitive::kPrimInt:
1592        case Primitive::kPrimChar:
1593          // Processing a Dex `int-to-float' instruction.
1594          locations->SetInAt(0, Location::RequiresRegister());
1595          locations->SetOut(Location::RequiresFpuRegister());
1596          break;
1597
1598        case Primitive::kPrimLong: {
1599          // Processing a Dex `long-to-float' instruction.
1600          InvokeRuntimeCallingConvention calling_convention;
1601          locations->SetInAt(0, Location::RegisterPairLocation(
1602              calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
1603          locations->SetOut(Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
1604          break;
1605        }
1606
1607        case Primitive::kPrimDouble:
1608          // Processing a Dex `double-to-float' instruction.
1609          locations->SetInAt(0, Location::RequiresFpuRegister());
1610          locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1611          break;
1612
1613        default:
1614          LOG(FATAL) << "Unexpected type conversion from " << input_type
1615                     << " to " << result_type;
1616      };
1617      break;
1618
1619    case Primitive::kPrimDouble:
1620      switch (input_type) {
1621        case Primitive::kPrimBoolean:
1622          // Boolean input is a result of code transformations.
1623        case Primitive::kPrimByte:
1624        case Primitive::kPrimShort:
1625        case Primitive::kPrimInt:
1626        case Primitive::kPrimChar:
1627          // Processing a Dex `int-to-double' instruction.
1628          locations->SetInAt(0, Location::RequiresRegister());
1629          locations->SetOut(Location::RequiresFpuRegister());
1630          break;
1631
1632        case Primitive::kPrimLong:
1633          // Processing a Dex `long-to-double' instruction.
1634          locations->SetInAt(0, Location::RequiresRegister());
1635          locations->SetOut(Location::RequiresFpuRegister());
1636          locations->AddTemp(Location::RequiresFpuRegister());
1637          locations->AddTemp(Location::RequiresFpuRegister());
1638          break;
1639
1640        case Primitive::kPrimFloat:
1641          // Processing a Dex `float-to-double' instruction.
1642          locations->SetInAt(0, Location::RequiresFpuRegister());
1643          locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1644          break;
1645
1646        default:
1647          LOG(FATAL) << "Unexpected type conversion from " << input_type
1648                     << " to " << result_type;
1649      };
1650      break;
1651
1652    default:
1653      LOG(FATAL) << "Unexpected type conversion from " << input_type
1654                 << " to " << result_type;
1655  }
1656}
1657
1658void InstructionCodeGeneratorARM::VisitTypeConversion(HTypeConversion* conversion) {
1659  LocationSummary* locations = conversion->GetLocations();
1660  Location out = locations->Out();
1661  Location in = locations->InAt(0);
1662  Primitive::Type result_type = conversion->GetResultType();
1663  Primitive::Type input_type = conversion->GetInputType();
1664  DCHECK_NE(result_type, input_type);
1665  switch (result_type) {
1666    case Primitive::kPrimByte:
1667      switch (input_type) {
1668        case Primitive::kPrimBoolean:
1669          // Boolean input is a result of code transformations.
1670        case Primitive::kPrimShort:
1671        case Primitive::kPrimInt:
1672        case Primitive::kPrimChar:
1673          // Processing a Dex `int-to-byte' instruction.
1674          __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 8);
1675          break;
1676
1677        default:
1678          LOG(FATAL) << "Unexpected type conversion from " << input_type
1679                     << " to " << result_type;
1680      }
1681      break;
1682
1683    case Primitive::kPrimShort:
1684      switch (input_type) {
1685        case Primitive::kPrimBoolean:
1686          // Boolean input is a result of code transformations.
1687        case Primitive::kPrimByte:
1688        case Primitive::kPrimInt:
1689        case Primitive::kPrimChar:
1690          // Processing a Dex `int-to-short' instruction.
1691          __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16);
1692          break;
1693
1694        default:
1695          LOG(FATAL) << "Unexpected type conversion from " << input_type
1696                     << " to " << result_type;
1697      }
1698      break;
1699
1700    case Primitive::kPrimInt:
1701      switch (input_type) {
1702        case Primitive::kPrimLong:
1703          // Processing a Dex `long-to-int' instruction.
1704          DCHECK(out.IsRegister());
1705          if (in.IsRegisterPair()) {
1706            __ Mov(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
1707          } else if (in.IsDoubleStackSlot()) {
1708            __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), SP, in.GetStackIndex());
1709          } else {
1710            DCHECK(in.IsConstant());
1711            DCHECK(in.GetConstant()->IsLongConstant());
1712            int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
1713            __ LoadImmediate(out.AsRegister<Register>(), static_cast<int32_t>(value));
1714          }
1715          break;
1716
1717        case Primitive::kPrimFloat: {
1718          // Processing a Dex `float-to-int' instruction.
1719          SRegister temp = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
1720          __ vmovs(temp, in.AsFpuRegister<SRegister>());
1721          __ vcvtis(temp, temp);
1722          __ vmovrs(out.AsRegister<Register>(), temp);
1723          break;
1724        }
1725
1726        case Primitive::kPrimDouble: {
1727          // Processing a Dex `double-to-int' instruction.
1728          SRegister temp_s = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
1729          DRegister temp_d = FromLowSToD(temp_s);
1730          __ vmovd(temp_d, FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1731          __ vcvtid(temp_s, temp_d);
1732          __ vmovrs(out.AsRegister<Register>(), temp_s);
1733          break;
1734        }
1735
1736        default:
1737          LOG(FATAL) << "Unexpected type conversion from " << input_type
1738                     << " to " << result_type;
1739      }
1740      break;
1741
1742    case Primitive::kPrimLong:
1743      switch (input_type) {
1744        case Primitive::kPrimBoolean:
1745          // Boolean input is a result of code transformations.
1746        case Primitive::kPrimByte:
1747        case Primitive::kPrimShort:
1748        case Primitive::kPrimInt:
1749        case Primitive::kPrimChar:
1750          // Processing a Dex `int-to-long' instruction.
1751          DCHECK(out.IsRegisterPair());
1752          DCHECK(in.IsRegister());
1753          __ Mov(out.AsRegisterPairLow<Register>(), in.AsRegister<Register>());
1754          // Sign extension.
1755          __ Asr(out.AsRegisterPairHigh<Register>(),
1756                 out.AsRegisterPairLow<Register>(),
1757                 31);
1758          break;
1759
1760        case Primitive::kPrimFloat:
1761          // Processing a Dex `float-to-long' instruction.
1762          codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pF2l),
1763                                  conversion,
1764                                  conversion->GetDexPc(),
1765                                  nullptr);
1766          break;
1767
1768        case Primitive::kPrimDouble:
1769          // Processing a Dex `double-to-long' instruction.
1770          codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pD2l),
1771                                  conversion,
1772                                  conversion->GetDexPc(),
1773                                  nullptr);
1774          break;
1775
1776        default:
1777          LOG(FATAL) << "Unexpected type conversion from " << input_type
1778                     << " to " << result_type;
1779      }
1780      break;
1781
1782    case Primitive::kPrimChar:
1783      switch (input_type) {
1784        case Primitive::kPrimBoolean:
1785          // Boolean input is a result of code transformations.
1786        case Primitive::kPrimByte:
1787        case Primitive::kPrimShort:
1788        case Primitive::kPrimInt:
1789          // Processing a Dex `int-to-char' instruction.
1790          __ ubfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16);
1791          break;
1792
1793        default:
1794          LOG(FATAL) << "Unexpected type conversion from " << input_type
1795                     << " to " << result_type;
1796      }
1797      break;
1798
1799    case Primitive::kPrimFloat:
1800      switch (input_type) {
1801        case Primitive::kPrimBoolean:
1802          // Boolean input is a result of code transformations.
1803        case Primitive::kPrimByte:
1804        case Primitive::kPrimShort:
1805        case Primitive::kPrimInt:
1806        case Primitive::kPrimChar: {
1807          // Processing a Dex `int-to-float' instruction.
1808          __ vmovsr(out.AsFpuRegister<SRegister>(), in.AsRegister<Register>());
1809          __ vcvtsi(out.AsFpuRegister<SRegister>(), out.AsFpuRegister<SRegister>());
1810          break;
1811        }
1812
1813        case Primitive::kPrimLong:
1814          // Processing a Dex `long-to-float' instruction.
1815          codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pL2f),
1816                                  conversion,
1817                                  conversion->GetDexPc(),
1818                                  nullptr);
1819          break;
1820
1821        case Primitive::kPrimDouble:
1822          // Processing a Dex `double-to-float' instruction.
1823          __ vcvtsd(out.AsFpuRegister<SRegister>(),
1824                    FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1825          break;
1826
1827        default:
1828          LOG(FATAL) << "Unexpected type conversion from " << input_type
1829                     << " to " << result_type;
1830      };
1831      break;
1832
1833    case Primitive::kPrimDouble:
1834      switch (input_type) {
1835        case Primitive::kPrimBoolean:
1836          // Boolean input is a result of code transformations.
1837        case Primitive::kPrimByte:
1838        case Primitive::kPrimShort:
1839        case Primitive::kPrimInt:
1840        case Primitive::kPrimChar: {
1841          // Processing a Dex `int-to-double' instruction.
1842          __ vmovsr(out.AsFpuRegisterPairLow<SRegister>(), in.AsRegister<Register>());
1843          __ vcvtdi(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1844                    out.AsFpuRegisterPairLow<SRegister>());
1845          break;
1846        }
1847
1848        case Primitive::kPrimLong: {
1849          // Processing a Dex `long-to-double' instruction.
1850          Register low = in.AsRegisterPairLow<Register>();
1851          Register high = in.AsRegisterPairHigh<Register>();
1852          SRegister out_s = out.AsFpuRegisterPairLow<SRegister>();
1853          DRegister out_d = FromLowSToD(out_s);
1854          SRegister temp_s = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
1855          DRegister temp_d = FromLowSToD(temp_s);
1856          SRegister constant_s = locations->GetTemp(1).AsFpuRegisterPairLow<SRegister>();
1857          DRegister constant_d = FromLowSToD(constant_s);
1858
1859          // temp_d = int-to-double(high)
1860          __ vmovsr(temp_s, high);
1861          __ vcvtdi(temp_d, temp_s);
1862          // constant_d = k2Pow32EncodingForDouble
1863          __ LoadDImmediate(constant_d, bit_cast<double, int64_t>(k2Pow32EncodingForDouble));
1864          // out_d = unsigned-to-double(low)
1865          __ vmovsr(out_s, low);
1866          __ vcvtdu(out_d, out_s);
1867          // out_d += temp_d * constant_d
1868          __ vmlad(out_d, temp_d, constant_d);
1869          break;
1870        }
1871
1872        case Primitive::kPrimFloat:
1873          // Processing a Dex `float-to-double' instruction.
1874          __ vcvtds(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1875                    in.AsFpuRegister<SRegister>());
1876          break;
1877
1878        default:
1879          LOG(FATAL) << "Unexpected type conversion from " << input_type
1880                     << " to " << result_type;
1881      };
1882      break;
1883
1884    default:
1885      LOG(FATAL) << "Unexpected type conversion from " << input_type
1886                 << " to " << result_type;
1887  }
1888}
1889
1890void LocationsBuilderARM::VisitAdd(HAdd* add) {
1891  LocationSummary* locations =
1892      new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
1893  switch (add->GetResultType()) {
1894    case Primitive::kPrimInt: {
1895      locations->SetInAt(0, Location::RequiresRegister());
1896      locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
1897      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1898      break;
1899    }
1900
1901    case Primitive::kPrimLong: {
1902      locations->SetInAt(0, Location::RequiresRegister());
1903      locations->SetInAt(1, Location::RequiresRegister());
1904      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1905      break;
1906    }
1907
1908    case Primitive::kPrimFloat:
1909    case Primitive::kPrimDouble: {
1910      locations->SetInAt(0, Location::RequiresFpuRegister());
1911      locations->SetInAt(1, Location::RequiresFpuRegister());
1912      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1913      break;
1914    }
1915
1916    default:
1917      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1918  }
1919}
1920
1921void InstructionCodeGeneratorARM::VisitAdd(HAdd* add) {
1922  LocationSummary* locations = add->GetLocations();
1923  Location out = locations->Out();
1924  Location first = locations->InAt(0);
1925  Location second = locations->InAt(1);
1926  switch (add->GetResultType()) {
1927    case Primitive::kPrimInt:
1928      if (second.IsRegister()) {
1929        __ add(out.AsRegister<Register>(),
1930               first.AsRegister<Register>(),
1931               ShifterOperand(second.AsRegister<Register>()));
1932      } else {
1933        __ AddConstant(out.AsRegister<Register>(),
1934                       first.AsRegister<Register>(),
1935                       second.GetConstant()->AsIntConstant()->GetValue());
1936      }
1937      break;
1938
1939    case Primitive::kPrimLong: {
1940      DCHECK(second.IsRegisterPair());
1941      __ adds(out.AsRegisterPairLow<Register>(),
1942              first.AsRegisterPairLow<Register>(),
1943              ShifterOperand(second.AsRegisterPairLow<Register>()));
1944      __ adc(out.AsRegisterPairHigh<Register>(),
1945             first.AsRegisterPairHigh<Register>(),
1946             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1947      break;
1948    }
1949
1950    case Primitive::kPrimFloat:
1951      __ vadds(out.AsFpuRegister<SRegister>(),
1952               first.AsFpuRegister<SRegister>(),
1953               second.AsFpuRegister<SRegister>());
1954      break;
1955
1956    case Primitive::kPrimDouble:
1957      __ vaddd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1958               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1959               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1960      break;
1961
1962    default:
1963      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1964  }
1965}
1966
1967void LocationsBuilderARM::VisitSub(HSub* sub) {
1968  LocationSummary* locations =
1969      new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
1970  switch (sub->GetResultType()) {
1971    case Primitive::kPrimInt: {
1972      locations->SetInAt(0, Location::RequiresRegister());
1973      locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1)));
1974      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1975      break;
1976    }
1977
1978    case Primitive::kPrimLong: {
1979      locations->SetInAt(0, Location::RequiresRegister());
1980      locations->SetInAt(1, Location::RequiresRegister());
1981      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1982      break;
1983    }
1984    case Primitive::kPrimFloat:
1985    case Primitive::kPrimDouble: {
1986      locations->SetInAt(0, Location::RequiresFpuRegister());
1987      locations->SetInAt(1, Location::RequiresFpuRegister());
1988      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1989      break;
1990    }
1991    default:
1992      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1993  }
1994}
1995
1996void InstructionCodeGeneratorARM::VisitSub(HSub* sub) {
1997  LocationSummary* locations = sub->GetLocations();
1998  Location out = locations->Out();
1999  Location first = locations->InAt(0);
2000  Location second = locations->InAt(1);
2001  switch (sub->GetResultType()) {
2002    case Primitive::kPrimInt: {
2003      if (second.IsRegister()) {
2004        __ sub(out.AsRegister<Register>(),
2005               first.AsRegister<Register>(),
2006               ShifterOperand(second.AsRegister<Register>()));
2007      } else {
2008        __ AddConstant(out.AsRegister<Register>(),
2009                       first.AsRegister<Register>(),
2010                       -second.GetConstant()->AsIntConstant()->GetValue());
2011      }
2012      break;
2013    }
2014
2015    case Primitive::kPrimLong: {
2016      DCHECK(second.IsRegisterPair());
2017      __ subs(out.AsRegisterPairLow<Register>(),
2018              first.AsRegisterPairLow<Register>(),
2019              ShifterOperand(second.AsRegisterPairLow<Register>()));
2020      __ sbc(out.AsRegisterPairHigh<Register>(),
2021             first.AsRegisterPairHigh<Register>(),
2022             ShifterOperand(second.AsRegisterPairHigh<Register>()));
2023      break;
2024    }
2025
2026    case Primitive::kPrimFloat: {
2027      __ vsubs(out.AsFpuRegister<SRegister>(),
2028               first.AsFpuRegister<SRegister>(),
2029               second.AsFpuRegister<SRegister>());
2030      break;
2031    }
2032
2033    case Primitive::kPrimDouble: {
2034      __ vsubd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2035               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2036               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2037      break;
2038    }
2039
2040
2041    default:
2042      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
2043  }
2044}
2045
2046void LocationsBuilderARM::VisitMul(HMul* mul) {
2047  LocationSummary* locations =
2048      new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
2049  switch (mul->GetResultType()) {
2050    case Primitive::kPrimInt:
2051    case Primitive::kPrimLong:  {
2052      locations->SetInAt(0, Location::RequiresRegister());
2053      locations->SetInAt(1, Location::RequiresRegister());
2054      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2055      break;
2056    }
2057
2058    case Primitive::kPrimFloat:
2059    case Primitive::kPrimDouble: {
2060      locations->SetInAt(0, Location::RequiresFpuRegister());
2061      locations->SetInAt(1, Location::RequiresFpuRegister());
2062      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2063      break;
2064    }
2065
2066    default:
2067      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2068  }
2069}
2070
2071void InstructionCodeGeneratorARM::VisitMul(HMul* mul) {
2072  LocationSummary* locations = mul->GetLocations();
2073  Location out = locations->Out();
2074  Location first = locations->InAt(0);
2075  Location second = locations->InAt(1);
2076  switch (mul->GetResultType()) {
2077    case Primitive::kPrimInt: {
2078      __ mul(out.AsRegister<Register>(),
2079             first.AsRegister<Register>(),
2080             second.AsRegister<Register>());
2081      break;
2082    }
2083    case Primitive::kPrimLong: {
2084      Register out_hi = out.AsRegisterPairHigh<Register>();
2085      Register out_lo = out.AsRegisterPairLow<Register>();
2086      Register in1_hi = first.AsRegisterPairHigh<Register>();
2087      Register in1_lo = first.AsRegisterPairLow<Register>();
2088      Register in2_hi = second.AsRegisterPairHigh<Register>();
2089      Register in2_lo = second.AsRegisterPairLow<Register>();
2090
2091      // Extra checks to protect caused by the existence of R1_R2.
2092      // The algorithm is wrong if out.hi is either in1.lo or in2.lo:
2093      // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2);
2094      DCHECK_NE(out_hi, in1_lo);
2095      DCHECK_NE(out_hi, in2_lo);
2096
2097      // input: in1 - 64 bits, in2 - 64 bits
2098      // output: out
2099      // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
2100      // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
2101      // parts: out.lo = (in1.lo * in2.lo)[31:0]
2102
2103      // IP <- in1.lo * in2.hi
2104      __ mul(IP, in1_lo, in2_hi);
2105      // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo
2106      __ mla(out_hi, in1_hi, in2_lo, IP);
2107      // out.lo <- (in1.lo * in2.lo)[31:0];
2108      __ umull(out_lo, IP, in1_lo, in2_lo);
2109      // out.hi <- in2.hi * in1.lo +  in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
2110      __ add(out_hi, out_hi, ShifterOperand(IP));
2111      break;
2112    }
2113
2114    case Primitive::kPrimFloat: {
2115      __ vmuls(out.AsFpuRegister<SRegister>(),
2116               first.AsFpuRegister<SRegister>(),
2117               second.AsFpuRegister<SRegister>());
2118      break;
2119    }
2120
2121    case Primitive::kPrimDouble: {
2122      __ vmuld(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2123               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2124               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2125      break;
2126    }
2127
2128    default:
2129      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2130  }
2131}
2132
2133void InstructionCodeGeneratorARM::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
2134  DCHECK(instruction->IsDiv() || instruction->IsRem());
2135  DCHECK(instruction->GetResultType() == Primitive::kPrimInt);
2136
2137  LocationSummary* locations = instruction->GetLocations();
2138  Location second = locations->InAt(1);
2139  DCHECK(second.IsConstant());
2140
2141  Register out = locations->Out().AsRegister<Register>();
2142  Register dividend = locations->InAt(0).AsRegister<Register>();
2143  int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
2144  DCHECK(imm == 1 || imm == -1);
2145
2146  if (instruction->IsRem()) {
2147    __ LoadImmediate(out, 0);
2148  } else {
2149    if (imm == 1) {
2150      __ Mov(out, dividend);
2151    } else {
2152      __ rsb(out, dividend, ShifterOperand(0));
2153    }
2154  }
2155}
2156
2157void InstructionCodeGeneratorARM::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
2158  DCHECK(instruction->IsDiv() || instruction->IsRem());
2159  DCHECK(instruction->GetResultType() == Primitive::kPrimInt);
2160
2161  LocationSummary* locations = instruction->GetLocations();
2162  Location second = locations->InAt(1);
2163  DCHECK(second.IsConstant());
2164
2165  Register out = locations->Out().AsRegister<Register>();
2166  Register dividend = locations->InAt(0).AsRegister<Register>();
2167  Register temp = locations->GetTemp(0).AsRegister<Register>();
2168  int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
2169  uint32_t abs_imm = static_cast<uint32_t>(std::abs(imm));
2170  DCHECK(IsPowerOfTwo(abs_imm));
2171  int ctz_imm = CTZ(abs_imm);
2172
2173  if (ctz_imm == 1) {
2174    __ Lsr(temp, dividend, 32 - ctz_imm);
2175  } else {
2176    __ Asr(temp, dividend, 31);
2177    __ Lsr(temp, temp, 32 - ctz_imm);
2178  }
2179  __ add(out, temp, ShifterOperand(dividend));
2180
2181  if (instruction->IsDiv()) {
2182    __ Asr(out, out, ctz_imm);
2183    if (imm < 0) {
2184      __ rsb(out, out, ShifterOperand(0));
2185    }
2186  } else {
2187    __ ubfx(out, out, 0, ctz_imm);
2188    __ sub(out, out, ShifterOperand(temp));
2189  }
2190}
2191
2192void InstructionCodeGeneratorARM::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
2193  DCHECK(instruction->IsDiv() || instruction->IsRem());
2194  DCHECK(instruction->GetResultType() == Primitive::kPrimInt);
2195
2196  LocationSummary* locations = instruction->GetLocations();
2197  Location second = locations->InAt(1);
2198  DCHECK(second.IsConstant());
2199
2200  Register out = locations->Out().AsRegister<Register>();
2201  Register dividend = locations->InAt(0).AsRegister<Register>();
2202  Register temp1 = locations->GetTemp(0).AsRegister<Register>();
2203  Register temp2 = locations->GetTemp(1).AsRegister<Register>();
2204  int64_t imm = second.GetConstant()->AsIntConstant()->GetValue();
2205
2206  int64_t magic;
2207  int shift;
2208  CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
2209
2210  __ LoadImmediate(temp1, magic);
2211  __ smull(temp2, temp1, dividend, temp1);
2212
2213  if (imm > 0 && magic < 0) {
2214    __ add(temp1, temp1, ShifterOperand(dividend));
2215  } else if (imm < 0 && magic > 0) {
2216    __ sub(temp1, temp1, ShifterOperand(dividend));
2217  }
2218
2219  if (shift != 0) {
2220    __ Asr(temp1, temp1, shift);
2221  }
2222
2223  if (instruction->IsDiv()) {
2224    __ sub(out, temp1, ShifterOperand(temp1, ASR, 31));
2225  } else {
2226    __ sub(temp1, temp1, ShifterOperand(temp1, ASR, 31));
2227    // TODO: Strength reduction for mls.
2228    __ LoadImmediate(temp2, imm);
2229    __ mls(out, temp1, temp2, dividend);
2230  }
2231}
2232
2233void InstructionCodeGeneratorARM::GenerateDivRemConstantIntegral(HBinaryOperation* instruction) {
2234  DCHECK(instruction->IsDiv() || instruction->IsRem());
2235  DCHECK(instruction->GetResultType() == Primitive::kPrimInt);
2236
2237  LocationSummary* locations = instruction->GetLocations();
2238  Location second = locations->InAt(1);
2239  DCHECK(second.IsConstant());
2240
2241  int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
2242  if (imm == 0) {
2243    // Do not generate anything. DivZeroCheck would prevent any code to be executed.
2244  } else if (imm == 1 || imm == -1) {
2245    DivRemOneOrMinusOne(instruction);
2246  } else if (IsPowerOfTwo(std::abs(imm))) {
2247    DivRemByPowerOfTwo(instruction);
2248  } else {
2249    DCHECK(imm <= -2 || imm >= 2);
2250    GenerateDivRemWithAnyConstant(instruction);
2251  }
2252}
2253
2254void LocationsBuilderARM::VisitDiv(HDiv* div) {
2255  LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
2256  if (div->GetResultType() == Primitive::kPrimLong) {
2257    // pLdiv runtime call.
2258    call_kind = LocationSummary::kCall;
2259  } else if (div->GetResultType() == Primitive::kPrimInt && div->InputAt(1)->IsConstant()) {
2260    // sdiv will be replaced by other instruction sequence.
2261  } else if (div->GetResultType() == Primitive::kPrimInt &&
2262             !codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
2263    // pIdivmod runtime call.
2264    call_kind = LocationSummary::kCall;
2265  }
2266
2267  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
2268
2269  switch (div->GetResultType()) {
2270    case Primitive::kPrimInt: {
2271      if (div->InputAt(1)->IsConstant()) {
2272        locations->SetInAt(0, Location::RequiresRegister());
2273        locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
2274        locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2275        int32_t abs_imm = std::abs(div->InputAt(1)->AsIntConstant()->GetValue());
2276        if (abs_imm <= 1) {
2277          // No temp register required.
2278        } else {
2279          locations->AddTemp(Location::RequiresRegister());
2280          if (!IsPowerOfTwo(abs_imm)) {
2281            locations->AddTemp(Location::RequiresRegister());
2282          }
2283        }
2284      } else if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
2285        locations->SetInAt(0, Location::RequiresRegister());
2286        locations->SetInAt(1, Location::RequiresRegister());
2287        locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2288      } else {
2289        InvokeRuntimeCallingConvention calling_convention;
2290        locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2291        locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2292        // Note: divrem will compute both the quotient and the remainder as the pair R0 and R1, but
2293        //       we only need the former.
2294        locations->SetOut(Location::RegisterLocation(R0));
2295      }
2296      break;
2297    }
2298    case Primitive::kPrimLong: {
2299      InvokeRuntimeCallingConvention calling_convention;
2300      locations->SetInAt(0, Location::RegisterPairLocation(
2301          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2302      locations->SetInAt(1, Location::RegisterPairLocation(
2303          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2304      locations->SetOut(Location::RegisterPairLocation(R0, R1));
2305      break;
2306    }
2307    case Primitive::kPrimFloat:
2308    case Primitive::kPrimDouble: {
2309      locations->SetInAt(0, Location::RequiresFpuRegister());
2310      locations->SetInAt(1, Location::RequiresFpuRegister());
2311      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2312      break;
2313    }
2314
2315    default:
2316      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2317  }
2318}
2319
2320void InstructionCodeGeneratorARM::VisitDiv(HDiv* div) {
2321  LocationSummary* locations = div->GetLocations();
2322  Location out = locations->Out();
2323  Location first = locations->InAt(0);
2324  Location second = locations->InAt(1);
2325
2326  switch (div->GetResultType()) {
2327    case Primitive::kPrimInt: {
2328      if (second.IsConstant()) {
2329        GenerateDivRemConstantIntegral(div);
2330      } else if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
2331        __ sdiv(out.AsRegister<Register>(),
2332                first.AsRegister<Register>(),
2333                second.AsRegister<Register>());
2334      } else {
2335        InvokeRuntimeCallingConvention calling_convention;
2336        DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegister<Register>());
2337        DCHECK_EQ(calling_convention.GetRegisterAt(1), second.AsRegister<Register>());
2338        DCHECK_EQ(R0, out.AsRegister<Register>());
2339
2340        codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pIdivmod), div, div->GetDexPc(), nullptr);
2341      }
2342      break;
2343    }
2344
2345    case Primitive::kPrimLong: {
2346      InvokeRuntimeCallingConvention calling_convention;
2347      DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
2348      DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
2349      DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegisterPairLow<Register>());
2350      DCHECK_EQ(calling_convention.GetRegisterAt(3), second.AsRegisterPairHigh<Register>());
2351      DCHECK_EQ(R0, out.AsRegisterPairLow<Register>());
2352      DCHECK_EQ(R1, out.AsRegisterPairHigh<Register>());
2353
2354      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLdiv), div, div->GetDexPc(), nullptr);
2355      break;
2356    }
2357
2358    case Primitive::kPrimFloat: {
2359      __ vdivs(out.AsFpuRegister<SRegister>(),
2360               first.AsFpuRegister<SRegister>(),
2361               second.AsFpuRegister<SRegister>());
2362      break;
2363    }
2364
2365    case Primitive::kPrimDouble: {
2366      __ vdivd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2367               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2368               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2369      break;
2370    }
2371
2372    default:
2373      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2374  }
2375}
2376
2377void LocationsBuilderARM::VisitRem(HRem* rem) {
2378  Primitive::Type type = rem->GetResultType();
2379
2380  // Most remainders are implemented in the runtime.
2381  LocationSummary::CallKind call_kind = LocationSummary::kCall;
2382  if (rem->GetResultType() == Primitive::kPrimInt && rem->InputAt(1)->IsConstant()) {
2383    // sdiv will be replaced by other instruction sequence.
2384    call_kind = LocationSummary::kNoCall;
2385  } else if ((rem->GetResultType() == Primitive::kPrimInt)
2386             && codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
2387    // Have hardware divide instruction for int, do it with three instructions.
2388    call_kind = LocationSummary::kNoCall;
2389  }
2390
2391  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
2392
2393  switch (type) {
2394    case Primitive::kPrimInt: {
2395      if (rem->InputAt(1)->IsConstant()) {
2396        locations->SetInAt(0, Location::RequiresRegister());
2397        locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
2398        locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2399        int32_t abs_imm = std::abs(rem->InputAt(1)->AsIntConstant()->GetValue());
2400        if (abs_imm <= 1) {
2401          // No temp register required.
2402        } else {
2403          locations->AddTemp(Location::RequiresRegister());
2404          if (!IsPowerOfTwo(abs_imm)) {
2405            locations->AddTemp(Location::RequiresRegister());
2406          }
2407        }
2408      } else if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
2409        locations->SetInAt(0, Location::RequiresRegister());
2410        locations->SetInAt(1, Location::RequiresRegister());
2411        locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2412        locations->AddTemp(Location::RequiresRegister());
2413      } else {
2414        InvokeRuntimeCallingConvention calling_convention;
2415        locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2416        locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2417        // Note: divrem will compute both the quotient and the remainder as the pair R0 and R1, but
2418        //       we only need the latter.
2419        locations->SetOut(Location::RegisterLocation(R1));
2420      }
2421      break;
2422    }
2423    case Primitive::kPrimLong: {
2424      InvokeRuntimeCallingConvention calling_convention;
2425      locations->SetInAt(0, Location::RegisterPairLocation(
2426          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2427      locations->SetInAt(1, Location::RegisterPairLocation(
2428          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2429      // The runtime helper puts the output in R2,R3.
2430      locations->SetOut(Location::RegisterPairLocation(R2, R3));
2431      break;
2432    }
2433    case Primitive::kPrimFloat: {
2434      InvokeRuntimeCallingConvention calling_convention;
2435      locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
2436      locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
2437      locations->SetOut(Location::FpuRegisterLocation(S0));
2438      break;
2439    }
2440
2441    case Primitive::kPrimDouble: {
2442      InvokeRuntimeCallingConvention calling_convention;
2443      locations->SetInAt(0, Location::FpuRegisterPairLocation(
2444          calling_convention.GetFpuRegisterAt(0), calling_convention.GetFpuRegisterAt(1)));
2445      locations->SetInAt(1, Location::FpuRegisterPairLocation(
2446          calling_convention.GetFpuRegisterAt(2), calling_convention.GetFpuRegisterAt(3)));
2447      locations->SetOut(Location::Location::FpuRegisterPairLocation(S0, S1));
2448      break;
2449    }
2450
2451    default:
2452      LOG(FATAL) << "Unexpected rem type " << type;
2453  }
2454}
2455
2456void InstructionCodeGeneratorARM::VisitRem(HRem* rem) {
2457  LocationSummary* locations = rem->GetLocations();
2458  Location out = locations->Out();
2459  Location first = locations->InAt(0);
2460  Location second = locations->InAt(1);
2461
2462  Primitive::Type type = rem->GetResultType();
2463  switch (type) {
2464    case Primitive::kPrimInt: {
2465        if (second.IsConstant()) {
2466          GenerateDivRemConstantIntegral(rem);
2467        } else if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
2468        Register reg1 = first.AsRegister<Register>();
2469        Register reg2 = second.AsRegister<Register>();
2470        Register temp = locations->GetTemp(0).AsRegister<Register>();
2471
2472        // temp = reg1 / reg2  (integer division)
2473        // temp = temp * reg2
2474        // dest = reg1 - temp
2475        __ sdiv(temp, reg1, reg2);
2476        __ mul(temp, temp, reg2);
2477        __ sub(out.AsRegister<Register>(), reg1, ShifterOperand(temp));
2478      } else {
2479        InvokeRuntimeCallingConvention calling_convention;
2480        DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegister<Register>());
2481        DCHECK_EQ(calling_convention.GetRegisterAt(1), second.AsRegister<Register>());
2482        DCHECK_EQ(R1, out.AsRegister<Register>());
2483
2484        codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pIdivmod), rem, rem->GetDexPc(), nullptr);
2485      }
2486      break;
2487    }
2488
2489    case Primitive::kPrimLong: {
2490      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLmod), rem, rem->GetDexPc(), nullptr);
2491      break;
2492    }
2493
2494    case Primitive::kPrimFloat: {
2495      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmodf), rem, rem->GetDexPc(), nullptr);
2496      break;
2497    }
2498
2499    case Primitive::kPrimDouble: {
2500      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmod), rem, rem->GetDexPc(), nullptr);
2501      break;
2502    }
2503
2504    default:
2505      LOG(FATAL) << "Unexpected rem type " << type;
2506  }
2507}
2508
2509void LocationsBuilderARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2510  LocationSummary* locations =
2511      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2512  locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2513  if (instruction->HasUses()) {
2514    locations->SetOut(Location::SameAsFirstInput());
2515  }
2516}
2517
2518void InstructionCodeGeneratorARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2519  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM(instruction);
2520  codegen_->AddSlowPath(slow_path);
2521
2522  LocationSummary* locations = instruction->GetLocations();
2523  Location value = locations->InAt(0);
2524
2525  switch (instruction->GetType()) {
2526    case Primitive::kPrimInt: {
2527      if (value.IsRegister()) {
2528        __ cmp(value.AsRegister<Register>(), ShifterOperand(0));
2529        __ b(slow_path->GetEntryLabel(), EQ);
2530      } else {
2531        DCHECK(value.IsConstant()) << value;
2532        if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
2533          __ b(slow_path->GetEntryLabel());
2534        }
2535      }
2536      break;
2537    }
2538    case Primitive::kPrimLong: {
2539      if (value.IsRegisterPair()) {
2540        __ orrs(IP,
2541                value.AsRegisterPairLow<Register>(),
2542                ShifterOperand(value.AsRegisterPairHigh<Register>()));
2543        __ b(slow_path->GetEntryLabel(), EQ);
2544      } else {
2545        DCHECK(value.IsConstant()) << value;
2546        if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
2547          __ b(slow_path->GetEntryLabel());
2548        }
2549      }
2550      break;
2551    default:
2552      LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
2553    }
2554  }
2555}
2556
2557void LocationsBuilderARM::HandleShift(HBinaryOperation* op) {
2558  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2559
2560  LocationSummary* locations =
2561      new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
2562
2563  switch (op->GetResultType()) {
2564    case Primitive::kPrimInt: {
2565      locations->SetInAt(0, Location::RequiresRegister());
2566      locations->SetInAt(1, Location::RegisterOrConstant(op->InputAt(1)));
2567      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2568      break;
2569    }
2570    case Primitive::kPrimLong: {
2571      locations->SetInAt(0, Location::RequiresRegister());
2572      locations->SetInAt(1, Location::RequiresRegister());
2573      locations->AddTemp(Location::RequiresRegister());
2574      locations->SetOut(Location::RequiresRegister());
2575      break;
2576    }
2577    default:
2578      LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
2579  }
2580}
2581
2582void InstructionCodeGeneratorARM::HandleShift(HBinaryOperation* op) {
2583  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2584
2585  LocationSummary* locations = op->GetLocations();
2586  Location out = locations->Out();
2587  Location first = locations->InAt(0);
2588  Location second = locations->InAt(1);
2589
2590  Primitive::Type type = op->GetResultType();
2591  switch (type) {
2592    case Primitive::kPrimInt: {
2593      Register out_reg = out.AsRegister<Register>();
2594      Register first_reg = first.AsRegister<Register>();
2595      // Arm doesn't mask the shift count so we need to do it ourselves.
2596      if (second.IsRegister()) {
2597        Register second_reg = second.AsRegister<Register>();
2598        __ and_(second_reg, second_reg, ShifterOperand(kMaxIntShiftValue));
2599        if (op->IsShl()) {
2600          __ Lsl(out_reg, first_reg, second_reg);
2601        } else if (op->IsShr()) {
2602          __ Asr(out_reg, first_reg, second_reg);
2603        } else {
2604          __ Lsr(out_reg, first_reg, second_reg);
2605        }
2606      } else {
2607        int32_t cst = second.GetConstant()->AsIntConstant()->GetValue();
2608        uint32_t shift_value = static_cast<uint32_t>(cst & kMaxIntShiftValue);
2609        if (shift_value == 0) {  // arm does not support shifting with 0 immediate.
2610          __ Mov(out_reg, first_reg);
2611        } else if (op->IsShl()) {
2612          __ Lsl(out_reg, first_reg, shift_value);
2613        } else if (op->IsShr()) {
2614          __ Asr(out_reg, first_reg, shift_value);
2615        } else {
2616          __ Lsr(out_reg, first_reg, shift_value);
2617        }
2618      }
2619      break;
2620    }
2621    case Primitive::kPrimLong: {
2622      Register o_h = out.AsRegisterPairHigh<Register>();
2623      Register o_l = out.AsRegisterPairLow<Register>();
2624
2625      Register temp = locations->GetTemp(0).AsRegister<Register>();
2626
2627      Register high = first.AsRegisterPairHigh<Register>();
2628      Register low = first.AsRegisterPairLow<Register>();
2629
2630      Register second_reg = second.AsRegister<Register>();
2631
2632      if (op->IsShl()) {
2633        // Shift the high part
2634        __ and_(second_reg, second_reg, ShifterOperand(63));
2635        __ Lsl(o_h, high, second_reg);
2636        // Shift the low part and `or` what overflew on the high part
2637        __ rsb(temp, second_reg, ShifterOperand(32));
2638        __ Lsr(temp, low, temp);
2639        __ orr(o_h, o_h, ShifterOperand(temp));
2640        // If the shift is > 32 bits, override the high part
2641        __ subs(temp, second_reg, ShifterOperand(32));
2642        __ it(PL);
2643        __ Lsl(o_h, low, temp, false, PL);
2644        // Shift the low part
2645        __ Lsl(o_l, low, second_reg);
2646      } else if (op->IsShr()) {
2647        // Shift the low part
2648        __ and_(second_reg, second_reg, ShifterOperand(63));
2649        __ Lsr(o_l, low, second_reg);
2650        // Shift the high part and `or` what underflew on the low part
2651        __ rsb(temp, second_reg, ShifterOperand(32));
2652        __ Lsl(temp, high, temp);
2653        __ orr(o_l, o_l, ShifterOperand(temp));
2654        // If the shift is > 32 bits, override the low part
2655        __ subs(temp, second_reg, ShifterOperand(32));
2656        __ it(PL);
2657        __ Asr(o_l, high, temp, false, PL);
2658        // Shift the high part
2659        __ Asr(o_h, high, second_reg);
2660      } else {
2661        // same as Shr except we use `Lsr`s and not `Asr`s
2662        __ and_(second_reg, second_reg, ShifterOperand(63));
2663        __ Lsr(o_l, low, second_reg);
2664        __ rsb(temp, second_reg, ShifterOperand(32));
2665        __ Lsl(temp, high, temp);
2666        __ orr(o_l, o_l, ShifterOperand(temp));
2667        __ subs(temp, second_reg, ShifterOperand(32));
2668        __ it(PL);
2669        __ Lsr(o_l, high, temp, false, PL);
2670        __ Lsr(o_h, high, second_reg);
2671      }
2672      break;
2673    }
2674    default:
2675      LOG(FATAL) << "Unexpected operation type " << type;
2676  }
2677}
2678
2679void LocationsBuilderARM::VisitShl(HShl* shl) {
2680  HandleShift(shl);
2681}
2682
2683void InstructionCodeGeneratorARM::VisitShl(HShl* shl) {
2684  HandleShift(shl);
2685}
2686
2687void LocationsBuilderARM::VisitShr(HShr* shr) {
2688  HandleShift(shr);
2689}
2690
2691void InstructionCodeGeneratorARM::VisitShr(HShr* shr) {
2692  HandleShift(shr);
2693}
2694
2695void LocationsBuilderARM::VisitUShr(HUShr* ushr) {
2696  HandleShift(ushr);
2697}
2698
2699void InstructionCodeGeneratorARM::VisitUShr(HUShr* ushr) {
2700  HandleShift(ushr);
2701}
2702
2703void LocationsBuilderARM::VisitNewInstance(HNewInstance* instruction) {
2704  LocationSummary* locations =
2705      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2706  InvokeRuntimeCallingConvention calling_convention;
2707  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2708  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2709  locations->SetOut(Location::RegisterLocation(R0));
2710}
2711
2712void InstructionCodeGeneratorARM::VisitNewInstance(HNewInstance* instruction) {
2713  InvokeRuntimeCallingConvention calling_convention;
2714  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
2715  codegen_->InvokeRuntime(GetThreadOffset<kArmWordSize>(instruction->GetEntrypoint()).Int32Value(),
2716                          instruction,
2717                          instruction->GetDexPc(),
2718                          nullptr);
2719}
2720
2721void LocationsBuilderARM::VisitNewArray(HNewArray* instruction) {
2722  LocationSummary* locations =
2723      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2724  InvokeRuntimeCallingConvention calling_convention;
2725  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2726  locations->SetOut(Location::RegisterLocation(R0));
2727  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2728  locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2729}
2730
2731void InstructionCodeGeneratorARM::VisitNewArray(HNewArray* instruction) {
2732  InvokeRuntimeCallingConvention calling_convention;
2733  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
2734  codegen_->InvokeRuntime(GetThreadOffset<kArmWordSize>(instruction->GetEntrypoint()).Int32Value(),
2735                          instruction,
2736                          instruction->GetDexPc(),
2737                          nullptr);
2738}
2739
2740void LocationsBuilderARM::VisitParameterValue(HParameterValue* instruction) {
2741  LocationSummary* locations =
2742      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2743  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
2744  if (location.IsStackSlot()) {
2745    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2746  } else if (location.IsDoubleStackSlot()) {
2747    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2748  }
2749  locations->SetOut(location);
2750}
2751
2752void InstructionCodeGeneratorARM::VisitParameterValue(
2753    HParameterValue* instruction ATTRIBUTE_UNUSED) {
2754  // Nothing to do, the parameter is already at its location.
2755}
2756
2757void LocationsBuilderARM::VisitCurrentMethod(HCurrentMethod* instruction) {
2758  LocationSummary* locations =
2759      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2760  locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
2761}
2762
2763void InstructionCodeGeneratorARM::VisitCurrentMethod(HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
2764  // Nothing to do, the method is already at its location.
2765}
2766
2767void LocationsBuilderARM::VisitNot(HNot* not_) {
2768  LocationSummary* locations =
2769      new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
2770  locations->SetInAt(0, Location::RequiresRegister());
2771  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2772}
2773
2774void InstructionCodeGeneratorARM::VisitNot(HNot* not_) {
2775  LocationSummary* locations = not_->GetLocations();
2776  Location out = locations->Out();
2777  Location in = locations->InAt(0);
2778  switch (not_->GetResultType()) {
2779    case Primitive::kPrimInt:
2780      __ mvn(out.AsRegister<Register>(), ShifterOperand(in.AsRegister<Register>()));
2781      break;
2782
2783    case Primitive::kPrimLong:
2784      __ mvn(out.AsRegisterPairLow<Register>(),
2785             ShifterOperand(in.AsRegisterPairLow<Register>()));
2786      __ mvn(out.AsRegisterPairHigh<Register>(),
2787             ShifterOperand(in.AsRegisterPairHigh<Register>()));
2788      break;
2789
2790    default:
2791      LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
2792  }
2793}
2794
2795void LocationsBuilderARM::VisitBooleanNot(HBooleanNot* bool_not) {
2796  LocationSummary* locations =
2797      new (GetGraph()->GetArena()) LocationSummary(bool_not, LocationSummary::kNoCall);
2798  locations->SetInAt(0, Location::RequiresRegister());
2799  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2800}
2801
2802void InstructionCodeGeneratorARM::VisitBooleanNot(HBooleanNot* bool_not) {
2803  LocationSummary* locations = bool_not->GetLocations();
2804  Location out = locations->Out();
2805  Location in = locations->InAt(0);
2806  __ eor(out.AsRegister<Register>(), in.AsRegister<Register>(), ShifterOperand(1));
2807}
2808
2809void LocationsBuilderARM::VisitCompare(HCompare* compare) {
2810  LocationSummary* locations =
2811      new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2812  switch (compare->InputAt(0)->GetType()) {
2813    case Primitive::kPrimLong: {
2814      locations->SetInAt(0, Location::RequiresRegister());
2815      locations->SetInAt(1, Location::RequiresRegister());
2816      // Output overlaps because it is written before doing the low comparison.
2817      locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2818      break;
2819    }
2820    case Primitive::kPrimFloat:
2821    case Primitive::kPrimDouble: {
2822      locations->SetInAt(0, Location::RequiresFpuRegister());
2823      locations->SetInAt(1, Location::RequiresFpuRegister());
2824      locations->SetOut(Location::RequiresRegister());
2825      break;
2826    }
2827    default:
2828      LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2829  }
2830}
2831
2832void InstructionCodeGeneratorARM::VisitCompare(HCompare* compare) {
2833  LocationSummary* locations = compare->GetLocations();
2834  Register out = locations->Out().AsRegister<Register>();
2835  Location left = locations->InAt(0);
2836  Location right = locations->InAt(1);
2837
2838  NearLabel less, greater, done;
2839  Primitive::Type type = compare->InputAt(0)->GetType();
2840  switch (type) {
2841    case Primitive::kPrimLong: {
2842      __ cmp(left.AsRegisterPairHigh<Register>(),
2843             ShifterOperand(right.AsRegisterPairHigh<Register>()));  // Signed compare.
2844      __ b(&less, LT);
2845      __ b(&greater, GT);
2846      // Do LoadImmediate before any `cmp`, as LoadImmediate might affect the status flags.
2847      __ LoadImmediate(out, 0);
2848      __ cmp(left.AsRegisterPairLow<Register>(),
2849             ShifterOperand(right.AsRegisterPairLow<Register>()));  // Unsigned compare.
2850      break;
2851    }
2852    case Primitive::kPrimFloat:
2853    case Primitive::kPrimDouble: {
2854      __ LoadImmediate(out, 0);
2855      if (type == Primitive::kPrimFloat) {
2856        __ vcmps(left.AsFpuRegister<SRegister>(), right.AsFpuRegister<SRegister>());
2857      } else {
2858        __ vcmpd(FromLowSToD(left.AsFpuRegisterPairLow<SRegister>()),
2859                 FromLowSToD(right.AsFpuRegisterPairLow<SRegister>()));
2860      }
2861      __ vmstat();  // transfer FP status register to ARM APSR.
2862      __ b(compare->IsGtBias() ? &greater : &less, VS);  // VS for unordered.
2863      break;
2864    }
2865    default:
2866      LOG(FATAL) << "Unexpected compare type " << type;
2867  }
2868  __ b(&done, EQ);
2869  __ b(&less, CC);  // CC is for both: unsigned compare for longs and 'less than' for floats.
2870
2871  __ Bind(&greater);
2872  __ LoadImmediate(out, 1);
2873  __ b(&done);
2874
2875  __ Bind(&less);
2876  __ LoadImmediate(out, -1);
2877
2878  __ Bind(&done);
2879}
2880
2881void LocationsBuilderARM::VisitPhi(HPhi* instruction) {
2882  LocationSummary* locations =
2883      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2884  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
2885    locations->SetInAt(i, Location::Any());
2886  }
2887  locations->SetOut(Location::Any());
2888}
2889
2890void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction) {
2891  UNUSED(instruction);
2892  LOG(FATAL) << "Unreachable";
2893}
2894
2895void InstructionCodeGeneratorARM::GenerateMemoryBarrier(MemBarrierKind kind) {
2896  // TODO (ported from quick): revisit Arm barrier kinds
2897  DmbOptions flavor = DmbOptions::ISH;  // quiet c++ warnings
2898  switch (kind) {
2899    case MemBarrierKind::kAnyStore:
2900    case MemBarrierKind::kLoadAny:
2901    case MemBarrierKind::kAnyAny: {
2902      flavor = DmbOptions::ISH;
2903      break;
2904    }
2905    case MemBarrierKind::kStoreStore: {
2906      flavor = DmbOptions::ISHST;
2907      break;
2908    }
2909    default:
2910      LOG(FATAL) << "Unexpected memory barrier " << kind;
2911  }
2912  __ dmb(flavor);
2913}
2914
2915void InstructionCodeGeneratorARM::GenerateWideAtomicLoad(Register addr,
2916                                                         uint32_t offset,
2917                                                         Register out_lo,
2918                                                         Register out_hi) {
2919  if (offset != 0) {
2920    __ LoadImmediate(out_lo, offset);
2921    __ add(IP, addr, ShifterOperand(out_lo));
2922    addr = IP;
2923  }
2924  __ ldrexd(out_lo, out_hi, addr);
2925}
2926
2927void InstructionCodeGeneratorARM::GenerateWideAtomicStore(Register addr,
2928                                                          uint32_t offset,
2929                                                          Register value_lo,
2930                                                          Register value_hi,
2931                                                          Register temp1,
2932                                                          Register temp2,
2933                                                          HInstruction* instruction) {
2934  NearLabel fail;
2935  if (offset != 0) {
2936    __ LoadImmediate(temp1, offset);
2937    __ add(IP, addr, ShifterOperand(temp1));
2938    addr = IP;
2939  }
2940  __ Bind(&fail);
2941  // We need a load followed by store. (The address used in a STREX instruction must
2942  // be the same as the address in the most recently executed LDREX instruction.)
2943  __ ldrexd(temp1, temp2, addr);
2944  codegen_->MaybeRecordImplicitNullCheck(instruction);
2945  __ strexd(temp1, value_lo, value_hi, addr);
2946  __ cmp(temp1, ShifterOperand(0));
2947  __ b(&fail, NE);
2948}
2949
2950void LocationsBuilderARM::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) {
2951  DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2952
2953  LocationSummary* locations =
2954      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2955  locations->SetInAt(0, Location::RequiresRegister());
2956
2957  Primitive::Type field_type = field_info.GetFieldType();
2958  if (Primitive::IsFloatingPointType(field_type)) {
2959    locations->SetInAt(1, Location::RequiresFpuRegister());
2960  } else {
2961    locations->SetInAt(1, Location::RequiresRegister());
2962  }
2963
2964  bool is_wide = field_type == Primitive::kPrimLong || field_type == Primitive::kPrimDouble;
2965  bool generate_volatile = field_info.IsVolatile()
2966      && is_wide
2967      && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2968  // Temporary registers for the write barrier.
2969  // TODO: consider renaming StoreNeedsWriteBarrier to StoreNeedsGCMark.
2970  if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
2971    locations->AddTemp(Location::RequiresRegister());
2972    locations->AddTemp(Location::RequiresRegister());
2973  } else if (generate_volatile) {
2974    // Arm encoding have some additional constraints for ldrexd/strexd:
2975    // - registers need to be consecutive
2976    // - the first register should be even but not R14.
2977    // We don't test for Arm yet, and the assertion makes sure that we revisit this if we ever
2978    // enable Arm encoding.
2979    DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
2980
2981    locations->AddTemp(Location::RequiresRegister());
2982    locations->AddTemp(Location::RequiresRegister());
2983    if (field_type == Primitive::kPrimDouble) {
2984      // For doubles we need two more registers to copy the value.
2985      locations->AddTemp(Location::RegisterLocation(R2));
2986      locations->AddTemp(Location::RegisterLocation(R3));
2987    }
2988  }
2989}
2990
2991void InstructionCodeGeneratorARM::HandleFieldSet(HInstruction* instruction,
2992                                                 const FieldInfo& field_info,
2993                                                 bool value_can_be_null) {
2994  DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2995
2996  LocationSummary* locations = instruction->GetLocations();
2997  Register base = locations->InAt(0).AsRegister<Register>();
2998  Location value = locations->InAt(1);
2999
3000  bool is_volatile = field_info.IsVolatile();
3001  bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
3002  Primitive::Type field_type = field_info.GetFieldType();
3003  uint32_t offset = field_info.GetFieldOffset().Uint32Value();
3004
3005  if (is_volatile) {
3006    GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
3007  }
3008
3009  switch (field_type) {
3010    case Primitive::kPrimBoolean:
3011    case Primitive::kPrimByte: {
3012      __ StoreToOffset(kStoreByte, value.AsRegister<Register>(), base, offset);
3013      break;
3014    }
3015
3016    case Primitive::kPrimShort:
3017    case Primitive::kPrimChar: {
3018      __ StoreToOffset(kStoreHalfword, value.AsRegister<Register>(), base, offset);
3019      break;
3020    }
3021
3022    case Primitive::kPrimInt:
3023    case Primitive::kPrimNot: {
3024      __ StoreToOffset(kStoreWord, value.AsRegister<Register>(), base, offset);
3025      break;
3026    }
3027
3028    case Primitive::kPrimLong: {
3029      if (is_volatile && !atomic_ldrd_strd) {
3030        GenerateWideAtomicStore(base, offset,
3031                                value.AsRegisterPairLow<Register>(),
3032                                value.AsRegisterPairHigh<Register>(),
3033                                locations->GetTemp(0).AsRegister<Register>(),
3034                                locations->GetTemp(1).AsRegister<Register>(),
3035                                instruction);
3036      } else {
3037        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), base, offset);
3038        codegen_->MaybeRecordImplicitNullCheck(instruction);
3039      }
3040      break;
3041    }
3042
3043    case Primitive::kPrimFloat: {
3044      __ StoreSToOffset(value.AsFpuRegister<SRegister>(), base, offset);
3045      break;
3046    }
3047
3048    case Primitive::kPrimDouble: {
3049      DRegister value_reg = FromLowSToD(value.AsFpuRegisterPairLow<SRegister>());
3050      if (is_volatile && !atomic_ldrd_strd) {
3051        Register value_reg_lo = locations->GetTemp(0).AsRegister<Register>();
3052        Register value_reg_hi = locations->GetTemp(1).AsRegister<Register>();
3053
3054        __ vmovrrd(value_reg_lo, value_reg_hi, value_reg);
3055
3056        GenerateWideAtomicStore(base, offset,
3057                                value_reg_lo,
3058                                value_reg_hi,
3059                                locations->GetTemp(2).AsRegister<Register>(),
3060                                locations->GetTemp(3).AsRegister<Register>(),
3061                                instruction);
3062      } else {
3063        __ StoreDToOffset(value_reg, base, offset);
3064        codegen_->MaybeRecordImplicitNullCheck(instruction);
3065      }
3066      break;
3067    }
3068
3069    case Primitive::kPrimVoid:
3070      LOG(FATAL) << "Unreachable type " << field_type;
3071      UNREACHABLE();
3072  }
3073
3074  // Longs and doubles are handled in the switch.
3075  if (field_type != Primitive::kPrimLong && field_type != Primitive::kPrimDouble) {
3076    codegen_->MaybeRecordImplicitNullCheck(instruction);
3077  }
3078
3079  if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
3080    Register temp = locations->GetTemp(0).AsRegister<Register>();
3081    Register card = locations->GetTemp(1).AsRegister<Register>();
3082    codegen_->MarkGCCard(
3083        temp, card, base, value.AsRegister<Register>(), value_can_be_null);
3084  }
3085
3086  if (is_volatile) {
3087    GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
3088  }
3089}
3090
3091void LocationsBuilderARM::HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info) {
3092  DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
3093  LocationSummary* locations =
3094      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3095  locations->SetInAt(0, Location::RequiresRegister());
3096
3097  bool volatile_for_double = field_info.IsVolatile()
3098      && (field_info.GetFieldType() == Primitive::kPrimDouble)
3099      && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
3100  bool overlap = field_info.IsVolatile() && (field_info.GetFieldType() == Primitive::kPrimLong);
3101
3102  if (Primitive::IsFloatingPointType(instruction->GetType())) {
3103    locations->SetOut(Location::RequiresFpuRegister());
3104  } else {
3105    locations->SetOut(Location::RequiresRegister(),
3106                      (overlap ? Location::kOutputOverlap : Location::kNoOutputOverlap));
3107  }
3108  if (volatile_for_double) {
3109    // Arm encoding have some additional constraints for ldrexd/strexd:
3110    // - registers need to be consecutive
3111    // - the first register should be even but not R14.
3112    // We don't test for Arm yet, and the assertion makes sure that we revisit this if we ever
3113    // enable Arm encoding.
3114    DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
3115    locations->AddTemp(Location::RequiresRegister());
3116    locations->AddTemp(Location::RequiresRegister());
3117  }
3118}
3119
3120void InstructionCodeGeneratorARM::HandleFieldGet(HInstruction* instruction,
3121                                                 const FieldInfo& field_info) {
3122  DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
3123
3124  LocationSummary* locations = instruction->GetLocations();
3125  Register base = locations->InAt(0).AsRegister<Register>();
3126  Location out = locations->Out();
3127  bool is_volatile = field_info.IsVolatile();
3128  bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
3129  Primitive::Type field_type = field_info.GetFieldType();
3130  uint32_t offset = field_info.GetFieldOffset().Uint32Value();
3131
3132  switch (field_type) {
3133    case Primitive::kPrimBoolean: {
3134      __ LoadFromOffset(kLoadUnsignedByte, out.AsRegister<Register>(), base, offset);
3135      break;
3136    }
3137
3138    case Primitive::kPrimByte: {
3139      __ LoadFromOffset(kLoadSignedByte, out.AsRegister<Register>(), base, offset);
3140      break;
3141    }
3142
3143    case Primitive::kPrimShort: {
3144      __ LoadFromOffset(kLoadSignedHalfword, out.AsRegister<Register>(), base, offset);
3145      break;
3146    }
3147
3148    case Primitive::kPrimChar: {
3149      __ LoadFromOffset(kLoadUnsignedHalfword, out.AsRegister<Register>(), base, offset);
3150      break;
3151    }
3152
3153    case Primitive::kPrimInt:
3154    case Primitive::kPrimNot: {
3155      __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), base, offset);
3156      break;
3157    }
3158
3159    case Primitive::kPrimLong: {
3160      if (is_volatile && !atomic_ldrd_strd) {
3161        GenerateWideAtomicLoad(base, offset,
3162                               out.AsRegisterPairLow<Register>(),
3163                               out.AsRegisterPairHigh<Register>());
3164      } else {
3165        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), base, offset);
3166      }
3167      break;
3168    }
3169
3170    case Primitive::kPrimFloat: {
3171      __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), base, offset);
3172      break;
3173    }
3174
3175    case Primitive::kPrimDouble: {
3176      DRegister out_reg = FromLowSToD(out.AsFpuRegisterPairLow<SRegister>());
3177      if (is_volatile && !atomic_ldrd_strd) {
3178        Register lo = locations->GetTemp(0).AsRegister<Register>();
3179        Register hi = locations->GetTemp(1).AsRegister<Register>();
3180        GenerateWideAtomicLoad(base, offset, lo, hi);
3181        codegen_->MaybeRecordImplicitNullCheck(instruction);
3182        __ vmovdrr(out_reg, lo, hi);
3183      } else {
3184        __ LoadDFromOffset(out_reg, base, offset);
3185        codegen_->MaybeRecordImplicitNullCheck(instruction);
3186      }
3187      break;
3188    }
3189
3190    case Primitive::kPrimVoid:
3191      LOG(FATAL) << "Unreachable type " << field_type;
3192      UNREACHABLE();
3193  }
3194
3195  // Doubles are handled in the switch.
3196  if (field_type != Primitive::kPrimDouble) {
3197    codegen_->MaybeRecordImplicitNullCheck(instruction);
3198  }
3199
3200  if (is_volatile) {
3201    GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
3202  }
3203}
3204
3205void LocationsBuilderARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
3206  HandleFieldSet(instruction, instruction->GetFieldInfo());
3207}
3208
3209void InstructionCodeGeneratorARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
3210  HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
3211}
3212
3213void LocationsBuilderARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
3214  HandleFieldGet(instruction, instruction->GetFieldInfo());
3215}
3216
3217void InstructionCodeGeneratorARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
3218  HandleFieldGet(instruction, instruction->GetFieldInfo());
3219}
3220
3221void LocationsBuilderARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
3222  HandleFieldGet(instruction, instruction->GetFieldInfo());
3223}
3224
3225void InstructionCodeGeneratorARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
3226  HandleFieldGet(instruction, instruction->GetFieldInfo());
3227}
3228
3229void LocationsBuilderARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
3230  HandleFieldSet(instruction, instruction->GetFieldInfo());
3231}
3232
3233void InstructionCodeGeneratorARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
3234  HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
3235}
3236
3237void LocationsBuilderARM::VisitNullCheck(HNullCheck* instruction) {
3238  LocationSummary* locations =
3239      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3240  locations->SetInAt(0, Location::RequiresRegister());
3241  if (instruction->HasUses()) {
3242    locations->SetOut(Location::SameAsFirstInput());
3243  }
3244}
3245
3246void InstructionCodeGeneratorARM::GenerateImplicitNullCheck(HNullCheck* instruction) {
3247  if (codegen_->CanMoveNullCheckToUser(instruction)) {
3248    return;
3249  }
3250  Location obj = instruction->GetLocations()->InAt(0);
3251
3252  __ LoadFromOffset(kLoadWord, IP, obj.AsRegister<Register>(), 0);
3253  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
3254}
3255
3256void InstructionCodeGeneratorARM::GenerateExplicitNullCheck(HNullCheck* instruction) {
3257  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM(instruction);
3258  codegen_->AddSlowPath(slow_path);
3259
3260  LocationSummary* locations = instruction->GetLocations();
3261  Location obj = locations->InAt(0);
3262
3263  __ cmp(obj.AsRegister<Register>(), ShifterOperand(0));
3264  __ b(slow_path->GetEntryLabel(), EQ);
3265}
3266
3267void InstructionCodeGeneratorARM::VisitNullCheck(HNullCheck* instruction) {
3268  if (codegen_->GetCompilerOptions().GetImplicitNullChecks()) {
3269    GenerateImplicitNullCheck(instruction);
3270  } else {
3271    GenerateExplicitNullCheck(instruction);
3272  }
3273}
3274
3275void LocationsBuilderARM::VisitArrayGet(HArrayGet* instruction) {
3276  LocationSummary* locations =
3277      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3278  locations->SetInAt(0, Location::RequiresRegister());
3279  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3280  if (Primitive::IsFloatingPointType(instruction->GetType())) {
3281    locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3282  } else {
3283    locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3284  }
3285}
3286
3287void InstructionCodeGeneratorARM::VisitArrayGet(HArrayGet* instruction) {
3288  LocationSummary* locations = instruction->GetLocations();
3289  Register obj = locations->InAt(0).AsRegister<Register>();
3290  Location index = locations->InAt(1);
3291
3292  switch (instruction->GetType()) {
3293    case Primitive::kPrimBoolean: {
3294      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
3295      Register out = locations->Out().AsRegister<Register>();
3296      if (index.IsConstant()) {
3297        size_t offset =
3298            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
3299        __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
3300      } else {
3301        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
3302        __ LoadFromOffset(kLoadUnsignedByte, out, IP, data_offset);
3303      }
3304      break;
3305    }
3306
3307    case Primitive::kPrimByte: {
3308      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
3309      Register out = locations->Out().AsRegister<Register>();
3310      if (index.IsConstant()) {
3311        size_t offset =
3312            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
3313        __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
3314      } else {
3315        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
3316        __ LoadFromOffset(kLoadSignedByte, out, IP, data_offset);
3317      }
3318      break;
3319    }
3320
3321    case Primitive::kPrimShort: {
3322      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
3323      Register out = locations->Out().AsRegister<Register>();
3324      if (index.IsConstant()) {
3325        size_t offset =
3326            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
3327        __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
3328      } else {
3329        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
3330        __ LoadFromOffset(kLoadSignedHalfword, out, IP, data_offset);
3331      }
3332      break;
3333    }
3334
3335    case Primitive::kPrimChar: {
3336      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
3337      Register out = locations->Out().AsRegister<Register>();
3338      if (index.IsConstant()) {
3339        size_t offset =
3340            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
3341        __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
3342      } else {
3343        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
3344        __ LoadFromOffset(kLoadUnsignedHalfword, out, IP, data_offset);
3345      }
3346      break;
3347    }
3348
3349    case Primitive::kPrimInt:
3350    case Primitive::kPrimNot: {
3351      DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
3352      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
3353      Register out = locations->Out().AsRegister<Register>();
3354      if (index.IsConstant()) {
3355        size_t offset =
3356            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3357        __ LoadFromOffset(kLoadWord, out, obj, offset);
3358      } else {
3359        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3360        __ LoadFromOffset(kLoadWord, out, IP, data_offset);
3361      }
3362      break;
3363    }
3364
3365    case Primitive::kPrimLong: {
3366      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
3367      Location out = locations->Out();
3368      if (index.IsConstant()) {
3369        size_t offset =
3370            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3371        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset);
3372      } else {
3373        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3374        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), IP, data_offset);
3375      }
3376      break;
3377    }
3378
3379    case Primitive::kPrimFloat: {
3380      uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
3381      Location out = locations->Out();
3382      DCHECK(out.IsFpuRegister());
3383      if (index.IsConstant()) {
3384        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3385        __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), obj, offset);
3386      } else {
3387        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3388        __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), IP, data_offset);
3389      }
3390      break;
3391    }
3392
3393    case Primitive::kPrimDouble: {
3394      uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
3395      Location out = locations->Out();
3396      DCHECK(out.IsFpuRegisterPair());
3397      if (index.IsConstant()) {
3398        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3399        __ LoadDFromOffset(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), obj, offset);
3400      } else {
3401        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3402        __ LoadDFromOffset(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), IP, data_offset);
3403      }
3404      break;
3405    }
3406
3407    case Primitive::kPrimVoid:
3408      LOG(FATAL) << "Unreachable type " << instruction->GetType();
3409      UNREACHABLE();
3410  }
3411  codegen_->MaybeRecordImplicitNullCheck(instruction);
3412}
3413
3414void LocationsBuilderARM::VisitArraySet(HArraySet* instruction) {
3415  Primitive::Type value_type = instruction->GetComponentType();
3416
3417  bool needs_write_barrier =
3418      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
3419  bool needs_runtime_call = instruction->NeedsTypeCheck();
3420
3421  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3422      instruction, needs_runtime_call ? LocationSummary::kCall : LocationSummary::kNoCall);
3423  if (needs_runtime_call) {
3424    InvokeRuntimeCallingConvention calling_convention;
3425    locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3426    locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3427    locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
3428  } else {
3429    locations->SetInAt(0, Location::RequiresRegister());
3430    locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3431    if (Primitive::IsFloatingPointType(value_type)) {
3432      locations->SetInAt(2, Location::RequiresFpuRegister());
3433    } else {
3434      locations->SetInAt(2, Location::RequiresRegister());
3435    }
3436
3437    if (needs_write_barrier) {
3438      // Temporary registers for the write barrier.
3439      locations->AddTemp(Location::RequiresRegister());
3440      locations->AddTemp(Location::RequiresRegister());
3441    }
3442  }
3443}
3444
3445void InstructionCodeGeneratorARM::VisitArraySet(HArraySet* instruction) {
3446  LocationSummary* locations = instruction->GetLocations();
3447  Register obj = locations->InAt(0).AsRegister<Register>();
3448  Location index = locations->InAt(1);
3449  Primitive::Type value_type = instruction->GetComponentType();
3450  bool needs_runtime_call = locations->WillCall();
3451  bool needs_write_barrier =
3452      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
3453
3454  switch (value_type) {
3455    case Primitive::kPrimBoolean:
3456    case Primitive::kPrimByte: {
3457      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
3458      Register value = locations->InAt(2).AsRegister<Register>();
3459      if (index.IsConstant()) {
3460        size_t offset =
3461            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
3462        __ StoreToOffset(kStoreByte, value, obj, offset);
3463      } else {
3464        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
3465        __ StoreToOffset(kStoreByte, value, IP, data_offset);
3466      }
3467      break;
3468    }
3469
3470    case Primitive::kPrimShort:
3471    case Primitive::kPrimChar: {
3472      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
3473      Register value = locations->InAt(2).AsRegister<Register>();
3474      if (index.IsConstant()) {
3475        size_t offset =
3476            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
3477        __ StoreToOffset(kStoreHalfword, value, obj, offset);
3478      } else {
3479        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
3480        __ StoreToOffset(kStoreHalfword, value, IP, data_offset);
3481      }
3482      break;
3483    }
3484
3485    case Primitive::kPrimInt:
3486    case Primitive::kPrimNot: {
3487      if (!needs_runtime_call) {
3488        uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
3489        Register value = locations->InAt(2).AsRegister<Register>();
3490        if (index.IsConstant()) {
3491          size_t offset =
3492              (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3493          __ StoreToOffset(kStoreWord, value, obj, offset);
3494        } else {
3495          DCHECK(index.IsRegister()) << index;
3496          __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3497          __ StoreToOffset(kStoreWord, value, IP, data_offset);
3498        }
3499        codegen_->MaybeRecordImplicitNullCheck(instruction);
3500        if (needs_write_barrier) {
3501          DCHECK_EQ(value_type, Primitive::kPrimNot);
3502          Register temp = locations->GetTemp(0).AsRegister<Register>();
3503          Register card = locations->GetTemp(1).AsRegister<Register>();
3504          codegen_->MarkGCCard(temp, card, obj, value, instruction->GetValueCanBeNull());
3505        }
3506      } else {
3507        DCHECK_EQ(value_type, Primitive::kPrimNot);
3508        codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
3509                                instruction,
3510                                instruction->GetDexPc(),
3511                                nullptr);
3512      }
3513      break;
3514    }
3515
3516    case Primitive::kPrimLong: {
3517      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
3518      Location value = locations->InAt(2);
3519      if (index.IsConstant()) {
3520        size_t offset =
3521            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3522        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset);
3523      } else {
3524        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3525        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), IP, data_offset);
3526      }
3527      break;
3528    }
3529
3530    case Primitive::kPrimFloat: {
3531      uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
3532      Location value = locations->InAt(2);
3533      DCHECK(value.IsFpuRegister());
3534      if (index.IsConstant()) {
3535        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3536        __ StoreSToOffset(value.AsFpuRegister<SRegister>(), obj, offset);
3537      } else {
3538        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3539        __ StoreSToOffset(value.AsFpuRegister<SRegister>(), IP, data_offset);
3540      }
3541      break;
3542    }
3543
3544    case Primitive::kPrimDouble: {
3545      uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
3546      Location value = locations->InAt(2);
3547      DCHECK(value.IsFpuRegisterPair());
3548      if (index.IsConstant()) {
3549        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3550        __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), obj, offset);
3551      } else {
3552        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3553        __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), IP, data_offset);
3554      }
3555
3556      break;
3557    }
3558
3559    case Primitive::kPrimVoid:
3560      LOG(FATAL) << "Unreachable type " << value_type;
3561      UNREACHABLE();
3562  }
3563
3564  // Ints and objects are handled in the switch.
3565  if (value_type != Primitive::kPrimInt && value_type != Primitive::kPrimNot) {
3566    codegen_->MaybeRecordImplicitNullCheck(instruction);
3567  }
3568}
3569
3570void LocationsBuilderARM::VisitArrayLength(HArrayLength* instruction) {
3571  LocationSummary* locations =
3572      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3573  locations->SetInAt(0, Location::RequiresRegister());
3574  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3575}
3576
3577void InstructionCodeGeneratorARM::VisitArrayLength(HArrayLength* instruction) {
3578  LocationSummary* locations = instruction->GetLocations();
3579  uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
3580  Register obj = locations->InAt(0).AsRegister<Register>();
3581  Register out = locations->Out().AsRegister<Register>();
3582  __ LoadFromOffset(kLoadWord, out, obj, offset);
3583  codegen_->MaybeRecordImplicitNullCheck(instruction);
3584}
3585
3586void LocationsBuilderARM::VisitBoundsCheck(HBoundsCheck* instruction) {
3587  LocationSummary* locations =
3588      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3589  locations->SetInAt(0, Location::RequiresRegister());
3590  locations->SetInAt(1, Location::RequiresRegister());
3591  if (instruction->HasUses()) {
3592    locations->SetOut(Location::SameAsFirstInput());
3593  }
3594}
3595
3596void InstructionCodeGeneratorARM::VisitBoundsCheck(HBoundsCheck* instruction) {
3597  LocationSummary* locations = instruction->GetLocations();
3598  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM(
3599      instruction, locations->InAt(0), locations->InAt(1));
3600  codegen_->AddSlowPath(slow_path);
3601
3602  Register index = locations->InAt(0).AsRegister<Register>();
3603  Register length = locations->InAt(1).AsRegister<Register>();
3604
3605  __ cmp(index, ShifterOperand(length));
3606  __ b(slow_path->GetEntryLabel(), CS);
3607}
3608
3609void CodeGeneratorARM::MarkGCCard(Register temp,
3610                                  Register card,
3611                                  Register object,
3612                                  Register value,
3613                                  bool can_be_null) {
3614  NearLabel is_null;
3615  if (can_be_null) {
3616    __ CompareAndBranchIfZero(value, &is_null);
3617  }
3618  __ LoadFromOffset(kLoadWord, card, TR, Thread::CardTableOffset<kArmWordSize>().Int32Value());
3619  __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
3620  __ strb(card, Address(card, temp));
3621  if (can_be_null) {
3622    __ Bind(&is_null);
3623  }
3624}
3625
3626void LocationsBuilderARM::VisitTemporary(HTemporary* temp) {
3627  temp->SetLocations(nullptr);
3628}
3629
3630void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp) {
3631  // Nothing to do, this is driven by the code generator.
3632  UNUSED(temp);
3633}
3634
3635void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction) {
3636  UNUSED(instruction);
3637  LOG(FATAL) << "Unreachable";
3638}
3639
3640void InstructionCodeGeneratorARM::VisitParallelMove(HParallelMove* instruction) {
3641  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
3642}
3643
3644void LocationsBuilderARM::VisitSuspendCheck(HSuspendCheck* instruction) {
3645  new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
3646}
3647
3648void InstructionCodeGeneratorARM::VisitSuspendCheck(HSuspendCheck* instruction) {
3649  HBasicBlock* block = instruction->GetBlock();
3650  if (block->GetLoopInformation() != nullptr) {
3651    DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
3652    // The back edge will generate the suspend check.
3653    return;
3654  }
3655  if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
3656    // The goto will generate the suspend check.
3657    return;
3658  }
3659  GenerateSuspendCheck(instruction, nullptr);
3660}
3661
3662void InstructionCodeGeneratorARM::GenerateSuspendCheck(HSuspendCheck* instruction,
3663                                                       HBasicBlock* successor) {
3664  SuspendCheckSlowPathARM* slow_path =
3665      down_cast<SuspendCheckSlowPathARM*>(instruction->GetSlowPath());
3666  if (slow_path == nullptr) {
3667    slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathARM(instruction, successor);
3668    instruction->SetSlowPath(slow_path);
3669    codegen_->AddSlowPath(slow_path);
3670    if (successor != nullptr) {
3671      DCHECK(successor->IsLoopHeader());
3672      codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
3673    }
3674  } else {
3675    DCHECK_EQ(slow_path->GetSuccessor(), successor);
3676  }
3677
3678  __ LoadFromOffset(
3679      kLoadUnsignedHalfword, IP, TR, Thread::ThreadFlagsOffset<kArmWordSize>().Int32Value());
3680  __ cmp(IP, ShifterOperand(0));
3681  // TODO: Figure out the branch offsets and use cbz/cbnz.
3682  if (successor == nullptr) {
3683    __ b(slow_path->GetEntryLabel(), NE);
3684    __ Bind(slow_path->GetReturnLabel());
3685  } else {
3686    __ b(codegen_->GetLabelOf(successor), EQ);
3687    __ b(slow_path->GetEntryLabel());
3688  }
3689}
3690
3691ArmAssembler* ParallelMoveResolverARM::GetAssembler() const {
3692  return codegen_->GetAssembler();
3693}
3694
3695void ParallelMoveResolverARM::EmitMove(size_t index) {
3696  MoveOperands* move = moves_.Get(index);
3697  Location source = move->GetSource();
3698  Location destination = move->GetDestination();
3699
3700  if (source.IsRegister()) {
3701    if (destination.IsRegister()) {
3702      __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>());
3703    } else {
3704      DCHECK(destination.IsStackSlot());
3705      __ StoreToOffset(kStoreWord, source.AsRegister<Register>(),
3706                       SP, destination.GetStackIndex());
3707    }
3708  } else if (source.IsStackSlot()) {
3709    if (destination.IsRegister()) {
3710      __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(),
3711                        SP, source.GetStackIndex());
3712    } else if (destination.IsFpuRegister()) {
3713      __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex());
3714    } else {
3715      DCHECK(destination.IsStackSlot());
3716      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
3717      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3718    }
3719  } else if (source.IsFpuRegister()) {
3720    if (destination.IsFpuRegister()) {
3721      __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>());
3722    } else {
3723      DCHECK(destination.IsStackSlot());
3724      __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex());
3725    }
3726  } else if (source.IsDoubleStackSlot()) {
3727    if (destination.IsDoubleStackSlot()) {
3728      __ LoadDFromOffset(DTMP, SP, source.GetStackIndex());
3729      __ StoreDToOffset(DTMP, SP, destination.GetStackIndex());
3730    } else if (destination.IsRegisterPair()) {
3731      DCHECK(ExpectedPairLayout(destination));
3732      __ LoadFromOffset(
3733          kLoadWordPair, destination.AsRegisterPairLow<Register>(), SP, source.GetStackIndex());
3734    } else {
3735      DCHECK(destination.IsFpuRegisterPair()) << destination;
3736      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
3737                         SP,
3738                         source.GetStackIndex());
3739    }
3740  } else if (source.IsRegisterPair()) {
3741    if (destination.IsRegisterPair()) {
3742      __ Mov(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairLow<Register>());
3743      __ Mov(destination.AsRegisterPairHigh<Register>(), source.AsRegisterPairHigh<Register>());
3744    } else {
3745      DCHECK(destination.IsDoubleStackSlot()) << destination;
3746      DCHECK(ExpectedPairLayout(source));
3747      __ StoreToOffset(
3748          kStoreWordPair, source.AsRegisterPairLow<Register>(), SP, destination.GetStackIndex());
3749    }
3750  } else if (source.IsFpuRegisterPair()) {
3751    if (destination.IsFpuRegisterPair()) {
3752      __ vmovd(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
3753               FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()));
3754    } else {
3755      DCHECK(destination.IsDoubleStackSlot()) << destination;
3756      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
3757                        SP,
3758                        destination.GetStackIndex());
3759    }
3760  } else {
3761    DCHECK(source.IsConstant()) << source;
3762    HConstant* constant = source.GetConstant();
3763    if (constant->IsIntConstant() || constant->IsNullConstant()) {
3764      int32_t value = CodeGenerator::GetInt32ValueOf(constant);
3765      if (destination.IsRegister()) {
3766        __ LoadImmediate(destination.AsRegister<Register>(), value);
3767      } else {
3768        DCHECK(destination.IsStackSlot());
3769        __ LoadImmediate(IP, value);
3770        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3771      }
3772    } else if (constant->IsLongConstant()) {
3773      int64_t value = constant->AsLongConstant()->GetValue();
3774      if (destination.IsRegisterPair()) {
3775        __ LoadImmediate(destination.AsRegisterPairLow<Register>(), Low32Bits(value));
3776        __ LoadImmediate(destination.AsRegisterPairHigh<Register>(), High32Bits(value));
3777      } else {
3778        DCHECK(destination.IsDoubleStackSlot()) << destination;
3779        __ LoadImmediate(IP, Low32Bits(value));
3780        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3781        __ LoadImmediate(IP, High32Bits(value));
3782        __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
3783      }
3784    } else if (constant->IsDoubleConstant()) {
3785      double value = constant->AsDoubleConstant()->GetValue();
3786      if (destination.IsFpuRegisterPair()) {
3787        __ LoadDImmediate(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()), value);
3788      } else {
3789        DCHECK(destination.IsDoubleStackSlot()) << destination;
3790        uint64_t int_value = bit_cast<uint64_t, double>(value);
3791        __ LoadImmediate(IP, Low32Bits(int_value));
3792        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3793        __ LoadImmediate(IP, High32Bits(int_value));
3794        __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
3795      }
3796    } else {
3797      DCHECK(constant->IsFloatConstant()) << constant->DebugName();
3798      float value = constant->AsFloatConstant()->GetValue();
3799      if (destination.IsFpuRegister()) {
3800        __ LoadSImmediate(destination.AsFpuRegister<SRegister>(), value);
3801      } else {
3802        DCHECK(destination.IsStackSlot());
3803        __ LoadImmediate(IP, bit_cast<int32_t, float>(value));
3804        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3805      }
3806    }
3807  }
3808}
3809
3810void ParallelMoveResolverARM::Exchange(Register reg, int mem) {
3811  __ Mov(IP, reg);
3812  __ LoadFromOffset(kLoadWord, reg, SP, mem);
3813  __ StoreToOffset(kStoreWord, IP, SP, mem);
3814}
3815
3816void ParallelMoveResolverARM::Exchange(int mem1, int mem2) {
3817  ScratchRegisterScope ensure_scratch(this, IP, R0, codegen_->GetNumberOfCoreRegisters());
3818  int stack_offset = ensure_scratch.IsSpilled() ? kArmWordSize : 0;
3819  __ LoadFromOffset(kLoadWord, static_cast<Register>(ensure_scratch.GetRegister()),
3820                    SP, mem1 + stack_offset);
3821  __ LoadFromOffset(kLoadWord, IP, SP, mem2 + stack_offset);
3822  __ StoreToOffset(kStoreWord, static_cast<Register>(ensure_scratch.GetRegister()),
3823                   SP, mem2 + stack_offset);
3824  __ StoreToOffset(kStoreWord, IP, SP, mem1 + stack_offset);
3825}
3826
3827void ParallelMoveResolverARM::EmitSwap(size_t index) {
3828  MoveOperands* move = moves_.Get(index);
3829  Location source = move->GetSource();
3830  Location destination = move->GetDestination();
3831
3832  if (source.IsRegister() && destination.IsRegister()) {
3833    DCHECK_NE(source.AsRegister<Register>(), IP);
3834    DCHECK_NE(destination.AsRegister<Register>(), IP);
3835    __ Mov(IP, source.AsRegister<Register>());
3836    __ Mov(source.AsRegister<Register>(), destination.AsRegister<Register>());
3837    __ Mov(destination.AsRegister<Register>(), IP);
3838  } else if (source.IsRegister() && destination.IsStackSlot()) {
3839    Exchange(source.AsRegister<Register>(), destination.GetStackIndex());
3840  } else if (source.IsStackSlot() && destination.IsRegister()) {
3841    Exchange(destination.AsRegister<Register>(), source.GetStackIndex());
3842  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
3843    Exchange(source.GetStackIndex(), destination.GetStackIndex());
3844  } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
3845    __ vmovrs(IP, source.AsFpuRegister<SRegister>());
3846    __ vmovs(source.AsFpuRegister<SRegister>(), destination.AsFpuRegister<SRegister>());
3847    __ vmovsr(destination.AsFpuRegister<SRegister>(), IP);
3848  } else if (source.IsRegisterPair() && destination.IsRegisterPair()) {
3849    __ vmovdrr(DTMP, source.AsRegisterPairLow<Register>(), source.AsRegisterPairHigh<Register>());
3850    __ Mov(source.AsRegisterPairLow<Register>(), destination.AsRegisterPairLow<Register>());
3851    __ Mov(source.AsRegisterPairHigh<Register>(), destination.AsRegisterPairHigh<Register>());
3852    __ vmovrrd(destination.AsRegisterPairLow<Register>(),
3853               destination.AsRegisterPairHigh<Register>(),
3854               DTMP);
3855  } else if (source.IsRegisterPair() || destination.IsRegisterPair()) {
3856    Register low_reg = source.IsRegisterPair()
3857        ? source.AsRegisterPairLow<Register>()
3858        : destination.AsRegisterPairLow<Register>();
3859    int mem = source.IsRegisterPair()
3860        ? destination.GetStackIndex()
3861        : source.GetStackIndex();
3862    DCHECK(ExpectedPairLayout(source.IsRegisterPair() ? source : destination));
3863    __ vmovdrr(DTMP, low_reg, static_cast<Register>(low_reg + 1));
3864    __ LoadFromOffset(kLoadWordPair, low_reg, SP, mem);
3865    __ StoreDToOffset(DTMP, SP, mem);
3866  } else if (source.IsFpuRegisterPair() && destination.IsFpuRegisterPair()) {
3867    DRegister first = FromLowSToD(source.AsFpuRegisterPairLow<SRegister>());
3868    DRegister second = FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>());
3869    __ vmovd(DTMP, first);
3870    __ vmovd(first, second);
3871    __ vmovd(second, DTMP);
3872  } else if (source.IsFpuRegisterPair() || destination.IsFpuRegisterPair()) {
3873    DRegister reg = source.IsFpuRegisterPair()
3874        ? FromLowSToD(source.AsFpuRegisterPairLow<SRegister>())
3875        : FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>());
3876    int mem = source.IsFpuRegisterPair()
3877        ? destination.GetStackIndex()
3878        : source.GetStackIndex();
3879    __ vmovd(DTMP, reg);
3880    __ LoadDFromOffset(reg, SP, mem);
3881    __ StoreDToOffset(DTMP, SP, mem);
3882  } else if (source.IsFpuRegister() || destination.IsFpuRegister()) {
3883    SRegister reg = source.IsFpuRegister() ? source.AsFpuRegister<SRegister>()
3884                                           : destination.AsFpuRegister<SRegister>();
3885    int mem = source.IsFpuRegister()
3886        ? destination.GetStackIndex()
3887        : source.GetStackIndex();
3888
3889    __ vmovrs(IP, reg);
3890    __ LoadSFromOffset(reg, SP, mem);
3891    __ StoreToOffset(kStoreWord, IP, SP, mem);
3892  } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
3893    Exchange(source.GetStackIndex(), destination.GetStackIndex());
3894    Exchange(source.GetHighStackIndex(kArmWordSize), destination.GetHighStackIndex(kArmWordSize));
3895  } else {
3896    LOG(FATAL) << "Unimplemented" << source << " <-> " << destination;
3897  }
3898}
3899
3900void ParallelMoveResolverARM::SpillScratch(int reg) {
3901  __ Push(static_cast<Register>(reg));
3902}
3903
3904void ParallelMoveResolverARM::RestoreScratch(int reg) {
3905  __ Pop(static_cast<Register>(reg));
3906}
3907
3908void LocationsBuilderARM::VisitLoadClass(HLoadClass* cls) {
3909  LocationSummary::CallKind call_kind = cls->CanCallRuntime()
3910      ? LocationSummary::kCallOnSlowPath
3911      : LocationSummary::kNoCall;
3912  LocationSummary* locations =
3913      new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
3914  locations->SetInAt(0, Location::RequiresRegister());
3915  locations->SetOut(Location::RequiresRegister());
3916}
3917
3918void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) {
3919  LocationSummary* locations = cls->GetLocations();
3920  Register out = locations->Out().AsRegister<Register>();
3921  Register current_method = locations->InAt(0).AsRegister<Register>();
3922  if (cls->IsReferrersClass()) {
3923    DCHECK(!cls->CanCallRuntime());
3924    DCHECK(!cls->MustGenerateClinitCheck());
3925    __ LoadFromOffset(
3926        kLoadWord, out, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
3927  } else {
3928    DCHECK(cls->CanCallRuntime());
3929    __ LoadFromOffset(kLoadWord,
3930                      out,
3931                      current_method,
3932                      ArtMethod::DexCacheResolvedTypesOffset().Int32Value());
3933    __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
3934
3935    SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
3936        cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
3937    codegen_->AddSlowPath(slow_path);
3938    __ cmp(out, ShifterOperand(0));
3939    __ b(slow_path->GetEntryLabel(), EQ);
3940    if (cls->MustGenerateClinitCheck()) {
3941      GenerateClassInitializationCheck(slow_path, out);
3942    } else {
3943      __ Bind(slow_path->GetExitLabel());
3944    }
3945  }
3946}
3947
3948void LocationsBuilderARM::VisitClinitCheck(HClinitCheck* check) {
3949  LocationSummary* locations =
3950      new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
3951  locations->SetInAt(0, Location::RequiresRegister());
3952  if (check->HasUses()) {
3953    locations->SetOut(Location::SameAsFirstInput());
3954  }
3955}
3956
3957void InstructionCodeGeneratorARM::VisitClinitCheck(HClinitCheck* check) {
3958  // We assume the class is not null.
3959  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
3960      check->GetLoadClass(), check, check->GetDexPc(), true);
3961  codegen_->AddSlowPath(slow_path);
3962  GenerateClassInitializationCheck(slow_path,
3963                                   check->GetLocations()->InAt(0).AsRegister<Register>());
3964}
3965
3966void InstructionCodeGeneratorARM::GenerateClassInitializationCheck(
3967    SlowPathCodeARM* slow_path, Register class_reg) {
3968  __ LoadFromOffset(kLoadWord, IP, class_reg, mirror::Class::StatusOffset().Int32Value());
3969  __ cmp(IP, ShifterOperand(mirror::Class::kStatusInitialized));
3970  __ b(slow_path->GetEntryLabel(), LT);
3971  // Even if the initialized flag is set, we may be in a situation where caches are not synced
3972  // properly. Therefore, we do a memory fence.
3973  __ dmb(ISH);
3974  __ Bind(slow_path->GetExitLabel());
3975}
3976
3977void LocationsBuilderARM::VisitLoadString(HLoadString* load) {
3978  LocationSummary* locations =
3979      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
3980  locations->SetInAt(0, Location::RequiresRegister());
3981  locations->SetOut(Location::RequiresRegister());
3982}
3983
3984void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) {
3985  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM(load);
3986  codegen_->AddSlowPath(slow_path);
3987
3988  LocationSummary* locations = load->GetLocations();
3989  Register out = locations->Out().AsRegister<Register>();
3990  Register current_method = locations->InAt(0).AsRegister<Register>();
3991  __ LoadFromOffset(
3992      kLoadWord, out, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
3993  __ LoadFromOffset(kLoadWord, out, out, mirror::Class::DexCacheStringsOffset().Int32Value());
3994  __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(load->GetStringIndex()));
3995  __ cmp(out, ShifterOperand(0));
3996  __ b(slow_path->GetEntryLabel(), EQ);
3997  __ Bind(slow_path->GetExitLabel());
3998}
3999
4000void LocationsBuilderARM::VisitLoadException(HLoadException* load) {
4001  LocationSummary* locations =
4002      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
4003  locations->SetOut(Location::RequiresRegister());
4004}
4005
4006void InstructionCodeGeneratorARM::VisitLoadException(HLoadException* load) {
4007  Register out = load->GetLocations()->Out().AsRegister<Register>();
4008  int32_t offset = Thread::ExceptionOffset<kArmWordSize>().Int32Value();
4009  __ LoadFromOffset(kLoadWord, out, TR, offset);
4010  __ LoadImmediate(IP, 0);
4011  __ StoreToOffset(kStoreWord, IP, TR, offset);
4012}
4013
4014void LocationsBuilderARM::VisitThrow(HThrow* instruction) {
4015  LocationSummary* locations =
4016      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
4017  InvokeRuntimeCallingConvention calling_convention;
4018  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4019}
4020
4021void InstructionCodeGeneratorARM::VisitThrow(HThrow* instruction) {
4022  codegen_->InvokeRuntime(
4023      QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc(), nullptr);
4024}
4025
4026void LocationsBuilderARM::VisitInstanceOf(HInstanceOf* instruction) {
4027  LocationSummary::CallKind call_kind = instruction->IsClassFinal()
4028      ? LocationSummary::kNoCall
4029      : LocationSummary::kCallOnSlowPath;
4030  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
4031  locations->SetInAt(0, Location::RequiresRegister());
4032  locations->SetInAt(1, Location::RequiresRegister());
4033  // The out register is used as a temporary, so it overlaps with the inputs.
4034  locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
4035}
4036
4037void InstructionCodeGeneratorARM::VisitInstanceOf(HInstanceOf* instruction) {
4038  LocationSummary* locations = instruction->GetLocations();
4039  Register obj = locations->InAt(0).AsRegister<Register>();
4040  Register cls = locations->InAt(1).AsRegister<Register>();
4041  Register out = locations->Out().AsRegister<Register>();
4042  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4043  NearLabel done, zero;
4044  SlowPathCodeARM* slow_path = nullptr;
4045
4046  // Return 0 if `obj` is null.
4047  // avoid null check if we know obj is not null.
4048  if (instruction->MustDoNullCheck()) {
4049    __ CompareAndBranchIfZero(obj, &zero);
4050  }
4051  // Compare the class of `obj` with `cls`.
4052  __ LoadFromOffset(kLoadWord, out, obj, class_offset);
4053  __ cmp(out, ShifterOperand(cls));
4054  if (instruction->IsClassFinal()) {
4055    // Classes must be equal for the instanceof to succeed.
4056    __ b(&zero, NE);
4057    __ LoadImmediate(out, 1);
4058    __ b(&done);
4059  } else {
4060    // If the classes are not equal, we go into a slow path.
4061    DCHECK(locations->OnlyCallsOnSlowPath());
4062    slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
4063        instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc());
4064    codegen_->AddSlowPath(slow_path);
4065    __ b(slow_path->GetEntryLabel(), NE);
4066    __ LoadImmediate(out, 1);
4067    __ b(&done);
4068  }
4069
4070  if (instruction->MustDoNullCheck() || instruction->IsClassFinal()) {
4071    __ Bind(&zero);
4072    __ LoadImmediate(out, 0);
4073  }
4074
4075  if (slow_path != nullptr) {
4076    __ Bind(slow_path->GetExitLabel());
4077  }
4078  __ Bind(&done);
4079}
4080
4081void LocationsBuilderARM::VisitCheckCast(HCheckCast* instruction) {
4082  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
4083      instruction, LocationSummary::kCallOnSlowPath);
4084  locations->SetInAt(0, Location::RequiresRegister());
4085  locations->SetInAt(1, Location::RequiresRegister());
4086  locations->AddTemp(Location::RequiresRegister());
4087}
4088
4089void InstructionCodeGeneratorARM::VisitCheckCast(HCheckCast* instruction) {
4090  LocationSummary* locations = instruction->GetLocations();
4091  Register obj = locations->InAt(0).AsRegister<Register>();
4092  Register cls = locations->InAt(1).AsRegister<Register>();
4093  Register temp = locations->GetTemp(0).AsRegister<Register>();
4094  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4095
4096  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
4097      instruction, locations->InAt(1), locations->GetTemp(0), instruction->GetDexPc());
4098  codegen_->AddSlowPath(slow_path);
4099
4100  NearLabel done;
4101  // avoid null check if we know obj is not null.
4102  if (instruction->MustDoNullCheck()) {
4103    __ CompareAndBranchIfZero(obj, &done);
4104  }
4105  // Compare the class of `obj` with `cls`.
4106  __ LoadFromOffset(kLoadWord, temp, obj, class_offset);
4107  __ cmp(temp, ShifterOperand(cls));
4108  __ b(slow_path->GetEntryLabel(), NE);
4109  __ Bind(slow_path->GetExitLabel());
4110  if (instruction->MustDoNullCheck()) {
4111    __ Bind(&done);
4112  }
4113}
4114
4115void LocationsBuilderARM::VisitMonitorOperation(HMonitorOperation* instruction) {
4116  LocationSummary* locations =
4117      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
4118  InvokeRuntimeCallingConvention calling_convention;
4119  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4120}
4121
4122void InstructionCodeGeneratorARM::VisitMonitorOperation(HMonitorOperation* instruction) {
4123  codegen_->InvokeRuntime(instruction->IsEnter()
4124        ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
4125      instruction,
4126      instruction->GetDexPc(),
4127      nullptr);
4128}
4129
4130void LocationsBuilderARM::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
4131void LocationsBuilderARM::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
4132void LocationsBuilderARM::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
4133
4134void LocationsBuilderARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
4135  LocationSummary* locations =
4136      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4137  DCHECK(instruction->GetResultType() == Primitive::kPrimInt
4138         || instruction->GetResultType() == Primitive::kPrimLong);
4139  locations->SetInAt(0, Location::RequiresRegister());
4140  locations->SetInAt(1, Location::RequiresRegister());
4141  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4142}
4143
4144void InstructionCodeGeneratorARM::VisitAnd(HAnd* instruction) {
4145  HandleBitwiseOperation(instruction);
4146}
4147
4148void InstructionCodeGeneratorARM::VisitOr(HOr* instruction) {
4149  HandleBitwiseOperation(instruction);
4150}
4151
4152void InstructionCodeGeneratorARM::VisitXor(HXor* instruction) {
4153  HandleBitwiseOperation(instruction);
4154}
4155
4156void InstructionCodeGeneratorARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
4157  LocationSummary* locations = instruction->GetLocations();
4158
4159  if (instruction->GetResultType() == Primitive::kPrimInt) {
4160    Register first = locations->InAt(0).AsRegister<Register>();
4161    Register second = locations->InAt(1).AsRegister<Register>();
4162    Register out = locations->Out().AsRegister<Register>();
4163    if (instruction->IsAnd()) {
4164      __ and_(out, first, ShifterOperand(second));
4165    } else if (instruction->IsOr()) {
4166      __ orr(out, first, ShifterOperand(second));
4167    } else {
4168      DCHECK(instruction->IsXor());
4169      __ eor(out, first, ShifterOperand(second));
4170    }
4171  } else {
4172    DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
4173    Location first = locations->InAt(0);
4174    Location second = locations->InAt(1);
4175    Location out = locations->Out();
4176    if (instruction->IsAnd()) {
4177      __ and_(out.AsRegisterPairLow<Register>(),
4178              first.AsRegisterPairLow<Register>(),
4179              ShifterOperand(second.AsRegisterPairLow<Register>()));
4180      __ and_(out.AsRegisterPairHigh<Register>(),
4181              first.AsRegisterPairHigh<Register>(),
4182              ShifterOperand(second.AsRegisterPairHigh<Register>()));
4183    } else if (instruction->IsOr()) {
4184      __ orr(out.AsRegisterPairLow<Register>(),
4185             first.AsRegisterPairLow<Register>(),
4186             ShifterOperand(second.AsRegisterPairLow<Register>()));
4187      __ orr(out.AsRegisterPairHigh<Register>(),
4188             first.AsRegisterPairHigh<Register>(),
4189             ShifterOperand(second.AsRegisterPairHigh<Register>()));
4190    } else {
4191      DCHECK(instruction->IsXor());
4192      __ eor(out.AsRegisterPairLow<Register>(),
4193             first.AsRegisterPairLow<Register>(),
4194             ShifterOperand(second.AsRegisterPairLow<Register>()));
4195      __ eor(out.AsRegisterPairHigh<Register>(),
4196             first.AsRegisterPairHigh<Register>(),
4197             ShifterOperand(second.AsRegisterPairHigh<Register>()));
4198    }
4199  }
4200}
4201
4202void CodeGeneratorARM::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
4203  // TODO: Implement all kinds of calls:
4204  // 1) boot -> boot
4205  // 2) app -> boot
4206  // 3) app -> app
4207  //
4208  // Currently we implement the app -> app logic, which looks up in the resolve cache.
4209
4210  if (invoke->IsStringInit()) {
4211    Register reg = temp.AsRegister<Register>();
4212    // temp = thread->string_init_entrypoint
4213    __ LoadFromOffset(kLoadWord, reg, TR, invoke->GetStringInitOffset());
4214    // LR = temp[offset_of_quick_compiled_code]
4215    __ LoadFromOffset(kLoadWord, LR, reg,
4216                      ArtMethod::EntryPointFromQuickCompiledCodeOffset(
4217                          kArmWordSize).Int32Value());
4218    // LR()
4219    __ blx(LR);
4220  } else if (invoke->IsRecursive()) {
4221    __ bl(GetFrameEntryLabel());
4222  } else {
4223    Register current_method =
4224        invoke->GetLocations()->InAt(invoke->GetCurrentMethodInputIndex()).AsRegister<Register>();
4225    Register reg = temp.AsRegister<Register>();
4226    // reg = current_method->dex_cache_resolved_methods_;
4227    __ LoadFromOffset(
4228        kLoadWord, reg, current_method, ArtMethod::DexCacheResolvedMethodsOffset().Int32Value());
4229    // reg = reg[index_in_cache]
4230    __ LoadFromOffset(
4231        kLoadWord, reg, reg, CodeGenerator::GetCacheOffset(invoke->GetDexMethodIndex()));
4232    // LR = reg[offset_of_quick_compiled_code]
4233    __ LoadFromOffset(kLoadWord, LR, reg, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
4234        kArmWordSize).Int32Value());
4235    // LR()
4236    __ blx(LR);
4237  }
4238
4239  DCHECK(!IsLeafMethod());
4240}
4241
4242void LocationsBuilderARM::VisitBoundType(HBoundType* instruction) {
4243  // Nothing to do, this should be removed during prepare for register allocator.
4244  UNUSED(instruction);
4245  LOG(FATAL) << "Unreachable";
4246}
4247
4248void InstructionCodeGeneratorARM::VisitBoundType(HBoundType* instruction) {
4249  // Nothing to do, this should be removed during prepare for register allocator.
4250  UNUSED(instruction);
4251  LOG(FATAL) << "Unreachable";
4252}
4253
4254}  // namespace arm
4255}  // namespace art
4256