code_generator_arm.cc revision 66d126ea06ce3f507d86ca5f0d1f752170ac9be1
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
20#include "entrypoints/quick/quick_entrypoints.h"
21#include "gc/accounting/card_table.h"
22#include "intrinsics.h"
23#include "intrinsics_arm.h"
24#include "mirror/array-inl.h"
25#include "mirror/art_method.h"
26#include "mirror/class.h"
27#include "thread.h"
28#include "utils/arm/assembler_arm.h"
29#include "utils/arm/managed_register_arm.h"
30#include "utils/assembler.h"
31#include "utils/stack_checks.h"
32
33namespace art {
34
35namespace arm {
36
37static bool ExpectedPairLayout(Location location) {
38  // We expected this for both core and fpu register pairs.
39  return ((location.low() & 1) == 0) && (location.low() + 1 == location.high());
40}
41
42static constexpr int kCurrentMethodStackOffset = 0;
43
44// We unconditionally allocate R5 to ensure we can do long operations
45// with baseline.
46static constexpr Register kCoreSavedRegisterForBaseline = R5;
47static constexpr Register kCoreCalleeSaves[] =
48    { R5, R6, R7, R8, R10, R11, PC };
49static constexpr SRegister kFpuCalleeSaves[] =
50    { S16, S17, S18, S19, S20, S21, S22, S23, S24, S25, S26, S27, S28, S29, S30, S31 };
51
52// D31 cannot be split into two S registers, and the register allocator only works on
53// S registers. Therefore there is no need to block it.
54static constexpr DRegister DTMP = D31;
55
56#define __ reinterpret_cast<ArmAssembler*>(codegen->GetAssembler())->
57#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArmWordSize, x).Int32Value()
58
59class NullCheckSlowPathARM : public SlowPathCodeARM {
60 public:
61  explicit NullCheckSlowPathARM(HNullCheck* instruction) : instruction_(instruction) {}
62
63  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
64    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
65    __ Bind(GetEntryLabel());
66    arm_codegen->InvokeRuntime(
67        QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc(), this);
68  }
69
70 private:
71  HNullCheck* const instruction_;
72  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM);
73};
74
75class DivZeroCheckSlowPathARM : public SlowPathCodeARM {
76 public:
77  explicit DivZeroCheckSlowPathARM(HDivZeroCheck* instruction) : instruction_(instruction) {}
78
79  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
80    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
81    __ Bind(GetEntryLabel());
82    arm_codegen->InvokeRuntime(
83        QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc(), this);
84  }
85
86 private:
87  HDivZeroCheck* const instruction_;
88  DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM);
89};
90
91class SuspendCheckSlowPathARM : public SlowPathCodeARM {
92 public:
93  SuspendCheckSlowPathARM(HSuspendCheck* instruction, HBasicBlock* successor)
94      : instruction_(instruction), successor_(successor) {}
95
96  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
97    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
98    __ Bind(GetEntryLabel());
99    SaveLiveRegisters(codegen, instruction_->GetLocations());
100    arm_codegen->InvokeRuntime(
101        QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc(), this);
102    RestoreLiveRegisters(codegen, instruction_->GetLocations());
103    if (successor_ == nullptr) {
104      __ b(GetReturnLabel());
105    } else {
106      __ b(arm_codegen->GetLabelOf(successor_));
107    }
108  }
109
110  Label* GetReturnLabel() {
111    DCHECK(successor_ == nullptr);
112    return &return_label_;
113  }
114
115 private:
116  HSuspendCheck* const instruction_;
117  // If not null, the block to branch to after the suspend check.
118  HBasicBlock* const successor_;
119
120  // If `successor_` is null, the label to branch to after the suspend check.
121  Label return_label_;
122
123  DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM);
124};
125
126class BoundsCheckSlowPathARM : public SlowPathCodeARM {
127 public:
128  BoundsCheckSlowPathARM(HBoundsCheck* instruction,
129                         Location index_location,
130                         Location length_location)
131      : instruction_(instruction),
132        index_location_(index_location),
133        length_location_(length_location) {}
134
135  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
136    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
137    __ Bind(GetEntryLabel());
138    // We're moving two locations to locations that could overlap, so we need a parallel
139    // move resolver.
140    InvokeRuntimeCallingConvention calling_convention;
141    codegen->EmitParallelMoves(
142        index_location_,
143        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
144        length_location_,
145        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
146    arm_codegen->InvokeRuntime(
147        QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc(), this);
148  }
149
150 private:
151  HBoundsCheck* const instruction_;
152  const Location index_location_;
153  const Location length_location_;
154
155  DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM);
156};
157
158class LoadClassSlowPathARM : public SlowPathCodeARM {
159 public:
160  LoadClassSlowPathARM(HLoadClass* cls,
161                       HInstruction* at,
162                       uint32_t dex_pc,
163                       bool do_clinit)
164      : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
165    DCHECK(at->IsLoadClass() || at->IsClinitCheck());
166  }
167
168  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
169    LocationSummary* locations = at_->GetLocations();
170
171    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
172    __ Bind(GetEntryLabel());
173    SaveLiveRegisters(codegen, locations);
174
175    InvokeRuntimeCallingConvention calling_convention;
176    __ LoadImmediate(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex());
177    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
178    int32_t entry_point_offset = do_clinit_
179        ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
180        : QUICK_ENTRY_POINT(pInitializeType);
181    arm_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_, this);
182
183    // Move the class to the desired location.
184    Location out = locations->Out();
185    if (out.IsValid()) {
186      DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
187      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
188    }
189    RestoreLiveRegisters(codegen, locations);
190    __ b(GetExitLabel());
191  }
192
193 private:
194  // The class this slow path will load.
195  HLoadClass* const cls_;
196
197  // The instruction where this slow path is happening.
198  // (Might be the load class or an initialization check).
199  HInstruction* const at_;
200
201  // The dex PC of `at_`.
202  const uint32_t dex_pc_;
203
204  // Whether to initialize the class.
205  const bool do_clinit_;
206
207  DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM);
208};
209
210class LoadStringSlowPathARM : public SlowPathCodeARM {
211 public:
212  explicit LoadStringSlowPathARM(HLoadString* instruction) : instruction_(instruction) {}
213
214  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
215    LocationSummary* locations = instruction_->GetLocations();
216    DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
217
218    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
219    __ Bind(GetEntryLabel());
220    SaveLiveRegisters(codegen, locations);
221
222    InvokeRuntimeCallingConvention calling_convention;
223    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
224    __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction_->GetStringIndex());
225    arm_codegen->InvokeRuntime(
226        QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc(), this);
227    arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
228
229    RestoreLiveRegisters(codegen, locations);
230    __ b(GetExitLabel());
231  }
232
233 private:
234  HLoadString* const instruction_;
235
236  DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM);
237};
238
239class TypeCheckSlowPathARM : public SlowPathCodeARM {
240 public:
241  TypeCheckSlowPathARM(HInstruction* instruction,
242                       Location class_to_check,
243                       Location object_class,
244                       uint32_t dex_pc)
245      : instruction_(instruction),
246        class_to_check_(class_to_check),
247        object_class_(object_class),
248        dex_pc_(dex_pc) {}
249
250  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
251    LocationSummary* locations = instruction_->GetLocations();
252    DCHECK(instruction_->IsCheckCast()
253           || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
254
255    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
256    __ Bind(GetEntryLabel());
257    SaveLiveRegisters(codegen, locations);
258
259    // We're moving two locations to locations that could overlap, so we need a parallel
260    // move resolver.
261    InvokeRuntimeCallingConvention calling_convention;
262    codegen->EmitParallelMoves(
263        class_to_check_,
264        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
265        object_class_,
266        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
267
268    if (instruction_->IsInstanceOf()) {
269      arm_codegen->InvokeRuntime(
270          QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc_, this);
271      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
272    } else {
273      DCHECK(instruction_->IsCheckCast());
274      arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc_, this);
275    }
276
277    RestoreLiveRegisters(codegen, locations);
278    __ b(GetExitLabel());
279  }
280
281 private:
282  HInstruction* const instruction_;
283  const Location class_to_check_;
284  const Location object_class_;
285  uint32_t dex_pc_;
286
287  DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM);
288};
289
290class DeoptimizationSlowPathARM : public SlowPathCodeARM {
291 public:
292  explicit DeoptimizationSlowPathARM(HInstruction* instruction)
293    : instruction_(instruction) {}
294
295  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
296    __ Bind(GetEntryLabel());
297    SaveLiveRegisters(codegen, instruction_->GetLocations());
298    DCHECK(instruction_->IsDeoptimize());
299    HDeoptimize* deoptimize = instruction_->AsDeoptimize();
300    uint32_t dex_pc = deoptimize->GetDexPc();
301    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
302    arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize), instruction_, dex_pc, this);
303  }
304
305 private:
306  HInstruction* const instruction_;
307  DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM);
308};
309
310#undef __
311
312#undef __
313#define __ reinterpret_cast<ArmAssembler*>(GetAssembler())->
314
315inline Condition ARMCondition(IfCondition cond) {
316  switch (cond) {
317    case kCondEQ: return EQ;
318    case kCondNE: return NE;
319    case kCondLT: return LT;
320    case kCondLE: return LE;
321    case kCondGT: return GT;
322    case kCondGE: return GE;
323    default:
324      LOG(FATAL) << "Unknown if condition";
325  }
326  return EQ;        // Unreachable.
327}
328
329inline Condition ARMOppositeCondition(IfCondition cond) {
330  switch (cond) {
331    case kCondEQ: return NE;
332    case kCondNE: return EQ;
333    case kCondLT: return GE;
334    case kCondLE: return GT;
335    case kCondGT: return LE;
336    case kCondGE: return LT;
337    default:
338      LOG(FATAL) << "Unknown if condition";
339  }
340  return EQ;        // Unreachable.
341}
342
343void CodeGeneratorARM::DumpCoreRegister(std::ostream& stream, int reg) const {
344  stream << ArmManagedRegister::FromCoreRegister(Register(reg));
345}
346
347void CodeGeneratorARM::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
348  stream << ArmManagedRegister::FromSRegister(SRegister(reg));
349}
350
351size_t CodeGeneratorARM::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
352  __ StoreToOffset(kStoreWord, static_cast<Register>(reg_id), SP, stack_index);
353  return kArmWordSize;
354}
355
356size_t CodeGeneratorARM::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
357  __ LoadFromOffset(kLoadWord, static_cast<Register>(reg_id), SP, stack_index);
358  return kArmWordSize;
359}
360
361size_t CodeGeneratorARM::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
362  __ StoreSToOffset(static_cast<SRegister>(reg_id), SP, stack_index);
363  return kArmWordSize;
364}
365
366size_t CodeGeneratorARM::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
367  __ LoadSFromOffset(static_cast<SRegister>(reg_id), SP, stack_index);
368  return kArmWordSize;
369}
370
371CodeGeneratorARM::CodeGeneratorARM(HGraph* graph,
372                                   const ArmInstructionSetFeatures& isa_features,
373                                   const CompilerOptions& compiler_options)
374    : CodeGenerator(graph,
375                    kNumberOfCoreRegisters,
376                    kNumberOfSRegisters,
377                    kNumberOfRegisterPairs,
378                    ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
379                                        arraysize(kCoreCalleeSaves)),
380                    ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
381                                        arraysize(kFpuCalleeSaves)),
382                    compiler_options),
383      block_labels_(graph->GetArena(), 0),
384      location_builder_(graph, this),
385      instruction_visitor_(graph, this),
386      move_resolver_(graph->GetArena(), this),
387      assembler_(true),
388      isa_features_(isa_features) {
389  // Save the PC register to mimic Quick.
390  AddAllocatedRegister(Location::RegisterLocation(PC));
391}
392
393Location CodeGeneratorARM::AllocateFreeRegister(Primitive::Type type) const {
394  switch (type) {
395    case Primitive::kPrimLong: {
396      size_t reg = FindFreeEntry(blocked_register_pairs_, kNumberOfRegisterPairs);
397      ArmManagedRegister pair =
398          ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
399      DCHECK(!blocked_core_registers_[pair.AsRegisterPairLow()]);
400      DCHECK(!blocked_core_registers_[pair.AsRegisterPairHigh()]);
401
402      blocked_core_registers_[pair.AsRegisterPairLow()] = true;
403      blocked_core_registers_[pair.AsRegisterPairHigh()] = true;
404      UpdateBlockedPairRegisters();
405      return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
406    }
407
408    case Primitive::kPrimByte:
409    case Primitive::kPrimBoolean:
410    case Primitive::kPrimChar:
411    case Primitive::kPrimShort:
412    case Primitive::kPrimInt:
413    case Primitive::kPrimNot: {
414      int reg = FindFreeEntry(blocked_core_registers_, kNumberOfCoreRegisters);
415      // Block all register pairs that contain `reg`.
416      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
417        ArmManagedRegister current =
418            ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
419        if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
420          blocked_register_pairs_[i] = true;
421        }
422      }
423      return Location::RegisterLocation(reg);
424    }
425
426    case Primitive::kPrimFloat: {
427      int reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfSRegisters);
428      return Location::FpuRegisterLocation(reg);
429    }
430
431    case Primitive::kPrimDouble: {
432      int reg = FindTwoFreeConsecutiveAlignedEntries(blocked_fpu_registers_, kNumberOfSRegisters);
433      DCHECK_EQ(reg % 2, 0);
434      return Location::FpuRegisterPairLocation(reg, reg + 1);
435    }
436
437    case Primitive::kPrimVoid:
438      LOG(FATAL) << "Unreachable type " << type;
439  }
440
441  return Location();
442}
443
444void CodeGeneratorARM::SetupBlockedRegisters(bool is_baseline) const {
445  // Don't allocate the dalvik style register pair passing.
446  blocked_register_pairs_[R1_R2] = true;
447
448  // Stack register, LR and PC are always reserved.
449  blocked_core_registers_[SP] = true;
450  blocked_core_registers_[LR] = true;
451  blocked_core_registers_[PC] = true;
452
453  // Reserve thread register.
454  blocked_core_registers_[TR] = true;
455
456  // Reserve temp register.
457  blocked_core_registers_[IP] = true;
458
459  if (is_baseline) {
460    for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
461      blocked_core_registers_[kCoreCalleeSaves[i]] = true;
462    }
463
464    blocked_core_registers_[kCoreSavedRegisterForBaseline] = false;
465
466    for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
467      blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
468    }
469  }
470
471  UpdateBlockedPairRegisters();
472}
473
474void CodeGeneratorARM::UpdateBlockedPairRegisters() const {
475  for (int i = 0; i < kNumberOfRegisterPairs; i++) {
476    ArmManagedRegister current =
477        ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
478    if (blocked_core_registers_[current.AsRegisterPairLow()]
479        || blocked_core_registers_[current.AsRegisterPairHigh()]) {
480      blocked_register_pairs_[i] = true;
481    }
482  }
483}
484
485InstructionCodeGeneratorARM::InstructionCodeGeneratorARM(HGraph* graph, CodeGeneratorARM* codegen)
486      : HGraphVisitor(graph),
487        assembler_(codegen->GetAssembler()),
488        codegen_(codegen) {}
489
490static uint32_t LeastSignificantBit(uint32_t mask) {
491  // ffs starts at 1.
492  return ffs(mask) - 1;
493}
494
495void CodeGeneratorARM::ComputeSpillMask() {
496  core_spill_mask_ = allocated_registers_.GetCoreRegisters() & core_callee_save_mask_;
497  // Save one extra register for baseline. Note that on thumb2, there is no easy
498  // instruction to restore just the PC, so this actually helps both baseline
499  // and non-baseline to save and restore at least two registers at entry and exit.
500  core_spill_mask_ |= (1 << kCoreSavedRegisterForBaseline);
501  DCHECK_NE(core_spill_mask_, 0u) << "At least the return address register must be saved";
502  fpu_spill_mask_ = allocated_registers_.GetFloatingPointRegisters() & fpu_callee_save_mask_;
503  // We use vpush and vpop for saving and restoring floating point registers, which take
504  // a SRegister and the number of registers to save/restore after that SRegister. We
505  // therefore update the `fpu_spill_mask_` to also contain those registers not allocated,
506  // but in the range.
507  if (fpu_spill_mask_ != 0) {
508    uint32_t least_significant_bit = LeastSignificantBit(fpu_spill_mask_);
509    uint32_t most_significant_bit = MostSignificantBit(fpu_spill_mask_);
510    for (uint32_t i = least_significant_bit + 1 ; i < most_significant_bit; ++i) {
511      fpu_spill_mask_ |= (1 << i);
512    }
513  }
514}
515
516static dwarf::Reg DWARFReg(Register reg) {
517  return dwarf::Reg::ArmCore(static_cast<int>(reg));
518}
519
520static dwarf::Reg DWARFReg(SRegister reg) {
521  return dwarf::Reg::ArmFp(static_cast<int>(reg));
522}
523
524void CodeGeneratorARM::GenerateFrameEntry() {
525  bool skip_overflow_check =
526      IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm);
527  DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
528  __ Bind(&frame_entry_label_);
529
530  if (HasEmptyFrame()) {
531    return;
532  }
533
534  if (!skip_overflow_check) {
535    __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm)));
536    __ LoadFromOffset(kLoadWord, IP, IP, 0);
537    RecordPcInfo(nullptr, 0);
538  }
539
540  // PC is in the list of callee-save to mimic Quick, but we need to push
541  // LR at entry instead.
542  uint32_t push_mask = (core_spill_mask_ & (~(1 << PC))) | 1 << LR;
543  __ PushList(push_mask);
544  __ cfi().AdjustCFAOffset(kArmWordSize * POPCOUNT(push_mask));
545  __ cfi().RelOffsetForMany(DWARFReg(R0), 0, push_mask, kArmWordSize);
546  if (fpu_spill_mask_ != 0) {
547    SRegister start_register = SRegister(LeastSignificantBit(fpu_spill_mask_));
548    __ vpushs(start_register, POPCOUNT(fpu_spill_mask_));
549    __ cfi().AdjustCFAOffset(kArmWordSize * POPCOUNT(fpu_spill_mask_));
550    __ cfi().RelOffsetForMany(DWARFReg(S0), 0, fpu_spill_mask_, kArmWordSize);
551  }
552  int adjust = GetFrameSize() - FrameEntrySpillSize();
553  __ AddConstant(SP, -adjust);
554  __ cfi().AdjustCFAOffset(adjust);
555  __ StoreToOffset(kStoreWord, R0, SP, 0);
556}
557
558void CodeGeneratorARM::GenerateFrameExit() {
559  if (HasEmptyFrame()) {
560    __ bx(LR);
561    return;
562  }
563  __ cfi().RememberState();
564  int adjust = GetFrameSize() - FrameEntrySpillSize();
565  __ AddConstant(SP, adjust);
566  __ cfi().AdjustCFAOffset(-adjust);
567  if (fpu_spill_mask_ != 0) {
568    SRegister start_register = SRegister(LeastSignificantBit(fpu_spill_mask_));
569    __ vpops(start_register, POPCOUNT(fpu_spill_mask_));
570    __ cfi().AdjustCFAOffset(-kArmPointerSize * POPCOUNT(fpu_spill_mask_));
571    __ cfi().RestoreMany(DWARFReg(SRegister(0)), fpu_spill_mask_);
572  }
573  __ PopList(core_spill_mask_);
574  __ cfi().RestoreState();
575  __ cfi().DefCFAOffset(GetFrameSize());
576}
577
578void CodeGeneratorARM::Bind(HBasicBlock* block) {
579  __ Bind(GetLabelOf(block));
580}
581
582Location CodeGeneratorARM::GetStackLocation(HLoadLocal* load) const {
583  switch (load->GetType()) {
584    case Primitive::kPrimLong:
585    case Primitive::kPrimDouble:
586      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
587
588    case Primitive::kPrimInt:
589    case Primitive::kPrimNot:
590    case Primitive::kPrimFloat:
591      return Location::StackSlot(GetStackSlot(load->GetLocal()));
592
593    case Primitive::kPrimBoolean:
594    case Primitive::kPrimByte:
595    case Primitive::kPrimChar:
596    case Primitive::kPrimShort:
597    case Primitive::kPrimVoid:
598      LOG(FATAL) << "Unexpected type " << load->GetType();
599      UNREACHABLE();
600  }
601
602  LOG(FATAL) << "Unreachable";
603  UNREACHABLE();
604}
605
606Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
607  switch (type) {
608    case Primitive::kPrimBoolean:
609    case Primitive::kPrimByte:
610    case Primitive::kPrimChar:
611    case Primitive::kPrimShort:
612    case Primitive::kPrimInt:
613    case Primitive::kPrimNot: {
614      uint32_t index = gp_index_++;
615      uint32_t stack_index = stack_index_++;
616      if (index < calling_convention.GetNumberOfRegisters()) {
617        return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
618      } else {
619        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
620      }
621    }
622
623    case Primitive::kPrimLong: {
624      uint32_t index = gp_index_;
625      uint32_t stack_index = stack_index_;
626      gp_index_ += 2;
627      stack_index_ += 2;
628      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
629        if (calling_convention.GetRegisterAt(index) == R1) {
630          // Skip R1, and use R2_R3 instead.
631          gp_index_++;
632          index++;
633        }
634      }
635      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
636        DCHECK_EQ(calling_convention.GetRegisterAt(index) + 1,
637                  calling_convention.GetRegisterAt(index + 1));
638        return Location::RegisterPairLocation(calling_convention.GetRegisterAt(index),
639                                              calling_convention.GetRegisterAt(index + 1));
640      } else {
641        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
642      }
643    }
644
645    case Primitive::kPrimFloat: {
646      uint32_t stack_index = stack_index_++;
647      if (float_index_ % 2 == 0) {
648        float_index_ = std::max(double_index_, float_index_);
649      }
650      if (float_index_ < calling_convention.GetNumberOfFpuRegisters()) {
651        return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(float_index_++));
652      } else {
653        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
654      }
655    }
656
657    case Primitive::kPrimDouble: {
658      double_index_ = std::max(double_index_, RoundUp(float_index_, 2));
659      uint32_t stack_index = stack_index_;
660      stack_index_ += 2;
661      if (double_index_ + 1 < calling_convention.GetNumberOfFpuRegisters()) {
662        uint32_t index = double_index_;
663        double_index_ += 2;
664        Location result = Location::FpuRegisterPairLocation(
665          calling_convention.GetFpuRegisterAt(index),
666          calling_convention.GetFpuRegisterAt(index + 1));
667        DCHECK(ExpectedPairLayout(result));
668        return result;
669      } else {
670        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
671      }
672    }
673
674    case Primitive::kPrimVoid:
675      LOG(FATAL) << "Unexpected parameter type " << type;
676      break;
677  }
678  return Location();
679}
680
681Location InvokeDexCallingConventionVisitor::GetReturnLocation(Primitive::Type type) {
682  switch (type) {
683    case Primitive::kPrimBoolean:
684    case Primitive::kPrimByte:
685    case Primitive::kPrimChar:
686    case Primitive::kPrimShort:
687    case Primitive::kPrimInt:
688    case Primitive::kPrimNot: {
689      return Location::RegisterLocation(R0);
690    }
691
692    case Primitive::kPrimFloat: {
693      return Location::FpuRegisterLocation(S0);
694    }
695
696    case Primitive::kPrimLong: {
697      return Location::RegisterPairLocation(R0, R1);
698    }
699
700    case Primitive::kPrimDouble: {
701      return Location::FpuRegisterPairLocation(S0, S1);
702    }
703
704    case Primitive::kPrimVoid:
705      return Location();
706  }
707  UNREACHABLE();
708}
709
710void CodeGeneratorARM::Move32(Location destination, Location source) {
711  if (source.Equals(destination)) {
712    return;
713  }
714  if (destination.IsRegister()) {
715    if (source.IsRegister()) {
716      __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>());
717    } else if (source.IsFpuRegister()) {
718      __ vmovrs(destination.AsRegister<Register>(), source.AsFpuRegister<SRegister>());
719    } else {
720      __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(), SP, source.GetStackIndex());
721    }
722  } else if (destination.IsFpuRegister()) {
723    if (source.IsRegister()) {
724      __ vmovsr(destination.AsFpuRegister<SRegister>(), source.AsRegister<Register>());
725    } else if (source.IsFpuRegister()) {
726      __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>());
727    } else {
728      __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex());
729    }
730  } else {
731    DCHECK(destination.IsStackSlot()) << destination;
732    if (source.IsRegister()) {
733      __ StoreToOffset(kStoreWord, source.AsRegister<Register>(), SP, destination.GetStackIndex());
734    } else if (source.IsFpuRegister()) {
735      __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex());
736    } else {
737      DCHECK(source.IsStackSlot()) << source;
738      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
739      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
740    }
741  }
742}
743
744void CodeGeneratorARM::Move64(Location destination, Location source) {
745  if (source.Equals(destination)) {
746    return;
747  }
748  if (destination.IsRegisterPair()) {
749    if (source.IsRegisterPair()) {
750      EmitParallelMoves(
751          Location::RegisterLocation(source.AsRegisterPairHigh<Register>()),
752          Location::RegisterLocation(destination.AsRegisterPairHigh<Register>()),
753          Location::RegisterLocation(source.AsRegisterPairLow<Register>()),
754          Location::RegisterLocation(destination.AsRegisterPairLow<Register>()));
755    } else if (source.IsFpuRegister()) {
756      UNIMPLEMENTED(FATAL);
757    } else {
758      DCHECK(source.IsDoubleStackSlot());
759      DCHECK(ExpectedPairLayout(destination));
760      __ LoadFromOffset(kLoadWordPair, destination.AsRegisterPairLow<Register>(),
761                        SP, source.GetStackIndex());
762    }
763  } else if (destination.IsFpuRegisterPair()) {
764    if (source.IsDoubleStackSlot()) {
765      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
766                         SP,
767                         source.GetStackIndex());
768    } else {
769      UNIMPLEMENTED(FATAL);
770    }
771  } else {
772    DCHECK(destination.IsDoubleStackSlot());
773    if (source.IsRegisterPair()) {
774      // No conflict possible, so just do the moves.
775      if (source.AsRegisterPairLow<Register>() == R1) {
776        DCHECK_EQ(source.AsRegisterPairHigh<Register>(), R2);
777        __ StoreToOffset(kStoreWord, R1, SP, destination.GetStackIndex());
778        __ StoreToOffset(kStoreWord, R2, SP, destination.GetHighStackIndex(kArmWordSize));
779      } else {
780        __ StoreToOffset(kStoreWordPair, source.AsRegisterPairLow<Register>(),
781                         SP, destination.GetStackIndex());
782      }
783    } else if (source.IsFpuRegisterPair()) {
784      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
785                        SP,
786                        destination.GetStackIndex());
787    } else {
788      DCHECK(source.IsDoubleStackSlot());
789      EmitParallelMoves(
790          Location::StackSlot(source.GetStackIndex()),
791          Location::StackSlot(destination.GetStackIndex()),
792          Location::StackSlot(source.GetHighStackIndex(kArmWordSize)),
793          Location::StackSlot(destination.GetHighStackIndex(kArmWordSize)));
794    }
795  }
796}
797
798void CodeGeneratorARM::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
799  LocationSummary* locations = instruction->GetLocations();
800  if (locations != nullptr && locations->Out().Equals(location)) {
801    return;
802  }
803
804  if (locations != nullptr && locations->Out().IsConstant()) {
805    HConstant* const_to_move = locations->Out().GetConstant();
806    if (const_to_move->IsIntConstant() || const_to_move->IsNullConstant()) {
807      int32_t value = GetInt32ValueOf(const_to_move);
808      if (location.IsRegister()) {
809        __ LoadImmediate(location.AsRegister<Register>(), value);
810      } else {
811        DCHECK(location.IsStackSlot());
812        __ LoadImmediate(IP, value);
813        __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
814      }
815    } else {
816      DCHECK(const_to_move->IsLongConstant()) << const_to_move->DebugName();
817      int64_t value = const_to_move->AsLongConstant()->GetValue();
818      if (location.IsRegisterPair()) {
819        __ LoadImmediate(location.AsRegisterPairLow<Register>(), Low32Bits(value));
820        __ LoadImmediate(location.AsRegisterPairHigh<Register>(), High32Bits(value));
821      } else {
822        DCHECK(location.IsDoubleStackSlot());
823        __ LoadImmediate(IP, Low32Bits(value));
824        __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
825        __ LoadImmediate(IP, High32Bits(value));
826        __ StoreToOffset(kStoreWord, IP, SP, location.GetHighStackIndex(kArmWordSize));
827      }
828    }
829  } else if (instruction->IsLoadLocal()) {
830    uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
831    switch (instruction->GetType()) {
832      case Primitive::kPrimBoolean:
833      case Primitive::kPrimByte:
834      case Primitive::kPrimChar:
835      case Primitive::kPrimShort:
836      case Primitive::kPrimInt:
837      case Primitive::kPrimNot:
838      case Primitive::kPrimFloat:
839        Move32(location, Location::StackSlot(stack_slot));
840        break;
841
842      case Primitive::kPrimLong:
843      case Primitive::kPrimDouble:
844        Move64(location, Location::DoubleStackSlot(stack_slot));
845        break;
846
847      default:
848        LOG(FATAL) << "Unexpected type " << instruction->GetType();
849    }
850  } else if (instruction->IsTemporary()) {
851    Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
852    if (temp_location.IsStackSlot()) {
853      Move32(location, temp_location);
854    } else {
855      DCHECK(temp_location.IsDoubleStackSlot());
856      Move64(location, temp_location);
857    }
858  } else {
859    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
860    switch (instruction->GetType()) {
861      case Primitive::kPrimBoolean:
862      case Primitive::kPrimByte:
863      case Primitive::kPrimChar:
864      case Primitive::kPrimShort:
865      case Primitive::kPrimNot:
866      case Primitive::kPrimInt:
867      case Primitive::kPrimFloat:
868        Move32(location, locations->Out());
869        break;
870
871      case Primitive::kPrimLong:
872      case Primitive::kPrimDouble:
873        Move64(location, locations->Out());
874        break;
875
876      default:
877        LOG(FATAL) << "Unexpected type " << instruction->GetType();
878    }
879  }
880}
881
882void CodeGeneratorARM::InvokeRuntime(int32_t entry_point_offset,
883                                     HInstruction* instruction,
884                                     uint32_t dex_pc,
885                                     SlowPathCode* slow_path) {
886  __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
887  __ blx(LR);
888  RecordPcInfo(instruction, dex_pc, slow_path);
889  DCHECK(instruction->IsSuspendCheck()
890      || instruction->IsBoundsCheck()
891      || instruction->IsNullCheck()
892      || instruction->IsDivZeroCheck()
893      || instruction->GetLocations()->CanCall()
894      || !IsLeafMethod());
895}
896
897void LocationsBuilderARM::VisitGoto(HGoto* got) {
898  got->SetLocations(nullptr);
899}
900
901void InstructionCodeGeneratorARM::VisitGoto(HGoto* got) {
902  HBasicBlock* successor = got->GetSuccessor();
903  DCHECK(!successor->IsExitBlock());
904
905  HBasicBlock* block = got->GetBlock();
906  HInstruction* previous = got->GetPrevious();
907
908  HLoopInformation* info = block->GetLoopInformation();
909  if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
910    codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
911    GenerateSuspendCheck(info->GetSuspendCheck(), successor);
912    return;
913  }
914
915  if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
916    GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
917  }
918  if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
919    __ b(codegen_->GetLabelOf(successor));
920  }
921}
922
923void LocationsBuilderARM::VisitExit(HExit* exit) {
924  exit->SetLocations(nullptr);
925}
926
927void InstructionCodeGeneratorARM::VisitExit(HExit* exit) {
928  UNUSED(exit);
929}
930
931void InstructionCodeGeneratorARM::GenerateTestAndBranch(HInstruction* instruction,
932                                                        Label* true_target,
933                                                        Label* false_target,
934                                                        Label* always_true_target) {
935  HInstruction* cond = instruction->InputAt(0);
936  if (cond->IsIntConstant()) {
937    // Constant condition, statically compared against 1.
938    int32_t cond_value = cond->AsIntConstant()->GetValue();
939    if (cond_value == 1) {
940      if (always_true_target != nullptr) {
941        __ b(always_true_target);
942      }
943      return;
944    } else {
945      DCHECK_EQ(cond_value, 0);
946    }
947  } else {
948    if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
949      // Condition has been materialized, compare the output to 0
950      DCHECK(instruction->GetLocations()->InAt(0).IsRegister());
951      __ cmp(instruction->GetLocations()->InAt(0).AsRegister<Register>(),
952             ShifterOperand(0));
953      __ b(true_target, NE);
954    } else {
955      // Condition has not been materialized, use its inputs as the
956      // comparison and its condition as the branch condition.
957      LocationSummary* locations = cond->GetLocations();
958      DCHECK(locations->InAt(0).IsRegister()) << locations->InAt(0);
959      Register left = locations->InAt(0).AsRegister<Register>();
960      if (locations->InAt(1).IsRegister()) {
961        __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>()));
962      } else {
963        DCHECK(locations->InAt(1).IsConstant());
964        HConstant* constant = locations->InAt(1).GetConstant();
965        int32_t value = CodeGenerator::GetInt32ValueOf(constant);
966        ShifterOperand operand;
967        if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) {
968          __ cmp(left, operand);
969        } else {
970          Register temp = IP;
971          __ LoadImmediate(temp, value);
972          __ cmp(left, ShifterOperand(temp));
973        }
974      }
975      __ b(true_target, ARMCondition(cond->AsCondition()->GetCondition()));
976    }
977  }
978  if (false_target != nullptr) {
979    __ b(false_target);
980  }
981}
982
983void LocationsBuilderARM::VisitIf(HIf* if_instr) {
984  LocationSummary* locations =
985      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
986  HInstruction* cond = if_instr->InputAt(0);
987  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
988    locations->SetInAt(0, Location::RequiresRegister());
989  }
990}
991
992void InstructionCodeGeneratorARM::VisitIf(HIf* if_instr) {
993  Label* true_target = codegen_->GetLabelOf(if_instr->IfTrueSuccessor());
994  Label* false_target = codegen_->GetLabelOf(if_instr->IfFalseSuccessor());
995  Label* always_true_target = true_target;
996  if (codegen_->GoesToNextBlock(if_instr->GetBlock(),
997                                if_instr->IfTrueSuccessor())) {
998    always_true_target = nullptr;
999  }
1000  if (codegen_->GoesToNextBlock(if_instr->GetBlock(),
1001                                if_instr->IfFalseSuccessor())) {
1002    false_target = nullptr;
1003  }
1004  GenerateTestAndBranch(if_instr, true_target, false_target, always_true_target);
1005}
1006
1007void LocationsBuilderARM::VisitDeoptimize(HDeoptimize* deoptimize) {
1008  LocationSummary* locations = new (GetGraph()->GetArena())
1009      LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
1010  HInstruction* cond = deoptimize->InputAt(0);
1011  DCHECK(cond->IsCondition());
1012  if (cond->AsCondition()->NeedsMaterialization()) {
1013    locations->SetInAt(0, Location::RequiresRegister());
1014  }
1015}
1016
1017void InstructionCodeGeneratorARM::VisitDeoptimize(HDeoptimize* deoptimize) {
1018  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena())
1019      DeoptimizationSlowPathARM(deoptimize);
1020  codegen_->AddSlowPath(slow_path);
1021  Label* slow_path_entry = slow_path->GetEntryLabel();
1022  GenerateTestAndBranch(deoptimize, slow_path_entry, nullptr, slow_path_entry);
1023}
1024
1025void LocationsBuilderARM::VisitCondition(HCondition* comp) {
1026  LocationSummary* locations =
1027      new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall);
1028  locations->SetInAt(0, Location::RequiresRegister());
1029  locations->SetInAt(1, Location::RegisterOrConstant(comp->InputAt(1)));
1030  if (comp->NeedsMaterialization()) {
1031    locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1032  }
1033}
1034
1035void InstructionCodeGeneratorARM::VisitCondition(HCondition* comp) {
1036  if (!comp->NeedsMaterialization()) return;
1037  LocationSummary* locations = comp->GetLocations();
1038  Register left = locations->InAt(0).AsRegister<Register>();
1039
1040  if (locations->InAt(1).IsRegister()) {
1041    __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>()));
1042  } else {
1043    DCHECK(locations->InAt(1).IsConstant());
1044    int32_t value = CodeGenerator::GetInt32ValueOf(locations->InAt(1).GetConstant());
1045    ShifterOperand operand;
1046    if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) {
1047      __ cmp(left, operand);
1048    } else {
1049      Register temp = IP;
1050      __ LoadImmediate(temp, value);
1051      __ cmp(left, ShifterOperand(temp));
1052    }
1053  }
1054  __ it(ARMCondition(comp->GetCondition()), kItElse);
1055  __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(1),
1056         ARMCondition(comp->GetCondition()));
1057  __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(0),
1058         ARMOppositeCondition(comp->GetCondition()));
1059}
1060
1061void LocationsBuilderARM::VisitEqual(HEqual* comp) {
1062  VisitCondition(comp);
1063}
1064
1065void InstructionCodeGeneratorARM::VisitEqual(HEqual* comp) {
1066  VisitCondition(comp);
1067}
1068
1069void LocationsBuilderARM::VisitNotEqual(HNotEqual* comp) {
1070  VisitCondition(comp);
1071}
1072
1073void InstructionCodeGeneratorARM::VisitNotEqual(HNotEqual* comp) {
1074  VisitCondition(comp);
1075}
1076
1077void LocationsBuilderARM::VisitLessThan(HLessThan* comp) {
1078  VisitCondition(comp);
1079}
1080
1081void InstructionCodeGeneratorARM::VisitLessThan(HLessThan* comp) {
1082  VisitCondition(comp);
1083}
1084
1085void LocationsBuilderARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1086  VisitCondition(comp);
1087}
1088
1089void InstructionCodeGeneratorARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1090  VisitCondition(comp);
1091}
1092
1093void LocationsBuilderARM::VisitGreaterThan(HGreaterThan* comp) {
1094  VisitCondition(comp);
1095}
1096
1097void InstructionCodeGeneratorARM::VisitGreaterThan(HGreaterThan* comp) {
1098  VisitCondition(comp);
1099}
1100
1101void LocationsBuilderARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1102  VisitCondition(comp);
1103}
1104
1105void InstructionCodeGeneratorARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1106  VisitCondition(comp);
1107}
1108
1109void LocationsBuilderARM::VisitLocal(HLocal* local) {
1110  local->SetLocations(nullptr);
1111}
1112
1113void InstructionCodeGeneratorARM::VisitLocal(HLocal* local) {
1114  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
1115}
1116
1117void LocationsBuilderARM::VisitLoadLocal(HLoadLocal* load) {
1118  load->SetLocations(nullptr);
1119}
1120
1121void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load) {
1122  // Nothing to do, this is driven by the code generator.
1123  UNUSED(load);
1124}
1125
1126void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) {
1127  LocationSummary* locations =
1128      new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
1129  switch (store->InputAt(1)->GetType()) {
1130    case Primitive::kPrimBoolean:
1131    case Primitive::kPrimByte:
1132    case Primitive::kPrimChar:
1133    case Primitive::kPrimShort:
1134    case Primitive::kPrimInt:
1135    case Primitive::kPrimNot:
1136    case Primitive::kPrimFloat:
1137      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
1138      break;
1139
1140    case Primitive::kPrimLong:
1141    case Primitive::kPrimDouble:
1142      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
1143      break;
1144
1145    default:
1146      LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType();
1147  }
1148}
1149
1150void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store) {
1151  UNUSED(store);
1152}
1153
1154void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) {
1155  LocationSummary* locations =
1156      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1157  locations->SetOut(Location::ConstantLocation(constant));
1158}
1159
1160void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant) {
1161  // Will be generated at use site.
1162  UNUSED(constant);
1163}
1164
1165void LocationsBuilderARM::VisitNullConstant(HNullConstant* constant) {
1166  LocationSummary* locations =
1167      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1168  locations->SetOut(Location::ConstantLocation(constant));
1169}
1170
1171void InstructionCodeGeneratorARM::VisitNullConstant(HNullConstant* constant) {
1172  // Will be generated at use site.
1173  UNUSED(constant);
1174}
1175
1176void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) {
1177  LocationSummary* locations =
1178      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1179  locations->SetOut(Location::ConstantLocation(constant));
1180}
1181
1182void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant) {
1183  // Will be generated at use site.
1184  UNUSED(constant);
1185}
1186
1187void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) {
1188  LocationSummary* locations =
1189      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1190  locations->SetOut(Location::ConstantLocation(constant));
1191}
1192
1193void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant) {
1194  // Will be generated at use site.
1195  UNUSED(constant);
1196}
1197
1198void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) {
1199  LocationSummary* locations =
1200      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1201  locations->SetOut(Location::ConstantLocation(constant));
1202}
1203
1204void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant) {
1205  // Will be generated at use site.
1206  UNUSED(constant);
1207}
1208
1209void LocationsBuilderARM::VisitReturnVoid(HReturnVoid* ret) {
1210  ret->SetLocations(nullptr);
1211}
1212
1213void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret) {
1214  UNUSED(ret);
1215  codegen_->GenerateFrameExit();
1216}
1217
1218void LocationsBuilderARM::VisitReturn(HReturn* ret) {
1219  LocationSummary* locations =
1220      new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
1221  locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType()));
1222}
1223
1224void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret) {
1225  UNUSED(ret);
1226  codegen_->GenerateFrameExit();
1227}
1228
1229void LocationsBuilderARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1230  IntrinsicLocationsBuilderARM intrinsic(GetGraph()->GetArena(),
1231                                         codegen_->GetInstructionSetFeatures());
1232  if (intrinsic.TryDispatch(invoke)) {
1233    return;
1234  }
1235
1236  HandleInvoke(invoke);
1237}
1238
1239void CodeGeneratorARM::LoadCurrentMethod(Register reg) {
1240  DCHECK(RequiresCurrentMethod());
1241  __ LoadFromOffset(kLoadWord, reg, SP, kCurrentMethodStackOffset);
1242}
1243
1244static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM* codegen) {
1245  if (invoke->GetLocations()->Intrinsified()) {
1246    IntrinsicCodeGeneratorARM intrinsic(codegen);
1247    intrinsic.Dispatch(invoke);
1248    return true;
1249  }
1250  return false;
1251}
1252
1253void InstructionCodeGeneratorARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1254  if (TryGenerateIntrinsicCode(invoke, codegen_)) {
1255    return;
1256  }
1257
1258  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1259
1260  codegen_->GenerateStaticOrDirectCall(invoke, temp);
1261  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1262}
1263
1264void LocationsBuilderARM::HandleInvoke(HInvoke* invoke) {
1265  LocationSummary* locations =
1266      new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1267  locations->AddTemp(Location::RegisterLocation(R0));
1268
1269  InvokeDexCallingConventionVisitor calling_convention_visitor;
1270  for (size_t i = 0; i < invoke->InputCount(); i++) {
1271    HInstruction* input = invoke->InputAt(i);
1272    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1273  }
1274
1275  locations->SetOut(calling_convention_visitor.GetReturnLocation(invoke->GetType()));
1276}
1277
1278void LocationsBuilderARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1279  IntrinsicLocationsBuilderARM intrinsic(GetGraph()->GetArena(),
1280                                         codegen_->GetInstructionSetFeatures());
1281  if (intrinsic.TryDispatch(invoke)) {
1282    return;
1283  }
1284
1285  HandleInvoke(invoke);
1286}
1287
1288void InstructionCodeGeneratorARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1289  if (TryGenerateIntrinsicCode(invoke, codegen_)) {
1290    return;
1291  }
1292
1293  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1294  uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() +
1295          invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
1296  LocationSummary* locations = invoke->GetLocations();
1297  Location receiver = locations->InAt(0);
1298  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1299  // temp = object->GetClass();
1300  if (receiver.IsStackSlot()) {
1301    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1302    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1303  } else {
1304    __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
1305  }
1306  codegen_->MaybeRecordImplicitNullCheck(invoke);
1307  // temp = temp->GetMethodAt(method_offset);
1308  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1309      kArmWordSize).Int32Value();
1310  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1311  // LR = temp->GetEntryPoint();
1312  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1313  // LR();
1314  __ blx(LR);
1315  DCHECK(!codegen_->IsLeafMethod());
1316  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1317}
1318
1319void LocationsBuilderARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1320  HandleInvoke(invoke);
1321  // Add the hidden argument.
1322  invoke->GetLocations()->AddTemp(Location::RegisterLocation(R12));
1323}
1324
1325void InstructionCodeGeneratorARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1326  // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
1327  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1328  uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() +
1329          (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry);
1330  LocationSummary* locations = invoke->GetLocations();
1331  Location receiver = locations->InAt(0);
1332  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1333
1334  // Set the hidden argument.
1335  __ LoadImmediate(invoke->GetLocations()->GetTemp(1).AsRegister<Register>(),
1336                   invoke->GetDexMethodIndex());
1337
1338  // temp = object->GetClass();
1339  if (receiver.IsStackSlot()) {
1340    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1341    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1342  } else {
1343    __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
1344  }
1345  codegen_->MaybeRecordImplicitNullCheck(invoke);
1346  // temp = temp->GetImtEntryAt(method_offset);
1347  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1348      kArmWordSize).Int32Value();
1349  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1350  // LR = temp->GetEntryPoint();
1351  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1352  // LR();
1353  __ blx(LR);
1354  DCHECK(!codegen_->IsLeafMethod());
1355  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1356}
1357
1358void LocationsBuilderARM::VisitNeg(HNeg* neg) {
1359  LocationSummary* locations =
1360      new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
1361  switch (neg->GetResultType()) {
1362    case Primitive::kPrimInt: {
1363      locations->SetInAt(0, Location::RequiresRegister());
1364      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1365      break;
1366    }
1367    case Primitive::kPrimLong: {
1368      locations->SetInAt(0, Location::RequiresRegister());
1369      locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1370      break;
1371    }
1372
1373    case Primitive::kPrimFloat:
1374    case Primitive::kPrimDouble:
1375      locations->SetInAt(0, Location::RequiresFpuRegister());
1376      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1377      break;
1378
1379    default:
1380      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1381  }
1382}
1383
1384void InstructionCodeGeneratorARM::VisitNeg(HNeg* neg) {
1385  LocationSummary* locations = neg->GetLocations();
1386  Location out = locations->Out();
1387  Location in = locations->InAt(0);
1388  switch (neg->GetResultType()) {
1389    case Primitive::kPrimInt:
1390      DCHECK(in.IsRegister());
1391      __ rsb(out.AsRegister<Register>(), in.AsRegister<Register>(), ShifterOperand(0));
1392      break;
1393
1394    case Primitive::kPrimLong:
1395      DCHECK(in.IsRegisterPair());
1396      // out.lo = 0 - in.lo (and update the carry/borrow (C) flag)
1397      __ rsbs(out.AsRegisterPairLow<Register>(),
1398              in.AsRegisterPairLow<Register>(),
1399              ShifterOperand(0));
1400      // We cannot emit an RSC (Reverse Subtract with Carry)
1401      // instruction here, as it does not exist in the Thumb-2
1402      // instruction set.  We use the following approach
1403      // using SBC and SUB instead.
1404      //
1405      // out.hi = -C
1406      __ sbc(out.AsRegisterPairHigh<Register>(),
1407             out.AsRegisterPairHigh<Register>(),
1408             ShifterOperand(out.AsRegisterPairHigh<Register>()));
1409      // out.hi = out.hi - in.hi
1410      __ sub(out.AsRegisterPairHigh<Register>(),
1411             out.AsRegisterPairHigh<Register>(),
1412             ShifterOperand(in.AsRegisterPairHigh<Register>()));
1413      break;
1414
1415    case Primitive::kPrimFloat:
1416      DCHECK(in.IsFpuRegister());
1417      __ vnegs(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>());
1418      break;
1419
1420    case Primitive::kPrimDouble:
1421      DCHECK(in.IsFpuRegisterPair());
1422      __ vnegd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1423               FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1424      break;
1425
1426    default:
1427      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1428  }
1429}
1430
1431void LocationsBuilderARM::VisitTypeConversion(HTypeConversion* conversion) {
1432  Primitive::Type result_type = conversion->GetResultType();
1433  Primitive::Type input_type = conversion->GetInputType();
1434  DCHECK_NE(result_type, input_type);
1435
1436  // The float-to-long and double-to-long type conversions rely on a
1437  // call to the runtime.
1438  LocationSummary::CallKind call_kind =
1439      ((input_type == Primitive::kPrimFloat || input_type == Primitive::kPrimDouble)
1440       && result_type == Primitive::kPrimLong)
1441      ? LocationSummary::kCall
1442      : LocationSummary::kNoCall;
1443  LocationSummary* locations =
1444      new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind);
1445
1446  // The Java language does not allow treating boolean as an integral type but
1447  // our bit representation makes it safe.
1448
1449  switch (result_type) {
1450    case Primitive::kPrimByte:
1451      switch (input_type) {
1452        case Primitive::kPrimBoolean:
1453          // Boolean input is a result of code transformations.
1454        case Primitive::kPrimShort:
1455        case Primitive::kPrimInt:
1456        case Primitive::kPrimChar:
1457          // Processing a Dex `int-to-byte' instruction.
1458          locations->SetInAt(0, Location::RequiresRegister());
1459          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1460          break;
1461
1462        default:
1463          LOG(FATAL) << "Unexpected type conversion from " << input_type
1464                     << " to " << result_type;
1465      }
1466      break;
1467
1468    case Primitive::kPrimShort:
1469      switch (input_type) {
1470        case Primitive::kPrimBoolean:
1471          // Boolean input is a result of code transformations.
1472        case Primitive::kPrimByte:
1473        case Primitive::kPrimInt:
1474        case Primitive::kPrimChar:
1475          // Processing a Dex `int-to-short' instruction.
1476          locations->SetInAt(0, Location::RequiresRegister());
1477          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1478          break;
1479
1480        default:
1481          LOG(FATAL) << "Unexpected type conversion from " << input_type
1482                     << " to " << result_type;
1483      }
1484      break;
1485
1486    case Primitive::kPrimInt:
1487      switch (input_type) {
1488        case Primitive::kPrimLong:
1489          // Processing a Dex `long-to-int' instruction.
1490          locations->SetInAt(0, Location::Any());
1491          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1492          break;
1493
1494        case Primitive::kPrimFloat:
1495          // Processing a Dex `float-to-int' instruction.
1496          locations->SetInAt(0, Location::RequiresFpuRegister());
1497          locations->SetOut(Location::RequiresRegister());
1498          locations->AddTemp(Location::RequiresFpuRegister());
1499          break;
1500
1501        case Primitive::kPrimDouble:
1502          // Processing a Dex `double-to-int' instruction.
1503          locations->SetInAt(0, Location::RequiresFpuRegister());
1504          locations->SetOut(Location::RequiresRegister());
1505          locations->AddTemp(Location::RequiresFpuRegister());
1506          break;
1507
1508        default:
1509          LOG(FATAL) << "Unexpected type conversion from " << input_type
1510                     << " to " << result_type;
1511      }
1512      break;
1513
1514    case Primitive::kPrimLong:
1515      switch (input_type) {
1516        case Primitive::kPrimBoolean:
1517          // Boolean input is a result of code transformations.
1518        case Primitive::kPrimByte:
1519        case Primitive::kPrimShort:
1520        case Primitive::kPrimInt:
1521        case Primitive::kPrimChar:
1522          // Processing a Dex `int-to-long' instruction.
1523          locations->SetInAt(0, Location::RequiresRegister());
1524          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1525          break;
1526
1527        case Primitive::kPrimFloat: {
1528          // Processing a Dex `float-to-long' instruction.
1529          InvokeRuntimeCallingConvention calling_convention;
1530          locations->SetInAt(0, Location::FpuRegisterLocation(
1531              calling_convention.GetFpuRegisterAt(0)));
1532          locations->SetOut(Location::RegisterPairLocation(R0, R1));
1533          break;
1534        }
1535
1536        case Primitive::kPrimDouble: {
1537          // Processing a Dex `double-to-long' instruction.
1538          InvokeRuntimeCallingConvention calling_convention;
1539          locations->SetInAt(0, Location::FpuRegisterPairLocation(
1540              calling_convention.GetFpuRegisterAt(0),
1541              calling_convention.GetFpuRegisterAt(1)));
1542          locations->SetOut(Location::RegisterPairLocation(R0, R1));
1543          break;
1544        }
1545
1546        default:
1547          LOG(FATAL) << "Unexpected type conversion from " << input_type
1548                     << " to " << result_type;
1549      }
1550      break;
1551
1552    case Primitive::kPrimChar:
1553      switch (input_type) {
1554        case Primitive::kPrimBoolean:
1555          // Boolean input is a result of code transformations.
1556        case Primitive::kPrimByte:
1557        case Primitive::kPrimShort:
1558        case Primitive::kPrimInt:
1559          // Processing a Dex `int-to-char' instruction.
1560          locations->SetInAt(0, Location::RequiresRegister());
1561          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1562          break;
1563
1564        default:
1565          LOG(FATAL) << "Unexpected type conversion from " << input_type
1566                     << " to " << result_type;
1567      }
1568      break;
1569
1570    case Primitive::kPrimFloat:
1571      switch (input_type) {
1572        case Primitive::kPrimBoolean:
1573          // Boolean input is a result of code transformations.
1574        case Primitive::kPrimByte:
1575        case Primitive::kPrimShort:
1576        case Primitive::kPrimInt:
1577        case Primitive::kPrimChar:
1578          // Processing a Dex `int-to-float' instruction.
1579          locations->SetInAt(0, Location::RequiresRegister());
1580          locations->SetOut(Location::RequiresFpuRegister());
1581          break;
1582
1583        case Primitive::kPrimLong:
1584          // Processing a Dex `long-to-float' instruction.
1585          locations->SetInAt(0, Location::RequiresRegister());
1586          locations->SetOut(Location::RequiresFpuRegister());
1587          locations->AddTemp(Location::RequiresRegister());
1588          locations->AddTemp(Location::RequiresRegister());
1589          locations->AddTemp(Location::RequiresFpuRegister());
1590          locations->AddTemp(Location::RequiresFpuRegister());
1591          break;
1592
1593        case Primitive::kPrimDouble:
1594          // Processing a Dex `double-to-float' instruction.
1595          locations->SetInAt(0, Location::RequiresFpuRegister());
1596          locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1597          break;
1598
1599        default:
1600          LOG(FATAL) << "Unexpected type conversion from " << input_type
1601                     << " to " << result_type;
1602      };
1603      break;
1604
1605    case Primitive::kPrimDouble:
1606      switch (input_type) {
1607        case Primitive::kPrimBoolean:
1608          // Boolean input is a result of code transformations.
1609        case Primitive::kPrimByte:
1610        case Primitive::kPrimShort:
1611        case Primitive::kPrimInt:
1612        case Primitive::kPrimChar:
1613          // Processing a Dex `int-to-double' instruction.
1614          locations->SetInAt(0, Location::RequiresRegister());
1615          locations->SetOut(Location::RequiresFpuRegister());
1616          break;
1617
1618        case Primitive::kPrimLong:
1619          // Processing a Dex `long-to-double' instruction.
1620          locations->SetInAt(0, Location::RequiresRegister());
1621          locations->SetOut(Location::RequiresFpuRegister());
1622          locations->AddTemp(Location::RequiresRegister());
1623          locations->AddTemp(Location::RequiresRegister());
1624          locations->AddTemp(Location::RequiresFpuRegister());
1625          break;
1626
1627        case Primitive::kPrimFloat:
1628          // Processing a Dex `float-to-double' instruction.
1629          locations->SetInAt(0, Location::RequiresFpuRegister());
1630          locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1631          break;
1632
1633        default:
1634          LOG(FATAL) << "Unexpected type conversion from " << input_type
1635                     << " to " << result_type;
1636      };
1637      break;
1638
1639    default:
1640      LOG(FATAL) << "Unexpected type conversion from " << input_type
1641                 << " to " << result_type;
1642  }
1643}
1644
1645void InstructionCodeGeneratorARM::VisitTypeConversion(HTypeConversion* conversion) {
1646  LocationSummary* locations = conversion->GetLocations();
1647  Location out = locations->Out();
1648  Location in = locations->InAt(0);
1649  Primitive::Type result_type = conversion->GetResultType();
1650  Primitive::Type input_type = conversion->GetInputType();
1651  DCHECK_NE(result_type, input_type);
1652  switch (result_type) {
1653    case Primitive::kPrimByte:
1654      switch (input_type) {
1655        case Primitive::kPrimBoolean:
1656          // Boolean input is a result of code transformations.
1657        case Primitive::kPrimShort:
1658        case Primitive::kPrimInt:
1659        case Primitive::kPrimChar:
1660          // Processing a Dex `int-to-byte' instruction.
1661          __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 8);
1662          break;
1663
1664        default:
1665          LOG(FATAL) << "Unexpected type conversion from " << input_type
1666                     << " to " << result_type;
1667      }
1668      break;
1669
1670    case Primitive::kPrimShort:
1671      switch (input_type) {
1672        case Primitive::kPrimBoolean:
1673          // Boolean input is a result of code transformations.
1674        case Primitive::kPrimByte:
1675        case Primitive::kPrimInt:
1676        case Primitive::kPrimChar:
1677          // Processing a Dex `int-to-short' instruction.
1678          __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16);
1679          break;
1680
1681        default:
1682          LOG(FATAL) << "Unexpected type conversion from " << input_type
1683                     << " to " << result_type;
1684      }
1685      break;
1686
1687    case Primitive::kPrimInt:
1688      switch (input_type) {
1689        case Primitive::kPrimLong:
1690          // Processing a Dex `long-to-int' instruction.
1691          DCHECK(out.IsRegister());
1692          if (in.IsRegisterPair()) {
1693            __ Mov(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
1694          } else if (in.IsDoubleStackSlot()) {
1695            __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), SP, in.GetStackIndex());
1696          } else {
1697            DCHECK(in.IsConstant());
1698            DCHECK(in.GetConstant()->IsLongConstant());
1699            int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
1700            __ LoadImmediate(out.AsRegister<Register>(), static_cast<int32_t>(value));
1701          }
1702          break;
1703
1704        case Primitive::kPrimFloat: {
1705          // Processing a Dex `float-to-int' instruction.
1706          SRegister temp = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
1707          __ vmovs(temp, in.AsFpuRegister<SRegister>());
1708          __ vcvtis(temp, temp);
1709          __ vmovrs(out.AsRegister<Register>(), temp);
1710          break;
1711        }
1712
1713        case Primitive::kPrimDouble: {
1714          // Processing a Dex `double-to-int' instruction.
1715          SRegister temp_s = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
1716          DRegister temp_d = FromLowSToD(temp_s);
1717          __ vmovd(temp_d, FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1718          __ vcvtid(temp_s, temp_d);
1719          __ vmovrs(out.AsRegister<Register>(), temp_s);
1720          break;
1721        }
1722
1723        default:
1724          LOG(FATAL) << "Unexpected type conversion from " << input_type
1725                     << " to " << result_type;
1726      }
1727      break;
1728
1729    case Primitive::kPrimLong:
1730      switch (input_type) {
1731        case Primitive::kPrimBoolean:
1732          // Boolean input is a result of code transformations.
1733        case Primitive::kPrimByte:
1734        case Primitive::kPrimShort:
1735        case Primitive::kPrimInt:
1736        case Primitive::kPrimChar:
1737          // Processing a Dex `int-to-long' instruction.
1738          DCHECK(out.IsRegisterPair());
1739          DCHECK(in.IsRegister());
1740          __ Mov(out.AsRegisterPairLow<Register>(), in.AsRegister<Register>());
1741          // Sign extension.
1742          __ Asr(out.AsRegisterPairHigh<Register>(),
1743                 out.AsRegisterPairLow<Register>(),
1744                 31);
1745          break;
1746
1747        case Primitive::kPrimFloat:
1748          // Processing a Dex `float-to-long' instruction.
1749          codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pF2l),
1750                                  conversion,
1751                                  conversion->GetDexPc(),
1752                                  nullptr);
1753          break;
1754
1755        case Primitive::kPrimDouble:
1756          // Processing a Dex `double-to-long' instruction.
1757          codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pD2l),
1758                                  conversion,
1759                                  conversion->GetDexPc(),
1760                                  nullptr);
1761          break;
1762
1763        default:
1764          LOG(FATAL) << "Unexpected type conversion from " << input_type
1765                     << " to " << result_type;
1766      }
1767      break;
1768
1769    case Primitive::kPrimChar:
1770      switch (input_type) {
1771        case Primitive::kPrimBoolean:
1772          // Boolean input is a result of code transformations.
1773        case Primitive::kPrimByte:
1774        case Primitive::kPrimShort:
1775        case Primitive::kPrimInt:
1776          // Processing a Dex `int-to-char' instruction.
1777          __ ubfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16);
1778          break;
1779
1780        default:
1781          LOG(FATAL) << "Unexpected type conversion from " << input_type
1782                     << " to " << result_type;
1783      }
1784      break;
1785
1786    case Primitive::kPrimFloat:
1787      switch (input_type) {
1788        case Primitive::kPrimBoolean:
1789          // Boolean input is a result of code transformations.
1790        case Primitive::kPrimByte:
1791        case Primitive::kPrimShort:
1792        case Primitive::kPrimInt:
1793        case Primitive::kPrimChar: {
1794          // Processing a Dex `int-to-float' instruction.
1795          __ vmovsr(out.AsFpuRegister<SRegister>(), in.AsRegister<Register>());
1796          __ vcvtsi(out.AsFpuRegister<SRegister>(), out.AsFpuRegister<SRegister>());
1797          break;
1798        }
1799
1800        case Primitive::kPrimLong: {
1801          // Processing a Dex `long-to-float' instruction.
1802          Register low = in.AsRegisterPairLow<Register>();
1803          Register high = in.AsRegisterPairHigh<Register>();
1804          SRegister output = out.AsFpuRegister<SRegister>();
1805          Register constant_low = locations->GetTemp(0).AsRegister<Register>();
1806          Register constant_high = locations->GetTemp(1).AsRegister<Register>();
1807          SRegister temp1_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>();
1808          DRegister temp1_d = FromLowSToD(temp1_s);
1809          SRegister temp2_s = locations->GetTemp(3).AsFpuRegisterPairLow<SRegister>();
1810          DRegister temp2_d = FromLowSToD(temp2_s);
1811
1812          // Operations use doubles for precision reasons (each 32-bit
1813          // half of a long fits in the 53-bit mantissa of a double,
1814          // but not in the 24-bit mantissa of a float).  This is
1815          // especially important for the low bits.  The result is
1816          // eventually converted to float.
1817
1818          // temp1_d = int-to-double(high)
1819          __ vmovsr(temp1_s, high);
1820          __ vcvtdi(temp1_d, temp1_s);
1821          // Using vmovd to load the `k2Pow32EncodingForDouble` constant
1822          // as an immediate value into `temp2_d` does not work, as
1823          // this instruction only transfers 8 significant bits of its
1824          // immediate operand.  Instead, use two 32-bit core
1825          // registers to load `k2Pow32EncodingForDouble` into
1826          // `temp2_d`.
1827          __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble));
1828          __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble));
1829          __ vmovdrr(temp2_d, constant_low, constant_high);
1830          // temp1_d = temp1_d * 2^32
1831          __ vmuld(temp1_d, temp1_d, temp2_d);
1832          // temp2_d = unsigned-to-double(low)
1833          __ vmovsr(temp2_s, low);
1834          __ vcvtdu(temp2_d, temp2_s);
1835          // temp1_d = temp1_d + temp2_d
1836          __ vaddd(temp1_d, temp1_d, temp2_d);
1837          // output = double-to-float(temp1_d);
1838          __ vcvtsd(output, temp1_d);
1839          break;
1840        }
1841
1842        case Primitive::kPrimDouble:
1843          // Processing a Dex `double-to-float' instruction.
1844          __ vcvtsd(out.AsFpuRegister<SRegister>(),
1845                    FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1846          break;
1847
1848        default:
1849          LOG(FATAL) << "Unexpected type conversion from " << input_type
1850                     << " to " << result_type;
1851      };
1852      break;
1853
1854    case Primitive::kPrimDouble:
1855      switch (input_type) {
1856        case Primitive::kPrimBoolean:
1857          // Boolean input is a result of code transformations.
1858        case Primitive::kPrimByte:
1859        case Primitive::kPrimShort:
1860        case Primitive::kPrimInt:
1861        case Primitive::kPrimChar: {
1862          // Processing a Dex `int-to-double' instruction.
1863          __ vmovsr(out.AsFpuRegisterPairLow<SRegister>(), in.AsRegister<Register>());
1864          __ vcvtdi(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1865                    out.AsFpuRegisterPairLow<SRegister>());
1866          break;
1867        }
1868
1869        case Primitive::kPrimLong: {
1870          // Processing a Dex `long-to-double' instruction.
1871          Register low = in.AsRegisterPairLow<Register>();
1872          Register high = in.AsRegisterPairHigh<Register>();
1873          SRegister out_s = out.AsFpuRegisterPairLow<SRegister>();
1874          DRegister out_d = FromLowSToD(out_s);
1875          Register constant_low = locations->GetTemp(0).AsRegister<Register>();
1876          Register constant_high = locations->GetTemp(1).AsRegister<Register>();
1877          SRegister temp_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>();
1878          DRegister temp_d = FromLowSToD(temp_s);
1879
1880          // out_d = int-to-double(high)
1881          __ vmovsr(out_s, high);
1882          __ vcvtdi(out_d, out_s);
1883          // Using vmovd to load the `k2Pow32EncodingForDouble` constant
1884          // as an immediate value into `temp_d` does not work, as
1885          // this instruction only transfers 8 significant bits of its
1886          // immediate operand.  Instead, use two 32-bit core
1887          // registers to load `k2Pow32EncodingForDouble` into `temp_d`.
1888          __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble));
1889          __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble));
1890          __ vmovdrr(temp_d, constant_low, constant_high);
1891          // out_d = out_d * 2^32
1892          __ vmuld(out_d, out_d, temp_d);
1893          // temp_d = unsigned-to-double(low)
1894          __ vmovsr(temp_s, low);
1895          __ vcvtdu(temp_d, temp_s);
1896          // out_d = out_d + temp_d
1897          __ vaddd(out_d, out_d, temp_d);
1898          break;
1899        }
1900
1901        case Primitive::kPrimFloat:
1902          // Processing a Dex `float-to-double' instruction.
1903          __ vcvtds(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1904                    in.AsFpuRegister<SRegister>());
1905          break;
1906
1907        default:
1908          LOG(FATAL) << "Unexpected type conversion from " << input_type
1909                     << " to " << result_type;
1910      };
1911      break;
1912
1913    default:
1914      LOG(FATAL) << "Unexpected type conversion from " << input_type
1915                 << " to " << result_type;
1916  }
1917}
1918
1919void LocationsBuilderARM::VisitAdd(HAdd* add) {
1920  LocationSummary* locations =
1921      new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
1922  switch (add->GetResultType()) {
1923    case Primitive::kPrimInt: {
1924      locations->SetInAt(0, Location::RequiresRegister());
1925      locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
1926      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1927      break;
1928    }
1929
1930    case Primitive::kPrimLong: {
1931      locations->SetInAt(0, Location::RequiresRegister());
1932      locations->SetInAt(1, Location::RequiresRegister());
1933      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1934      break;
1935    }
1936
1937    case Primitive::kPrimFloat:
1938    case Primitive::kPrimDouble: {
1939      locations->SetInAt(0, Location::RequiresFpuRegister());
1940      locations->SetInAt(1, Location::RequiresFpuRegister());
1941      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1942      break;
1943    }
1944
1945    default:
1946      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1947  }
1948}
1949
1950void InstructionCodeGeneratorARM::VisitAdd(HAdd* add) {
1951  LocationSummary* locations = add->GetLocations();
1952  Location out = locations->Out();
1953  Location first = locations->InAt(0);
1954  Location second = locations->InAt(1);
1955  switch (add->GetResultType()) {
1956    case Primitive::kPrimInt:
1957      if (second.IsRegister()) {
1958        __ add(out.AsRegister<Register>(),
1959               first.AsRegister<Register>(),
1960               ShifterOperand(second.AsRegister<Register>()));
1961      } else {
1962        __ AddConstant(out.AsRegister<Register>(),
1963                       first.AsRegister<Register>(),
1964                       second.GetConstant()->AsIntConstant()->GetValue());
1965      }
1966      break;
1967
1968    case Primitive::kPrimLong: {
1969      DCHECK(second.IsRegisterPair());
1970      __ adds(out.AsRegisterPairLow<Register>(),
1971              first.AsRegisterPairLow<Register>(),
1972              ShifterOperand(second.AsRegisterPairLow<Register>()));
1973      __ adc(out.AsRegisterPairHigh<Register>(),
1974             first.AsRegisterPairHigh<Register>(),
1975             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1976      break;
1977    }
1978
1979    case Primitive::kPrimFloat:
1980      __ vadds(out.AsFpuRegister<SRegister>(),
1981               first.AsFpuRegister<SRegister>(),
1982               second.AsFpuRegister<SRegister>());
1983      break;
1984
1985    case Primitive::kPrimDouble:
1986      __ vaddd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1987               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1988               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1989      break;
1990
1991    default:
1992      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1993  }
1994}
1995
1996void LocationsBuilderARM::VisitSub(HSub* sub) {
1997  LocationSummary* locations =
1998      new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
1999  switch (sub->GetResultType()) {
2000    case Primitive::kPrimInt: {
2001      locations->SetInAt(0, Location::RequiresRegister());
2002      locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1)));
2003      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2004      break;
2005    }
2006
2007    case Primitive::kPrimLong: {
2008      locations->SetInAt(0, Location::RequiresRegister());
2009      locations->SetInAt(1, Location::RequiresRegister());
2010      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2011      break;
2012    }
2013    case Primitive::kPrimFloat:
2014    case Primitive::kPrimDouble: {
2015      locations->SetInAt(0, Location::RequiresFpuRegister());
2016      locations->SetInAt(1, Location::RequiresFpuRegister());
2017      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2018      break;
2019    }
2020    default:
2021      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
2022  }
2023}
2024
2025void InstructionCodeGeneratorARM::VisitSub(HSub* sub) {
2026  LocationSummary* locations = sub->GetLocations();
2027  Location out = locations->Out();
2028  Location first = locations->InAt(0);
2029  Location second = locations->InAt(1);
2030  switch (sub->GetResultType()) {
2031    case Primitive::kPrimInt: {
2032      if (second.IsRegister()) {
2033        __ sub(out.AsRegister<Register>(),
2034               first.AsRegister<Register>(),
2035               ShifterOperand(second.AsRegister<Register>()));
2036      } else {
2037        __ AddConstant(out.AsRegister<Register>(),
2038                       first.AsRegister<Register>(),
2039                       -second.GetConstant()->AsIntConstant()->GetValue());
2040      }
2041      break;
2042    }
2043
2044    case Primitive::kPrimLong: {
2045      DCHECK(second.IsRegisterPair());
2046      __ subs(out.AsRegisterPairLow<Register>(),
2047              first.AsRegisterPairLow<Register>(),
2048              ShifterOperand(second.AsRegisterPairLow<Register>()));
2049      __ sbc(out.AsRegisterPairHigh<Register>(),
2050             first.AsRegisterPairHigh<Register>(),
2051             ShifterOperand(second.AsRegisterPairHigh<Register>()));
2052      break;
2053    }
2054
2055    case Primitive::kPrimFloat: {
2056      __ vsubs(out.AsFpuRegister<SRegister>(),
2057               first.AsFpuRegister<SRegister>(),
2058               second.AsFpuRegister<SRegister>());
2059      break;
2060    }
2061
2062    case Primitive::kPrimDouble: {
2063      __ vsubd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2064               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2065               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2066      break;
2067    }
2068
2069
2070    default:
2071      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
2072  }
2073}
2074
2075void LocationsBuilderARM::VisitMul(HMul* mul) {
2076  LocationSummary* locations =
2077      new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
2078  switch (mul->GetResultType()) {
2079    case Primitive::kPrimInt:
2080    case Primitive::kPrimLong:  {
2081      locations->SetInAt(0, Location::RequiresRegister());
2082      locations->SetInAt(1, Location::RequiresRegister());
2083      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2084      break;
2085    }
2086
2087    case Primitive::kPrimFloat:
2088    case Primitive::kPrimDouble: {
2089      locations->SetInAt(0, Location::RequiresFpuRegister());
2090      locations->SetInAt(1, Location::RequiresFpuRegister());
2091      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2092      break;
2093    }
2094
2095    default:
2096      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2097  }
2098}
2099
2100void InstructionCodeGeneratorARM::VisitMul(HMul* mul) {
2101  LocationSummary* locations = mul->GetLocations();
2102  Location out = locations->Out();
2103  Location first = locations->InAt(0);
2104  Location second = locations->InAt(1);
2105  switch (mul->GetResultType()) {
2106    case Primitive::kPrimInt: {
2107      __ mul(out.AsRegister<Register>(),
2108             first.AsRegister<Register>(),
2109             second.AsRegister<Register>());
2110      break;
2111    }
2112    case Primitive::kPrimLong: {
2113      Register out_hi = out.AsRegisterPairHigh<Register>();
2114      Register out_lo = out.AsRegisterPairLow<Register>();
2115      Register in1_hi = first.AsRegisterPairHigh<Register>();
2116      Register in1_lo = first.AsRegisterPairLow<Register>();
2117      Register in2_hi = second.AsRegisterPairHigh<Register>();
2118      Register in2_lo = second.AsRegisterPairLow<Register>();
2119
2120      // Extra checks to protect caused by the existence of R1_R2.
2121      // The algorithm is wrong if out.hi is either in1.lo or in2.lo:
2122      // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2);
2123      DCHECK_NE(out_hi, in1_lo);
2124      DCHECK_NE(out_hi, in2_lo);
2125
2126      // input: in1 - 64 bits, in2 - 64 bits
2127      // output: out
2128      // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
2129      // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
2130      // parts: out.lo = (in1.lo * in2.lo)[31:0]
2131
2132      // IP <- in1.lo * in2.hi
2133      __ mul(IP, in1_lo, in2_hi);
2134      // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo
2135      __ mla(out_hi, in1_hi, in2_lo, IP);
2136      // out.lo <- (in1.lo * in2.lo)[31:0];
2137      __ umull(out_lo, IP, in1_lo, in2_lo);
2138      // out.hi <- in2.hi * in1.lo +  in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
2139      __ add(out_hi, out_hi, ShifterOperand(IP));
2140      break;
2141    }
2142
2143    case Primitive::kPrimFloat: {
2144      __ vmuls(out.AsFpuRegister<SRegister>(),
2145               first.AsFpuRegister<SRegister>(),
2146               second.AsFpuRegister<SRegister>());
2147      break;
2148    }
2149
2150    case Primitive::kPrimDouble: {
2151      __ vmuld(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2152               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2153               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2154      break;
2155    }
2156
2157    default:
2158      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2159  }
2160}
2161
2162void LocationsBuilderARM::VisitDiv(HDiv* div) {
2163  LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
2164  if (div->GetResultType() == Primitive::kPrimLong) {
2165    // pLdiv runtime call.
2166    call_kind = LocationSummary::kCall;
2167  } else if (div->GetResultType() == Primitive::kPrimInt &&
2168             !codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
2169    // pIdivmod runtime call.
2170    call_kind = LocationSummary::kCall;
2171  }
2172
2173  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
2174
2175  switch (div->GetResultType()) {
2176    case Primitive::kPrimInt: {
2177      if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
2178        locations->SetInAt(0, Location::RequiresRegister());
2179        locations->SetInAt(1, Location::RequiresRegister());
2180        locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2181      } else {
2182        InvokeRuntimeCallingConvention calling_convention;
2183        locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2184        locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2185        // Note: divrem will compute both the quotient and the remainder as the pair R0 and R1, but
2186        //       we only need the former.
2187        locations->SetOut(Location::RegisterLocation(R0));
2188      }
2189      break;
2190    }
2191    case Primitive::kPrimLong: {
2192      InvokeRuntimeCallingConvention calling_convention;
2193      locations->SetInAt(0, Location::RegisterPairLocation(
2194          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2195      locations->SetInAt(1, Location::RegisterPairLocation(
2196          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2197      locations->SetOut(Location::RegisterPairLocation(R0, R1));
2198      break;
2199    }
2200    case Primitive::kPrimFloat:
2201    case Primitive::kPrimDouble: {
2202      locations->SetInAt(0, Location::RequiresFpuRegister());
2203      locations->SetInAt(1, Location::RequiresFpuRegister());
2204      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2205      break;
2206    }
2207
2208    default:
2209      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2210  }
2211}
2212
2213void InstructionCodeGeneratorARM::VisitDiv(HDiv* div) {
2214  LocationSummary* locations = div->GetLocations();
2215  Location out = locations->Out();
2216  Location first = locations->InAt(0);
2217  Location second = locations->InAt(1);
2218
2219  switch (div->GetResultType()) {
2220    case Primitive::kPrimInt: {
2221      if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
2222        __ sdiv(out.AsRegister<Register>(),
2223                first.AsRegister<Register>(),
2224                second.AsRegister<Register>());
2225      } else {
2226        InvokeRuntimeCallingConvention calling_convention;
2227        DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegister<Register>());
2228        DCHECK_EQ(calling_convention.GetRegisterAt(1), second.AsRegister<Register>());
2229        DCHECK_EQ(R0, out.AsRegister<Register>());
2230
2231        codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pIdivmod), div, div->GetDexPc(), nullptr);
2232      }
2233      break;
2234    }
2235
2236    case Primitive::kPrimLong: {
2237      InvokeRuntimeCallingConvention calling_convention;
2238      DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
2239      DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
2240      DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegisterPairLow<Register>());
2241      DCHECK_EQ(calling_convention.GetRegisterAt(3), second.AsRegisterPairHigh<Register>());
2242      DCHECK_EQ(R0, out.AsRegisterPairLow<Register>());
2243      DCHECK_EQ(R1, out.AsRegisterPairHigh<Register>());
2244
2245      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLdiv), div, div->GetDexPc(), nullptr);
2246      break;
2247    }
2248
2249    case Primitive::kPrimFloat: {
2250      __ vdivs(out.AsFpuRegister<SRegister>(),
2251               first.AsFpuRegister<SRegister>(),
2252               second.AsFpuRegister<SRegister>());
2253      break;
2254    }
2255
2256    case Primitive::kPrimDouble: {
2257      __ vdivd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2258               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2259               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2260      break;
2261    }
2262
2263    default:
2264      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2265  }
2266}
2267
2268void LocationsBuilderARM::VisitRem(HRem* rem) {
2269  Primitive::Type type = rem->GetResultType();
2270
2271  // Most remainders are implemented in the runtime.
2272  LocationSummary::CallKind call_kind = LocationSummary::kCall;
2273  if (rem->GetResultType() == Primitive::kPrimInt &&
2274      codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
2275    // Have hardware divide instruction for int, do it with three instructions.
2276    call_kind = LocationSummary::kNoCall;
2277  }
2278
2279  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
2280
2281  switch (type) {
2282    case Primitive::kPrimInt: {
2283      if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
2284        locations->SetInAt(0, Location::RequiresRegister());
2285        locations->SetInAt(1, Location::RequiresRegister());
2286        locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2287        locations->AddTemp(Location::RequiresRegister());
2288      } else {
2289        InvokeRuntimeCallingConvention calling_convention;
2290        locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2291        locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2292        // Note: divrem will compute both the quotient and the remainder as the pair R0 and R1, but
2293        //       we only need the latter.
2294        locations->SetOut(Location::RegisterLocation(R1));
2295      }
2296      break;
2297    }
2298    case Primitive::kPrimLong: {
2299      InvokeRuntimeCallingConvention calling_convention;
2300      locations->SetInAt(0, Location::RegisterPairLocation(
2301          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2302      locations->SetInAt(1, Location::RegisterPairLocation(
2303          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2304      // The runtime helper puts the output in R2,R3.
2305      locations->SetOut(Location::RegisterPairLocation(R2, R3));
2306      break;
2307    }
2308    case Primitive::kPrimFloat: {
2309      InvokeRuntimeCallingConvention calling_convention;
2310      locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
2311      locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
2312      locations->SetOut(Location::FpuRegisterLocation(S0));
2313      break;
2314    }
2315
2316    case Primitive::kPrimDouble: {
2317      InvokeRuntimeCallingConvention calling_convention;
2318      locations->SetInAt(0, Location::FpuRegisterPairLocation(
2319          calling_convention.GetFpuRegisterAt(0), calling_convention.GetFpuRegisterAt(1)));
2320      locations->SetInAt(1, Location::FpuRegisterPairLocation(
2321          calling_convention.GetFpuRegisterAt(2), calling_convention.GetFpuRegisterAt(3)));
2322      locations->SetOut(Location::Location::FpuRegisterPairLocation(S0, S1));
2323      break;
2324    }
2325
2326    default:
2327      LOG(FATAL) << "Unexpected rem type " << type;
2328  }
2329}
2330
2331void InstructionCodeGeneratorARM::VisitRem(HRem* rem) {
2332  LocationSummary* locations = rem->GetLocations();
2333  Location out = locations->Out();
2334  Location first = locations->InAt(0);
2335  Location second = locations->InAt(1);
2336
2337  Primitive::Type type = rem->GetResultType();
2338  switch (type) {
2339    case Primitive::kPrimInt: {
2340      if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
2341        Register reg1 = first.AsRegister<Register>();
2342        Register reg2 = second.AsRegister<Register>();
2343        Register temp = locations->GetTemp(0).AsRegister<Register>();
2344
2345        // temp = reg1 / reg2  (integer division)
2346        // temp = temp * reg2
2347        // dest = reg1 - temp
2348        __ sdiv(temp, reg1, reg2);
2349        __ mul(temp, temp, reg2);
2350        __ sub(out.AsRegister<Register>(), reg1, ShifterOperand(temp));
2351      } else {
2352        InvokeRuntimeCallingConvention calling_convention;
2353        DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegister<Register>());
2354        DCHECK_EQ(calling_convention.GetRegisterAt(1), second.AsRegister<Register>());
2355        DCHECK_EQ(R1, out.AsRegister<Register>());
2356
2357        codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pIdivmod), rem, rem->GetDexPc(), nullptr);
2358      }
2359      break;
2360    }
2361
2362    case Primitive::kPrimLong: {
2363      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLmod), rem, rem->GetDexPc(), nullptr);
2364      break;
2365    }
2366
2367    case Primitive::kPrimFloat: {
2368      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmodf), rem, rem->GetDexPc(), nullptr);
2369      break;
2370    }
2371
2372    case Primitive::kPrimDouble: {
2373      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmod), rem, rem->GetDexPc(), nullptr);
2374      break;
2375    }
2376
2377    default:
2378      LOG(FATAL) << "Unexpected rem type " << type;
2379  }
2380}
2381
2382void LocationsBuilderARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2383  LocationSummary* locations =
2384      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2385  locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2386  if (instruction->HasUses()) {
2387    locations->SetOut(Location::SameAsFirstInput());
2388  }
2389}
2390
2391void InstructionCodeGeneratorARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2392  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM(instruction);
2393  codegen_->AddSlowPath(slow_path);
2394
2395  LocationSummary* locations = instruction->GetLocations();
2396  Location value = locations->InAt(0);
2397
2398  switch (instruction->GetType()) {
2399    case Primitive::kPrimInt: {
2400      if (value.IsRegister()) {
2401        __ cmp(value.AsRegister<Register>(), ShifterOperand(0));
2402        __ b(slow_path->GetEntryLabel(), EQ);
2403      } else {
2404        DCHECK(value.IsConstant()) << value;
2405        if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
2406          __ b(slow_path->GetEntryLabel());
2407        }
2408      }
2409      break;
2410    }
2411    case Primitive::kPrimLong: {
2412      if (value.IsRegisterPair()) {
2413        __ orrs(IP,
2414                value.AsRegisterPairLow<Register>(),
2415                ShifterOperand(value.AsRegisterPairHigh<Register>()));
2416        __ b(slow_path->GetEntryLabel(), EQ);
2417      } else {
2418        DCHECK(value.IsConstant()) << value;
2419        if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
2420          __ b(slow_path->GetEntryLabel());
2421        }
2422      }
2423      break;
2424    default:
2425      LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
2426    }
2427  }
2428}
2429
2430void LocationsBuilderARM::HandleShift(HBinaryOperation* op) {
2431  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2432
2433  LocationSummary* locations =
2434      new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
2435
2436  switch (op->GetResultType()) {
2437    case Primitive::kPrimInt: {
2438      locations->SetInAt(0, Location::RequiresRegister());
2439      locations->SetInAt(1, Location::RegisterOrConstant(op->InputAt(1)));
2440      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2441      break;
2442    }
2443    case Primitive::kPrimLong: {
2444      locations->SetInAt(0, Location::RequiresRegister());
2445      locations->SetInAt(1, Location::RequiresRegister());
2446      locations->AddTemp(Location::RequiresRegister());
2447      locations->SetOut(Location::RequiresRegister());
2448      break;
2449    }
2450    default:
2451      LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
2452  }
2453}
2454
2455void InstructionCodeGeneratorARM::HandleShift(HBinaryOperation* op) {
2456  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2457
2458  LocationSummary* locations = op->GetLocations();
2459  Location out = locations->Out();
2460  Location first = locations->InAt(0);
2461  Location second = locations->InAt(1);
2462
2463  Primitive::Type type = op->GetResultType();
2464  switch (type) {
2465    case Primitive::kPrimInt: {
2466      Register out_reg = out.AsRegister<Register>();
2467      Register first_reg = first.AsRegister<Register>();
2468      // Arm doesn't mask the shift count so we need to do it ourselves.
2469      if (second.IsRegister()) {
2470        Register second_reg = second.AsRegister<Register>();
2471        __ and_(second_reg, second_reg, ShifterOperand(kMaxIntShiftValue));
2472        if (op->IsShl()) {
2473          __ Lsl(out_reg, first_reg, second_reg);
2474        } else if (op->IsShr()) {
2475          __ Asr(out_reg, first_reg, second_reg);
2476        } else {
2477          __ Lsr(out_reg, first_reg, second_reg);
2478        }
2479      } else {
2480        int32_t cst = second.GetConstant()->AsIntConstant()->GetValue();
2481        uint32_t shift_value = static_cast<uint32_t>(cst & kMaxIntShiftValue);
2482        if (shift_value == 0) {  // arm does not support shifting with 0 immediate.
2483          __ Mov(out_reg, first_reg);
2484        } else if (op->IsShl()) {
2485          __ Lsl(out_reg, first_reg, shift_value);
2486        } else if (op->IsShr()) {
2487          __ Asr(out_reg, first_reg, shift_value);
2488        } else {
2489          __ Lsr(out_reg, first_reg, shift_value);
2490        }
2491      }
2492      break;
2493    }
2494    case Primitive::kPrimLong: {
2495      Register o_h = out.AsRegisterPairHigh<Register>();
2496      Register o_l = out.AsRegisterPairLow<Register>();
2497
2498      Register temp = locations->GetTemp(0).AsRegister<Register>();
2499
2500      Register high = first.AsRegisterPairHigh<Register>();
2501      Register low = first.AsRegisterPairLow<Register>();
2502
2503      Register second_reg = second.AsRegister<Register>();
2504
2505      if (op->IsShl()) {
2506        // Shift the high part
2507        __ and_(second_reg, second_reg, ShifterOperand(63));
2508        __ Lsl(o_h, high, second_reg);
2509        // Shift the low part and `or` what overflew on the high part
2510        __ rsb(temp, second_reg, ShifterOperand(32));
2511        __ Lsr(temp, low, temp);
2512        __ orr(o_h, o_h, ShifterOperand(temp));
2513        // If the shift is > 32 bits, override the high part
2514        __ subs(temp, second_reg, ShifterOperand(32));
2515        __ it(PL);
2516        __ Lsl(o_h, low, temp, false, PL);
2517        // Shift the low part
2518        __ Lsl(o_l, low, second_reg);
2519      } else if (op->IsShr()) {
2520        // Shift the low part
2521        __ and_(second_reg, second_reg, ShifterOperand(63));
2522        __ Lsr(o_l, low, second_reg);
2523        // Shift the high part and `or` what underflew on the low part
2524        __ rsb(temp, second_reg, ShifterOperand(32));
2525        __ Lsl(temp, high, temp);
2526        __ orr(o_l, o_l, ShifterOperand(temp));
2527        // If the shift is > 32 bits, override the low part
2528        __ subs(temp, second_reg, ShifterOperand(32));
2529        __ it(PL);
2530        __ Asr(o_l, high, temp, false, PL);
2531        // Shift the high part
2532        __ Asr(o_h, high, second_reg);
2533      } else {
2534        // same as Shr except we use `Lsr`s and not `Asr`s
2535        __ and_(second_reg, second_reg, ShifterOperand(63));
2536        __ Lsr(o_l, low, second_reg);
2537        __ rsb(temp, second_reg, ShifterOperand(32));
2538        __ Lsl(temp, high, temp);
2539        __ orr(o_l, o_l, ShifterOperand(temp));
2540        __ subs(temp, second_reg, ShifterOperand(32));
2541        __ it(PL);
2542        __ Lsr(o_l, high, temp, false, PL);
2543        __ Lsr(o_h, high, second_reg);
2544      }
2545      break;
2546    }
2547    default:
2548      LOG(FATAL) << "Unexpected operation type " << type;
2549  }
2550}
2551
2552void LocationsBuilderARM::VisitShl(HShl* shl) {
2553  HandleShift(shl);
2554}
2555
2556void InstructionCodeGeneratorARM::VisitShl(HShl* shl) {
2557  HandleShift(shl);
2558}
2559
2560void LocationsBuilderARM::VisitShr(HShr* shr) {
2561  HandleShift(shr);
2562}
2563
2564void InstructionCodeGeneratorARM::VisitShr(HShr* shr) {
2565  HandleShift(shr);
2566}
2567
2568void LocationsBuilderARM::VisitUShr(HUShr* ushr) {
2569  HandleShift(ushr);
2570}
2571
2572void InstructionCodeGeneratorARM::VisitUShr(HUShr* ushr) {
2573  HandleShift(ushr);
2574}
2575
2576void LocationsBuilderARM::VisitNewInstance(HNewInstance* instruction) {
2577  LocationSummary* locations =
2578      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2579  InvokeRuntimeCallingConvention calling_convention;
2580  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2581  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2582  locations->SetOut(Location::RegisterLocation(R0));
2583}
2584
2585void InstructionCodeGeneratorARM::VisitNewInstance(HNewInstance* instruction) {
2586  InvokeRuntimeCallingConvention calling_convention;
2587  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
2588  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
2589  codegen_->InvokeRuntime(GetThreadOffset<kArmWordSize>(instruction->GetEntrypoint()).Int32Value(),
2590                          instruction,
2591                          instruction->GetDexPc(),
2592                          nullptr);
2593}
2594
2595void LocationsBuilderARM::VisitNewArray(HNewArray* instruction) {
2596  LocationSummary* locations =
2597      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2598  InvokeRuntimeCallingConvention calling_convention;
2599  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2600  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2601  locations->SetOut(Location::RegisterLocation(R0));
2602  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2603}
2604
2605void InstructionCodeGeneratorARM::VisitNewArray(HNewArray* instruction) {
2606  InvokeRuntimeCallingConvention calling_convention;
2607  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(2));
2608  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
2609  codegen_->InvokeRuntime(GetThreadOffset<kArmWordSize>(instruction->GetEntrypoint()).Int32Value(),
2610                          instruction,
2611                          instruction->GetDexPc(),
2612                          nullptr);
2613}
2614
2615void LocationsBuilderARM::VisitParameterValue(HParameterValue* instruction) {
2616  LocationSummary* locations =
2617      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2618  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
2619  if (location.IsStackSlot()) {
2620    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2621  } else if (location.IsDoubleStackSlot()) {
2622    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2623  }
2624  locations->SetOut(location);
2625}
2626
2627void InstructionCodeGeneratorARM::VisitParameterValue(HParameterValue* instruction) {
2628  // Nothing to do, the parameter is already at its location.
2629  UNUSED(instruction);
2630}
2631
2632void LocationsBuilderARM::VisitNot(HNot* not_) {
2633  LocationSummary* locations =
2634      new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
2635  locations->SetInAt(0, Location::RequiresRegister());
2636  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2637}
2638
2639void InstructionCodeGeneratorARM::VisitNot(HNot* not_) {
2640  LocationSummary* locations = not_->GetLocations();
2641  Location out = locations->Out();
2642  Location in = locations->InAt(0);
2643  switch (not_->GetResultType()) {
2644    case Primitive::kPrimInt:
2645      __ mvn(out.AsRegister<Register>(), ShifterOperand(in.AsRegister<Register>()));
2646      break;
2647
2648    case Primitive::kPrimLong:
2649      __ mvn(out.AsRegisterPairLow<Register>(),
2650             ShifterOperand(in.AsRegisterPairLow<Register>()));
2651      __ mvn(out.AsRegisterPairHigh<Register>(),
2652             ShifterOperand(in.AsRegisterPairHigh<Register>()));
2653      break;
2654
2655    default:
2656      LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
2657  }
2658}
2659
2660void LocationsBuilderARM::VisitBooleanNot(HBooleanNot* bool_not) {
2661  LocationSummary* locations =
2662      new (GetGraph()->GetArena()) LocationSummary(bool_not, LocationSummary::kNoCall);
2663  locations->SetInAt(0, Location::RequiresRegister());
2664  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2665}
2666
2667void InstructionCodeGeneratorARM::VisitBooleanNot(HBooleanNot* bool_not) {
2668  DCHECK_EQ(bool_not->InputAt(0)->GetType(), Primitive::kPrimBoolean);
2669  LocationSummary* locations = bool_not->GetLocations();
2670  Location out = locations->Out();
2671  Location in = locations->InAt(0);
2672  __ eor(out.AsRegister<Register>(), in.AsRegister<Register>(), ShifterOperand(1));
2673}
2674
2675void LocationsBuilderARM::VisitCompare(HCompare* compare) {
2676  LocationSummary* locations =
2677      new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2678  switch (compare->InputAt(0)->GetType()) {
2679    case Primitive::kPrimLong: {
2680      locations->SetInAt(0, Location::RequiresRegister());
2681      locations->SetInAt(1, Location::RequiresRegister());
2682      // Output overlaps because it is written before doing the low comparison.
2683      locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2684      break;
2685    }
2686    case Primitive::kPrimFloat:
2687    case Primitive::kPrimDouble: {
2688      locations->SetInAt(0, Location::RequiresFpuRegister());
2689      locations->SetInAt(1, Location::RequiresFpuRegister());
2690      locations->SetOut(Location::RequiresRegister());
2691      break;
2692    }
2693    default:
2694      LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2695  }
2696}
2697
2698void InstructionCodeGeneratorARM::VisitCompare(HCompare* compare) {
2699  LocationSummary* locations = compare->GetLocations();
2700  Register out = locations->Out().AsRegister<Register>();
2701  Location left = locations->InAt(0);
2702  Location right = locations->InAt(1);
2703
2704  Label less, greater, done;
2705  Primitive::Type type = compare->InputAt(0)->GetType();
2706  switch (type) {
2707    case Primitive::kPrimLong: {
2708      __ cmp(left.AsRegisterPairHigh<Register>(),
2709             ShifterOperand(right.AsRegisterPairHigh<Register>()));  // Signed compare.
2710      __ b(&less, LT);
2711      __ b(&greater, GT);
2712      // Do LoadImmediate before any `cmp`, as LoadImmediate might affect the status flags.
2713      __ LoadImmediate(out, 0);
2714      __ cmp(left.AsRegisterPairLow<Register>(),
2715             ShifterOperand(right.AsRegisterPairLow<Register>()));  // Unsigned compare.
2716      break;
2717    }
2718    case Primitive::kPrimFloat:
2719    case Primitive::kPrimDouble: {
2720      __ LoadImmediate(out, 0);
2721      if (type == Primitive::kPrimFloat) {
2722        __ vcmps(left.AsFpuRegister<SRegister>(), right.AsFpuRegister<SRegister>());
2723      } else {
2724        __ vcmpd(FromLowSToD(left.AsFpuRegisterPairLow<SRegister>()),
2725                 FromLowSToD(right.AsFpuRegisterPairLow<SRegister>()));
2726      }
2727      __ vmstat();  // transfer FP status register to ARM APSR.
2728      __ b(compare->IsGtBias() ? &greater : &less, VS);  // VS for unordered.
2729      break;
2730    }
2731    default:
2732      LOG(FATAL) << "Unexpected compare type " << type;
2733  }
2734  __ b(&done, EQ);
2735  __ b(&less, CC);  // CC is for both: unsigned compare for longs and 'less than' for floats.
2736
2737  __ Bind(&greater);
2738  __ LoadImmediate(out, 1);
2739  __ b(&done);
2740
2741  __ Bind(&less);
2742  __ LoadImmediate(out, -1);
2743
2744  __ Bind(&done);
2745}
2746
2747void LocationsBuilderARM::VisitPhi(HPhi* instruction) {
2748  LocationSummary* locations =
2749      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2750  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
2751    locations->SetInAt(i, Location::Any());
2752  }
2753  locations->SetOut(Location::Any());
2754}
2755
2756void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction) {
2757  UNUSED(instruction);
2758  LOG(FATAL) << "Unreachable";
2759}
2760
2761void InstructionCodeGeneratorARM::GenerateMemoryBarrier(MemBarrierKind kind) {
2762  // TODO (ported from quick): revisit Arm barrier kinds
2763  DmbOptions flavour = DmbOptions::ISH;  // quiet c++ warnings
2764  switch (kind) {
2765    case MemBarrierKind::kAnyStore:
2766    case MemBarrierKind::kLoadAny:
2767    case MemBarrierKind::kAnyAny: {
2768      flavour = DmbOptions::ISH;
2769      break;
2770    }
2771    case MemBarrierKind::kStoreStore: {
2772      flavour = DmbOptions::ISHST;
2773      break;
2774    }
2775    default:
2776      LOG(FATAL) << "Unexpected memory barrier " << kind;
2777  }
2778  __ dmb(flavour);
2779}
2780
2781void InstructionCodeGeneratorARM::GenerateWideAtomicLoad(Register addr,
2782                                                         uint32_t offset,
2783                                                         Register out_lo,
2784                                                         Register out_hi) {
2785  if (offset != 0) {
2786    __ LoadImmediate(out_lo, offset);
2787    __ add(IP, addr, ShifterOperand(out_lo));
2788    addr = IP;
2789  }
2790  __ ldrexd(out_lo, out_hi, addr);
2791}
2792
2793void InstructionCodeGeneratorARM::GenerateWideAtomicStore(Register addr,
2794                                                          uint32_t offset,
2795                                                          Register value_lo,
2796                                                          Register value_hi,
2797                                                          Register temp1,
2798                                                          Register temp2,
2799                                                          HInstruction* instruction) {
2800  Label fail;
2801  if (offset != 0) {
2802    __ LoadImmediate(temp1, offset);
2803    __ add(IP, addr, ShifterOperand(temp1));
2804    addr = IP;
2805  }
2806  __ Bind(&fail);
2807  // We need a load followed by store. (The address used in a STREX instruction must
2808  // be the same as the address in the most recently executed LDREX instruction.)
2809  __ ldrexd(temp1, temp2, addr);
2810  codegen_->MaybeRecordImplicitNullCheck(instruction);
2811  __ strexd(temp1, value_lo, value_hi, addr);
2812  __ cmp(temp1, ShifterOperand(0));
2813  __ b(&fail, NE);
2814}
2815
2816void LocationsBuilderARM::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) {
2817  DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2818
2819  LocationSummary* locations =
2820      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2821  locations->SetInAt(0, Location::RequiresRegister());
2822  locations->SetInAt(1, Location::RequiresRegister());
2823
2824
2825  Primitive::Type field_type = field_info.GetFieldType();
2826  bool is_wide = field_type == Primitive::kPrimLong || field_type == Primitive::kPrimDouble;
2827  bool generate_volatile = field_info.IsVolatile()
2828      && is_wide
2829      && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2830  // Temporary registers for the write barrier.
2831  // TODO: consider renaming StoreNeedsWriteBarrier to StoreNeedsGCMark.
2832  if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
2833    locations->AddTemp(Location::RequiresRegister());
2834    locations->AddTemp(Location::RequiresRegister());
2835  } else if (generate_volatile) {
2836    // Arm encoding have some additional constraints for ldrexd/strexd:
2837    // - registers need to be consecutive
2838    // - the first register should be even but not R14.
2839    // We don't test for Arm yet, and the assertion makes sure that we revisit this if we ever
2840    // enable Arm encoding.
2841    DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
2842
2843    locations->AddTemp(Location::RequiresRegister());
2844    locations->AddTemp(Location::RequiresRegister());
2845    if (field_type == Primitive::kPrimDouble) {
2846      // For doubles we need two more registers to copy the value.
2847      locations->AddTemp(Location::RegisterLocation(R2));
2848      locations->AddTemp(Location::RegisterLocation(R3));
2849    }
2850  }
2851}
2852
2853void InstructionCodeGeneratorARM::HandleFieldSet(HInstruction* instruction,
2854                                                 const FieldInfo& field_info) {
2855  DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2856
2857  LocationSummary* locations = instruction->GetLocations();
2858  Register base = locations->InAt(0).AsRegister<Register>();
2859  Location value = locations->InAt(1);
2860
2861  bool is_volatile = field_info.IsVolatile();
2862  bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2863  Primitive::Type field_type = field_info.GetFieldType();
2864  uint32_t offset = field_info.GetFieldOffset().Uint32Value();
2865
2866  if (is_volatile) {
2867    GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
2868  }
2869
2870  switch (field_type) {
2871    case Primitive::kPrimBoolean:
2872    case Primitive::kPrimByte: {
2873      __ StoreToOffset(kStoreByte, value.AsRegister<Register>(), base, offset);
2874      break;
2875    }
2876
2877    case Primitive::kPrimShort:
2878    case Primitive::kPrimChar: {
2879      __ StoreToOffset(kStoreHalfword, value.AsRegister<Register>(), base, offset);
2880      break;
2881    }
2882
2883    case Primitive::kPrimInt:
2884    case Primitive::kPrimNot: {
2885      __ StoreToOffset(kStoreWord, value.AsRegister<Register>(), base, offset);
2886      break;
2887    }
2888
2889    case Primitive::kPrimLong: {
2890      if (is_volatile && !atomic_ldrd_strd) {
2891        GenerateWideAtomicStore(base, offset,
2892                                value.AsRegisterPairLow<Register>(),
2893                                value.AsRegisterPairHigh<Register>(),
2894                                locations->GetTemp(0).AsRegister<Register>(),
2895                                locations->GetTemp(1).AsRegister<Register>(),
2896                                instruction);
2897      } else {
2898        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), base, offset);
2899        codegen_->MaybeRecordImplicitNullCheck(instruction);
2900      }
2901      break;
2902    }
2903
2904    case Primitive::kPrimFloat: {
2905      __ StoreSToOffset(value.AsFpuRegister<SRegister>(), base, offset);
2906      break;
2907    }
2908
2909    case Primitive::kPrimDouble: {
2910      DRegister value_reg = FromLowSToD(value.AsFpuRegisterPairLow<SRegister>());
2911      if (is_volatile && !atomic_ldrd_strd) {
2912        Register value_reg_lo = locations->GetTemp(0).AsRegister<Register>();
2913        Register value_reg_hi = locations->GetTemp(1).AsRegister<Register>();
2914
2915        __ vmovrrd(value_reg_lo, value_reg_hi, value_reg);
2916
2917        GenerateWideAtomicStore(base, offset,
2918                                value_reg_lo,
2919                                value_reg_hi,
2920                                locations->GetTemp(2).AsRegister<Register>(),
2921                                locations->GetTemp(3).AsRegister<Register>(),
2922                                instruction);
2923      } else {
2924        __ StoreDToOffset(value_reg, base, offset);
2925        codegen_->MaybeRecordImplicitNullCheck(instruction);
2926      }
2927      break;
2928    }
2929
2930    case Primitive::kPrimVoid:
2931      LOG(FATAL) << "Unreachable type " << field_type;
2932      UNREACHABLE();
2933  }
2934
2935  // Longs and doubles are handled in the switch.
2936  if (field_type != Primitive::kPrimLong && field_type != Primitive::kPrimDouble) {
2937    codegen_->MaybeRecordImplicitNullCheck(instruction);
2938  }
2939
2940  if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
2941    Register temp = locations->GetTemp(0).AsRegister<Register>();
2942    Register card = locations->GetTemp(1).AsRegister<Register>();
2943    codegen_->MarkGCCard(temp, card, base, value.AsRegister<Register>());
2944  }
2945
2946  if (is_volatile) {
2947    GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
2948  }
2949}
2950
2951void LocationsBuilderARM::HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info) {
2952  DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2953  LocationSummary* locations =
2954      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2955  locations->SetInAt(0, Location::RequiresRegister());
2956
2957  bool volatile_for_double = field_info.IsVolatile()
2958      && (field_info.GetFieldType() == Primitive::kPrimDouble)
2959      && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2960  bool overlap = field_info.IsVolatile() && (field_info.GetFieldType() == Primitive::kPrimLong);
2961  locations->SetOut(Location::RequiresRegister(),
2962                    (overlap ? Location::kOutputOverlap : Location::kNoOutputOverlap));
2963  if (volatile_for_double) {
2964    // Arm encoding have some additional constraints for ldrexd/strexd:
2965    // - registers need to be consecutive
2966    // - the first register should be even but not R14.
2967    // We don't test for Arm yet, and the assertion makes sure that we revisit this if we ever
2968    // enable Arm encoding.
2969    DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
2970    locations->AddTemp(Location::RequiresRegister());
2971    locations->AddTemp(Location::RequiresRegister());
2972  }
2973}
2974
2975void InstructionCodeGeneratorARM::HandleFieldGet(HInstruction* instruction,
2976                                                 const FieldInfo& field_info) {
2977  DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2978
2979  LocationSummary* locations = instruction->GetLocations();
2980  Register base = locations->InAt(0).AsRegister<Register>();
2981  Location out = locations->Out();
2982  bool is_volatile = field_info.IsVolatile();
2983  bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2984  Primitive::Type field_type = field_info.GetFieldType();
2985  uint32_t offset = field_info.GetFieldOffset().Uint32Value();
2986
2987  switch (field_type) {
2988    case Primitive::kPrimBoolean: {
2989      __ LoadFromOffset(kLoadUnsignedByte, out.AsRegister<Register>(), base, offset);
2990      break;
2991    }
2992
2993    case Primitive::kPrimByte: {
2994      __ LoadFromOffset(kLoadSignedByte, out.AsRegister<Register>(), base, offset);
2995      break;
2996    }
2997
2998    case Primitive::kPrimShort: {
2999      __ LoadFromOffset(kLoadSignedHalfword, out.AsRegister<Register>(), base, offset);
3000      break;
3001    }
3002
3003    case Primitive::kPrimChar: {
3004      __ LoadFromOffset(kLoadUnsignedHalfword, out.AsRegister<Register>(), base, offset);
3005      break;
3006    }
3007
3008    case Primitive::kPrimInt:
3009    case Primitive::kPrimNot: {
3010      __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), base, offset);
3011      break;
3012    }
3013
3014    case Primitive::kPrimLong: {
3015      if (is_volatile && !atomic_ldrd_strd) {
3016        GenerateWideAtomicLoad(base, offset,
3017                               out.AsRegisterPairLow<Register>(),
3018                               out.AsRegisterPairHigh<Register>());
3019      } else {
3020        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), base, offset);
3021      }
3022      break;
3023    }
3024
3025    case Primitive::kPrimFloat: {
3026      __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), base, offset);
3027      break;
3028    }
3029
3030    case Primitive::kPrimDouble: {
3031      DRegister out_reg = FromLowSToD(out.AsFpuRegisterPairLow<SRegister>());
3032      if (is_volatile && !atomic_ldrd_strd) {
3033        Register lo = locations->GetTemp(0).AsRegister<Register>();
3034        Register hi = locations->GetTemp(1).AsRegister<Register>();
3035        GenerateWideAtomicLoad(base, offset, lo, hi);
3036        codegen_->MaybeRecordImplicitNullCheck(instruction);
3037        __ vmovdrr(out_reg, lo, hi);
3038      } else {
3039        __ LoadDFromOffset(out_reg, base, offset);
3040        codegen_->MaybeRecordImplicitNullCheck(instruction);
3041      }
3042      break;
3043    }
3044
3045    case Primitive::kPrimVoid:
3046      LOG(FATAL) << "Unreachable type " << field_type;
3047      UNREACHABLE();
3048  }
3049
3050  // Doubles are handled in the switch.
3051  if (field_type != Primitive::kPrimDouble) {
3052    codegen_->MaybeRecordImplicitNullCheck(instruction);
3053  }
3054
3055  if (is_volatile) {
3056    GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
3057  }
3058}
3059
3060void LocationsBuilderARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
3061  HandleFieldSet(instruction, instruction->GetFieldInfo());
3062}
3063
3064void InstructionCodeGeneratorARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
3065  HandleFieldSet(instruction, instruction->GetFieldInfo());
3066}
3067
3068void LocationsBuilderARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
3069  HandleFieldGet(instruction, instruction->GetFieldInfo());
3070}
3071
3072void InstructionCodeGeneratorARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
3073  HandleFieldGet(instruction, instruction->GetFieldInfo());
3074}
3075
3076void LocationsBuilderARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
3077  HandleFieldGet(instruction, instruction->GetFieldInfo());
3078}
3079
3080void InstructionCodeGeneratorARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
3081  HandleFieldGet(instruction, instruction->GetFieldInfo());
3082}
3083
3084void LocationsBuilderARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
3085  HandleFieldSet(instruction, instruction->GetFieldInfo());
3086}
3087
3088void InstructionCodeGeneratorARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
3089  HandleFieldSet(instruction, instruction->GetFieldInfo());
3090}
3091
3092void LocationsBuilderARM::VisitNullCheck(HNullCheck* instruction) {
3093  LocationSummary* locations =
3094      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3095  locations->SetInAt(0, Location::RequiresRegister());
3096  if (instruction->HasUses()) {
3097    locations->SetOut(Location::SameAsFirstInput());
3098  }
3099}
3100
3101void InstructionCodeGeneratorARM::GenerateImplicitNullCheck(HNullCheck* instruction) {
3102  if (codegen_->CanMoveNullCheckToUser(instruction)) {
3103    return;
3104  }
3105  Location obj = instruction->GetLocations()->InAt(0);
3106
3107  __ LoadFromOffset(kLoadWord, IP, obj.AsRegister<Register>(), 0);
3108  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
3109}
3110
3111void InstructionCodeGeneratorARM::GenerateExplicitNullCheck(HNullCheck* instruction) {
3112  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM(instruction);
3113  codegen_->AddSlowPath(slow_path);
3114
3115  LocationSummary* locations = instruction->GetLocations();
3116  Location obj = locations->InAt(0);
3117
3118  __ cmp(obj.AsRegister<Register>(), ShifterOperand(0));
3119  __ b(slow_path->GetEntryLabel(), EQ);
3120}
3121
3122void InstructionCodeGeneratorARM::VisitNullCheck(HNullCheck* instruction) {
3123  if (codegen_->GetCompilerOptions().GetImplicitNullChecks()) {
3124    GenerateImplicitNullCheck(instruction);
3125  } else {
3126    GenerateExplicitNullCheck(instruction);
3127  }
3128}
3129
3130void LocationsBuilderARM::VisitArrayGet(HArrayGet* instruction) {
3131  LocationSummary* locations =
3132      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3133  locations->SetInAt(0, Location::RequiresRegister());
3134  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3135  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3136}
3137
3138void InstructionCodeGeneratorARM::VisitArrayGet(HArrayGet* instruction) {
3139  LocationSummary* locations = instruction->GetLocations();
3140  Register obj = locations->InAt(0).AsRegister<Register>();
3141  Location index = locations->InAt(1);
3142
3143  switch (instruction->GetType()) {
3144    case Primitive::kPrimBoolean: {
3145      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
3146      Register out = locations->Out().AsRegister<Register>();
3147      if (index.IsConstant()) {
3148        size_t offset =
3149            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
3150        __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
3151      } else {
3152        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
3153        __ LoadFromOffset(kLoadUnsignedByte, out, IP, data_offset);
3154      }
3155      break;
3156    }
3157
3158    case Primitive::kPrimByte: {
3159      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
3160      Register out = locations->Out().AsRegister<Register>();
3161      if (index.IsConstant()) {
3162        size_t offset =
3163            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
3164        __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
3165      } else {
3166        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
3167        __ LoadFromOffset(kLoadSignedByte, out, IP, data_offset);
3168      }
3169      break;
3170    }
3171
3172    case Primitive::kPrimShort: {
3173      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
3174      Register out = locations->Out().AsRegister<Register>();
3175      if (index.IsConstant()) {
3176        size_t offset =
3177            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
3178        __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
3179      } else {
3180        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
3181        __ LoadFromOffset(kLoadSignedHalfword, out, IP, data_offset);
3182      }
3183      break;
3184    }
3185
3186    case Primitive::kPrimChar: {
3187      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
3188      Register out = locations->Out().AsRegister<Register>();
3189      if (index.IsConstant()) {
3190        size_t offset =
3191            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
3192        __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
3193      } else {
3194        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
3195        __ LoadFromOffset(kLoadUnsignedHalfword, out, IP, data_offset);
3196      }
3197      break;
3198    }
3199
3200    case Primitive::kPrimInt:
3201    case Primitive::kPrimNot: {
3202      DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
3203      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
3204      Register out = locations->Out().AsRegister<Register>();
3205      if (index.IsConstant()) {
3206        size_t offset =
3207            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3208        __ LoadFromOffset(kLoadWord, out, obj, offset);
3209      } else {
3210        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3211        __ LoadFromOffset(kLoadWord, out, IP, data_offset);
3212      }
3213      break;
3214    }
3215
3216    case Primitive::kPrimLong: {
3217      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
3218      Location out = locations->Out();
3219      if (index.IsConstant()) {
3220        size_t offset =
3221            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3222        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset);
3223      } else {
3224        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3225        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), IP, data_offset);
3226      }
3227      break;
3228    }
3229
3230    case Primitive::kPrimFloat: {
3231      uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
3232      Location out = locations->Out();
3233      DCHECK(out.IsFpuRegister());
3234      if (index.IsConstant()) {
3235        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3236        __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), obj, offset);
3237      } else {
3238        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3239        __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), IP, data_offset);
3240      }
3241      break;
3242    }
3243
3244    case Primitive::kPrimDouble: {
3245      uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
3246      Location out = locations->Out();
3247      DCHECK(out.IsFpuRegisterPair());
3248      if (index.IsConstant()) {
3249        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3250        __ LoadDFromOffset(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), obj, offset);
3251      } else {
3252        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3253        __ LoadDFromOffset(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), IP, data_offset);
3254      }
3255      break;
3256    }
3257
3258    case Primitive::kPrimVoid:
3259      LOG(FATAL) << "Unreachable type " << instruction->GetType();
3260      UNREACHABLE();
3261  }
3262  codegen_->MaybeRecordImplicitNullCheck(instruction);
3263}
3264
3265void LocationsBuilderARM::VisitArraySet(HArraySet* instruction) {
3266  Primitive::Type value_type = instruction->GetComponentType();
3267
3268  bool needs_write_barrier =
3269      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
3270  bool needs_runtime_call = instruction->NeedsTypeCheck();
3271
3272  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3273      instruction, needs_runtime_call ? LocationSummary::kCall : LocationSummary::kNoCall);
3274  if (needs_runtime_call) {
3275    InvokeRuntimeCallingConvention calling_convention;
3276    locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3277    locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3278    locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
3279  } else {
3280    locations->SetInAt(0, Location::RequiresRegister());
3281    locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3282    locations->SetInAt(2, Location::RequiresRegister());
3283
3284    if (needs_write_barrier) {
3285      // Temporary registers for the write barrier.
3286      locations->AddTemp(Location::RequiresRegister());
3287      locations->AddTemp(Location::RequiresRegister());
3288    }
3289  }
3290}
3291
3292void InstructionCodeGeneratorARM::VisitArraySet(HArraySet* instruction) {
3293  LocationSummary* locations = instruction->GetLocations();
3294  Register obj = locations->InAt(0).AsRegister<Register>();
3295  Location index = locations->InAt(1);
3296  Primitive::Type value_type = instruction->GetComponentType();
3297  bool needs_runtime_call = locations->WillCall();
3298  bool needs_write_barrier =
3299      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
3300
3301  switch (value_type) {
3302    case Primitive::kPrimBoolean:
3303    case Primitive::kPrimByte: {
3304      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
3305      Register value = locations->InAt(2).AsRegister<Register>();
3306      if (index.IsConstant()) {
3307        size_t offset =
3308            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
3309        __ StoreToOffset(kStoreByte, value, obj, offset);
3310      } else {
3311        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
3312        __ StoreToOffset(kStoreByte, value, IP, data_offset);
3313      }
3314      break;
3315    }
3316
3317    case Primitive::kPrimShort:
3318    case Primitive::kPrimChar: {
3319      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
3320      Register value = locations->InAt(2).AsRegister<Register>();
3321      if (index.IsConstant()) {
3322        size_t offset =
3323            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
3324        __ StoreToOffset(kStoreHalfword, value, obj, offset);
3325      } else {
3326        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
3327        __ StoreToOffset(kStoreHalfword, value, IP, data_offset);
3328      }
3329      break;
3330    }
3331
3332    case Primitive::kPrimInt:
3333    case Primitive::kPrimNot: {
3334      if (!needs_runtime_call) {
3335        uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
3336        Register value = locations->InAt(2).AsRegister<Register>();
3337        if (index.IsConstant()) {
3338          size_t offset =
3339              (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3340          __ StoreToOffset(kStoreWord, value, obj, offset);
3341        } else {
3342          DCHECK(index.IsRegister()) << index;
3343          __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3344          __ StoreToOffset(kStoreWord, value, IP, data_offset);
3345        }
3346        codegen_->MaybeRecordImplicitNullCheck(instruction);
3347        if (needs_write_barrier) {
3348          DCHECK_EQ(value_type, Primitive::kPrimNot);
3349          Register temp = locations->GetTemp(0).AsRegister<Register>();
3350          Register card = locations->GetTemp(1).AsRegister<Register>();
3351          codegen_->MarkGCCard(temp, card, obj, value);
3352        }
3353      } else {
3354        DCHECK_EQ(value_type, Primitive::kPrimNot);
3355        codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
3356                                instruction,
3357                                instruction->GetDexPc(),
3358                                nullptr);
3359      }
3360      break;
3361    }
3362
3363    case Primitive::kPrimLong: {
3364      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
3365      Location value = locations->InAt(2);
3366      if (index.IsConstant()) {
3367        size_t offset =
3368            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3369        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset);
3370      } else {
3371        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3372        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), IP, data_offset);
3373      }
3374      break;
3375    }
3376
3377    case Primitive::kPrimFloat: {
3378      uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
3379      Location value = locations->InAt(2);
3380      DCHECK(value.IsFpuRegister());
3381      if (index.IsConstant()) {
3382        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3383        __ StoreSToOffset(value.AsFpuRegister<SRegister>(), obj, offset);
3384      } else {
3385        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3386        __ StoreSToOffset(value.AsFpuRegister<SRegister>(), IP, data_offset);
3387      }
3388      break;
3389    }
3390
3391    case Primitive::kPrimDouble: {
3392      uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
3393      Location value = locations->InAt(2);
3394      DCHECK(value.IsFpuRegisterPair());
3395      if (index.IsConstant()) {
3396        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3397        __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), obj, offset);
3398      } else {
3399        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3400        __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), IP, data_offset);
3401      }
3402
3403      break;
3404    }
3405
3406    case Primitive::kPrimVoid:
3407      LOG(FATAL) << "Unreachable type " << value_type;
3408      UNREACHABLE();
3409  }
3410
3411  // Ints and objects are handled in the switch.
3412  if (value_type != Primitive::kPrimInt && value_type != Primitive::kPrimNot) {
3413    codegen_->MaybeRecordImplicitNullCheck(instruction);
3414  }
3415}
3416
3417void LocationsBuilderARM::VisitArrayLength(HArrayLength* instruction) {
3418  LocationSummary* locations =
3419      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3420  locations->SetInAt(0, Location::RequiresRegister());
3421  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3422}
3423
3424void InstructionCodeGeneratorARM::VisitArrayLength(HArrayLength* instruction) {
3425  LocationSummary* locations = instruction->GetLocations();
3426  uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
3427  Register obj = locations->InAt(0).AsRegister<Register>();
3428  Register out = locations->Out().AsRegister<Register>();
3429  __ LoadFromOffset(kLoadWord, out, obj, offset);
3430  codegen_->MaybeRecordImplicitNullCheck(instruction);
3431}
3432
3433void LocationsBuilderARM::VisitBoundsCheck(HBoundsCheck* instruction) {
3434  LocationSummary* locations =
3435      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3436  locations->SetInAt(0, Location::RequiresRegister());
3437  locations->SetInAt(1, Location::RequiresRegister());
3438  if (instruction->HasUses()) {
3439    locations->SetOut(Location::SameAsFirstInput());
3440  }
3441}
3442
3443void InstructionCodeGeneratorARM::VisitBoundsCheck(HBoundsCheck* instruction) {
3444  LocationSummary* locations = instruction->GetLocations();
3445  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM(
3446      instruction, locations->InAt(0), locations->InAt(1));
3447  codegen_->AddSlowPath(slow_path);
3448
3449  Register index = locations->InAt(0).AsRegister<Register>();
3450  Register length = locations->InAt(1).AsRegister<Register>();
3451
3452  __ cmp(index, ShifterOperand(length));
3453  __ b(slow_path->GetEntryLabel(), CS);
3454}
3455
3456void CodeGeneratorARM::MarkGCCard(Register temp, Register card, Register object, Register value) {
3457  Label is_null;
3458  __ CompareAndBranchIfZero(value, &is_null);
3459  __ LoadFromOffset(kLoadWord, card, TR, Thread::CardTableOffset<kArmWordSize>().Int32Value());
3460  __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
3461  __ strb(card, Address(card, temp));
3462  __ Bind(&is_null);
3463}
3464
3465void LocationsBuilderARM::VisitTemporary(HTemporary* temp) {
3466  temp->SetLocations(nullptr);
3467}
3468
3469void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp) {
3470  // Nothing to do, this is driven by the code generator.
3471  UNUSED(temp);
3472}
3473
3474void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction) {
3475  UNUSED(instruction);
3476  LOG(FATAL) << "Unreachable";
3477}
3478
3479void InstructionCodeGeneratorARM::VisitParallelMove(HParallelMove* instruction) {
3480  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
3481}
3482
3483void LocationsBuilderARM::VisitSuspendCheck(HSuspendCheck* instruction) {
3484  new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
3485}
3486
3487void InstructionCodeGeneratorARM::VisitSuspendCheck(HSuspendCheck* instruction) {
3488  HBasicBlock* block = instruction->GetBlock();
3489  if (block->GetLoopInformation() != nullptr) {
3490    DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
3491    // The back edge will generate the suspend check.
3492    return;
3493  }
3494  if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
3495    // The goto will generate the suspend check.
3496    return;
3497  }
3498  GenerateSuspendCheck(instruction, nullptr);
3499}
3500
3501void InstructionCodeGeneratorARM::GenerateSuspendCheck(HSuspendCheck* instruction,
3502                                                       HBasicBlock* successor) {
3503  SuspendCheckSlowPathARM* slow_path =
3504      new (GetGraph()->GetArena()) SuspendCheckSlowPathARM(instruction, successor);
3505  codegen_->AddSlowPath(slow_path);
3506
3507  __ LoadFromOffset(
3508      kLoadUnsignedHalfword, IP, TR, Thread::ThreadFlagsOffset<kArmWordSize>().Int32Value());
3509  __ cmp(IP, ShifterOperand(0));
3510  // TODO: Figure out the branch offsets and use cbz/cbnz.
3511  if (successor == nullptr) {
3512    __ b(slow_path->GetEntryLabel(), NE);
3513    __ Bind(slow_path->GetReturnLabel());
3514  } else {
3515    __ b(codegen_->GetLabelOf(successor), EQ);
3516    __ b(slow_path->GetEntryLabel());
3517  }
3518}
3519
3520ArmAssembler* ParallelMoveResolverARM::GetAssembler() const {
3521  return codegen_->GetAssembler();
3522}
3523
3524void ParallelMoveResolverARM::EmitMove(size_t index) {
3525  MoveOperands* move = moves_.Get(index);
3526  Location source = move->GetSource();
3527  Location destination = move->GetDestination();
3528
3529  if (source.IsRegister()) {
3530    if (destination.IsRegister()) {
3531      __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>());
3532    } else {
3533      DCHECK(destination.IsStackSlot());
3534      __ StoreToOffset(kStoreWord, source.AsRegister<Register>(),
3535                       SP, destination.GetStackIndex());
3536    }
3537  } else if (source.IsStackSlot()) {
3538    if (destination.IsRegister()) {
3539      __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(),
3540                        SP, source.GetStackIndex());
3541    } else if (destination.IsFpuRegister()) {
3542      __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex());
3543    } else {
3544      DCHECK(destination.IsStackSlot());
3545      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
3546      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3547    }
3548  } else if (source.IsFpuRegister()) {
3549    if (destination.IsFpuRegister()) {
3550      __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>());
3551    } else {
3552      DCHECK(destination.IsStackSlot());
3553      __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex());
3554    }
3555  } else if (source.IsDoubleStackSlot()) {
3556    if (destination.IsDoubleStackSlot()) {
3557      __ LoadDFromOffset(DTMP, SP, source.GetStackIndex());
3558      __ StoreDToOffset(DTMP, SP, destination.GetStackIndex());
3559    } else if (destination.IsRegisterPair()) {
3560      DCHECK(ExpectedPairLayout(destination));
3561      __ LoadFromOffset(
3562          kLoadWordPair, destination.AsRegisterPairLow<Register>(), SP, source.GetStackIndex());
3563    } else {
3564      DCHECK(destination.IsFpuRegisterPair()) << destination;
3565      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
3566                         SP,
3567                         source.GetStackIndex());
3568    }
3569  } else if (source.IsRegisterPair()) {
3570    if (destination.IsRegisterPair()) {
3571      __ Mov(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairLow<Register>());
3572      __ Mov(destination.AsRegisterPairHigh<Register>(), source.AsRegisterPairHigh<Register>());
3573    } else {
3574      DCHECK(destination.IsDoubleStackSlot()) << destination;
3575      DCHECK(ExpectedPairLayout(source));
3576      __ StoreToOffset(
3577          kStoreWordPair, source.AsRegisterPairLow<Register>(), SP, destination.GetStackIndex());
3578    }
3579  } else if (source.IsFpuRegisterPair()) {
3580    if (destination.IsFpuRegisterPair()) {
3581      __ vmovd(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
3582               FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()));
3583    } else {
3584      DCHECK(destination.IsDoubleStackSlot()) << destination;
3585      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
3586                        SP,
3587                        destination.GetStackIndex());
3588    }
3589  } else {
3590    DCHECK(source.IsConstant()) << source;
3591    HConstant* constant = source.GetConstant();
3592    if (constant->IsIntConstant() || constant->IsNullConstant()) {
3593      int32_t value = CodeGenerator::GetInt32ValueOf(constant);
3594      if (destination.IsRegister()) {
3595        __ LoadImmediate(destination.AsRegister<Register>(), value);
3596      } else {
3597        DCHECK(destination.IsStackSlot());
3598        __ LoadImmediate(IP, value);
3599        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3600      }
3601    } else if (constant->IsLongConstant()) {
3602      int64_t value = constant->AsLongConstant()->GetValue();
3603      if (destination.IsRegisterPair()) {
3604        __ LoadImmediate(destination.AsRegisterPairLow<Register>(), Low32Bits(value));
3605        __ LoadImmediate(destination.AsRegisterPairHigh<Register>(), High32Bits(value));
3606      } else {
3607        DCHECK(destination.IsDoubleStackSlot()) << destination;
3608        __ LoadImmediate(IP, Low32Bits(value));
3609        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3610        __ LoadImmediate(IP, High32Bits(value));
3611        __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
3612      }
3613    } else if (constant->IsDoubleConstant()) {
3614      double value = constant->AsDoubleConstant()->GetValue();
3615      if (destination.IsFpuRegisterPair()) {
3616        __ LoadDImmediate(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()), value);
3617      } else {
3618        DCHECK(destination.IsDoubleStackSlot()) << destination;
3619        uint64_t int_value = bit_cast<uint64_t, double>(value);
3620        __ LoadImmediate(IP, Low32Bits(int_value));
3621        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3622        __ LoadImmediate(IP, High32Bits(int_value));
3623        __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
3624      }
3625    } else {
3626      DCHECK(constant->IsFloatConstant()) << constant->DebugName();
3627      float value = constant->AsFloatConstant()->GetValue();
3628      if (destination.IsFpuRegister()) {
3629        __ LoadSImmediate(destination.AsFpuRegister<SRegister>(), value);
3630      } else {
3631        DCHECK(destination.IsStackSlot());
3632        __ LoadImmediate(IP, bit_cast<int32_t, float>(value));
3633        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3634      }
3635    }
3636  }
3637}
3638
3639void ParallelMoveResolverARM::Exchange(Register reg, int mem) {
3640  __ Mov(IP, reg);
3641  __ LoadFromOffset(kLoadWord, reg, SP, mem);
3642  __ StoreToOffset(kStoreWord, IP, SP, mem);
3643}
3644
3645void ParallelMoveResolverARM::Exchange(int mem1, int mem2) {
3646  ScratchRegisterScope ensure_scratch(this, IP, R0, codegen_->GetNumberOfCoreRegisters());
3647  int stack_offset = ensure_scratch.IsSpilled() ? kArmWordSize : 0;
3648  __ LoadFromOffset(kLoadWord, static_cast<Register>(ensure_scratch.GetRegister()),
3649                    SP, mem1 + stack_offset);
3650  __ LoadFromOffset(kLoadWord, IP, SP, mem2 + stack_offset);
3651  __ StoreToOffset(kStoreWord, static_cast<Register>(ensure_scratch.GetRegister()),
3652                   SP, mem2 + stack_offset);
3653  __ StoreToOffset(kStoreWord, IP, SP, mem1 + stack_offset);
3654}
3655
3656void ParallelMoveResolverARM::EmitSwap(size_t index) {
3657  MoveOperands* move = moves_.Get(index);
3658  Location source = move->GetSource();
3659  Location destination = move->GetDestination();
3660
3661  if (source.IsRegister() && destination.IsRegister()) {
3662    DCHECK_NE(source.AsRegister<Register>(), IP);
3663    DCHECK_NE(destination.AsRegister<Register>(), IP);
3664    __ Mov(IP, source.AsRegister<Register>());
3665    __ Mov(source.AsRegister<Register>(), destination.AsRegister<Register>());
3666    __ Mov(destination.AsRegister<Register>(), IP);
3667  } else if (source.IsRegister() && destination.IsStackSlot()) {
3668    Exchange(source.AsRegister<Register>(), destination.GetStackIndex());
3669  } else if (source.IsStackSlot() && destination.IsRegister()) {
3670    Exchange(destination.AsRegister<Register>(), source.GetStackIndex());
3671  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
3672    Exchange(source.GetStackIndex(), destination.GetStackIndex());
3673  } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
3674    __ vmovrs(IP, source.AsFpuRegister<SRegister>());
3675    __ vmovs(source.AsFpuRegister<SRegister>(), destination.AsFpuRegister<SRegister>());
3676    __ vmovsr(destination.AsFpuRegister<SRegister>(), IP);
3677  } else if (source.IsRegisterPair() && destination.IsRegisterPair()) {
3678    __ vmovdrr(DTMP, source.AsRegisterPairLow<Register>(), source.AsRegisterPairHigh<Register>());
3679    __ Mov(source.AsRegisterPairLow<Register>(), destination.AsRegisterPairLow<Register>());
3680    __ Mov(source.AsRegisterPairHigh<Register>(), destination.AsRegisterPairHigh<Register>());
3681    __ vmovrrd(destination.AsRegisterPairLow<Register>(),
3682               destination.AsRegisterPairHigh<Register>(),
3683               DTMP);
3684  } else if (source.IsRegisterPair() || destination.IsRegisterPair()) {
3685    Register low_reg = source.IsRegisterPair()
3686        ? source.AsRegisterPairLow<Register>()
3687        : destination.AsRegisterPairLow<Register>();
3688    int mem = source.IsRegisterPair()
3689        ? destination.GetStackIndex()
3690        : source.GetStackIndex();
3691    DCHECK(ExpectedPairLayout(source.IsRegisterPair() ? source : destination));
3692    __ vmovdrr(DTMP, low_reg, static_cast<Register>(low_reg + 1));
3693    __ LoadFromOffset(kLoadWordPair, low_reg, SP, mem);
3694    __ StoreDToOffset(DTMP, SP, mem);
3695  } else if (source.IsFpuRegisterPair() && destination.IsFpuRegisterPair()) {
3696    DRegister first = FromLowSToD(source.AsFpuRegisterPairLow<SRegister>());
3697    DRegister second = FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>());
3698    __ vmovd(DTMP, first);
3699    __ vmovd(first, second);
3700    __ vmovd(second, DTMP);
3701  } else if (source.IsFpuRegisterPair() || destination.IsFpuRegisterPair()) {
3702    DRegister reg = source.IsFpuRegisterPair()
3703        ? FromLowSToD(source.AsFpuRegisterPairLow<SRegister>())
3704        : FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>());
3705    int mem = source.IsFpuRegisterPair()
3706        ? destination.GetStackIndex()
3707        : source.GetStackIndex();
3708    __ vmovd(DTMP, reg);
3709    __ LoadDFromOffset(reg, SP, mem);
3710    __ StoreDToOffset(DTMP, SP, mem);
3711  } else if (source.IsFpuRegister() || destination.IsFpuRegister()) {
3712    SRegister reg = source.IsFpuRegister() ? source.AsFpuRegister<SRegister>()
3713                                           : destination.AsFpuRegister<SRegister>();
3714    int mem = source.IsFpuRegister()
3715        ? destination.GetStackIndex()
3716        : source.GetStackIndex();
3717
3718    __ vmovrs(IP, reg);
3719    __ LoadSFromOffset(reg, SP, mem);
3720    __ StoreToOffset(kStoreWord, IP, SP, mem);
3721  } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
3722    Exchange(source.GetStackIndex(), destination.GetStackIndex());
3723    Exchange(source.GetHighStackIndex(kArmWordSize), destination.GetHighStackIndex(kArmWordSize));
3724  } else {
3725    LOG(FATAL) << "Unimplemented" << source << " <-> " << destination;
3726  }
3727}
3728
3729void ParallelMoveResolverARM::SpillScratch(int reg) {
3730  __ Push(static_cast<Register>(reg));
3731}
3732
3733void ParallelMoveResolverARM::RestoreScratch(int reg) {
3734  __ Pop(static_cast<Register>(reg));
3735}
3736
3737void LocationsBuilderARM::VisitLoadClass(HLoadClass* cls) {
3738  LocationSummary::CallKind call_kind = cls->CanCallRuntime()
3739      ? LocationSummary::kCallOnSlowPath
3740      : LocationSummary::kNoCall;
3741  LocationSummary* locations =
3742      new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
3743  locations->SetOut(Location::RequiresRegister());
3744}
3745
3746void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) {
3747  Register out = cls->GetLocations()->Out().AsRegister<Register>();
3748  if (cls->IsReferrersClass()) {
3749    DCHECK(!cls->CanCallRuntime());
3750    DCHECK(!cls->MustGenerateClinitCheck());
3751    codegen_->LoadCurrentMethod(out);
3752    __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
3753  } else {
3754    DCHECK(cls->CanCallRuntime());
3755    codegen_->LoadCurrentMethod(out);
3756    __ LoadFromOffset(
3757        kLoadWord, out, out, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value());
3758    __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
3759
3760    SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
3761        cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
3762    codegen_->AddSlowPath(slow_path);
3763    __ cmp(out, ShifterOperand(0));
3764    __ b(slow_path->GetEntryLabel(), EQ);
3765    if (cls->MustGenerateClinitCheck()) {
3766      GenerateClassInitializationCheck(slow_path, out);
3767    } else {
3768      __ Bind(slow_path->GetExitLabel());
3769    }
3770  }
3771}
3772
3773void LocationsBuilderARM::VisitClinitCheck(HClinitCheck* check) {
3774  LocationSummary* locations =
3775      new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
3776  locations->SetInAt(0, Location::RequiresRegister());
3777  if (check->HasUses()) {
3778    locations->SetOut(Location::SameAsFirstInput());
3779  }
3780}
3781
3782void InstructionCodeGeneratorARM::VisitClinitCheck(HClinitCheck* check) {
3783  // We assume the class is not null.
3784  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
3785      check->GetLoadClass(), check, check->GetDexPc(), true);
3786  codegen_->AddSlowPath(slow_path);
3787  GenerateClassInitializationCheck(slow_path,
3788                                   check->GetLocations()->InAt(0).AsRegister<Register>());
3789}
3790
3791void InstructionCodeGeneratorARM::GenerateClassInitializationCheck(
3792    SlowPathCodeARM* slow_path, Register class_reg) {
3793  __ LoadFromOffset(kLoadWord, IP, class_reg, mirror::Class::StatusOffset().Int32Value());
3794  __ cmp(IP, ShifterOperand(mirror::Class::kStatusInitialized));
3795  __ b(slow_path->GetEntryLabel(), LT);
3796  // Even if the initialized flag is set, we may be in a situation where caches are not synced
3797  // properly. Therefore, we do a memory fence.
3798  __ dmb(ISH);
3799  __ Bind(slow_path->GetExitLabel());
3800}
3801
3802void LocationsBuilderARM::VisitLoadString(HLoadString* load) {
3803  LocationSummary* locations =
3804      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
3805  locations->SetOut(Location::RequiresRegister());
3806}
3807
3808void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) {
3809  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM(load);
3810  codegen_->AddSlowPath(slow_path);
3811
3812  Register out = load->GetLocations()->Out().AsRegister<Register>();
3813  codegen_->LoadCurrentMethod(out);
3814  __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
3815  __ LoadFromOffset(kLoadWord, out, out, mirror::Class::DexCacheStringsOffset().Int32Value());
3816  __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(load->GetStringIndex()));
3817  __ cmp(out, ShifterOperand(0));
3818  __ b(slow_path->GetEntryLabel(), EQ);
3819  __ Bind(slow_path->GetExitLabel());
3820}
3821
3822void LocationsBuilderARM::VisitLoadException(HLoadException* load) {
3823  LocationSummary* locations =
3824      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
3825  locations->SetOut(Location::RequiresRegister());
3826}
3827
3828void InstructionCodeGeneratorARM::VisitLoadException(HLoadException* load) {
3829  Register out = load->GetLocations()->Out().AsRegister<Register>();
3830  int32_t offset = Thread::ExceptionOffset<kArmWordSize>().Int32Value();
3831  __ LoadFromOffset(kLoadWord, out, TR, offset);
3832  __ LoadImmediate(IP, 0);
3833  __ StoreToOffset(kStoreWord, IP, TR, offset);
3834}
3835
3836void LocationsBuilderARM::VisitThrow(HThrow* instruction) {
3837  LocationSummary* locations =
3838      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3839  InvokeRuntimeCallingConvention calling_convention;
3840  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3841}
3842
3843void InstructionCodeGeneratorARM::VisitThrow(HThrow* instruction) {
3844  codegen_->InvokeRuntime(
3845      QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc(), nullptr);
3846}
3847
3848void LocationsBuilderARM::VisitInstanceOf(HInstanceOf* instruction) {
3849  LocationSummary::CallKind call_kind = instruction->IsClassFinal()
3850      ? LocationSummary::kNoCall
3851      : LocationSummary::kCallOnSlowPath;
3852  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3853  locations->SetInAt(0, Location::RequiresRegister());
3854  locations->SetInAt(1, Location::RequiresRegister());
3855  // The out register is used as a temporary, so it overlaps with the inputs.
3856  locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3857}
3858
3859void InstructionCodeGeneratorARM::VisitInstanceOf(HInstanceOf* instruction) {
3860  LocationSummary* locations = instruction->GetLocations();
3861  Register obj = locations->InAt(0).AsRegister<Register>();
3862  Register cls = locations->InAt(1).AsRegister<Register>();
3863  Register out = locations->Out().AsRegister<Register>();
3864  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3865  Label done, zero;
3866  SlowPathCodeARM* slow_path = nullptr;
3867
3868  // Return 0 if `obj` is null.
3869  // TODO: avoid this check if we know obj is not null.
3870  __ cmp(obj, ShifterOperand(0));
3871  __ b(&zero, EQ);
3872  // Compare the class of `obj` with `cls`.
3873  __ LoadFromOffset(kLoadWord, out, obj, class_offset);
3874  __ cmp(out, ShifterOperand(cls));
3875  if (instruction->IsClassFinal()) {
3876    // Classes must be equal for the instanceof to succeed.
3877    __ b(&zero, NE);
3878    __ LoadImmediate(out, 1);
3879    __ b(&done);
3880  } else {
3881    // If the classes are not equal, we go into a slow path.
3882    DCHECK(locations->OnlyCallsOnSlowPath());
3883    slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
3884        instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc());
3885    codegen_->AddSlowPath(slow_path);
3886    __ b(slow_path->GetEntryLabel(), NE);
3887    __ LoadImmediate(out, 1);
3888    __ b(&done);
3889  }
3890  __ Bind(&zero);
3891  __ LoadImmediate(out, 0);
3892  if (slow_path != nullptr) {
3893    __ Bind(slow_path->GetExitLabel());
3894  }
3895  __ Bind(&done);
3896}
3897
3898void LocationsBuilderARM::VisitCheckCast(HCheckCast* instruction) {
3899  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3900      instruction, LocationSummary::kCallOnSlowPath);
3901  locations->SetInAt(0, Location::RequiresRegister());
3902  locations->SetInAt(1, Location::RequiresRegister());
3903  locations->AddTemp(Location::RequiresRegister());
3904}
3905
3906void InstructionCodeGeneratorARM::VisitCheckCast(HCheckCast* instruction) {
3907  LocationSummary* locations = instruction->GetLocations();
3908  Register obj = locations->InAt(0).AsRegister<Register>();
3909  Register cls = locations->InAt(1).AsRegister<Register>();
3910  Register temp = locations->GetTemp(0).AsRegister<Register>();
3911  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3912
3913  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
3914      instruction, locations->InAt(1), locations->GetTemp(0), instruction->GetDexPc());
3915  codegen_->AddSlowPath(slow_path);
3916
3917  // TODO: avoid this check if we know obj is not null.
3918  __ cmp(obj, ShifterOperand(0));
3919  __ b(slow_path->GetExitLabel(), EQ);
3920  // Compare the class of `obj` with `cls`.
3921  __ LoadFromOffset(kLoadWord, temp, obj, class_offset);
3922  __ cmp(temp, ShifterOperand(cls));
3923  __ b(slow_path->GetEntryLabel(), NE);
3924  __ Bind(slow_path->GetExitLabel());
3925}
3926
3927void LocationsBuilderARM::VisitMonitorOperation(HMonitorOperation* instruction) {
3928  LocationSummary* locations =
3929      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3930  InvokeRuntimeCallingConvention calling_convention;
3931  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3932}
3933
3934void InstructionCodeGeneratorARM::VisitMonitorOperation(HMonitorOperation* instruction) {
3935  codegen_->InvokeRuntime(instruction->IsEnter()
3936        ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
3937      instruction,
3938      instruction->GetDexPc(),
3939      nullptr);
3940}
3941
3942void LocationsBuilderARM::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
3943void LocationsBuilderARM::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
3944void LocationsBuilderARM::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
3945
3946void LocationsBuilderARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
3947  LocationSummary* locations =
3948      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3949  DCHECK(instruction->GetResultType() == Primitive::kPrimInt
3950         || instruction->GetResultType() == Primitive::kPrimLong);
3951  locations->SetInAt(0, Location::RequiresRegister());
3952  locations->SetInAt(1, Location::RequiresRegister());
3953  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3954}
3955
3956void InstructionCodeGeneratorARM::VisitAnd(HAnd* instruction) {
3957  HandleBitwiseOperation(instruction);
3958}
3959
3960void InstructionCodeGeneratorARM::VisitOr(HOr* instruction) {
3961  HandleBitwiseOperation(instruction);
3962}
3963
3964void InstructionCodeGeneratorARM::VisitXor(HXor* instruction) {
3965  HandleBitwiseOperation(instruction);
3966}
3967
3968void InstructionCodeGeneratorARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
3969  LocationSummary* locations = instruction->GetLocations();
3970
3971  if (instruction->GetResultType() == Primitive::kPrimInt) {
3972    Register first = locations->InAt(0).AsRegister<Register>();
3973    Register second = locations->InAt(1).AsRegister<Register>();
3974    Register out = locations->Out().AsRegister<Register>();
3975    if (instruction->IsAnd()) {
3976      __ and_(out, first, ShifterOperand(second));
3977    } else if (instruction->IsOr()) {
3978      __ orr(out, first, ShifterOperand(second));
3979    } else {
3980      DCHECK(instruction->IsXor());
3981      __ eor(out, first, ShifterOperand(second));
3982    }
3983  } else {
3984    DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3985    Location first = locations->InAt(0);
3986    Location second = locations->InAt(1);
3987    Location out = locations->Out();
3988    if (instruction->IsAnd()) {
3989      __ and_(out.AsRegisterPairLow<Register>(),
3990              first.AsRegisterPairLow<Register>(),
3991              ShifterOperand(second.AsRegisterPairLow<Register>()));
3992      __ and_(out.AsRegisterPairHigh<Register>(),
3993              first.AsRegisterPairHigh<Register>(),
3994              ShifterOperand(second.AsRegisterPairHigh<Register>()));
3995    } else if (instruction->IsOr()) {
3996      __ orr(out.AsRegisterPairLow<Register>(),
3997             first.AsRegisterPairLow<Register>(),
3998             ShifterOperand(second.AsRegisterPairLow<Register>()));
3999      __ orr(out.AsRegisterPairHigh<Register>(),
4000             first.AsRegisterPairHigh<Register>(),
4001             ShifterOperand(second.AsRegisterPairHigh<Register>()));
4002    } else {
4003      DCHECK(instruction->IsXor());
4004      __ eor(out.AsRegisterPairLow<Register>(),
4005             first.AsRegisterPairLow<Register>(),
4006             ShifterOperand(second.AsRegisterPairLow<Register>()));
4007      __ eor(out.AsRegisterPairHigh<Register>(),
4008             first.AsRegisterPairHigh<Register>(),
4009             ShifterOperand(second.AsRegisterPairHigh<Register>()));
4010    }
4011  }
4012}
4013
4014void CodeGeneratorARM::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Register temp) {
4015  DCHECK_EQ(temp, kArtMethodRegister);
4016
4017  // TODO: Implement all kinds of calls:
4018  // 1) boot -> boot
4019  // 2) app -> boot
4020  // 3) app -> app
4021  //
4022  // Currently we implement the app -> app logic, which looks up in the resolve cache.
4023
4024  // temp = method;
4025  LoadCurrentMethod(temp);
4026  if (!invoke->IsRecursive()) {
4027    // temp = temp->dex_cache_resolved_methods_;
4028    __ LoadFromOffset(
4029        kLoadWord, temp, temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value());
4030    // temp = temp[index_in_cache]
4031    __ LoadFromOffset(
4032        kLoadWord, temp, temp, CodeGenerator::GetCacheOffset(invoke->GetDexMethodIndex()));
4033    // LR = temp[offset_of_quick_compiled_code]
4034    __ LoadFromOffset(kLoadWord, LR, temp,
4035                      mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
4036                          kArmWordSize).Int32Value());
4037    // LR()
4038    __ blx(LR);
4039  } else {
4040    __ bl(GetFrameEntryLabel());
4041  }
4042
4043  DCHECK(!IsLeafMethod());
4044}
4045
4046void LocationsBuilderARM::VisitBoundType(HBoundType* instruction) {
4047  // Nothing to do, this should be removed during prepare for register allocator.
4048  UNUSED(instruction);
4049  LOG(FATAL) << "Unreachable";
4050}
4051
4052void InstructionCodeGeneratorARM::VisitBoundType(HBoundType* instruction) {
4053  // Nothing to do, this should be removed during prepare for register allocator.
4054  UNUSED(instruction);
4055  LOG(FATAL) << "Unreachable";
4056}
4057
4058}  // namespace arm
4059}  // namespace art
4060