code_generator_arm.cc revision 27df758e2e7baebb6e3f393f9732fd0d064420c8
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
20#include "entrypoints/quick/quick_entrypoints.h"
21#include "gc/accounting/card_table.h"
22#include "intrinsics.h"
23#include "intrinsics_arm.h"
24#include "mirror/array-inl.h"
25#include "mirror/art_method.h"
26#include "mirror/class.h"
27#include "thread.h"
28#include "utils/arm/assembler_arm.h"
29#include "utils/arm/managed_register_arm.h"
30#include "utils/assembler.h"
31#include "utils/stack_checks.h"
32
33namespace art {
34
35namespace arm {
36
37static bool ExpectedPairLayout(Location location) {
38  // We expected this for both core and fpu register pairs.
39  return ((location.low() & 1) == 0) && (location.low() + 1 == location.high());
40}
41
42static constexpr int kCurrentMethodStackOffset = 0;
43
44// We unconditionally allocate R5 to ensure we can do long operations
45// with baseline.
46static constexpr Register kCoreSavedRegisterForBaseline = R5;
47static constexpr Register kCoreCalleeSaves[] =
48    { R5, R6, R7, R8, R10, R11, PC };
49static constexpr SRegister kFpuCalleeSaves[] =
50    { S16, S17, S18, S19, S20, S21, S22, S23, S24, S25, S26, S27, S28, S29, S30, S31 };
51
52// D31 cannot be split into two S registers, and the register allocator only works on
53// S registers. Therefore there is no need to block it.
54static constexpr DRegister DTMP = D31;
55
56#define __ reinterpret_cast<ArmAssembler*>(codegen->GetAssembler())->
57#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArmWordSize, x).Int32Value()
58
59class NullCheckSlowPathARM : public SlowPathCodeARM {
60 public:
61  explicit NullCheckSlowPathARM(HNullCheck* instruction) : instruction_(instruction) {}
62
63  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
64    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
65    __ Bind(GetEntryLabel());
66    arm_codegen->InvokeRuntime(
67        QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc(), this);
68  }
69
70 private:
71  HNullCheck* const instruction_;
72  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM);
73};
74
75class DivZeroCheckSlowPathARM : public SlowPathCodeARM {
76 public:
77  explicit DivZeroCheckSlowPathARM(HDivZeroCheck* instruction) : instruction_(instruction) {}
78
79  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
80    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
81    __ Bind(GetEntryLabel());
82    arm_codegen->InvokeRuntime(
83        QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc(), this);
84  }
85
86 private:
87  HDivZeroCheck* const instruction_;
88  DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM);
89};
90
91class SuspendCheckSlowPathARM : public SlowPathCodeARM {
92 public:
93  SuspendCheckSlowPathARM(HSuspendCheck* instruction, HBasicBlock* successor)
94      : instruction_(instruction), successor_(successor) {}
95
96  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
97    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
98    __ Bind(GetEntryLabel());
99    SaveLiveRegisters(codegen, instruction_->GetLocations());
100    arm_codegen->InvokeRuntime(
101        QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc(), this);
102    RestoreLiveRegisters(codegen, instruction_->GetLocations());
103    if (successor_ == nullptr) {
104      __ b(GetReturnLabel());
105    } else {
106      __ b(arm_codegen->GetLabelOf(successor_));
107    }
108  }
109
110  Label* GetReturnLabel() {
111    DCHECK(successor_ == nullptr);
112    return &return_label_;
113  }
114
115 private:
116  HSuspendCheck* const instruction_;
117  // If not null, the block to branch to after the suspend check.
118  HBasicBlock* const successor_;
119
120  // If `successor_` is null, the label to branch to after the suspend check.
121  Label return_label_;
122
123  DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM);
124};
125
126class BoundsCheckSlowPathARM : public SlowPathCodeARM {
127 public:
128  BoundsCheckSlowPathARM(HBoundsCheck* instruction,
129                         Location index_location,
130                         Location length_location)
131      : instruction_(instruction),
132        index_location_(index_location),
133        length_location_(length_location) {}
134
135  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
136    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
137    __ Bind(GetEntryLabel());
138    // We're moving two locations to locations that could overlap, so we need a parallel
139    // move resolver.
140    InvokeRuntimeCallingConvention calling_convention;
141    codegen->EmitParallelMoves(
142        index_location_,
143        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
144        Primitive::kPrimInt,
145        length_location_,
146        Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
147        Primitive::kPrimInt);
148    arm_codegen->InvokeRuntime(
149        QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc(), this);
150  }
151
152 private:
153  HBoundsCheck* const instruction_;
154  const Location index_location_;
155  const Location length_location_;
156
157  DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM);
158};
159
160class LoadClassSlowPathARM : public SlowPathCodeARM {
161 public:
162  LoadClassSlowPathARM(HLoadClass* cls,
163                       HInstruction* at,
164                       uint32_t dex_pc,
165                       bool do_clinit)
166      : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
167    DCHECK(at->IsLoadClass() || at->IsClinitCheck());
168  }
169
170  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
171    LocationSummary* locations = at_->GetLocations();
172
173    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
174    __ Bind(GetEntryLabel());
175    SaveLiveRegisters(codegen, locations);
176
177    InvokeRuntimeCallingConvention calling_convention;
178    __ LoadImmediate(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex());
179    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
180    int32_t entry_point_offset = do_clinit_
181        ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
182        : QUICK_ENTRY_POINT(pInitializeType);
183    arm_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_, this);
184
185    // Move the class to the desired location.
186    Location out = locations->Out();
187    if (out.IsValid()) {
188      DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
189      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
190    }
191    RestoreLiveRegisters(codegen, locations);
192    __ b(GetExitLabel());
193  }
194
195 private:
196  // The class this slow path will load.
197  HLoadClass* const cls_;
198
199  // The instruction where this slow path is happening.
200  // (Might be the load class or an initialization check).
201  HInstruction* const at_;
202
203  // The dex PC of `at_`.
204  const uint32_t dex_pc_;
205
206  // Whether to initialize the class.
207  const bool do_clinit_;
208
209  DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM);
210};
211
212class LoadStringSlowPathARM : public SlowPathCodeARM {
213 public:
214  explicit LoadStringSlowPathARM(HLoadString* instruction) : instruction_(instruction) {}
215
216  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
217    LocationSummary* locations = instruction_->GetLocations();
218    DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
219
220    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
221    __ Bind(GetEntryLabel());
222    SaveLiveRegisters(codegen, locations);
223
224    InvokeRuntimeCallingConvention calling_convention;
225    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
226    __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction_->GetStringIndex());
227    arm_codegen->InvokeRuntime(
228        QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc(), this);
229    arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
230
231    RestoreLiveRegisters(codegen, locations);
232    __ b(GetExitLabel());
233  }
234
235 private:
236  HLoadString* const instruction_;
237
238  DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM);
239};
240
241class TypeCheckSlowPathARM : public SlowPathCodeARM {
242 public:
243  TypeCheckSlowPathARM(HInstruction* instruction,
244                       Location class_to_check,
245                       Location object_class,
246                       uint32_t dex_pc)
247      : instruction_(instruction),
248        class_to_check_(class_to_check),
249        object_class_(object_class),
250        dex_pc_(dex_pc) {}
251
252  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
253    LocationSummary* locations = instruction_->GetLocations();
254    DCHECK(instruction_->IsCheckCast()
255           || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
256
257    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
258    __ Bind(GetEntryLabel());
259    SaveLiveRegisters(codegen, locations);
260
261    // We're moving two locations to locations that could overlap, so we need a parallel
262    // move resolver.
263    InvokeRuntimeCallingConvention calling_convention;
264    codegen->EmitParallelMoves(
265        class_to_check_,
266        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
267        Primitive::kPrimNot,
268        object_class_,
269        Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
270        Primitive::kPrimNot);
271
272    if (instruction_->IsInstanceOf()) {
273      arm_codegen->InvokeRuntime(
274          QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc_, this);
275      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
276    } else {
277      DCHECK(instruction_->IsCheckCast());
278      arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc_, this);
279    }
280
281    RestoreLiveRegisters(codegen, locations);
282    __ b(GetExitLabel());
283  }
284
285 private:
286  HInstruction* const instruction_;
287  const Location class_to_check_;
288  const Location object_class_;
289  uint32_t dex_pc_;
290
291  DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM);
292};
293
294class DeoptimizationSlowPathARM : public SlowPathCodeARM {
295 public:
296  explicit DeoptimizationSlowPathARM(HInstruction* instruction)
297    : instruction_(instruction) {}
298
299  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
300    __ Bind(GetEntryLabel());
301    SaveLiveRegisters(codegen, instruction_->GetLocations());
302    DCHECK(instruction_->IsDeoptimize());
303    HDeoptimize* deoptimize = instruction_->AsDeoptimize();
304    uint32_t dex_pc = deoptimize->GetDexPc();
305    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
306    arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize), instruction_, dex_pc, this);
307  }
308
309 private:
310  HInstruction* const instruction_;
311  DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM);
312};
313
314#undef __
315
316#undef __
317#define __ reinterpret_cast<ArmAssembler*>(GetAssembler())->
318
319inline Condition ARMCondition(IfCondition cond) {
320  switch (cond) {
321    case kCondEQ: return EQ;
322    case kCondNE: return NE;
323    case kCondLT: return LT;
324    case kCondLE: return LE;
325    case kCondGT: return GT;
326    case kCondGE: return GE;
327    default:
328      LOG(FATAL) << "Unknown if condition";
329  }
330  return EQ;        // Unreachable.
331}
332
333inline Condition ARMOppositeCondition(IfCondition cond) {
334  switch (cond) {
335    case kCondEQ: return NE;
336    case kCondNE: return EQ;
337    case kCondLT: return GE;
338    case kCondLE: return GT;
339    case kCondGT: return LE;
340    case kCondGE: return LT;
341    default:
342      LOG(FATAL) << "Unknown if condition";
343  }
344  return EQ;        // Unreachable.
345}
346
347void CodeGeneratorARM::DumpCoreRegister(std::ostream& stream, int reg) const {
348  stream << ArmManagedRegister::FromCoreRegister(Register(reg));
349}
350
351void CodeGeneratorARM::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
352  stream << ArmManagedRegister::FromSRegister(SRegister(reg));
353}
354
355size_t CodeGeneratorARM::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
356  __ StoreToOffset(kStoreWord, static_cast<Register>(reg_id), SP, stack_index);
357  return kArmWordSize;
358}
359
360size_t CodeGeneratorARM::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
361  __ LoadFromOffset(kLoadWord, static_cast<Register>(reg_id), SP, stack_index);
362  return kArmWordSize;
363}
364
365size_t CodeGeneratorARM::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
366  __ StoreSToOffset(static_cast<SRegister>(reg_id), SP, stack_index);
367  return kArmWordSize;
368}
369
370size_t CodeGeneratorARM::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
371  __ LoadSFromOffset(static_cast<SRegister>(reg_id), SP, stack_index);
372  return kArmWordSize;
373}
374
375CodeGeneratorARM::CodeGeneratorARM(HGraph* graph,
376                                   const ArmInstructionSetFeatures& isa_features,
377                                   const CompilerOptions& compiler_options)
378    : CodeGenerator(graph,
379                    kNumberOfCoreRegisters,
380                    kNumberOfSRegisters,
381                    kNumberOfRegisterPairs,
382                    ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
383                                        arraysize(kCoreCalleeSaves)),
384                    ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
385                                        arraysize(kFpuCalleeSaves)),
386                    compiler_options),
387      block_labels_(graph->GetArena(), 0),
388      location_builder_(graph, this),
389      instruction_visitor_(graph, this),
390      move_resolver_(graph->GetArena(), this),
391      assembler_(true),
392      isa_features_(isa_features) {
393  // Save the PC register to mimic Quick.
394  AddAllocatedRegister(Location::RegisterLocation(PC));
395}
396
397Location CodeGeneratorARM::AllocateFreeRegister(Primitive::Type type) const {
398  switch (type) {
399    case Primitive::kPrimLong: {
400      size_t reg = FindFreeEntry(blocked_register_pairs_, kNumberOfRegisterPairs);
401      ArmManagedRegister pair =
402          ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
403      DCHECK(!blocked_core_registers_[pair.AsRegisterPairLow()]);
404      DCHECK(!blocked_core_registers_[pair.AsRegisterPairHigh()]);
405
406      blocked_core_registers_[pair.AsRegisterPairLow()] = true;
407      blocked_core_registers_[pair.AsRegisterPairHigh()] = true;
408      UpdateBlockedPairRegisters();
409      return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
410    }
411
412    case Primitive::kPrimByte:
413    case Primitive::kPrimBoolean:
414    case Primitive::kPrimChar:
415    case Primitive::kPrimShort:
416    case Primitive::kPrimInt:
417    case Primitive::kPrimNot: {
418      int reg = FindFreeEntry(blocked_core_registers_, kNumberOfCoreRegisters);
419      // Block all register pairs that contain `reg`.
420      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
421        ArmManagedRegister current =
422            ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
423        if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
424          blocked_register_pairs_[i] = true;
425        }
426      }
427      return Location::RegisterLocation(reg);
428    }
429
430    case Primitive::kPrimFloat: {
431      int reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfSRegisters);
432      return Location::FpuRegisterLocation(reg);
433    }
434
435    case Primitive::kPrimDouble: {
436      int reg = FindTwoFreeConsecutiveAlignedEntries(blocked_fpu_registers_, kNumberOfSRegisters);
437      DCHECK_EQ(reg % 2, 0);
438      return Location::FpuRegisterPairLocation(reg, reg + 1);
439    }
440
441    case Primitive::kPrimVoid:
442      LOG(FATAL) << "Unreachable type " << type;
443  }
444
445  return Location();
446}
447
448void CodeGeneratorARM::SetupBlockedRegisters(bool is_baseline) const {
449  // Don't allocate the dalvik style register pair passing.
450  blocked_register_pairs_[R1_R2] = true;
451
452  // Stack register, LR and PC are always reserved.
453  blocked_core_registers_[SP] = true;
454  blocked_core_registers_[LR] = true;
455  blocked_core_registers_[PC] = true;
456
457  // Reserve thread register.
458  blocked_core_registers_[TR] = true;
459
460  // Reserve temp register.
461  blocked_core_registers_[IP] = true;
462
463  if (is_baseline) {
464    for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
465      blocked_core_registers_[kCoreCalleeSaves[i]] = true;
466    }
467
468    blocked_core_registers_[kCoreSavedRegisterForBaseline] = false;
469
470    for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
471      blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
472    }
473  }
474
475  UpdateBlockedPairRegisters();
476}
477
478void CodeGeneratorARM::UpdateBlockedPairRegisters() const {
479  for (int i = 0; i < kNumberOfRegisterPairs; i++) {
480    ArmManagedRegister current =
481        ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
482    if (blocked_core_registers_[current.AsRegisterPairLow()]
483        || blocked_core_registers_[current.AsRegisterPairHigh()]) {
484      blocked_register_pairs_[i] = true;
485    }
486  }
487}
488
489InstructionCodeGeneratorARM::InstructionCodeGeneratorARM(HGraph* graph, CodeGeneratorARM* codegen)
490      : HGraphVisitor(graph),
491        assembler_(codegen->GetAssembler()),
492        codegen_(codegen) {}
493
494static uint32_t LeastSignificantBit(uint32_t mask) {
495  // ffs starts at 1.
496  return ffs(mask) - 1;
497}
498
499void CodeGeneratorARM::ComputeSpillMask() {
500  core_spill_mask_ = allocated_registers_.GetCoreRegisters() & core_callee_save_mask_;
501  // Save one extra register for baseline. Note that on thumb2, there is no easy
502  // instruction to restore just the PC, so this actually helps both baseline
503  // and non-baseline to save and restore at least two registers at entry and exit.
504  core_spill_mask_ |= (1 << kCoreSavedRegisterForBaseline);
505  DCHECK_NE(core_spill_mask_, 0u) << "At least the return address register must be saved";
506  fpu_spill_mask_ = allocated_registers_.GetFloatingPointRegisters() & fpu_callee_save_mask_;
507  // We use vpush and vpop for saving and restoring floating point registers, which take
508  // a SRegister and the number of registers to save/restore after that SRegister. We
509  // therefore update the `fpu_spill_mask_` to also contain those registers not allocated,
510  // but in the range.
511  if (fpu_spill_mask_ != 0) {
512    uint32_t least_significant_bit = LeastSignificantBit(fpu_spill_mask_);
513    uint32_t most_significant_bit = MostSignificantBit(fpu_spill_mask_);
514    for (uint32_t i = least_significant_bit + 1 ; i < most_significant_bit; ++i) {
515      fpu_spill_mask_ |= (1 << i);
516    }
517  }
518}
519
520static dwarf::Reg DWARFReg(Register reg) {
521  return dwarf::Reg::ArmCore(static_cast<int>(reg));
522}
523
524static dwarf::Reg DWARFReg(SRegister reg) {
525  return dwarf::Reg::ArmFp(static_cast<int>(reg));
526}
527
528void CodeGeneratorARM::GenerateFrameEntry() {
529  bool skip_overflow_check =
530      IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm);
531  DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
532  __ Bind(&frame_entry_label_);
533
534  if (HasEmptyFrame()) {
535    return;
536  }
537
538  if (!skip_overflow_check) {
539    __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm)));
540    __ LoadFromOffset(kLoadWord, IP, IP, 0);
541    RecordPcInfo(nullptr, 0);
542  }
543
544  // PC is in the list of callee-save to mimic Quick, but we need to push
545  // LR at entry instead.
546  uint32_t push_mask = (core_spill_mask_ & (~(1 << PC))) | 1 << LR;
547  __ PushList(push_mask);
548  __ cfi().AdjustCFAOffset(kArmWordSize * POPCOUNT(push_mask));
549  __ cfi().RelOffsetForMany(DWARFReg(R0), 0, push_mask, kArmWordSize);
550  if (fpu_spill_mask_ != 0) {
551    SRegister start_register = SRegister(LeastSignificantBit(fpu_spill_mask_));
552    __ vpushs(start_register, POPCOUNT(fpu_spill_mask_));
553    __ cfi().AdjustCFAOffset(kArmWordSize * POPCOUNT(fpu_spill_mask_));
554    __ cfi().RelOffsetForMany(DWARFReg(S0), 0, fpu_spill_mask_, kArmWordSize);
555  }
556  int adjust = GetFrameSize() - FrameEntrySpillSize();
557  __ AddConstant(SP, -adjust);
558  __ cfi().AdjustCFAOffset(adjust);
559  __ StoreToOffset(kStoreWord, R0, SP, 0);
560}
561
562void CodeGeneratorARM::GenerateFrameExit() {
563  if (HasEmptyFrame()) {
564    __ bx(LR);
565    return;
566  }
567  __ cfi().RememberState();
568  int adjust = GetFrameSize() - FrameEntrySpillSize();
569  __ AddConstant(SP, adjust);
570  __ cfi().AdjustCFAOffset(-adjust);
571  if (fpu_spill_mask_ != 0) {
572    SRegister start_register = SRegister(LeastSignificantBit(fpu_spill_mask_));
573    __ vpops(start_register, POPCOUNT(fpu_spill_mask_));
574    __ cfi().AdjustCFAOffset(-kArmPointerSize * POPCOUNT(fpu_spill_mask_));
575    __ cfi().RestoreMany(DWARFReg(SRegister(0)), fpu_spill_mask_);
576  }
577  __ PopList(core_spill_mask_);
578  __ cfi().RestoreState();
579  __ cfi().DefCFAOffset(GetFrameSize());
580}
581
582void CodeGeneratorARM::Bind(HBasicBlock* block) {
583  __ Bind(GetLabelOf(block));
584}
585
586Location CodeGeneratorARM::GetStackLocation(HLoadLocal* load) const {
587  switch (load->GetType()) {
588    case Primitive::kPrimLong:
589    case Primitive::kPrimDouble:
590      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
591
592    case Primitive::kPrimInt:
593    case Primitive::kPrimNot:
594    case Primitive::kPrimFloat:
595      return Location::StackSlot(GetStackSlot(load->GetLocal()));
596
597    case Primitive::kPrimBoolean:
598    case Primitive::kPrimByte:
599    case Primitive::kPrimChar:
600    case Primitive::kPrimShort:
601    case Primitive::kPrimVoid:
602      LOG(FATAL) << "Unexpected type " << load->GetType();
603      UNREACHABLE();
604  }
605
606  LOG(FATAL) << "Unreachable";
607  UNREACHABLE();
608}
609
610Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
611  switch (type) {
612    case Primitive::kPrimBoolean:
613    case Primitive::kPrimByte:
614    case Primitive::kPrimChar:
615    case Primitive::kPrimShort:
616    case Primitive::kPrimInt:
617    case Primitive::kPrimNot: {
618      uint32_t index = gp_index_++;
619      uint32_t stack_index = stack_index_++;
620      if (index < calling_convention.GetNumberOfRegisters()) {
621        return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
622      } else {
623        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
624      }
625    }
626
627    case Primitive::kPrimLong: {
628      uint32_t index = gp_index_;
629      uint32_t stack_index = stack_index_;
630      gp_index_ += 2;
631      stack_index_ += 2;
632      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
633        if (calling_convention.GetRegisterAt(index) == R1) {
634          // Skip R1, and use R2_R3 instead.
635          gp_index_++;
636          index++;
637        }
638      }
639      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
640        DCHECK_EQ(calling_convention.GetRegisterAt(index) + 1,
641                  calling_convention.GetRegisterAt(index + 1));
642        return Location::RegisterPairLocation(calling_convention.GetRegisterAt(index),
643                                              calling_convention.GetRegisterAt(index + 1));
644      } else {
645        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
646      }
647    }
648
649    case Primitive::kPrimFloat: {
650      uint32_t stack_index = stack_index_++;
651      if (float_index_ % 2 == 0) {
652        float_index_ = std::max(double_index_, float_index_);
653      }
654      if (float_index_ < calling_convention.GetNumberOfFpuRegisters()) {
655        return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(float_index_++));
656      } else {
657        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
658      }
659    }
660
661    case Primitive::kPrimDouble: {
662      double_index_ = std::max(double_index_, RoundUp(float_index_, 2));
663      uint32_t stack_index = stack_index_;
664      stack_index_ += 2;
665      if (double_index_ + 1 < calling_convention.GetNumberOfFpuRegisters()) {
666        uint32_t index = double_index_;
667        double_index_ += 2;
668        Location result = Location::FpuRegisterPairLocation(
669          calling_convention.GetFpuRegisterAt(index),
670          calling_convention.GetFpuRegisterAt(index + 1));
671        DCHECK(ExpectedPairLayout(result));
672        return result;
673      } else {
674        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
675      }
676    }
677
678    case Primitive::kPrimVoid:
679      LOG(FATAL) << "Unexpected parameter type " << type;
680      break;
681  }
682  return Location();
683}
684
685Location InvokeDexCallingConventionVisitor::GetReturnLocation(Primitive::Type type) {
686  switch (type) {
687    case Primitive::kPrimBoolean:
688    case Primitive::kPrimByte:
689    case Primitive::kPrimChar:
690    case Primitive::kPrimShort:
691    case Primitive::kPrimInt:
692    case Primitive::kPrimNot: {
693      return Location::RegisterLocation(R0);
694    }
695
696    case Primitive::kPrimFloat: {
697      return Location::FpuRegisterLocation(S0);
698    }
699
700    case Primitive::kPrimLong: {
701      return Location::RegisterPairLocation(R0, R1);
702    }
703
704    case Primitive::kPrimDouble: {
705      return Location::FpuRegisterPairLocation(S0, S1);
706    }
707
708    case Primitive::kPrimVoid:
709      return Location();
710  }
711  UNREACHABLE();
712}
713
714void CodeGeneratorARM::Move32(Location destination, Location source) {
715  if (source.Equals(destination)) {
716    return;
717  }
718  if (destination.IsRegister()) {
719    if (source.IsRegister()) {
720      __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>());
721    } else if (source.IsFpuRegister()) {
722      __ vmovrs(destination.AsRegister<Register>(), source.AsFpuRegister<SRegister>());
723    } else {
724      __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(), SP, source.GetStackIndex());
725    }
726  } else if (destination.IsFpuRegister()) {
727    if (source.IsRegister()) {
728      __ vmovsr(destination.AsFpuRegister<SRegister>(), source.AsRegister<Register>());
729    } else if (source.IsFpuRegister()) {
730      __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>());
731    } else {
732      __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex());
733    }
734  } else {
735    DCHECK(destination.IsStackSlot()) << destination;
736    if (source.IsRegister()) {
737      __ StoreToOffset(kStoreWord, source.AsRegister<Register>(), SP, destination.GetStackIndex());
738    } else if (source.IsFpuRegister()) {
739      __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex());
740    } else {
741      DCHECK(source.IsStackSlot()) << source;
742      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
743      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
744    }
745  }
746}
747
748void CodeGeneratorARM::Move64(Location destination, Location source) {
749  if (source.Equals(destination)) {
750    return;
751  }
752  if (destination.IsRegisterPair()) {
753    if (source.IsRegisterPair()) {
754      EmitParallelMoves(
755          Location::RegisterLocation(source.AsRegisterPairHigh<Register>()),
756          Location::RegisterLocation(destination.AsRegisterPairHigh<Register>()),
757          Primitive::kPrimInt,
758          Location::RegisterLocation(source.AsRegisterPairLow<Register>()),
759          Location::RegisterLocation(destination.AsRegisterPairLow<Register>()),
760          Primitive::kPrimInt);
761    } else if (source.IsFpuRegister()) {
762      UNIMPLEMENTED(FATAL);
763    } else {
764      DCHECK(source.IsDoubleStackSlot());
765      DCHECK(ExpectedPairLayout(destination));
766      __ LoadFromOffset(kLoadWordPair, destination.AsRegisterPairLow<Register>(),
767                        SP, source.GetStackIndex());
768    }
769  } else if (destination.IsFpuRegisterPair()) {
770    if (source.IsDoubleStackSlot()) {
771      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
772                         SP,
773                         source.GetStackIndex());
774    } else {
775      UNIMPLEMENTED(FATAL);
776    }
777  } else {
778    DCHECK(destination.IsDoubleStackSlot());
779    if (source.IsRegisterPair()) {
780      // No conflict possible, so just do the moves.
781      if (source.AsRegisterPairLow<Register>() == R1) {
782        DCHECK_EQ(source.AsRegisterPairHigh<Register>(), R2);
783        __ StoreToOffset(kStoreWord, R1, SP, destination.GetStackIndex());
784        __ StoreToOffset(kStoreWord, R2, SP, destination.GetHighStackIndex(kArmWordSize));
785      } else {
786        __ StoreToOffset(kStoreWordPair, source.AsRegisterPairLow<Register>(),
787                         SP, destination.GetStackIndex());
788      }
789    } else if (source.IsFpuRegisterPair()) {
790      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
791                        SP,
792                        destination.GetStackIndex());
793    } else {
794      DCHECK(source.IsDoubleStackSlot());
795      EmitParallelMoves(
796          Location::StackSlot(source.GetStackIndex()),
797          Location::StackSlot(destination.GetStackIndex()),
798          Primitive::kPrimInt,
799          Location::StackSlot(source.GetHighStackIndex(kArmWordSize)),
800          Location::StackSlot(destination.GetHighStackIndex(kArmWordSize)),
801          Primitive::kPrimInt);
802    }
803  }
804}
805
806void CodeGeneratorARM::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
807  LocationSummary* locations = instruction->GetLocations();
808  if (locations != nullptr && locations->Out().Equals(location)) {
809    return;
810  }
811
812  if (locations != nullptr && locations->Out().IsConstant()) {
813    HConstant* const_to_move = locations->Out().GetConstant();
814    if (const_to_move->IsIntConstant() || const_to_move->IsNullConstant()) {
815      int32_t value = GetInt32ValueOf(const_to_move);
816      if (location.IsRegister()) {
817        __ LoadImmediate(location.AsRegister<Register>(), value);
818      } else {
819        DCHECK(location.IsStackSlot());
820        __ LoadImmediate(IP, value);
821        __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
822      }
823    } else {
824      DCHECK(const_to_move->IsLongConstant()) << const_to_move->DebugName();
825      int64_t value = const_to_move->AsLongConstant()->GetValue();
826      if (location.IsRegisterPair()) {
827        __ LoadImmediate(location.AsRegisterPairLow<Register>(), Low32Bits(value));
828        __ LoadImmediate(location.AsRegisterPairHigh<Register>(), High32Bits(value));
829      } else {
830        DCHECK(location.IsDoubleStackSlot());
831        __ LoadImmediate(IP, Low32Bits(value));
832        __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
833        __ LoadImmediate(IP, High32Bits(value));
834        __ StoreToOffset(kStoreWord, IP, SP, location.GetHighStackIndex(kArmWordSize));
835      }
836    }
837  } else if (instruction->IsLoadLocal()) {
838    uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
839    switch (instruction->GetType()) {
840      case Primitive::kPrimBoolean:
841      case Primitive::kPrimByte:
842      case Primitive::kPrimChar:
843      case Primitive::kPrimShort:
844      case Primitive::kPrimInt:
845      case Primitive::kPrimNot:
846      case Primitive::kPrimFloat:
847        Move32(location, Location::StackSlot(stack_slot));
848        break;
849
850      case Primitive::kPrimLong:
851      case Primitive::kPrimDouble:
852        Move64(location, Location::DoubleStackSlot(stack_slot));
853        break;
854
855      default:
856        LOG(FATAL) << "Unexpected type " << instruction->GetType();
857    }
858  } else if (instruction->IsTemporary()) {
859    Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
860    if (temp_location.IsStackSlot()) {
861      Move32(location, temp_location);
862    } else {
863      DCHECK(temp_location.IsDoubleStackSlot());
864      Move64(location, temp_location);
865    }
866  } else {
867    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
868    switch (instruction->GetType()) {
869      case Primitive::kPrimBoolean:
870      case Primitive::kPrimByte:
871      case Primitive::kPrimChar:
872      case Primitive::kPrimShort:
873      case Primitive::kPrimNot:
874      case Primitive::kPrimInt:
875      case Primitive::kPrimFloat:
876        Move32(location, locations->Out());
877        break;
878
879      case Primitive::kPrimLong:
880      case Primitive::kPrimDouble:
881        Move64(location, locations->Out());
882        break;
883
884      default:
885        LOG(FATAL) << "Unexpected type " << instruction->GetType();
886    }
887  }
888}
889
890void CodeGeneratorARM::InvokeRuntime(int32_t entry_point_offset,
891                                     HInstruction* instruction,
892                                     uint32_t dex_pc,
893                                     SlowPathCode* slow_path) {
894  __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
895  __ blx(LR);
896  RecordPcInfo(instruction, dex_pc, slow_path);
897  DCHECK(instruction->IsSuspendCheck()
898      || instruction->IsBoundsCheck()
899      || instruction->IsNullCheck()
900      || instruction->IsDivZeroCheck()
901      || instruction->GetLocations()->CanCall()
902      || !IsLeafMethod());
903}
904
905void LocationsBuilderARM::VisitGoto(HGoto* got) {
906  got->SetLocations(nullptr);
907}
908
909void InstructionCodeGeneratorARM::VisitGoto(HGoto* got) {
910  HBasicBlock* successor = got->GetSuccessor();
911  DCHECK(!successor->IsExitBlock());
912
913  HBasicBlock* block = got->GetBlock();
914  HInstruction* previous = got->GetPrevious();
915
916  HLoopInformation* info = block->GetLoopInformation();
917  if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
918    codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
919    GenerateSuspendCheck(info->GetSuspendCheck(), successor);
920    return;
921  }
922
923  if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
924    GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
925  }
926  if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
927    __ b(codegen_->GetLabelOf(successor));
928  }
929}
930
931void LocationsBuilderARM::VisitExit(HExit* exit) {
932  exit->SetLocations(nullptr);
933}
934
935void InstructionCodeGeneratorARM::VisitExit(HExit* exit) {
936  UNUSED(exit);
937}
938
939void InstructionCodeGeneratorARM::GenerateTestAndBranch(HInstruction* instruction,
940                                                        Label* true_target,
941                                                        Label* false_target,
942                                                        Label* always_true_target) {
943  HInstruction* cond = instruction->InputAt(0);
944  if (cond->IsIntConstant()) {
945    // Constant condition, statically compared against 1.
946    int32_t cond_value = cond->AsIntConstant()->GetValue();
947    if (cond_value == 1) {
948      if (always_true_target != nullptr) {
949        __ b(always_true_target);
950      }
951      return;
952    } else {
953      DCHECK_EQ(cond_value, 0);
954    }
955  } else {
956    if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
957      // Condition has been materialized, compare the output to 0
958      DCHECK(instruction->GetLocations()->InAt(0).IsRegister());
959      __ cmp(instruction->GetLocations()->InAt(0).AsRegister<Register>(),
960             ShifterOperand(0));
961      __ b(true_target, NE);
962    } else {
963      // Condition has not been materialized, use its inputs as the
964      // comparison and its condition as the branch condition.
965      LocationSummary* locations = cond->GetLocations();
966      DCHECK(locations->InAt(0).IsRegister()) << locations->InAt(0);
967      Register left = locations->InAt(0).AsRegister<Register>();
968      if (locations->InAt(1).IsRegister()) {
969        __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>()));
970      } else {
971        DCHECK(locations->InAt(1).IsConstant());
972        HConstant* constant = locations->InAt(1).GetConstant();
973        int32_t value = CodeGenerator::GetInt32ValueOf(constant);
974        ShifterOperand operand;
975        if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) {
976          __ cmp(left, operand);
977        } else {
978          Register temp = IP;
979          __ LoadImmediate(temp, value);
980          __ cmp(left, ShifterOperand(temp));
981        }
982      }
983      __ b(true_target, ARMCondition(cond->AsCondition()->GetCondition()));
984    }
985  }
986  if (false_target != nullptr) {
987    __ b(false_target);
988  }
989}
990
991void LocationsBuilderARM::VisitIf(HIf* if_instr) {
992  LocationSummary* locations =
993      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
994  HInstruction* cond = if_instr->InputAt(0);
995  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
996    locations->SetInAt(0, Location::RequiresRegister());
997  }
998}
999
1000void InstructionCodeGeneratorARM::VisitIf(HIf* if_instr) {
1001  Label* true_target = codegen_->GetLabelOf(if_instr->IfTrueSuccessor());
1002  Label* false_target = codegen_->GetLabelOf(if_instr->IfFalseSuccessor());
1003  Label* always_true_target = true_target;
1004  if (codegen_->GoesToNextBlock(if_instr->GetBlock(),
1005                                if_instr->IfTrueSuccessor())) {
1006    always_true_target = nullptr;
1007  }
1008  if (codegen_->GoesToNextBlock(if_instr->GetBlock(),
1009                                if_instr->IfFalseSuccessor())) {
1010    false_target = nullptr;
1011  }
1012  GenerateTestAndBranch(if_instr, true_target, false_target, always_true_target);
1013}
1014
1015void LocationsBuilderARM::VisitDeoptimize(HDeoptimize* deoptimize) {
1016  LocationSummary* locations = new (GetGraph()->GetArena())
1017      LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
1018  HInstruction* cond = deoptimize->InputAt(0);
1019  DCHECK(cond->IsCondition());
1020  if (cond->AsCondition()->NeedsMaterialization()) {
1021    locations->SetInAt(0, Location::RequiresRegister());
1022  }
1023}
1024
1025void InstructionCodeGeneratorARM::VisitDeoptimize(HDeoptimize* deoptimize) {
1026  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena())
1027      DeoptimizationSlowPathARM(deoptimize);
1028  codegen_->AddSlowPath(slow_path);
1029  Label* slow_path_entry = slow_path->GetEntryLabel();
1030  GenerateTestAndBranch(deoptimize, slow_path_entry, nullptr, slow_path_entry);
1031}
1032
1033void LocationsBuilderARM::VisitCondition(HCondition* comp) {
1034  LocationSummary* locations =
1035      new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall);
1036  locations->SetInAt(0, Location::RequiresRegister());
1037  locations->SetInAt(1, Location::RegisterOrConstant(comp->InputAt(1)));
1038  if (comp->NeedsMaterialization()) {
1039    locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1040  }
1041}
1042
1043void InstructionCodeGeneratorARM::VisitCondition(HCondition* comp) {
1044  if (!comp->NeedsMaterialization()) return;
1045  LocationSummary* locations = comp->GetLocations();
1046  Register left = locations->InAt(0).AsRegister<Register>();
1047
1048  if (locations->InAt(1).IsRegister()) {
1049    __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>()));
1050  } else {
1051    DCHECK(locations->InAt(1).IsConstant());
1052    int32_t value = CodeGenerator::GetInt32ValueOf(locations->InAt(1).GetConstant());
1053    ShifterOperand operand;
1054    if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) {
1055      __ cmp(left, operand);
1056    } else {
1057      Register temp = IP;
1058      __ LoadImmediate(temp, value);
1059      __ cmp(left, ShifterOperand(temp));
1060    }
1061  }
1062  __ it(ARMCondition(comp->GetCondition()), kItElse);
1063  __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(1),
1064         ARMCondition(comp->GetCondition()));
1065  __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(0),
1066         ARMOppositeCondition(comp->GetCondition()));
1067}
1068
1069void LocationsBuilderARM::VisitEqual(HEqual* comp) {
1070  VisitCondition(comp);
1071}
1072
1073void InstructionCodeGeneratorARM::VisitEqual(HEqual* comp) {
1074  VisitCondition(comp);
1075}
1076
1077void LocationsBuilderARM::VisitNotEqual(HNotEqual* comp) {
1078  VisitCondition(comp);
1079}
1080
1081void InstructionCodeGeneratorARM::VisitNotEqual(HNotEqual* comp) {
1082  VisitCondition(comp);
1083}
1084
1085void LocationsBuilderARM::VisitLessThan(HLessThan* comp) {
1086  VisitCondition(comp);
1087}
1088
1089void InstructionCodeGeneratorARM::VisitLessThan(HLessThan* comp) {
1090  VisitCondition(comp);
1091}
1092
1093void LocationsBuilderARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1094  VisitCondition(comp);
1095}
1096
1097void InstructionCodeGeneratorARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1098  VisitCondition(comp);
1099}
1100
1101void LocationsBuilderARM::VisitGreaterThan(HGreaterThan* comp) {
1102  VisitCondition(comp);
1103}
1104
1105void InstructionCodeGeneratorARM::VisitGreaterThan(HGreaterThan* comp) {
1106  VisitCondition(comp);
1107}
1108
1109void LocationsBuilderARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1110  VisitCondition(comp);
1111}
1112
1113void InstructionCodeGeneratorARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1114  VisitCondition(comp);
1115}
1116
1117void LocationsBuilderARM::VisitLocal(HLocal* local) {
1118  local->SetLocations(nullptr);
1119}
1120
1121void InstructionCodeGeneratorARM::VisitLocal(HLocal* local) {
1122  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
1123}
1124
1125void LocationsBuilderARM::VisitLoadLocal(HLoadLocal* load) {
1126  load->SetLocations(nullptr);
1127}
1128
1129void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load) {
1130  // Nothing to do, this is driven by the code generator.
1131  UNUSED(load);
1132}
1133
1134void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) {
1135  LocationSummary* locations =
1136      new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
1137  switch (store->InputAt(1)->GetType()) {
1138    case Primitive::kPrimBoolean:
1139    case Primitive::kPrimByte:
1140    case Primitive::kPrimChar:
1141    case Primitive::kPrimShort:
1142    case Primitive::kPrimInt:
1143    case Primitive::kPrimNot:
1144    case Primitive::kPrimFloat:
1145      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
1146      break;
1147
1148    case Primitive::kPrimLong:
1149    case Primitive::kPrimDouble:
1150      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
1151      break;
1152
1153    default:
1154      LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType();
1155  }
1156}
1157
1158void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store) {
1159  UNUSED(store);
1160}
1161
1162void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) {
1163  LocationSummary* locations =
1164      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1165  locations->SetOut(Location::ConstantLocation(constant));
1166}
1167
1168void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant) {
1169  // Will be generated at use site.
1170  UNUSED(constant);
1171}
1172
1173void LocationsBuilderARM::VisitNullConstant(HNullConstant* constant) {
1174  LocationSummary* locations =
1175      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1176  locations->SetOut(Location::ConstantLocation(constant));
1177}
1178
1179void InstructionCodeGeneratorARM::VisitNullConstant(HNullConstant* constant) {
1180  // Will be generated at use site.
1181  UNUSED(constant);
1182}
1183
1184void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) {
1185  LocationSummary* locations =
1186      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1187  locations->SetOut(Location::ConstantLocation(constant));
1188}
1189
1190void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant) {
1191  // Will be generated at use site.
1192  UNUSED(constant);
1193}
1194
1195void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) {
1196  LocationSummary* locations =
1197      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1198  locations->SetOut(Location::ConstantLocation(constant));
1199}
1200
1201void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant) {
1202  // Will be generated at use site.
1203  UNUSED(constant);
1204}
1205
1206void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) {
1207  LocationSummary* locations =
1208      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1209  locations->SetOut(Location::ConstantLocation(constant));
1210}
1211
1212void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant) {
1213  // Will be generated at use site.
1214  UNUSED(constant);
1215}
1216
1217void LocationsBuilderARM::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
1218  memory_barrier->SetLocations(nullptr);
1219}
1220
1221void InstructionCodeGeneratorARM::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
1222  GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
1223}
1224
1225void LocationsBuilderARM::VisitReturnVoid(HReturnVoid* ret) {
1226  ret->SetLocations(nullptr);
1227}
1228
1229void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret) {
1230  UNUSED(ret);
1231  codegen_->GenerateFrameExit();
1232}
1233
1234void LocationsBuilderARM::VisitReturn(HReturn* ret) {
1235  LocationSummary* locations =
1236      new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
1237  locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType()));
1238}
1239
1240void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret) {
1241  UNUSED(ret);
1242  codegen_->GenerateFrameExit();
1243}
1244
1245void LocationsBuilderARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1246  IntrinsicLocationsBuilderARM intrinsic(GetGraph()->GetArena(),
1247                                         codegen_->GetInstructionSetFeatures());
1248  if (intrinsic.TryDispatch(invoke)) {
1249    return;
1250  }
1251
1252  HandleInvoke(invoke);
1253}
1254
1255void CodeGeneratorARM::LoadCurrentMethod(Register reg) {
1256  DCHECK(RequiresCurrentMethod());
1257  __ LoadFromOffset(kLoadWord, reg, SP, kCurrentMethodStackOffset);
1258}
1259
1260static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM* codegen) {
1261  if (invoke->GetLocations()->Intrinsified()) {
1262    IntrinsicCodeGeneratorARM intrinsic(codegen);
1263    intrinsic.Dispatch(invoke);
1264    return true;
1265  }
1266  return false;
1267}
1268
1269void InstructionCodeGeneratorARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1270  if (TryGenerateIntrinsicCode(invoke, codegen_)) {
1271    return;
1272  }
1273
1274  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1275
1276  codegen_->GenerateStaticOrDirectCall(invoke, temp);
1277  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1278}
1279
1280void LocationsBuilderARM::HandleInvoke(HInvoke* invoke) {
1281  LocationSummary* locations =
1282      new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1283  locations->AddTemp(Location::RegisterLocation(R0));
1284
1285  InvokeDexCallingConventionVisitor calling_convention_visitor;
1286  for (size_t i = 0; i < invoke->InputCount(); i++) {
1287    HInstruction* input = invoke->InputAt(i);
1288    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1289  }
1290
1291  locations->SetOut(calling_convention_visitor.GetReturnLocation(invoke->GetType()));
1292}
1293
1294void LocationsBuilderARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1295  IntrinsicLocationsBuilderARM intrinsic(GetGraph()->GetArena(),
1296                                         codegen_->GetInstructionSetFeatures());
1297  if (intrinsic.TryDispatch(invoke)) {
1298    return;
1299  }
1300
1301  HandleInvoke(invoke);
1302}
1303
1304void InstructionCodeGeneratorARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1305  if (TryGenerateIntrinsicCode(invoke, codegen_)) {
1306    return;
1307  }
1308
1309  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1310  uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() +
1311          invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
1312  LocationSummary* locations = invoke->GetLocations();
1313  Location receiver = locations->InAt(0);
1314  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1315  // temp = object->GetClass();
1316  if (receiver.IsStackSlot()) {
1317    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1318    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1319  } else {
1320    __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
1321  }
1322  codegen_->MaybeRecordImplicitNullCheck(invoke);
1323  // temp = temp->GetMethodAt(method_offset);
1324  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1325      kArmWordSize).Int32Value();
1326  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1327  // LR = temp->GetEntryPoint();
1328  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1329  // LR();
1330  __ blx(LR);
1331  DCHECK(!codegen_->IsLeafMethod());
1332  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1333}
1334
1335void LocationsBuilderARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1336  HandleInvoke(invoke);
1337  // Add the hidden argument.
1338  invoke->GetLocations()->AddTemp(Location::RegisterLocation(R12));
1339}
1340
1341void InstructionCodeGeneratorARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1342  // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
1343  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1344  uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() +
1345          (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry);
1346  LocationSummary* locations = invoke->GetLocations();
1347  Location receiver = locations->InAt(0);
1348  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1349
1350  // Set the hidden argument.
1351  __ LoadImmediate(invoke->GetLocations()->GetTemp(1).AsRegister<Register>(),
1352                   invoke->GetDexMethodIndex());
1353
1354  // temp = object->GetClass();
1355  if (receiver.IsStackSlot()) {
1356    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1357    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1358  } else {
1359    __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
1360  }
1361  codegen_->MaybeRecordImplicitNullCheck(invoke);
1362  // temp = temp->GetImtEntryAt(method_offset);
1363  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1364      kArmWordSize).Int32Value();
1365  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1366  // LR = temp->GetEntryPoint();
1367  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1368  // LR();
1369  __ blx(LR);
1370  DCHECK(!codegen_->IsLeafMethod());
1371  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1372}
1373
1374void LocationsBuilderARM::VisitNeg(HNeg* neg) {
1375  LocationSummary* locations =
1376      new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
1377  switch (neg->GetResultType()) {
1378    case Primitive::kPrimInt: {
1379      locations->SetInAt(0, Location::RequiresRegister());
1380      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1381      break;
1382    }
1383    case Primitive::kPrimLong: {
1384      locations->SetInAt(0, Location::RequiresRegister());
1385      locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1386      break;
1387    }
1388
1389    case Primitive::kPrimFloat:
1390    case Primitive::kPrimDouble:
1391      locations->SetInAt(0, Location::RequiresFpuRegister());
1392      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1393      break;
1394
1395    default:
1396      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1397  }
1398}
1399
1400void InstructionCodeGeneratorARM::VisitNeg(HNeg* neg) {
1401  LocationSummary* locations = neg->GetLocations();
1402  Location out = locations->Out();
1403  Location in = locations->InAt(0);
1404  switch (neg->GetResultType()) {
1405    case Primitive::kPrimInt:
1406      DCHECK(in.IsRegister());
1407      __ rsb(out.AsRegister<Register>(), in.AsRegister<Register>(), ShifterOperand(0));
1408      break;
1409
1410    case Primitive::kPrimLong:
1411      DCHECK(in.IsRegisterPair());
1412      // out.lo = 0 - in.lo (and update the carry/borrow (C) flag)
1413      __ rsbs(out.AsRegisterPairLow<Register>(),
1414              in.AsRegisterPairLow<Register>(),
1415              ShifterOperand(0));
1416      // We cannot emit an RSC (Reverse Subtract with Carry)
1417      // instruction here, as it does not exist in the Thumb-2
1418      // instruction set.  We use the following approach
1419      // using SBC and SUB instead.
1420      //
1421      // out.hi = -C
1422      __ sbc(out.AsRegisterPairHigh<Register>(),
1423             out.AsRegisterPairHigh<Register>(),
1424             ShifterOperand(out.AsRegisterPairHigh<Register>()));
1425      // out.hi = out.hi - in.hi
1426      __ sub(out.AsRegisterPairHigh<Register>(),
1427             out.AsRegisterPairHigh<Register>(),
1428             ShifterOperand(in.AsRegisterPairHigh<Register>()));
1429      break;
1430
1431    case Primitive::kPrimFloat:
1432      DCHECK(in.IsFpuRegister());
1433      __ vnegs(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>());
1434      break;
1435
1436    case Primitive::kPrimDouble:
1437      DCHECK(in.IsFpuRegisterPair());
1438      __ vnegd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1439               FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1440      break;
1441
1442    default:
1443      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1444  }
1445}
1446
1447void LocationsBuilderARM::VisitTypeConversion(HTypeConversion* conversion) {
1448  Primitive::Type result_type = conversion->GetResultType();
1449  Primitive::Type input_type = conversion->GetInputType();
1450  DCHECK_NE(result_type, input_type);
1451
1452  // The float-to-long and double-to-long type conversions rely on a
1453  // call to the runtime.
1454  LocationSummary::CallKind call_kind =
1455      ((input_type == Primitive::kPrimFloat || input_type == Primitive::kPrimDouble)
1456       && result_type == Primitive::kPrimLong)
1457      ? LocationSummary::kCall
1458      : LocationSummary::kNoCall;
1459  LocationSummary* locations =
1460      new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind);
1461
1462  // The Java language does not allow treating boolean as an integral type but
1463  // our bit representation makes it safe.
1464
1465  switch (result_type) {
1466    case Primitive::kPrimByte:
1467      switch (input_type) {
1468        case Primitive::kPrimBoolean:
1469          // Boolean input is a result of code transformations.
1470        case Primitive::kPrimShort:
1471        case Primitive::kPrimInt:
1472        case Primitive::kPrimChar:
1473          // Processing a Dex `int-to-byte' instruction.
1474          locations->SetInAt(0, Location::RequiresRegister());
1475          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1476          break;
1477
1478        default:
1479          LOG(FATAL) << "Unexpected type conversion from " << input_type
1480                     << " to " << result_type;
1481      }
1482      break;
1483
1484    case Primitive::kPrimShort:
1485      switch (input_type) {
1486        case Primitive::kPrimBoolean:
1487          // Boolean input is a result of code transformations.
1488        case Primitive::kPrimByte:
1489        case Primitive::kPrimInt:
1490        case Primitive::kPrimChar:
1491          // Processing a Dex `int-to-short' instruction.
1492          locations->SetInAt(0, Location::RequiresRegister());
1493          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1494          break;
1495
1496        default:
1497          LOG(FATAL) << "Unexpected type conversion from " << input_type
1498                     << " to " << result_type;
1499      }
1500      break;
1501
1502    case Primitive::kPrimInt:
1503      switch (input_type) {
1504        case Primitive::kPrimLong:
1505          // Processing a Dex `long-to-int' instruction.
1506          locations->SetInAt(0, Location::Any());
1507          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1508          break;
1509
1510        case Primitive::kPrimFloat:
1511          // Processing a Dex `float-to-int' instruction.
1512          locations->SetInAt(0, Location::RequiresFpuRegister());
1513          locations->SetOut(Location::RequiresRegister());
1514          locations->AddTemp(Location::RequiresFpuRegister());
1515          break;
1516
1517        case Primitive::kPrimDouble:
1518          // Processing a Dex `double-to-int' instruction.
1519          locations->SetInAt(0, Location::RequiresFpuRegister());
1520          locations->SetOut(Location::RequiresRegister());
1521          locations->AddTemp(Location::RequiresFpuRegister());
1522          break;
1523
1524        default:
1525          LOG(FATAL) << "Unexpected type conversion from " << input_type
1526                     << " to " << result_type;
1527      }
1528      break;
1529
1530    case Primitive::kPrimLong:
1531      switch (input_type) {
1532        case Primitive::kPrimBoolean:
1533          // Boolean input is a result of code transformations.
1534        case Primitive::kPrimByte:
1535        case Primitive::kPrimShort:
1536        case Primitive::kPrimInt:
1537        case Primitive::kPrimChar:
1538          // Processing a Dex `int-to-long' instruction.
1539          locations->SetInAt(0, Location::RequiresRegister());
1540          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1541          break;
1542
1543        case Primitive::kPrimFloat: {
1544          // Processing a Dex `float-to-long' instruction.
1545          InvokeRuntimeCallingConvention calling_convention;
1546          locations->SetInAt(0, Location::FpuRegisterLocation(
1547              calling_convention.GetFpuRegisterAt(0)));
1548          locations->SetOut(Location::RegisterPairLocation(R0, R1));
1549          break;
1550        }
1551
1552        case Primitive::kPrimDouble: {
1553          // Processing a Dex `double-to-long' instruction.
1554          InvokeRuntimeCallingConvention calling_convention;
1555          locations->SetInAt(0, Location::FpuRegisterPairLocation(
1556              calling_convention.GetFpuRegisterAt(0),
1557              calling_convention.GetFpuRegisterAt(1)));
1558          locations->SetOut(Location::RegisterPairLocation(R0, R1));
1559          break;
1560        }
1561
1562        default:
1563          LOG(FATAL) << "Unexpected type conversion from " << input_type
1564                     << " to " << result_type;
1565      }
1566      break;
1567
1568    case Primitive::kPrimChar:
1569      switch (input_type) {
1570        case Primitive::kPrimBoolean:
1571          // Boolean input is a result of code transformations.
1572        case Primitive::kPrimByte:
1573        case Primitive::kPrimShort:
1574        case Primitive::kPrimInt:
1575          // Processing a Dex `int-to-char' instruction.
1576          locations->SetInAt(0, Location::RequiresRegister());
1577          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1578          break;
1579
1580        default:
1581          LOG(FATAL) << "Unexpected type conversion from " << input_type
1582                     << " to " << result_type;
1583      }
1584      break;
1585
1586    case Primitive::kPrimFloat:
1587      switch (input_type) {
1588        case Primitive::kPrimBoolean:
1589          // Boolean input is a result of code transformations.
1590        case Primitive::kPrimByte:
1591        case Primitive::kPrimShort:
1592        case Primitive::kPrimInt:
1593        case Primitive::kPrimChar:
1594          // Processing a Dex `int-to-float' instruction.
1595          locations->SetInAt(0, Location::RequiresRegister());
1596          locations->SetOut(Location::RequiresFpuRegister());
1597          break;
1598
1599        case Primitive::kPrimLong:
1600          // Processing a Dex `long-to-float' instruction.
1601          locations->SetInAt(0, Location::RequiresRegister());
1602          locations->SetOut(Location::RequiresFpuRegister());
1603          locations->AddTemp(Location::RequiresRegister());
1604          locations->AddTemp(Location::RequiresRegister());
1605          locations->AddTemp(Location::RequiresFpuRegister());
1606          locations->AddTemp(Location::RequiresFpuRegister());
1607          break;
1608
1609        case Primitive::kPrimDouble:
1610          // Processing a Dex `double-to-float' instruction.
1611          locations->SetInAt(0, Location::RequiresFpuRegister());
1612          locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1613          break;
1614
1615        default:
1616          LOG(FATAL) << "Unexpected type conversion from " << input_type
1617                     << " to " << result_type;
1618      };
1619      break;
1620
1621    case Primitive::kPrimDouble:
1622      switch (input_type) {
1623        case Primitive::kPrimBoolean:
1624          // Boolean input is a result of code transformations.
1625        case Primitive::kPrimByte:
1626        case Primitive::kPrimShort:
1627        case Primitive::kPrimInt:
1628        case Primitive::kPrimChar:
1629          // Processing a Dex `int-to-double' instruction.
1630          locations->SetInAt(0, Location::RequiresRegister());
1631          locations->SetOut(Location::RequiresFpuRegister());
1632          break;
1633
1634        case Primitive::kPrimLong:
1635          // Processing a Dex `long-to-double' instruction.
1636          locations->SetInAt(0, Location::RequiresRegister());
1637          locations->SetOut(Location::RequiresFpuRegister());
1638          locations->AddTemp(Location::RequiresRegister());
1639          locations->AddTemp(Location::RequiresRegister());
1640          locations->AddTemp(Location::RequiresFpuRegister());
1641          break;
1642
1643        case Primitive::kPrimFloat:
1644          // Processing a Dex `float-to-double' instruction.
1645          locations->SetInAt(0, Location::RequiresFpuRegister());
1646          locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1647          break;
1648
1649        default:
1650          LOG(FATAL) << "Unexpected type conversion from " << input_type
1651                     << " to " << result_type;
1652      };
1653      break;
1654
1655    default:
1656      LOG(FATAL) << "Unexpected type conversion from " << input_type
1657                 << " to " << result_type;
1658  }
1659}
1660
1661void InstructionCodeGeneratorARM::VisitTypeConversion(HTypeConversion* conversion) {
1662  LocationSummary* locations = conversion->GetLocations();
1663  Location out = locations->Out();
1664  Location in = locations->InAt(0);
1665  Primitive::Type result_type = conversion->GetResultType();
1666  Primitive::Type input_type = conversion->GetInputType();
1667  DCHECK_NE(result_type, input_type);
1668  switch (result_type) {
1669    case Primitive::kPrimByte:
1670      switch (input_type) {
1671        case Primitive::kPrimBoolean:
1672          // Boolean input is a result of code transformations.
1673        case Primitive::kPrimShort:
1674        case Primitive::kPrimInt:
1675        case Primitive::kPrimChar:
1676          // Processing a Dex `int-to-byte' instruction.
1677          __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 8);
1678          break;
1679
1680        default:
1681          LOG(FATAL) << "Unexpected type conversion from " << input_type
1682                     << " to " << result_type;
1683      }
1684      break;
1685
1686    case Primitive::kPrimShort:
1687      switch (input_type) {
1688        case Primitive::kPrimBoolean:
1689          // Boolean input is a result of code transformations.
1690        case Primitive::kPrimByte:
1691        case Primitive::kPrimInt:
1692        case Primitive::kPrimChar:
1693          // Processing a Dex `int-to-short' instruction.
1694          __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16);
1695          break;
1696
1697        default:
1698          LOG(FATAL) << "Unexpected type conversion from " << input_type
1699                     << " to " << result_type;
1700      }
1701      break;
1702
1703    case Primitive::kPrimInt:
1704      switch (input_type) {
1705        case Primitive::kPrimLong:
1706          // Processing a Dex `long-to-int' instruction.
1707          DCHECK(out.IsRegister());
1708          if (in.IsRegisterPair()) {
1709            __ Mov(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
1710          } else if (in.IsDoubleStackSlot()) {
1711            __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), SP, in.GetStackIndex());
1712          } else {
1713            DCHECK(in.IsConstant());
1714            DCHECK(in.GetConstant()->IsLongConstant());
1715            int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
1716            __ LoadImmediate(out.AsRegister<Register>(), static_cast<int32_t>(value));
1717          }
1718          break;
1719
1720        case Primitive::kPrimFloat: {
1721          // Processing a Dex `float-to-int' instruction.
1722          SRegister temp = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
1723          __ vmovs(temp, in.AsFpuRegister<SRegister>());
1724          __ vcvtis(temp, temp);
1725          __ vmovrs(out.AsRegister<Register>(), temp);
1726          break;
1727        }
1728
1729        case Primitive::kPrimDouble: {
1730          // Processing a Dex `double-to-int' instruction.
1731          SRegister temp_s = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
1732          DRegister temp_d = FromLowSToD(temp_s);
1733          __ vmovd(temp_d, FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1734          __ vcvtid(temp_s, temp_d);
1735          __ vmovrs(out.AsRegister<Register>(), temp_s);
1736          break;
1737        }
1738
1739        default:
1740          LOG(FATAL) << "Unexpected type conversion from " << input_type
1741                     << " to " << result_type;
1742      }
1743      break;
1744
1745    case Primitive::kPrimLong:
1746      switch (input_type) {
1747        case Primitive::kPrimBoolean:
1748          // Boolean input is a result of code transformations.
1749        case Primitive::kPrimByte:
1750        case Primitive::kPrimShort:
1751        case Primitive::kPrimInt:
1752        case Primitive::kPrimChar:
1753          // Processing a Dex `int-to-long' instruction.
1754          DCHECK(out.IsRegisterPair());
1755          DCHECK(in.IsRegister());
1756          __ Mov(out.AsRegisterPairLow<Register>(), in.AsRegister<Register>());
1757          // Sign extension.
1758          __ Asr(out.AsRegisterPairHigh<Register>(),
1759                 out.AsRegisterPairLow<Register>(),
1760                 31);
1761          break;
1762
1763        case Primitive::kPrimFloat:
1764          // Processing a Dex `float-to-long' instruction.
1765          codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pF2l),
1766                                  conversion,
1767                                  conversion->GetDexPc(),
1768                                  nullptr);
1769          break;
1770
1771        case Primitive::kPrimDouble:
1772          // Processing a Dex `double-to-long' instruction.
1773          codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pD2l),
1774                                  conversion,
1775                                  conversion->GetDexPc(),
1776                                  nullptr);
1777          break;
1778
1779        default:
1780          LOG(FATAL) << "Unexpected type conversion from " << input_type
1781                     << " to " << result_type;
1782      }
1783      break;
1784
1785    case Primitive::kPrimChar:
1786      switch (input_type) {
1787        case Primitive::kPrimBoolean:
1788          // Boolean input is a result of code transformations.
1789        case Primitive::kPrimByte:
1790        case Primitive::kPrimShort:
1791        case Primitive::kPrimInt:
1792          // Processing a Dex `int-to-char' instruction.
1793          __ ubfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16);
1794          break;
1795
1796        default:
1797          LOG(FATAL) << "Unexpected type conversion from " << input_type
1798                     << " to " << result_type;
1799      }
1800      break;
1801
1802    case Primitive::kPrimFloat:
1803      switch (input_type) {
1804        case Primitive::kPrimBoolean:
1805          // Boolean input is a result of code transformations.
1806        case Primitive::kPrimByte:
1807        case Primitive::kPrimShort:
1808        case Primitive::kPrimInt:
1809        case Primitive::kPrimChar: {
1810          // Processing a Dex `int-to-float' instruction.
1811          __ vmovsr(out.AsFpuRegister<SRegister>(), in.AsRegister<Register>());
1812          __ vcvtsi(out.AsFpuRegister<SRegister>(), out.AsFpuRegister<SRegister>());
1813          break;
1814        }
1815
1816        case Primitive::kPrimLong: {
1817          // Processing a Dex `long-to-float' instruction.
1818          Register low = in.AsRegisterPairLow<Register>();
1819          Register high = in.AsRegisterPairHigh<Register>();
1820          SRegister output = out.AsFpuRegister<SRegister>();
1821          Register constant_low = locations->GetTemp(0).AsRegister<Register>();
1822          Register constant_high = locations->GetTemp(1).AsRegister<Register>();
1823          SRegister temp1_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>();
1824          DRegister temp1_d = FromLowSToD(temp1_s);
1825          SRegister temp2_s = locations->GetTemp(3).AsFpuRegisterPairLow<SRegister>();
1826          DRegister temp2_d = FromLowSToD(temp2_s);
1827
1828          // Operations use doubles for precision reasons (each 32-bit
1829          // half of a long fits in the 53-bit mantissa of a double,
1830          // but not in the 24-bit mantissa of a float).  This is
1831          // especially important for the low bits.  The result is
1832          // eventually converted to float.
1833
1834          // temp1_d = int-to-double(high)
1835          __ vmovsr(temp1_s, high);
1836          __ vcvtdi(temp1_d, temp1_s);
1837          // Using vmovd to load the `k2Pow32EncodingForDouble` constant
1838          // as an immediate value into `temp2_d` does not work, as
1839          // this instruction only transfers 8 significant bits of its
1840          // immediate operand.  Instead, use two 32-bit core
1841          // registers to load `k2Pow32EncodingForDouble` into
1842          // `temp2_d`.
1843          __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble));
1844          __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble));
1845          __ vmovdrr(temp2_d, constant_low, constant_high);
1846          // temp1_d = temp1_d * 2^32
1847          __ vmuld(temp1_d, temp1_d, temp2_d);
1848          // temp2_d = unsigned-to-double(low)
1849          __ vmovsr(temp2_s, low);
1850          __ vcvtdu(temp2_d, temp2_s);
1851          // temp1_d = temp1_d + temp2_d
1852          __ vaddd(temp1_d, temp1_d, temp2_d);
1853          // output = double-to-float(temp1_d);
1854          __ vcvtsd(output, temp1_d);
1855          break;
1856        }
1857
1858        case Primitive::kPrimDouble:
1859          // Processing a Dex `double-to-float' instruction.
1860          __ vcvtsd(out.AsFpuRegister<SRegister>(),
1861                    FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1862          break;
1863
1864        default:
1865          LOG(FATAL) << "Unexpected type conversion from " << input_type
1866                     << " to " << result_type;
1867      };
1868      break;
1869
1870    case Primitive::kPrimDouble:
1871      switch (input_type) {
1872        case Primitive::kPrimBoolean:
1873          // Boolean input is a result of code transformations.
1874        case Primitive::kPrimByte:
1875        case Primitive::kPrimShort:
1876        case Primitive::kPrimInt:
1877        case Primitive::kPrimChar: {
1878          // Processing a Dex `int-to-double' instruction.
1879          __ vmovsr(out.AsFpuRegisterPairLow<SRegister>(), in.AsRegister<Register>());
1880          __ vcvtdi(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1881                    out.AsFpuRegisterPairLow<SRegister>());
1882          break;
1883        }
1884
1885        case Primitive::kPrimLong: {
1886          // Processing a Dex `long-to-double' instruction.
1887          Register low = in.AsRegisterPairLow<Register>();
1888          Register high = in.AsRegisterPairHigh<Register>();
1889          SRegister out_s = out.AsFpuRegisterPairLow<SRegister>();
1890          DRegister out_d = FromLowSToD(out_s);
1891          Register constant_low = locations->GetTemp(0).AsRegister<Register>();
1892          Register constant_high = locations->GetTemp(1).AsRegister<Register>();
1893          SRegister temp_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>();
1894          DRegister temp_d = FromLowSToD(temp_s);
1895
1896          // out_d = int-to-double(high)
1897          __ vmovsr(out_s, high);
1898          __ vcvtdi(out_d, out_s);
1899          // Using vmovd to load the `k2Pow32EncodingForDouble` constant
1900          // as an immediate value into `temp_d` does not work, as
1901          // this instruction only transfers 8 significant bits of its
1902          // immediate operand.  Instead, use two 32-bit core
1903          // registers to load `k2Pow32EncodingForDouble` into `temp_d`.
1904          __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble));
1905          __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble));
1906          __ vmovdrr(temp_d, constant_low, constant_high);
1907          // out_d = out_d * 2^32
1908          __ vmuld(out_d, out_d, temp_d);
1909          // temp_d = unsigned-to-double(low)
1910          __ vmovsr(temp_s, low);
1911          __ vcvtdu(temp_d, temp_s);
1912          // out_d = out_d + temp_d
1913          __ vaddd(out_d, out_d, temp_d);
1914          break;
1915        }
1916
1917        case Primitive::kPrimFloat:
1918          // Processing a Dex `float-to-double' instruction.
1919          __ vcvtds(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1920                    in.AsFpuRegister<SRegister>());
1921          break;
1922
1923        default:
1924          LOG(FATAL) << "Unexpected type conversion from " << input_type
1925                     << " to " << result_type;
1926      };
1927      break;
1928
1929    default:
1930      LOG(FATAL) << "Unexpected type conversion from " << input_type
1931                 << " to " << result_type;
1932  }
1933}
1934
1935void LocationsBuilderARM::VisitAdd(HAdd* add) {
1936  LocationSummary* locations =
1937      new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
1938  switch (add->GetResultType()) {
1939    case Primitive::kPrimInt: {
1940      locations->SetInAt(0, Location::RequiresRegister());
1941      locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
1942      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1943      break;
1944    }
1945
1946    case Primitive::kPrimLong: {
1947      locations->SetInAt(0, Location::RequiresRegister());
1948      locations->SetInAt(1, Location::RequiresRegister());
1949      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1950      break;
1951    }
1952
1953    case Primitive::kPrimFloat:
1954    case Primitive::kPrimDouble: {
1955      locations->SetInAt(0, Location::RequiresFpuRegister());
1956      locations->SetInAt(1, Location::RequiresFpuRegister());
1957      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1958      break;
1959    }
1960
1961    default:
1962      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1963  }
1964}
1965
1966void InstructionCodeGeneratorARM::VisitAdd(HAdd* add) {
1967  LocationSummary* locations = add->GetLocations();
1968  Location out = locations->Out();
1969  Location first = locations->InAt(0);
1970  Location second = locations->InAt(1);
1971  switch (add->GetResultType()) {
1972    case Primitive::kPrimInt:
1973      if (second.IsRegister()) {
1974        __ add(out.AsRegister<Register>(),
1975               first.AsRegister<Register>(),
1976               ShifterOperand(second.AsRegister<Register>()));
1977      } else {
1978        __ AddConstant(out.AsRegister<Register>(),
1979                       first.AsRegister<Register>(),
1980                       second.GetConstant()->AsIntConstant()->GetValue());
1981      }
1982      break;
1983
1984    case Primitive::kPrimLong: {
1985      DCHECK(second.IsRegisterPair());
1986      __ adds(out.AsRegisterPairLow<Register>(),
1987              first.AsRegisterPairLow<Register>(),
1988              ShifterOperand(second.AsRegisterPairLow<Register>()));
1989      __ adc(out.AsRegisterPairHigh<Register>(),
1990             first.AsRegisterPairHigh<Register>(),
1991             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1992      break;
1993    }
1994
1995    case Primitive::kPrimFloat:
1996      __ vadds(out.AsFpuRegister<SRegister>(),
1997               first.AsFpuRegister<SRegister>(),
1998               second.AsFpuRegister<SRegister>());
1999      break;
2000
2001    case Primitive::kPrimDouble:
2002      __ vaddd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2003               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2004               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2005      break;
2006
2007    default:
2008      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
2009  }
2010}
2011
2012void LocationsBuilderARM::VisitSub(HSub* sub) {
2013  LocationSummary* locations =
2014      new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
2015  switch (sub->GetResultType()) {
2016    case Primitive::kPrimInt: {
2017      locations->SetInAt(0, Location::RequiresRegister());
2018      locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1)));
2019      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2020      break;
2021    }
2022
2023    case Primitive::kPrimLong: {
2024      locations->SetInAt(0, Location::RequiresRegister());
2025      locations->SetInAt(1, Location::RequiresRegister());
2026      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2027      break;
2028    }
2029    case Primitive::kPrimFloat:
2030    case Primitive::kPrimDouble: {
2031      locations->SetInAt(0, Location::RequiresFpuRegister());
2032      locations->SetInAt(1, Location::RequiresFpuRegister());
2033      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2034      break;
2035    }
2036    default:
2037      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
2038  }
2039}
2040
2041void InstructionCodeGeneratorARM::VisitSub(HSub* sub) {
2042  LocationSummary* locations = sub->GetLocations();
2043  Location out = locations->Out();
2044  Location first = locations->InAt(0);
2045  Location second = locations->InAt(1);
2046  switch (sub->GetResultType()) {
2047    case Primitive::kPrimInt: {
2048      if (second.IsRegister()) {
2049        __ sub(out.AsRegister<Register>(),
2050               first.AsRegister<Register>(),
2051               ShifterOperand(second.AsRegister<Register>()));
2052      } else {
2053        __ AddConstant(out.AsRegister<Register>(),
2054                       first.AsRegister<Register>(),
2055                       -second.GetConstant()->AsIntConstant()->GetValue());
2056      }
2057      break;
2058    }
2059
2060    case Primitive::kPrimLong: {
2061      DCHECK(second.IsRegisterPair());
2062      __ subs(out.AsRegisterPairLow<Register>(),
2063              first.AsRegisterPairLow<Register>(),
2064              ShifterOperand(second.AsRegisterPairLow<Register>()));
2065      __ sbc(out.AsRegisterPairHigh<Register>(),
2066             first.AsRegisterPairHigh<Register>(),
2067             ShifterOperand(second.AsRegisterPairHigh<Register>()));
2068      break;
2069    }
2070
2071    case Primitive::kPrimFloat: {
2072      __ vsubs(out.AsFpuRegister<SRegister>(),
2073               first.AsFpuRegister<SRegister>(),
2074               second.AsFpuRegister<SRegister>());
2075      break;
2076    }
2077
2078    case Primitive::kPrimDouble: {
2079      __ vsubd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2080               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2081               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2082      break;
2083    }
2084
2085
2086    default:
2087      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
2088  }
2089}
2090
2091void LocationsBuilderARM::VisitMul(HMul* mul) {
2092  LocationSummary* locations =
2093      new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
2094  switch (mul->GetResultType()) {
2095    case Primitive::kPrimInt:
2096    case Primitive::kPrimLong:  {
2097      locations->SetInAt(0, Location::RequiresRegister());
2098      locations->SetInAt(1, Location::RequiresRegister());
2099      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2100      break;
2101    }
2102
2103    case Primitive::kPrimFloat:
2104    case Primitive::kPrimDouble: {
2105      locations->SetInAt(0, Location::RequiresFpuRegister());
2106      locations->SetInAt(1, Location::RequiresFpuRegister());
2107      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2108      break;
2109    }
2110
2111    default:
2112      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2113  }
2114}
2115
2116void InstructionCodeGeneratorARM::VisitMul(HMul* mul) {
2117  LocationSummary* locations = mul->GetLocations();
2118  Location out = locations->Out();
2119  Location first = locations->InAt(0);
2120  Location second = locations->InAt(1);
2121  switch (mul->GetResultType()) {
2122    case Primitive::kPrimInt: {
2123      __ mul(out.AsRegister<Register>(),
2124             first.AsRegister<Register>(),
2125             second.AsRegister<Register>());
2126      break;
2127    }
2128    case Primitive::kPrimLong: {
2129      Register out_hi = out.AsRegisterPairHigh<Register>();
2130      Register out_lo = out.AsRegisterPairLow<Register>();
2131      Register in1_hi = first.AsRegisterPairHigh<Register>();
2132      Register in1_lo = first.AsRegisterPairLow<Register>();
2133      Register in2_hi = second.AsRegisterPairHigh<Register>();
2134      Register in2_lo = second.AsRegisterPairLow<Register>();
2135
2136      // Extra checks to protect caused by the existence of R1_R2.
2137      // The algorithm is wrong if out.hi is either in1.lo or in2.lo:
2138      // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2);
2139      DCHECK_NE(out_hi, in1_lo);
2140      DCHECK_NE(out_hi, in2_lo);
2141
2142      // input: in1 - 64 bits, in2 - 64 bits
2143      // output: out
2144      // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
2145      // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
2146      // parts: out.lo = (in1.lo * in2.lo)[31:0]
2147
2148      // IP <- in1.lo * in2.hi
2149      __ mul(IP, in1_lo, in2_hi);
2150      // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo
2151      __ mla(out_hi, in1_hi, in2_lo, IP);
2152      // out.lo <- (in1.lo * in2.lo)[31:0];
2153      __ umull(out_lo, IP, in1_lo, in2_lo);
2154      // out.hi <- in2.hi * in1.lo +  in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
2155      __ add(out_hi, out_hi, ShifterOperand(IP));
2156      break;
2157    }
2158
2159    case Primitive::kPrimFloat: {
2160      __ vmuls(out.AsFpuRegister<SRegister>(),
2161               first.AsFpuRegister<SRegister>(),
2162               second.AsFpuRegister<SRegister>());
2163      break;
2164    }
2165
2166    case Primitive::kPrimDouble: {
2167      __ vmuld(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2168               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2169               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2170      break;
2171    }
2172
2173    default:
2174      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2175  }
2176}
2177
2178void LocationsBuilderARM::VisitDiv(HDiv* div) {
2179  LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
2180  if (div->GetResultType() == Primitive::kPrimLong) {
2181    // pLdiv runtime call.
2182    call_kind = LocationSummary::kCall;
2183  } else if (div->GetResultType() == Primitive::kPrimInt &&
2184             !codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
2185    // pIdivmod runtime call.
2186    call_kind = LocationSummary::kCall;
2187  }
2188
2189  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
2190
2191  switch (div->GetResultType()) {
2192    case Primitive::kPrimInt: {
2193      if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
2194        locations->SetInAt(0, Location::RequiresRegister());
2195        locations->SetInAt(1, Location::RequiresRegister());
2196        locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2197      } else {
2198        InvokeRuntimeCallingConvention calling_convention;
2199        locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2200        locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2201        // Note: divrem will compute both the quotient and the remainder as the pair R0 and R1, but
2202        //       we only need the former.
2203        locations->SetOut(Location::RegisterLocation(R0));
2204      }
2205      break;
2206    }
2207    case Primitive::kPrimLong: {
2208      InvokeRuntimeCallingConvention calling_convention;
2209      locations->SetInAt(0, Location::RegisterPairLocation(
2210          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2211      locations->SetInAt(1, Location::RegisterPairLocation(
2212          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2213      locations->SetOut(Location::RegisterPairLocation(R0, R1));
2214      break;
2215    }
2216    case Primitive::kPrimFloat:
2217    case Primitive::kPrimDouble: {
2218      locations->SetInAt(0, Location::RequiresFpuRegister());
2219      locations->SetInAt(1, Location::RequiresFpuRegister());
2220      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2221      break;
2222    }
2223
2224    default:
2225      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2226  }
2227}
2228
2229void InstructionCodeGeneratorARM::VisitDiv(HDiv* div) {
2230  LocationSummary* locations = div->GetLocations();
2231  Location out = locations->Out();
2232  Location first = locations->InAt(0);
2233  Location second = locations->InAt(1);
2234
2235  switch (div->GetResultType()) {
2236    case Primitive::kPrimInt: {
2237      if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
2238        __ sdiv(out.AsRegister<Register>(),
2239                first.AsRegister<Register>(),
2240                second.AsRegister<Register>());
2241      } else {
2242        InvokeRuntimeCallingConvention calling_convention;
2243        DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegister<Register>());
2244        DCHECK_EQ(calling_convention.GetRegisterAt(1), second.AsRegister<Register>());
2245        DCHECK_EQ(R0, out.AsRegister<Register>());
2246
2247        codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pIdivmod), div, div->GetDexPc(), nullptr);
2248      }
2249      break;
2250    }
2251
2252    case Primitive::kPrimLong: {
2253      InvokeRuntimeCallingConvention calling_convention;
2254      DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
2255      DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
2256      DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegisterPairLow<Register>());
2257      DCHECK_EQ(calling_convention.GetRegisterAt(3), second.AsRegisterPairHigh<Register>());
2258      DCHECK_EQ(R0, out.AsRegisterPairLow<Register>());
2259      DCHECK_EQ(R1, out.AsRegisterPairHigh<Register>());
2260
2261      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLdiv), div, div->GetDexPc(), nullptr);
2262      break;
2263    }
2264
2265    case Primitive::kPrimFloat: {
2266      __ vdivs(out.AsFpuRegister<SRegister>(),
2267               first.AsFpuRegister<SRegister>(),
2268               second.AsFpuRegister<SRegister>());
2269      break;
2270    }
2271
2272    case Primitive::kPrimDouble: {
2273      __ vdivd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2274               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2275               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2276      break;
2277    }
2278
2279    default:
2280      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2281  }
2282}
2283
2284void LocationsBuilderARM::VisitRem(HRem* rem) {
2285  Primitive::Type type = rem->GetResultType();
2286
2287  // Most remainders are implemented in the runtime.
2288  LocationSummary::CallKind call_kind = LocationSummary::kCall;
2289  if (rem->GetResultType() == Primitive::kPrimInt &&
2290      codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
2291    // Have hardware divide instruction for int, do it with three instructions.
2292    call_kind = LocationSummary::kNoCall;
2293  }
2294
2295  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
2296
2297  switch (type) {
2298    case Primitive::kPrimInt: {
2299      if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
2300        locations->SetInAt(0, Location::RequiresRegister());
2301        locations->SetInAt(1, Location::RequiresRegister());
2302        locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2303        locations->AddTemp(Location::RequiresRegister());
2304      } else {
2305        InvokeRuntimeCallingConvention calling_convention;
2306        locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2307        locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2308        // Note: divrem will compute both the quotient and the remainder as the pair R0 and R1, but
2309        //       we only need the latter.
2310        locations->SetOut(Location::RegisterLocation(R1));
2311      }
2312      break;
2313    }
2314    case Primitive::kPrimLong: {
2315      InvokeRuntimeCallingConvention calling_convention;
2316      locations->SetInAt(0, Location::RegisterPairLocation(
2317          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2318      locations->SetInAt(1, Location::RegisterPairLocation(
2319          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2320      // The runtime helper puts the output in R2,R3.
2321      locations->SetOut(Location::RegisterPairLocation(R2, R3));
2322      break;
2323    }
2324    case Primitive::kPrimFloat: {
2325      InvokeRuntimeCallingConvention calling_convention;
2326      locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
2327      locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
2328      locations->SetOut(Location::FpuRegisterLocation(S0));
2329      break;
2330    }
2331
2332    case Primitive::kPrimDouble: {
2333      InvokeRuntimeCallingConvention calling_convention;
2334      locations->SetInAt(0, Location::FpuRegisterPairLocation(
2335          calling_convention.GetFpuRegisterAt(0), calling_convention.GetFpuRegisterAt(1)));
2336      locations->SetInAt(1, Location::FpuRegisterPairLocation(
2337          calling_convention.GetFpuRegisterAt(2), calling_convention.GetFpuRegisterAt(3)));
2338      locations->SetOut(Location::Location::FpuRegisterPairLocation(S0, S1));
2339      break;
2340    }
2341
2342    default:
2343      LOG(FATAL) << "Unexpected rem type " << type;
2344  }
2345}
2346
2347void InstructionCodeGeneratorARM::VisitRem(HRem* rem) {
2348  LocationSummary* locations = rem->GetLocations();
2349  Location out = locations->Out();
2350  Location first = locations->InAt(0);
2351  Location second = locations->InAt(1);
2352
2353  Primitive::Type type = rem->GetResultType();
2354  switch (type) {
2355    case Primitive::kPrimInt: {
2356      if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
2357        Register reg1 = first.AsRegister<Register>();
2358        Register reg2 = second.AsRegister<Register>();
2359        Register temp = locations->GetTemp(0).AsRegister<Register>();
2360
2361        // temp = reg1 / reg2  (integer division)
2362        // temp = temp * reg2
2363        // dest = reg1 - temp
2364        __ sdiv(temp, reg1, reg2);
2365        __ mul(temp, temp, reg2);
2366        __ sub(out.AsRegister<Register>(), reg1, ShifterOperand(temp));
2367      } else {
2368        InvokeRuntimeCallingConvention calling_convention;
2369        DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegister<Register>());
2370        DCHECK_EQ(calling_convention.GetRegisterAt(1), second.AsRegister<Register>());
2371        DCHECK_EQ(R1, out.AsRegister<Register>());
2372
2373        codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pIdivmod), rem, rem->GetDexPc(), nullptr);
2374      }
2375      break;
2376    }
2377
2378    case Primitive::kPrimLong: {
2379      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLmod), rem, rem->GetDexPc(), nullptr);
2380      break;
2381    }
2382
2383    case Primitive::kPrimFloat: {
2384      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmodf), rem, rem->GetDexPc(), nullptr);
2385      break;
2386    }
2387
2388    case Primitive::kPrimDouble: {
2389      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmod), rem, rem->GetDexPc(), nullptr);
2390      break;
2391    }
2392
2393    default:
2394      LOG(FATAL) << "Unexpected rem type " << type;
2395  }
2396}
2397
2398void LocationsBuilderARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2399  LocationSummary* locations =
2400      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2401  locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2402  if (instruction->HasUses()) {
2403    locations->SetOut(Location::SameAsFirstInput());
2404  }
2405}
2406
2407void InstructionCodeGeneratorARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2408  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM(instruction);
2409  codegen_->AddSlowPath(slow_path);
2410
2411  LocationSummary* locations = instruction->GetLocations();
2412  Location value = locations->InAt(0);
2413
2414  switch (instruction->GetType()) {
2415    case Primitive::kPrimInt: {
2416      if (value.IsRegister()) {
2417        __ cmp(value.AsRegister<Register>(), ShifterOperand(0));
2418        __ b(slow_path->GetEntryLabel(), EQ);
2419      } else {
2420        DCHECK(value.IsConstant()) << value;
2421        if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
2422          __ b(slow_path->GetEntryLabel());
2423        }
2424      }
2425      break;
2426    }
2427    case Primitive::kPrimLong: {
2428      if (value.IsRegisterPair()) {
2429        __ orrs(IP,
2430                value.AsRegisterPairLow<Register>(),
2431                ShifterOperand(value.AsRegisterPairHigh<Register>()));
2432        __ b(slow_path->GetEntryLabel(), EQ);
2433      } else {
2434        DCHECK(value.IsConstant()) << value;
2435        if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
2436          __ b(slow_path->GetEntryLabel());
2437        }
2438      }
2439      break;
2440    default:
2441      LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
2442    }
2443  }
2444}
2445
2446void LocationsBuilderARM::HandleShift(HBinaryOperation* op) {
2447  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2448
2449  LocationSummary* locations =
2450      new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
2451
2452  switch (op->GetResultType()) {
2453    case Primitive::kPrimInt: {
2454      locations->SetInAt(0, Location::RequiresRegister());
2455      locations->SetInAt(1, Location::RegisterOrConstant(op->InputAt(1)));
2456      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2457      break;
2458    }
2459    case Primitive::kPrimLong: {
2460      locations->SetInAt(0, Location::RequiresRegister());
2461      locations->SetInAt(1, Location::RequiresRegister());
2462      locations->AddTemp(Location::RequiresRegister());
2463      locations->SetOut(Location::RequiresRegister());
2464      break;
2465    }
2466    default:
2467      LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
2468  }
2469}
2470
2471void InstructionCodeGeneratorARM::HandleShift(HBinaryOperation* op) {
2472  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2473
2474  LocationSummary* locations = op->GetLocations();
2475  Location out = locations->Out();
2476  Location first = locations->InAt(0);
2477  Location second = locations->InAt(1);
2478
2479  Primitive::Type type = op->GetResultType();
2480  switch (type) {
2481    case Primitive::kPrimInt: {
2482      Register out_reg = out.AsRegister<Register>();
2483      Register first_reg = first.AsRegister<Register>();
2484      // Arm doesn't mask the shift count so we need to do it ourselves.
2485      if (second.IsRegister()) {
2486        Register second_reg = second.AsRegister<Register>();
2487        __ and_(second_reg, second_reg, ShifterOperand(kMaxIntShiftValue));
2488        if (op->IsShl()) {
2489          __ Lsl(out_reg, first_reg, second_reg);
2490        } else if (op->IsShr()) {
2491          __ Asr(out_reg, first_reg, second_reg);
2492        } else {
2493          __ Lsr(out_reg, first_reg, second_reg);
2494        }
2495      } else {
2496        int32_t cst = second.GetConstant()->AsIntConstant()->GetValue();
2497        uint32_t shift_value = static_cast<uint32_t>(cst & kMaxIntShiftValue);
2498        if (shift_value == 0) {  // arm does not support shifting with 0 immediate.
2499          __ Mov(out_reg, first_reg);
2500        } else if (op->IsShl()) {
2501          __ Lsl(out_reg, first_reg, shift_value);
2502        } else if (op->IsShr()) {
2503          __ Asr(out_reg, first_reg, shift_value);
2504        } else {
2505          __ Lsr(out_reg, first_reg, shift_value);
2506        }
2507      }
2508      break;
2509    }
2510    case Primitive::kPrimLong: {
2511      Register o_h = out.AsRegisterPairHigh<Register>();
2512      Register o_l = out.AsRegisterPairLow<Register>();
2513
2514      Register temp = locations->GetTemp(0).AsRegister<Register>();
2515
2516      Register high = first.AsRegisterPairHigh<Register>();
2517      Register low = first.AsRegisterPairLow<Register>();
2518
2519      Register second_reg = second.AsRegister<Register>();
2520
2521      if (op->IsShl()) {
2522        // Shift the high part
2523        __ and_(second_reg, second_reg, ShifterOperand(63));
2524        __ Lsl(o_h, high, second_reg);
2525        // Shift the low part and `or` what overflew on the high part
2526        __ rsb(temp, second_reg, ShifterOperand(32));
2527        __ Lsr(temp, low, temp);
2528        __ orr(o_h, o_h, ShifterOperand(temp));
2529        // If the shift is > 32 bits, override the high part
2530        __ subs(temp, second_reg, ShifterOperand(32));
2531        __ it(PL);
2532        __ Lsl(o_h, low, temp, false, PL);
2533        // Shift the low part
2534        __ Lsl(o_l, low, second_reg);
2535      } else if (op->IsShr()) {
2536        // Shift the low part
2537        __ and_(second_reg, second_reg, ShifterOperand(63));
2538        __ Lsr(o_l, low, second_reg);
2539        // Shift the high part and `or` what underflew on the low part
2540        __ rsb(temp, second_reg, ShifterOperand(32));
2541        __ Lsl(temp, high, temp);
2542        __ orr(o_l, o_l, ShifterOperand(temp));
2543        // If the shift is > 32 bits, override the low part
2544        __ subs(temp, second_reg, ShifterOperand(32));
2545        __ it(PL);
2546        __ Asr(o_l, high, temp, false, PL);
2547        // Shift the high part
2548        __ Asr(o_h, high, second_reg);
2549      } else {
2550        // same as Shr except we use `Lsr`s and not `Asr`s
2551        __ and_(second_reg, second_reg, ShifterOperand(63));
2552        __ Lsr(o_l, low, second_reg);
2553        __ rsb(temp, second_reg, ShifterOperand(32));
2554        __ Lsl(temp, high, temp);
2555        __ orr(o_l, o_l, ShifterOperand(temp));
2556        __ subs(temp, second_reg, ShifterOperand(32));
2557        __ it(PL);
2558        __ Lsr(o_l, high, temp, false, PL);
2559        __ Lsr(o_h, high, second_reg);
2560      }
2561      break;
2562    }
2563    default:
2564      LOG(FATAL) << "Unexpected operation type " << type;
2565  }
2566}
2567
2568void LocationsBuilderARM::VisitShl(HShl* shl) {
2569  HandleShift(shl);
2570}
2571
2572void InstructionCodeGeneratorARM::VisitShl(HShl* shl) {
2573  HandleShift(shl);
2574}
2575
2576void LocationsBuilderARM::VisitShr(HShr* shr) {
2577  HandleShift(shr);
2578}
2579
2580void InstructionCodeGeneratorARM::VisitShr(HShr* shr) {
2581  HandleShift(shr);
2582}
2583
2584void LocationsBuilderARM::VisitUShr(HUShr* ushr) {
2585  HandleShift(ushr);
2586}
2587
2588void InstructionCodeGeneratorARM::VisitUShr(HUShr* ushr) {
2589  HandleShift(ushr);
2590}
2591
2592void LocationsBuilderARM::VisitNewInstance(HNewInstance* instruction) {
2593  LocationSummary* locations =
2594      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2595  InvokeRuntimeCallingConvention calling_convention;
2596  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2597  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2598  locations->SetOut(Location::RegisterLocation(R0));
2599}
2600
2601void InstructionCodeGeneratorARM::VisitNewInstance(HNewInstance* instruction) {
2602  InvokeRuntimeCallingConvention calling_convention;
2603  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
2604  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
2605  codegen_->InvokeRuntime(GetThreadOffset<kArmWordSize>(instruction->GetEntrypoint()).Int32Value(),
2606                          instruction,
2607                          instruction->GetDexPc(),
2608                          nullptr);
2609}
2610
2611void LocationsBuilderARM::VisitNewArray(HNewArray* instruction) {
2612  LocationSummary* locations =
2613      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2614  InvokeRuntimeCallingConvention calling_convention;
2615  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2616  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2617  locations->SetOut(Location::RegisterLocation(R0));
2618  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2619}
2620
2621void InstructionCodeGeneratorARM::VisitNewArray(HNewArray* instruction) {
2622  InvokeRuntimeCallingConvention calling_convention;
2623  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(2));
2624  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
2625  codegen_->InvokeRuntime(GetThreadOffset<kArmWordSize>(instruction->GetEntrypoint()).Int32Value(),
2626                          instruction,
2627                          instruction->GetDexPc(),
2628                          nullptr);
2629}
2630
2631void LocationsBuilderARM::VisitParameterValue(HParameterValue* instruction) {
2632  LocationSummary* locations =
2633      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2634  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
2635  if (location.IsStackSlot()) {
2636    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2637  } else if (location.IsDoubleStackSlot()) {
2638    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2639  }
2640  locations->SetOut(location);
2641}
2642
2643void InstructionCodeGeneratorARM::VisitParameterValue(HParameterValue* instruction) {
2644  // Nothing to do, the parameter is already at its location.
2645  UNUSED(instruction);
2646}
2647
2648void LocationsBuilderARM::VisitNot(HNot* not_) {
2649  LocationSummary* locations =
2650      new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
2651  locations->SetInAt(0, Location::RequiresRegister());
2652  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2653}
2654
2655void InstructionCodeGeneratorARM::VisitNot(HNot* not_) {
2656  LocationSummary* locations = not_->GetLocations();
2657  Location out = locations->Out();
2658  Location in = locations->InAt(0);
2659  switch (not_->GetResultType()) {
2660    case Primitive::kPrimInt:
2661      __ mvn(out.AsRegister<Register>(), ShifterOperand(in.AsRegister<Register>()));
2662      break;
2663
2664    case Primitive::kPrimLong:
2665      __ mvn(out.AsRegisterPairLow<Register>(),
2666             ShifterOperand(in.AsRegisterPairLow<Register>()));
2667      __ mvn(out.AsRegisterPairHigh<Register>(),
2668             ShifterOperand(in.AsRegisterPairHigh<Register>()));
2669      break;
2670
2671    default:
2672      LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
2673  }
2674}
2675
2676void LocationsBuilderARM::VisitBooleanNot(HBooleanNot* bool_not) {
2677  LocationSummary* locations =
2678      new (GetGraph()->GetArena()) LocationSummary(bool_not, LocationSummary::kNoCall);
2679  locations->SetInAt(0, Location::RequiresRegister());
2680  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2681}
2682
2683void InstructionCodeGeneratorARM::VisitBooleanNot(HBooleanNot* bool_not) {
2684  LocationSummary* locations = bool_not->GetLocations();
2685  Location out = locations->Out();
2686  Location in = locations->InAt(0);
2687  __ eor(out.AsRegister<Register>(), in.AsRegister<Register>(), ShifterOperand(1));
2688}
2689
2690void LocationsBuilderARM::VisitCompare(HCompare* compare) {
2691  LocationSummary* locations =
2692      new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2693  switch (compare->InputAt(0)->GetType()) {
2694    case Primitive::kPrimLong: {
2695      locations->SetInAt(0, Location::RequiresRegister());
2696      locations->SetInAt(1, Location::RequiresRegister());
2697      // Output overlaps because it is written before doing the low comparison.
2698      locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2699      break;
2700    }
2701    case Primitive::kPrimFloat:
2702    case Primitive::kPrimDouble: {
2703      locations->SetInAt(0, Location::RequiresFpuRegister());
2704      locations->SetInAt(1, Location::RequiresFpuRegister());
2705      locations->SetOut(Location::RequiresRegister());
2706      break;
2707    }
2708    default:
2709      LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2710  }
2711}
2712
2713void InstructionCodeGeneratorARM::VisitCompare(HCompare* compare) {
2714  LocationSummary* locations = compare->GetLocations();
2715  Register out = locations->Out().AsRegister<Register>();
2716  Location left = locations->InAt(0);
2717  Location right = locations->InAt(1);
2718
2719  Label less, greater, done;
2720  Primitive::Type type = compare->InputAt(0)->GetType();
2721  switch (type) {
2722    case Primitive::kPrimLong: {
2723      __ cmp(left.AsRegisterPairHigh<Register>(),
2724             ShifterOperand(right.AsRegisterPairHigh<Register>()));  // Signed compare.
2725      __ b(&less, LT);
2726      __ b(&greater, GT);
2727      // Do LoadImmediate before any `cmp`, as LoadImmediate might affect the status flags.
2728      __ LoadImmediate(out, 0);
2729      __ cmp(left.AsRegisterPairLow<Register>(),
2730             ShifterOperand(right.AsRegisterPairLow<Register>()));  // Unsigned compare.
2731      break;
2732    }
2733    case Primitive::kPrimFloat:
2734    case Primitive::kPrimDouble: {
2735      __ LoadImmediate(out, 0);
2736      if (type == Primitive::kPrimFloat) {
2737        __ vcmps(left.AsFpuRegister<SRegister>(), right.AsFpuRegister<SRegister>());
2738      } else {
2739        __ vcmpd(FromLowSToD(left.AsFpuRegisterPairLow<SRegister>()),
2740                 FromLowSToD(right.AsFpuRegisterPairLow<SRegister>()));
2741      }
2742      __ vmstat();  // transfer FP status register to ARM APSR.
2743      __ b(compare->IsGtBias() ? &greater : &less, VS);  // VS for unordered.
2744      break;
2745    }
2746    default:
2747      LOG(FATAL) << "Unexpected compare type " << type;
2748  }
2749  __ b(&done, EQ);
2750  __ b(&less, CC);  // CC is for both: unsigned compare for longs and 'less than' for floats.
2751
2752  __ Bind(&greater);
2753  __ LoadImmediate(out, 1);
2754  __ b(&done);
2755
2756  __ Bind(&less);
2757  __ LoadImmediate(out, -1);
2758
2759  __ Bind(&done);
2760}
2761
2762void LocationsBuilderARM::VisitPhi(HPhi* instruction) {
2763  LocationSummary* locations =
2764      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2765  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
2766    locations->SetInAt(i, Location::Any());
2767  }
2768  locations->SetOut(Location::Any());
2769}
2770
2771void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction) {
2772  UNUSED(instruction);
2773  LOG(FATAL) << "Unreachable";
2774}
2775
2776void InstructionCodeGeneratorARM::GenerateMemoryBarrier(MemBarrierKind kind) {
2777  // TODO (ported from quick): revisit Arm barrier kinds
2778  DmbOptions flavour = DmbOptions::ISH;  // quiet c++ warnings
2779  switch (kind) {
2780    case MemBarrierKind::kAnyStore:
2781    case MemBarrierKind::kLoadAny:
2782    case MemBarrierKind::kAnyAny: {
2783      flavour = DmbOptions::ISH;
2784      break;
2785    }
2786    case MemBarrierKind::kStoreStore: {
2787      flavour = DmbOptions::ISHST;
2788      break;
2789    }
2790    default:
2791      LOG(FATAL) << "Unexpected memory barrier " << kind;
2792  }
2793  __ dmb(flavour);
2794}
2795
2796void InstructionCodeGeneratorARM::GenerateWideAtomicLoad(Register addr,
2797                                                         uint32_t offset,
2798                                                         Register out_lo,
2799                                                         Register out_hi) {
2800  if (offset != 0) {
2801    __ LoadImmediate(out_lo, offset);
2802    __ add(IP, addr, ShifterOperand(out_lo));
2803    addr = IP;
2804  }
2805  __ ldrexd(out_lo, out_hi, addr);
2806}
2807
2808void InstructionCodeGeneratorARM::GenerateWideAtomicStore(Register addr,
2809                                                          uint32_t offset,
2810                                                          Register value_lo,
2811                                                          Register value_hi,
2812                                                          Register temp1,
2813                                                          Register temp2,
2814                                                          HInstruction* instruction) {
2815  Label fail;
2816  if (offset != 0) {
2817    __ LoadImmediate(temp1, offset);
2818    __ add(IP, addr, ShifterOperand(temp1));
2819    addr = IP;
2820  }
2821  __ Bind(&fail);
2822  // We need a load followed by store. (The address used in a STREX instruction must
2823  // be the same as the address in the most recently executed LDREX instruction.)
2824  __ ldrexd(temp1, temp2, addr);
2825  codegen_->MaybeRecordImplicitNullCheck(instruction);
2826  __ strexd(temp1, value_lo, value_hi, addr);
2827  __ cmp(temp1, ShifterOperand(0));
2828  __ b(&fail, NE);
2829}
2830
2831void LocationsBuilderARM::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) {
2832  DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2833
2834  LocationSummary* locations =
2835      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2836  locations->SetInAt(0, Location::RequiresRegister());
2837  locations->SetInAt(1, Location::RequiresRegister());
2838
2839
2840  Primitive::Type field_type = field_info.GetFieldType();
2841  bool is_wide = field_type == Primitive::kPrimLong || field_type == Primitive::kPrimDouble;
2842  bool generate_volatile = field_info.IsVolatile()
2843      && is_wide
2844      && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2845  // Temporary registers for the write barrier.
2846  // TODO: consider renaming StoreNeedsWriteBarrier to StoreNeedsGCMark.
2847  if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
2848    locations->AddTemp(Location::RequiresRegister());
2849    locations->AddTemp(Location::RequiresRegister());
2850  } else if (generate_volatile) {
2851    // Arm encoding have some additional constraints for ldrexd/strexd:
2852    // - registers need to be consecutive
2853    // - the first register should be even but not R14.
2854    // We don't test for Arm yet, and the assertion makes sure that we revisit this if we ever
2855    // enable Arm encoding.
2856    DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
2857
2858    locations->AddTemp(Location::RequiresRegister());
2859    locations->AddTemp(Location::RequiresRegister());
2860    if (field_type == Primitive::kPrimDouble) {
2861      // For doubles we need two more registers to copy the value.
2862      locations->AddTemp(Location::RegisterLocation(R2));
2863      locations->AddTemp(Location::RegisterLocation(R3));
2864    }
2865  }
2866}
2867
2868void InstructionCodeGeneratorARM::HandleFieldSet(HInstruction* instruction,
2869                                                 const FieldInfo& field_info) {
2870  DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2871
2872  LocationSummary* locations = instruction->GetLocations();
2873  Register base = locations->InAt(0).AsRegister<Register>();
2874  Location value = locations->InAt(1);
2875
2876  bool is_volatile = field_info.IsVolatile();
2877  bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2878  Primitive::Type field_type = field_info.GetFieldType();
2879  uint32_t offset = field_info.GetFieldOffset().Uint32Value();
2880
2881  if (is_volatile) {
2882    GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
2883  }
2884
2885  switch (field_type) {
2886    case Primitive::kPrimBoolean:
2887    case Primitive::kPrimByte: {
2888      __ StoreToOffset(kStoreByte, value.AsRegister<Register>(), base, offset);
2889      break;
2890    }
2891
2892    case Primitive::kPrimShort:
2893    case Primitive::kPrimChar: {
2894      __ StoreToOffset(kStoreHalfword, value.AsRegister<Register>(), base, offset);
2895      break;
2896    }
2897
2898    case Primitive::kPrimInt:
2899    case Primitive::kPrimNot: {
2900      __ StoreToOffset(kStoreWord, value.AsRegister<Register>(), base, offset);
2901      break;
2902    }
2903
2904    case Primitive::kPrimLong: {
2905      if (is_volatile && !atomic_ldrd_strd) {
2906        GenerateWideAtomicStore(base, offset,
2907                                value.AsRegisterPairLow<Register>(),
2908                                value.AsRegisterPairHigh<Register>(),
2909                                locations->GetTemp(0).AsRegister<Register>(),
2910                                locations->GetTemp(1).AsRegister<Register>(),
2911                                instruction);
2912      } else {
2913        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), base, offset);
2914        codegen_->MaybeRecordImplicitNullCheck(instruction);
2915      }
2916      break;
2917    }
2918
2919    case Primitive::kPrimFloat: {
2920      __ StoreSToOffset(value.AsFpuRegister<SRegister>(), base, offset);
2921      break;
2922    }
2923
2924    case Primitive::kPrimDouble: {
2925      DRegister value_reg = FromLowSToD(value.AsFpuRegisterPairLow<SRegister>());
2926      if (is_volatile && !atomic_ldrd_strd) {
2927        Register value_reg_lo = locations->GetTemp(0).AsRegister<Register>();
2928        Register value_reg_hi = locations->GetTemp(1).AsRegister<Register>();
2929
2930        __ vmovrrd(value_reg_lo, value_reg_hi, value_reg);
2931
2932        GenerateWideAtomicStore(base, offset,
2933                                value_reg_lo,
2934                                value_reg_hi,
2935                                locations->GetTemp(2).AsRegister<Register>(),
2936                                locations->GetTemp(3).AsRegister<Register>(),
2937                                instruction);
2938      } else {
2939        __ StoreDToOffset(value_reg, base, offset);
2940        codegen_->MaybeRecordImplicitNullCheck(instruction);
2941      }
2942      break;
2943    }
2944
2945    case Primitive::kPrimVoid:
2946      LOG(FATAL) << "Unreachable type " << field_type;
2947      UNREACHABLE();
2948  }
2949
2950  // Longs and doubles are handled in the switch.
2951  if (field_type != Primitive::kPrimLong && field_type != Primitive::kPrimDouble) {
2952    codegen_->MaybeRecordImplicitNullCheck(instruction);
2953  }
2954
2955  if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
2956    Register temp = locations->GetTemp(0).AsRegister<Register>();
2957    Register card = locations->GetTemp(1).AsRegister<Register>();
2958    codegen_->MarkGCCard(temp, card, base, value.AsRegister<Register>());
2959  }
2960
2961  if (is_volatile) {
2962    GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
2963  }
2964}
2965
2966void LocationsBuilderARM::HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info) {
2967  DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2968  LocationSummary* locations =
2969      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2970  locations->SetInAt(0, Location::RequiresRegister());
2971
2972  bool volatile_for_double = field_info.IsVolatile()
2973      && (field_info.GetFieldType() == Primitive::kPrimDouble)
2974      && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2975  bool overlap = field_info.IsVolatile() && (field_info.GetFieldType() == Primitive::kPrimLong);
2976  locations->SetOut(Location::RequiresRegister(),
2977                    (overlap ? Location::kOutputOverlap : Location::kNoOutputOverlap));
2978  if (volatile_for_double) {
2979    // Arm encoding have some additional constraints for ldrexd/strexd:
2980    // - registers need to be consecutive
2981    // - the first register should be even but not R14.
2982    // We don't test for Arm yet, and the assertion makes sure that we revisit this if we ever
2983    // enable Arm encoding.
2984    DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
2985    locations->AddTemp(Location::RequiresRegister());
2986    locations->AddTemp(Location::RequiresRegister());
2987  }
2988}
2989
2990void InstructionCodeGeneratorARM::HandleFieldGet(HInstruction* instruction,
2991                                                 const FieldInfo& field_info) {
2992  DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2993
2994  LocationSummary* locations = instruction->GetLocations();
2995  Register base = locations->InAt(0).AsRegister<Register>();
2996  Location out = locations->Out();
2997  bool is_volatile = field_info.IsVolatile();
2998  bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2999  Primitive::Type field_type = field_info.GetFieldType();
3000  uint32_t offset = field_info.GetFieldOffset().Uint32Value();
3001
3002  switch (field_type) {
3003    case Primitive::kPrimBoolean: {
3004      __ LoadFromOffset(kLoadUnsignedByte, out.AsRegister<Register>(), base, offset);
3005      break;
3006    }
3007
3008    case Primitive::kPrimByte: {
3009      __ LoadFromOffset(kLoadSignedByte, out.AsRegister<Register>(), base, offset);
3010      break;
3011    }
3012
3013    case Primitive::kPrimShort: {
3014      __ LoadFromOffset(kLoadSignedHalfword, out.AsRegister<Register>(), base, offset);
3015      break;
3016    }
3017
3018    case Primitive::kPrimChar: {
3019      __ LoadFromOffset(kLoadUnsignedHalfword, out.AsRegister<Register>(), base, offset);
3020      break;
3021    }
3022
3023    case Primitive::kPrimInt:
3024    case Primitive::kPrimNot: {
3025      __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), base, offset);
3026      break;
3027    }
3028
3029    case Primitive::kPrimLong: {
3030      if (is_volatile && !atomic_ldrd_strd) {
3031        GenerateWideAtomicLoad(base, offset,
3032                               out.AsRegisterPairLow<Register>(),
3033                               out.AsRegisterPairHigh<Register>());
3034      } else {
3035        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), base, offset);
3036      }
3037      break;
3038    }
3039
3040    case Primitive::kPrimFloat: {
3041      __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), base, offset);
3042      break;
3043    }
3044
3045    case Primitive::kPrimDouble: {
3046      DRegister out_reg = FromLowSToD(out.AsFpuRegisterPairLow<SRegister>());
3047      if (is_volatile && !atomic_ldrd_strd) {
3048        Register lo = locations->GetTemp(0).AsRegister<Register>();
3049        Register hi = locations->GetTemp(1).AsRegister<Register>();
3050        GenerateWideAtomicLoad(base, offset, lo, hi);
3051        codegen_->MaybeRecordImplicitNullCheck(instruction);
3052        __ vmovdrr(out_reg, lo, hi);
3053      } else {
3054        __ LoadDFromOffset(out_reg, base, offset);
3055        codegen_->MaybeRecordImplicitNullCheck(instruction);
3056      }
3057      break;
3058    }
3059
3060    case Primitive::kPrimVoid:
3061      LOG(FATAL) << "Unreachable type " << field_type;
3062      UNREACHABLE();
3063  }
3064
3065  // Doubles are handled in the switch.
3066  if (field_type != Primitive::kPrimDouble) {
3067    codegen_->MaybeRecordImplicitNullCheck(instruction);
3068  }
3069
3070  if (is_volatile) {
3071    GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
3072  }
3073}
3074
3075void LocationsBuilderARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
3076  HandleFieldSet(instruction, instruction->GetFieldInfo());
3077}
3078
3079void InstructionCodeGeneratorARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
3080  HandleFieldSet(instruction, instruction->GetFieldInfo());
3081}
3082
3083void LocationsBuilderARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
3084  HandleFieldGet(instruction, instruction->GetFieldInfo());
3085}
3086
3087void InstructionCodeGeneratorARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
3088  HandleFieldGet(instruction, instruction->GetFieldInfo());
3089}
3090
3091void LocationsBuilderARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
3092  HandleFieldGet(instruction, instruction->GetFieldInfo());
3093}
3094
3095void InstructionCodeGeneratorARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
3096  HandleFieldGet(instruction, instruction->GetFieldInfo());
3097}
3098
3099void LocationsBuilderARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
3100  HandleFieldSet(instruction, instruction->GetFieldInfo());
3101}
3102
3103void InstructionCodeGeneratorARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
3104  HandleFieldSet(instruction, instruction->GetFieldInfo());
3105}
3106
3107void LocationsBuilderARM::VisitNullCheck(HNullCheck* instruction) {
3108  LocationSummary* locations =
3109      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3110  locations->SetInAt(0, Location::RequiresRegister());
3111  if (instruction->HasUses()) {
3112    locations->SetOut(Location::SameAsFirstInput());
3113  }
3114}
3115
3116void InstructionCodeGeneratorARM::GenerateImplicitNullCheck(HNullCheck* instruction) {
3117  if (codegen_->CanMoveNullCheckToUser(instruction)) {
3118    return;
3119  }
3120  Location obj = instruction->GetLocations()->InAt(0);
3121
3122  __ LoadFromOffset(kLoadWord, IP, obj.AsRegister<Register>(), 0);
3123  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
3124}
3125
3126void InstructionCodeGeneratorARM::GenerateExplicitNullCheck(HNullCheck* instruction) {
3127  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM(instruction);
3128  codegen_->AddSlowPath(slow_path);
3129
3130  LocationSummary* locations = instruction->GetLocations();
3131  Location obj = locations->InAt(0);
3132
3133  __ cmp(obj.AsRegister<Register>(), ShifterOperand(0));
3134  __ b(slow_path->GetEntryLabel(), EQ);
3135}
3136
3137void InstructionCodeGeneratorARM::VisitNullCheck(HNullCheck* instruction) {
3138  if (codegen_->GetCompilerOptions().GetImplicitNullChecks()) {
3139    GenerateImplicitNullCheck(instruction);
3140  } else {
3141    GenerateExplicitNullCheck(instruction);
3142  }
3143}
3144
3145void LocationsBuilderARM::VisitArrayGet(HArrayGet* instruction) {
3146  LocationSummary* locations =
3147      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3148  locations->SetInAt(0, Location::RequiresRegister());
3149  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3150  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3151}
3152
3153void InstructionCodeGeneratorARM::VisitArrayGet(HArrayGet* instruction) {
3154  LocationSummary* locations = instruction->GetLocations();
3155  Register obj = locations->InAt(0).AsRegister<Register>();
3156  Location index = locations->InAt(1);
3157
3158  switch (instruction->GetType()) {
3159    case Primitive::kPrimBoolean: {
3160      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
3161      Register out = locations->Out().AsRegister<Register>();
3162      if (index.IsConstant()) {
3163        size_t offset =
3164            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
3165        __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
3166      } else {
3167        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
3168        __ LoadFromOffset(kLoadUnsignedByte, out, IP, data_offset);
3169      }
3170      break;
3171    }
3172
3173    case Primitive::kPrimByte: {
3174      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
3175      Register out = locations->Out().AsRegister<Register>();
3176      if (index.IsConstant()) {
3177        size_t offset =
3178            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
3179        __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
3180      } else {
3181        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
3182        __ LoadFromOffset(kLoadSignedByte, out, IP, data_offset);
3183      }
3184      break;
3185    }
3186
3187    case Primitive::kPrimShort: {
3188      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
3189      Register out = locations->Out().AsRegister<Register>();
3190      if (index.IsConstant()) {
3191        size_t offset =
3192            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
3193        __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
3194      } else {
3195        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
3196        __ LoadFromOffset(kLoadSignedHalfword, out, IP, data_offset);
3197      }
3198      break;
3199    }
3200
3201    case Primitive::kPrimChar: {
3202      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
3203      Register out = locations->Out().AsRegister<Register>();
3204      if (index.IsConstant()) {
3205        size_t offset =
3206            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
3207        __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
3208      } else {
3209        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
3210        __ LoadFromOffset(kLoadUnsignedHalfword, out, IP, data_offset);
3211      }
3212      break;
3213    }
3214
3215    case Primitive::kPrimInt:
3216    case Primitive::kPrimNot: {
3217      DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
3218      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
3219      Register out = locations->Out().AsRegister<Register>();
3220      if (index.IsConstant()) {
3221        size_t offset =
3222            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3223        __ LoadFromOffset(kLoadWord, out, obj, offset);
3224      } else {
3225        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3226        __ LoadFromOffset(kLoadWord, out, IP, data_offset);
3227      }
3228      break;
3229    }
3230
3231    case Primitive::kPrimLong: {
3232      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
3233      Location out = locations->Out();
3234      if (index.IsConstant()) {
3235        size_t offset =
3236            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3237        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset);
3238      } else {
3239        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3240        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), IP, data_offset);
3241      }
3242      break;
3243    }
3244
3245    case Primitive::kPrimFloat: {
3246      uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
3247      Location out = locations->Out();
3248      DCHECK(out.IsFpuRegister());
3249      if (index.IsConstant()) {
3250        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3251        __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), obj, offset);
3252      } else {
3253        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3254        __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), IP, data_offset);
3255      }
3256      break;
3257    }
3258
3259    case Primitive::kPrimDouble: {
3260      uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
3261      Location out = locations->Out();
3262      DCHECK(out.IsFpuRegisterPair());
3263      if (index.IsConstant()) {
3264        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3265        __ LoadDFromOffset(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), obj, offset);
3266      } else {
3267        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3268        __ LoadDFromOffset(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), IP, data_offset);
3269      }
3270      break;
3271    }
3272
3273    case Primitive::kPrimVoid:
3274      LOG(FATAL) << "Unreachable type " << instruction->GetType();
3275      UNREACHABLE();
3276  }
3277  codegen_->MaybeRecordImplicitNullCheck(instruction);
3278}
3279
3280void LocationsBuilderARM::VisitArraySet(HArraySet* instruction) {
3281  Primitive::Type value_type = instruction->GetComponentType();
3282
3283  bool needs_write_barrier =
3284      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
3285  bool needs_runtime_call = instruction->NeedsTypeCheck();
3286
3287  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3288      instruction, needs_runtime_call ? LocationSummary::kCall : LocationSummary::kNoCall);
3289  if (needs_runtime_call) {
3290    InvokeRuntimeCallingConvention calling_convention;
3291    locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3292    locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3293    locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
3294  } else {
3295    locations->SetInAt(0, Location::RequiresRegister());
3296    locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3297    locations->SetInAt(2, Location::RequiresRegister());
3298
3299    if (needs_write_barrier) {
3300      // Temporary registers for the write barrier.
3301      locations->AddTemp(Location::RequiresRegister());
3302      locations->AddTemp(Location::RequiresRegister());
3303    }
3304  }
3305}
3306
3307void InstructionCodeGeneratorARM::VisitArraySet(HArraySet* instruction) {
3308  LocationSummary* locations = instruction->GetLocations();
3309  Register obj = locations->InAt(0).AsRegister<Register>();
3310  Location index = locations->InAt(1);
3311  Primitive::Type value_type = instruction->GetComponentType();
3312  bool needs_runtime_call = locations->WillCall();
3313  bool needs_write_barrier =
3314      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
3315
3316  switch (value_type) {
3317    case Primitive::kPrimBoolean:
3318    case Primitive::kPrimByte: {
3319      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
3320      Register value = locations->InAt(2).AsRegister<Register>();
3321      if (index.IsConstant()) {
3322        size_t offset =
3323            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
3324        __ StoreToOffset(kStoreByte, value, obj, offset);
3325      } else {
3326        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
3327        __ StoreToOffset(kStoreByte, value, IP, data_offset);
3328      }
3329      break;
3330    }
3331
3332    case Primitive::kPrimShort:
3333    case Primitive::kPrimChar: {
3334      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
3335      Register value = locations->InAt(2).AsRegister<Register>();
3336      if (index.IsConstant()) {
3337        size_t offset =
3338            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
3339        __ StoreToOffset(kStoreHalfword, value, obj, offset);
3340      } else {
3341        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
3342        __ StoreToOffset(kStoreHalfword, value, IP, data_offset);
3343      }
3344      break;
3345    }
3346
3347    case Primitive::kPrimInt:
3348    case Primitive::kPrimNot: {
3349      if (!needs_runtime_call) {
3350        uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
3351        Register value = locations->InAt(2).AsRegister<Register>();
3352        if (index.IsConstant()) {
3353          size_t offset =
3354              (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3355          __ StoreToOffset(kStoreWord, value, obj, offset);
3356        } else {
3357          DCHECK(index.IsRegister()) << index;
3358          __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3359          __ StoreToOffset(kStoreWord, value, IP, data_offset);
3360        }
3361        codegen_->MaybeRecordImplicitNullCheck(instruction);
3362        if (needs_write_barrier) {
3363          DCHECK_EQ(value_type, Primitive::kPrimNot);
3364          Register temp = locations->GetTemp(0).AsRegister<Register>();
3365          Register card = locations->GetTemp(1).AsRegister<Register>();
3366          codegen_->MarkGCCard(temp, card, obj, value);
3367        }
3368      } else {
3369        DCHECK_EQ(value_type, Primitive::kPrimNot);
3370        codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
3371                                instruction,
3372                                instruction->GetDexPc(),
3373                                nullptr);
3374      }
3375      break;
3376    }
3377
3378    case Primitive::kPrimLong: {
3379      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
3380      Location value = locations->InAt(2);
3381      if (index.IsConstant()) {
3382        size_t offset =
3383            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3384        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset);
3385      } else {
3386        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3387        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), IP, data_offset);
3388      }
3389      break;
3390    }
3391
3392    case Primitive::kPrimFloat: {
3393      uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
3394      Location value = locations->InAt(2);
3395      DCHECK(value.IsFpuRegister());
3396      if (index.IsConstant()) {
3397        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3398        __ StoreSToOffset(value.AsFpuRegister<SRegister>(), obj, offset);
3399      } else {
3400        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3401        __ StoreSToOffset(value.AsFpuRegister<SRegister>(), IP, data_offset);
3402      }
3403      break;
3404    }
3405
3406    case Primitive::kPrimDouble: {
3407      uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
3408      Location value = locations->InAt(2);
3409      DCHECK(value.IsFpuRegisterPair());
3410      if (index.IsConstant()) {
3411        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3412        __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), obj, offset);
3413      } else {
3414        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3415        __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), IP, data_offset);
3416      }
3417
3418      break;
3419    }
3420
3421    case Primitive::kPrimVoid:
3422      LOG(FATAL) << "Unreachable type " << value_type;
3423      UNREACHABLE();
3424  }
3425
3426  // Ints and objects are handled in the switch.
3427  if (value_type != Primitive::kPrimInt && value_type != Primitive::kPrimNot) {
3428    codegen_->MaybeRecordImplicitNullCheck(instruction);
3429  }
3430}
3431
3432void LocationsBuilderARM::VisitArrayLength(HArrayLength* instruction) {
3433  LocationSummary* locations =
3434      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3435  locations->SetInAt(0, Location::RequiresRegister());
3436  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3437}
3438
3439void InstructionCodeGeneratorARM::VisitArrayLength(HArrayLength* instruction) {
3440  LocationSummary* locations = instruction->GetLocations();
3441  uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
3442  Register obj = locations->InAt(0).AsRegister<Register>();
3443  Register out = locations->Out().AsRegister<Register>();
3444  __ LoadFromOffset(kLoadWord, out, obj, offset);
3445  codegen_->MaybeRecordImplicitNullCheck(instruction);
3446}
3447
3448void LocationsBuilderARM::VisitBoundsCheck(HBoundsCheck* instruction) {
3449  LocationSummary* locations =
3450      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3451  locations->SetInAt(0, Location::RequiresRegister());
3452  locations->SetInAt(1, Location::RequiresRegister());
3453  if (instruction->HasUses()) {
3454    locations->SetOut(Location::SameAsFirstInput());
3455  }
3456}
3457
3458void InstructionCodeGeneratorARM::VisitBoundsCheck(HBoundsCheck* instruction) {
3459  LocationSummary* locations = instruction->GetLocations();
3460  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM(
3461      instruction, locations->InAt(0), locations->InAt(1));
3462  codegen_->AddSlowPath(slow_path);
3463
3464  Register index = locations->InAt(0).AsRegister<Register>();
3465  Register length = locations->InAt(1).AsRegister<Register>();
3466
3467  __ cmp(index, ShifterOperand(length));
3468  __ b(slow_path->GetEntryLabel(), CS);
3469}
3470
3471void CodeGeneratorARM::MarkGCCard(Register temp, Register card, Register object, Register value) {
3472  Label is_null;
3473  __ CompareAndBranchIfZero(value, &is_null);
3474  __ LoadFromOffset(kLoadWord, card, TR, Thread::CardTableOffset<kArmWordSize>().Int32Value());
3475  __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
3476  __ strb(card, Address(card, temp));
3477  __ Bind(&is_null);
3478}
3479
3480void LocationsBuilderARM::VisitTemporary(HTemporary* temp) {
3481  temp->SetLocations(nullptr);
3482}
3483
3484void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp) {
3485  // Nothing to do, this is driven by the code generator.
3486  UNUSED(temp);
3487}
3488
3489void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction) {
3490  UNUSED(instruction);
3491  LOG(FATAL) << "Unreachable";
3492}
3493
3494void InstructionCodeGeneratorARM::VisitParallelMove(HParallelMove* instruction) {
3495  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
3496}
3497
3498void LocationsBuilderARM::VisitSuspendCheck(HSuspendCheck* instruction) {
3499  new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
3500}
3501
3502void InstructionCodeGeneratorARM::VisitSuspendCheck(HSuspendCheck* instruction) {
3503  HBasicBlock* block = instruction->GetBlock();
3504  if (block->GetLoopInformation() != nullptr) {
3505    DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
3506    // The back edge will generate the suspend check.
3507    return;
3508  }
3509  if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
3510    // The goto will generate the suspend check.
3511    return;
3512  }
3513  GenerateSuspendCheck(instruction, nullptr);
3514}
3515
3516void InstructionCodeGeneratorARM::GenerateSuspendCheck(HSuspendCheck* instruction,
3517                                                       HBasicBlock* successor) {
3518  SuspendCheckSlowPathARM* slow_path =
3519      new (GetGraph()->GetArena()) SuspendCheckSlowPathARM(instruction, successor);
3520  codegen_->AddSlowPath(slow_path);
3521
3522  __ LoadFromOffset(
3523      kLoadUnsignedHalfword, IP, TR, Thread::ThreadFlagsOffset<kArmWordSize>().Int32Value());
3524  __ cmp(IP, ShifterOperand(0));
3525  // TODO: Figure out the branch offsets and use cbz/cbnz.
3526  if (successor == nullptr) {
3527    __ b(slow_path->GetEntryLabel(), NE);
3528    __ Bind(slow_path->GetReturnLabel());
3529  } else {
3530    __ b(codegen_->GetLabelOf(successor), EQ);
3531    __ b(slow_path->GetEntryLabel());
3532  }
3533}
3534
3535ArmAssembler* ParallelMoveResolverARM::GetAssembler() const {
3536  return codegen_->GetAssembler();
3537}
3538
3539void ParallelMoveResolverARM::EmitMove(size_t index) {
3540  MoveOperands* move = moves_.Get(index);
3541  Location source = move->GetSource();
3542  Location destination = move->GetDestination();
3543
3544  if (source.IsRegister()) {
3545    if (destination.IsRegister()) {
3546      __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>());
3547    } else {
3548      DCHECK(destination.IsStackSlot());
3549      __ StoreToOffset(kStoreWord, source.AsRegister<Register>(),
3550                       SP, destination.GetStackIndex());
3551    }
3552  } else if (source.IsStackSlot()) {
3553    if (destination.IsRegister()) {
3554      __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(),
3555                        SP, source.GetStackIndex());
3556    } else if (destination.IsFpuRegister()) {
3557      __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex());
3558    } else {
3559      DCHECK(destination.IsStackSlot());
3560      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
3561      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3562    }
3563  } else if (source.IsFpuRegister()) {
3564    if (destination.IsFpuRegister()) {
3565      __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>());
3566    } else {
3567      DCHECK(destination.IsStackSlot());
3568      __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex());
3569    }
3570  } else if (source.IsDoubleStackSlot()) {
3571    if (destination.IsDoubleStackSlot()) {
3572      __ LoadDFromOffset(DTMP, SP, source.GetStackIndex());
3573      __ StoreDToOffset(DTMP, SP, destination.GetStackIndex());
3574    } else if (destination.IsRegisterPair()) {
3575      DCHECK(ExpectedPairLayout(destination));
3576      __ LoadFromOffset(
3577          kLoadWordPair, destination.AsRegisterPairLow<Register>(), SP, source.GetStackIndex());
3578    } else {
3579      DCHECK(destination.IsFpuRegisterPair()) << destination;
3580      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
3581                         SP,
3582                         source.GetStackIndex());
3583    }
3584  } else if (source.IsRegisterPair()) {
3585    if (destination.IsRegisterPair()) {
3586      __ Mov(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairLow<Register>());
3587      __ Mov(destination.AsRegisterPairHigh<Register>(), source.AsRegisterPairHigh<Register>());
3588    } else {
3589      DCHECK(destination.IsDoubleStackSlot()) << destination;
3590      DCHECK(ExpectedPairLayout(source));
3591      __ StoreToOffset(
3592          kStoreWordPair, source.AsRegisterPairLow<Register>(), SP, destination.GetStackIndex());
3593    }
3594  } else if (source.IsFpuRegisterPair()) {
3595    if (destination.IsFpuRegisterPair()) {
3596      __ vmovd(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
3597               FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()));
3598    } else {
3599      DCHECK(destination.IsDoubleStackSlot()) << destination;
3600      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
3601                        SP,
3602                        destination.GetStackIndex());
3603    }
3604  } else {
3605    DCHECK(source.IsConstant()) << source;
3606    HConstant* constant = source.GetConstant();
3607    if (constant->IsIntConstant() || constant->IsNullConstant()) {
3608      int32_t value = CodeGenerator::GetInt32ValueOf(constant);
3609      if (destination.IsRegister()) {
3610        __ LoadImmediate(destination.AsRegister<Register>(), value);
3611      } else {
3612        DCHECK(destination.IsStackSlot());
3613        __ LoadImmediate(IP, value);
3614        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3615      }
3616    } else if (constant->IsLongConstant()) {
3617      int64_t value = constant->AsLongConstant()->GetValue();
3618      if (destination.IsRegisterPair()) {
3619        __ LoadImmediate(destination.AsRegisterPairLow<Register>(), Low32Bits(value));
3620        __ LoadImmediate(destination.AsRegisterPairHigh<Register>(), High32Bits(value));
3621      } else {
3622        DCHECK(destination.IsDoubleStackSlot()) << destination;
3623        __ LoadImmediate(IP, Low32Bits(value));
3624        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3625        __ LoadImmediate(IP, High32Bits(value));
3626        __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
3627      }
3628    } else if (constant->IsDoubleConstant()) {
3629      double value = constant->AsDoubleConstant()->GetValue();
3630      if (destination.IsFpuRegisterPair()) {
3631        __ LoadDImmediate(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()), value);
3632      } else {
3633        DCHECK(destination.IsDoubleStackSlot()) << destination;
3634        uint64_t int_value = bit_cast<uint64_t, double>(value);
3635        __ LoadImmediate(IP, Low32Bits(int_value));
3636        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3637        __ LoadImmediate(IP, High32Bits(int_value));
3638        __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
3639      }
3640    } else {
3641      DCHECK(constant->IsFloatConstant()) << constant->DebugName();
3642      float value = constant->AsFloatConstant()->GetValue();
3643      if (destination.IsFpuRegister()) {
3644        __ LoadSImmediate(destination.AsFpuRegister<SRegister>(), value);
3645      } else {
3646        DCHECK(destination.IsStackSlot());
3647        __ LoadImmediate(IP, bit_cast<int32_t, float>(value));
3648        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3649      }
3650    }
3651  }
3652}
3653
3654void ParallelMoveResolverARM::Exchange(Register reg, int mem) {
3655  __ Mov(IP, reg);
3656  __ LoadFromOffset(kLoadWord, reg, SP, mem);
3657  __ StoreToOffset(kStoreWord, IP, SP, mem);
3658}
3659
3660void ParallelMoveResolverARM::Exchange(int mem1, int mem2) {
3661  ScratchRegisterScope ensure_scratch(this, IP, R0, codegen_->GetNumberOfCoreRegisters());
3662  int stack_offset = ensure_scratch.IsSpilled() ? kArmWordSize : 0;
3663  __ LoadFromOffset(kLoadWord, static_cast<Register>(ensure_scratch.GetRegister()),
3664                    SP, mem1 + stack_offset);
3665  __ LoadFromOffset(kLoadWord, IP, SP, mem2 + stack_offset);
3666  __ StoreToOffset(kStoreWord, static_cast<Register>(ensure_scratch.GetRegister()),
3667                   SP, mem2 + stack_offset);
3668  __ StoreToOffset(kStoreWord, IP, SP, mem1 + stack_offset);
3669}
3670
3671void ParallelMoveResolverARM::EmitSwap(size_t index) {
3672  MoveOperands* move = moves_.Get(index);
3673  Location source = move->GetSource();
3674  Location destination = move->GetDestination();
3675
3676  if (source.IsRegister() && destination.IsRegister()) {
3677    DCHECK_NE(source.AsRegister<Register>(), IP);
3678    DCHECK_NE(destination.AsRegister<Register>(), IP);
3679    __ Mov(IP, source.AsRegister<Register>());
3680    __ Mov(source.AsRegister<Register>(), destination.AsRegister<Register>());
3681    __ Mov(destination.AsRegister<Register>(), IP);
3682  } else if (source.IsRegister() && destination.IsStackSlot()) {
3683    Exchange(source.AsRegister<Register>(), destination.GetStackIndex());
3684  } else if (source.IsStackSlot() && destination.IsRegister()) {
3685    Exchange(destination.AsRegister<Register>(), source.GetStackIndex());
3686  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
3687    Exchange(source.GetStackIndex(), destination.GetStackIndex());
3688  } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
3689    __ vmovrs(IP, source.AsFpuRegister<SRegister>());
3690    __ vmovs(source.AsFpuRegister<SRegister>(), destination.AsFpuRegister<SRegister>());
3691    __ vmovsr(destination.AsFpuRegister<SRegister>(), IP);
3692  } else if (source.IsRegisterPair() && destination.IsRegisterPair()) {
3693    __ vmovdrr(DTMP, source.AsRegisterPairLow<Register>(), source.AsRegisterPairHigh<Register>());
3694    __ Mov(source.AsRegisterPairLow<Register>(), destination.AsRegisterPairLow<Register>());
3695    __ Mov(source.AsRegisterPairHigh<Register>(), destination.AsRegisterPairHigh<Register>());
3696    __ vmovrrd(destination.AsRegisterPairLow<Register>(),
3697               destination.AsRegisterPairHigh<Register>(),
3698               DTMP);
3699  } else if (source.IsRegisterPair() || destination.IsRegisterPair()) {
3700    Register low_reg = source.IsRegisterPair()
3701        ? source.AsRegisterPairLow<Register>()
3702        : destination.AsRegisterPairLow<Register>();
3703    int mem = source.IsRegisterPair()
3704        ? destination.GetStackIndex()
3705        : source.GetStackIndex();
3706    DCHECK(ExpectedPairLayout(source.IsRegisterPair() ? source : destination));
3707    __ vmovdrr(DTMP, low_reg, static_cast<Register>(low_reg + 1));
3708    __ LoadFromOffset(kLoadWordPair, low_reg, SP, mem);
3709    __ StoreDToOffset(DTMP, SP, mem);
3710  } else if (source.IsFpuRegisterPair() && destination.IsFpuRegisterPair()) {
3711    DRegister first = FromLowSToD(source.AsFpuRegisterPairLow<SRegister>());
3712    DRegister second = FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>());
3713    __ vmovd(DTMP, first);
3714    __ vmovd(first, second);
3715    __ vmovd(second, DTMP);
3716  } else if (source.IsFpuRegisterPair() || destination.IsFpuRegisterPair()) {
3717    DRegister reg = source.IsFpuRegisterPair()
3718        ? FromLowSToD(source.AsFpuRegisterPairLow<SRegister>())
3719        : FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>());
3720    int mem = source.IsFpuRegisterPair()
3721        ? destination.GetStackIndex()
3722        : source.GetStackIndex();
3723    __ vmovd(DTMP, reg);
3724    __ LoadDFromOffset(reg, SP, mem);
3725    __ StoreDToOffset(DTMP, SP, mem);
3726  } else if (source.IsFpuRegister() || destination.IsFpuRegister()) {
3727    SRegister reg = source.IsFpuRegister() ? source.AsFpuRegister<SRegister>()
3728                                           : destination.AsFpuRegister<SRegister>();
3729    int mem = source.IsFpuRegister()
3730        ? destination.GetStackIndex()
3731        : source.GetStackIndex();
3732
3733    __ vmovrs(IP, reg);
3734    __ LoadSFromOffset(reg, SP, mem);
3735    __ StoreToOffset(kStoreWord, IP, SP, mem);
3736  } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
3737    Exchange(source.GetStackIndex(), destination.GetStackIndex());
3738    Exchange(source.GetHighStackIndex(kArmWordSize), destination.GetHighStackIndex(kArmWordSize));
3739  } else {
3740    LOG(FATAL) << "Unimplemented" << source << " <-> " << destination;
3741  }
3742}
3743
3744void ParallelMoveResolverARM::SpillScratch(int reg) {
3745  __ Push(static_cast<Register>(reg));
3746}
3747
3748void ParallelMoveResolverARM::RestoreScratch(int reg) {
3749  __ Pop(static_cast<Register>(reg));
3750}
3751
3752void LocationsBuilderARM::VisitLoadClass(HLoadClass* cls) {
3753  LocationSummary::CallKind call_kind = cls->CanCallRuntime()
3754      ? LocationSummary::kCallOnSlowPath
3755      : LocationSummary::kNoCall;
3756  LocationSummary* locations =
3757      new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
3758  locations->SetOut(Location::RequiresRegister());
3759}
3760
3761void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) {
3762  Register out = cls->GetLocations()->Out().AsRegister<Register>();
3763  if (cls->IsReferrersClass()) {
3764    DCHECK(!cls->CanCallRuntime());
3765    DCHECK(!cls->MustGenerateClinitCheck());
3766    codegen_->LoadCurrentMethod(out);
3767    __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
3768  } else {
3769    DCHECK(cls->CanCallRuntime());
3770    codegen_->LoadCurrentMethod(out);
3771    __ LoadFromOffset(
3772        kLoadWord, out, out, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value());
3773    __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
3774
3775    SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
3776        cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
3777    codegen_->AddSlowPath(slow_path);
3778    __ cmp(out, ShifterOperand(0));
3779    __ b(slow_path->GetEntryLabel(), EQ);
3780    if (cls->MustGenerateClinitCheck()) {
3781      GenerateClassInitializationCheck(slow_path, out);
3782    } else {
3783      __ Bind(slow_path->GetExitLabel());
3784    }
3785  }
3786}
3787
3788void LocationsBuilderARM::VisitClinitCheck(HClinitCheck* check) {
3789  LocationSummary* locations =
3790      new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
3791  locations->SetInAt(0, Location::RequiresRegister());
3792  if (check->HasUses()) {
3793    locations->SetOut(Location::SameAsFirstInput());
3794  }
3795}
3796
3797void InstructionCodeGeneratorARM::VisitClinitCheck(HClinitCheck* check) {
3798  // We assume the class is not null.
3799  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
3800      check->GetLoadClass(), check, check->GetDexPc(), true);
3801  codegen_->AddSlowPath(slow_path);
3802  GenerateClassInitializationCheck(slow_path,
3803                                   check->GetLocations()->InAt(0).AsRegister<Register>());
3804}
3805
3806void InstructionCodeGeneratorARM::GenerateClassInitializationCheck(
3807    SlowPathCodeARM* slow_path, Register class_reg) {
3808  __ LoadFromOffset(kLoadWord, IP, class_reg, mirror::Class::StatusOffset().Int32Value());
3809  __ cmp(IP, ShifterOperand(mirror::Class::kStatusInitialized));
3810  __ b(slow_path->GetEntryLabel(), LT);
3811  // Even if the initialized flag is set, we may be in a situation where caches are not synced
3812  // properly. Therefore, we do a memory fence.
3813  __ dmb(ISH);
3814  __ Bind(slow_path->GetExitLabel());
3815}
3816
3817void LocationsBuilderARM::VisitLoadString(HLoadString* load) {
3818  LocationSummary* locations =
3819      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
3820  locations->SetOut(Location::RequiresRegister());
3821}
3822
3823void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) {
3824  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM(load);
3825  codegen_->AddSlowPath(slow_path);
3826
3827  Register out = load->GetLocations()->Out().AsRegister<Register>();
3828  codegen_->LoadCurrentMethod(out);
3829  __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
3830  __ LoadFromOffset(kLoadWord, out, out, mirror::Class::DexCacheStringsOffset().Int32Value());
3831  __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(load->GetStringIndex()));
3832  __ cmp(out, ShifterOperand(0));
3833  __ b(slow_path->GetEntryLabel(), EQ);
3834  __ Bind(slow_path->GetExitLabel());
3835}
3836
3837void LocationsBuilderARM::VisitLoadException(HLoadException* load) {
3838  LocationSummary* locations =
3839      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
3840  locations->SetOut(Location::RequiresRegister());
3841}
3842
3843void InstructionCodeGeneratorARM::VisitLoadException(HLoadException* load) {
3844  Register out = load->GetLocations()->Out().AsRegister<Register>();
3845  int32_t offset = Thread::ExceptionOffset<kArmWordSize>().Int32Value();
3846  __ LoadFromOffset(kLoadWord, out, TR, offset);
3847  __ LoadImmediate(IP, 0);
3848  __ StoreToOffset(kStoreWord, IP, TR, offset);
3849}
3850
3851void LocationsBuilderARM::VisitThrow(HThrow* instruction) {
3852  LocationSummary* locations =
3853      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3854  InvokeRuntimeCallingConvention calling_convention;
3855  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3856}
3857
3858void InstructionCodeGeneratorARM::VisitThrow(HThrow* instruction) {
3859  codegen_->InvokeRuntime(
3860      QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc(), nullptr);
3861}
3862
3863void LocationsBuilderARM::VisitInstanceOf(HInstanceOf* instruction) {
3864  LocationSummary::CallKind call_kind = instruction->IsClassFinal()
3865      ? LocationSummary::kNoCall
3866      : LocationSummary::kCallOnSlowPath;
3867  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3868  locations->SetInAt(0, Location::RequiresRegister());
3869  locations->SetInAt(1, Location::RequiresRegister());
3870  // The out register is used as a temporary, so it overlaps with the inputs.
3871  locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3872}
3873
3874void InstructionCodeGeneratorARM::VisitInstanceOf(HInstanceOf* instruction) {
3875  LocationSummary* locations = instruction->GetLocations();
3876  Register obj = locations->InAt(0).AsRegister<Register>();
3877  Register cls = locations->InAt(1).AsRegister<Register>();
3878  Register out = locations->Out().AsRegister<Register>();
3879  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3880  Label done, zero;
3881  SlowPathCodeARM* slow_path = nullptr;
3882
3883  // Return 0 if `obj` is null.
3884  // TODO: avoid this check if we know obj is not null.
3885  __ cmp(obj, ShifterOperand(0));
3886  __ b(&zero, EQ);
3887  // Compare the class of `obj` with `cls`.
3888  __ LoadFromOffset(kLoadWord, out, obj, class_offset);
3889  __ cmp(out, ShifterOperand(cls));
3890  if (instruction->IsClassFinal()) {
3891    // Classes must be equal for the instanceof to succeed.
3892    __ b(&zero, NE);
3893    __ LoadImmediate(out, 1);
3894    __ b(&done);
3895  } else {
3896    // If the classes are not equal, we go into a slow path.
3897    DCHECK(locations->OnlyCallsOnSlowPath());
3898    slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
3899        instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc());
3900    codegen_->AddSlowPath(slow_path);
3901    __ b(slow_path->GetEntryLabel(), NE);
3902    __ LoadImmediate(out, 1);
3903    __ b(&done);
3904  }
3905  __ Bind(&zero);
3906  __ LoadImmediate(out, 0);
3907  if (slow_path != nullptr) {
3908    __ Bind(slow_path->GetExitLabel());
3909  }
3910  __ Bind(&done);
3911}
3912
3913void LocationsBuilderARM::VisitCheckCast(HCheckCast* instruction) {
3914  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3915      instruction, LocationSummary::kCallOnSlowPath);
3916  locations->SetInAt(0, Location::RequiresRegister());
3917  locations->SetInAt(1, Location::RequiresRegister());
3918  locations->AddTemp(Location::RequiresRegister());
3919}
3920
3921void InstructionCodeGeneratorARM::VisitCheckCast(HCheckCast* instruction) {
3922  LocationSummary* locations = instruction->GetLocations();
3923  Register obj = locations->InAt(0).AsRegister<Register>();
3924  Register cls = locations->InAt(1).AsRegister<Register>();
3925  Register temp = locations->GetTemp(0).AsRegister<Register>();
3926  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3927
3928  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
3929      instruction, locations->InAt(1), locations->GetTemp(0), instruction->GetDexPc());
3930  codegen_->AddSlowPath(slow_path);
3931
3932  // TODO: avoid this check if we know obj is not null.
3933  __ cmp(obj, ShifterOperand(0));
3934  __ b(slow_path->GetExitLabel(), EQ);
3935  // Compare the class of `obj` with `cls`.
3936  __ LoadFromOffset(kLoadWord, temp, obj, class_offset);
3937  __ cmp(temp, ShifterOperand(cls));
3938  __ b(slow_path->GetEntryLabel(), NE);
3939  __ Bind(slow_path->GetExitLabel());
3940}
3941
3942void LocationsBuilderARM::VisitMonitorOperation(HMonitorOperation* instruction) {
3943  LocationSummary* locations =
3944      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3945  InvokeRuntimeCallingConvention calling_convention;
3946  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3947}
3948
3949void InstructionCodeGeneratorARM::VisitMonitorOperation(HMonitorOperation* instruction) {
3950  codegen_->InvokeRuntime(instruction->IsEnter()
3951        ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
3952      instruction,
3953      instruction->GetDexPc(),
3954      nullptr);
3955}
3956
3957void LocationsBuilderARM::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
3958void LocationsBuilderARM::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
3959void LocationsBuilderARM::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
3960
3961void LocationsBuilderARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
3962  LocationSummary* locations =
3963      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3964  DCHECK(instruction->GetResultType() == Primitive::kPrimInt
3965         || instruction->GetResultType() == Primitive::kPrimLong);
3966  locations->SetInAt(0, Location::RequiresRegister());
3967  locations->SetInAt(1, Location::RequiresRegister());
3968  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3969}
3970
3971void InstructionCodeGeneratorARM::VisitAnd(HAnd* instruction) {
3972  HandleBitwiseOperation(instruction);
3973}
3974
3975void InstructionCodeGeneratorARM::VisitOr(HOr* instruction) {
3976  HandleBitwiseOperation(instruction);
3977}
3978
3979void InstructionCodeGeneratorARM::VisitXor(HXor* instruction) {
3980  HandleBitwiseOperation(instruction);
3981}
3982
3983void InstructionCodeGeneratorARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
3984  LocationSummary* locations = instruction->GetLocations();
3985
3986  if (instruction->GetResultType() == Primitive::kPrimInt) {
3987    Register first = locations->InAt(0).AsRegister<Register>();
3988    Register second = locations->InAt(1).AsRegister<Register>();
3989    Register out = locations->Out().AsRegister<Register>();
3990    if (instruction->IsAnd()) {
3991      __ and_(out, first, ShifterOperand(second));
3992    } else if (instruction->IsOr()) {
3993      __ orr(out, first, ShifterOperand(second));
3994    } else {
3995      DCHECK(instruction->IsXor());
3996      __ eor(out, first, ShifterOperand(second));
3997    }
3998  } else {
3999    DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
4000    Location first = locations->InAt(0);
4001    Location second = locations->InAt(1);
4002    Location out = locations->Out();
4003    if (instruction->IsAnd()) {
4004      __ and_(out.AsRegisterPairLow<Register>(),
4005              first.AsRegisterPairLow<Register>(),
4006              ShifterOperand(second.AsRegisterPairLow<Register>()));
4007      __ and_(out.AsRegisterPairHigh<Register>(),
4008              first.AsRegisterPairHigh<Register>(),
4009              ShifterOperand(second.AsRegisterPairHigh<Register>()));
4010    } else if (instruction->IsOr()) {
4011      __ orr(out.AsRegisterPairLow<Register>(),
4012             first.AsRegisterPairLow<Register>(),
4013             ShifterOperand(second.AsRegisterPairLow<Register>()));
4014      __ orr(out.AsRegisterPairHigh<Register>(),
4015             first.AsRegisterPairHigh<Register>(),
4016             ShifterOperand(second.AsRegisterPairHigh<Register>()));
4017    } else {
4018      DCHECK(instruction->IsXor());
4019      __ eor(out.AsRegisterPairLow<Register>(),
4020             first.AsRegisterPairLow<Register>(),
4021             ShifterOperand(second.AsRegisterPairLow<Register>()));
4022      __ eor(out.AsRegisterPairHigh<Register>(),
4023             first.AsRegisterPairHigh<Register>(),
4024             ShifterOperand(second.AsRegisterPairHigh<Register>()));
4025    }
4026  }
4027}
4028
4029void CodeGeneratorARM::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Register temp) {
4030  DCHECK_EQ(temp, kArtMethodRegister);
4031
4032  // TODO: Implement all kinds of calls:
4033  // 1) boot -> boot
4034  // 2) app -> boot
4035  // 3) app -> app
4036  //
4037  // Currently we implement the app -> app logic, which looks up in the resolve cache.
4038
4039  // temp = method;
4040  LoadCurrentMethod(temp);
4041  if (!invoke->IsRecursive()) {
4042    // temp = temp->dex_cache_resolved_methods_;
4043    __ LoadFromOffset(
4044        kLoadWord, temp, temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value());
4045    // temp = temp[index_in_cache]
4046    __ LoadFromOffset(
4047        kLoadWord, temp, temp, CodeGenerator::GetCacheOffset(invoke->GetDexMethodIndex()));
4048    // LR = temp[offset_of_quick_compiled_code]
4049    __ LoadFromOffset(kLoadWord, LR, temp,
4050                      mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
4051                          kArmWordSize).Int32Value());
4052    // LR()
4053    __ blx(LR);
4054  } else {
4055    __ bl(GetFrameEntryLabel());
4056  }
4057
4058  DCHECK(!IsLeafMethod());
4059}
4060
4061void LocationsBuilderARM::VisitBoundType(HBoundType* instruction) {
4062  // Nothing to do, this should be removed during prepare for register allocator.
4063  UNUSED(instruction);
4064  LOG(FATAL) << "Unreachable";
4065}
4066
4067void InstructionCodeGeneratorARM::VisitBoundType(HBoundType* instruction) {
4068  // Nothing to do, this should be removed during prepare for register allocator.
4069  UNUSED(instruction);
4070  LOG(FATAL) << "Unreachable";
4071}
4072
4073}  // namespace arm
4074}  // namespace art
4075