code_generator_arm.cc revision 71fb52fee246b7d511f520febbd73dc7a9bbca79
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
20#include "entrypoints/quick/quick_entrypoints.h"
21#include "gc/accounting/card_table.h"
22#include "mirror/array-inl.h"
23#include "mirror/art_method.h"
24#include "mirror/class.h"
25#include "thread.h"
26#include "utils/arm/assembler_arm.h"
27#include "utils/arm/managed_register_arm.h"
28#include "utils/assembler.h"
29#include "utils/stack_checks.h"
30
31namespace art {
32
33namespace arm {
34
35static DRegister FromLowSToD(SRegister reg) {
36  DCHECK_EQ(reg % 2, 0);
37  return static_cast<DRegister>(reg / 2);
38}
39
40static constexpr bool kExplicitStackOverflowCheck = false;
41
42static constexpr int kNumberOfPushedRegistersAtEntry = 1 + 2;  // LR, R6, R7
43static constexpr int kCurrentMethodStackOffset = 0;
44
45static constexpr Register kRuntimeParameterCoreRegisters[] = { R0, R1, R2, R3 };
46static constexpr size_t kRuntimeParameterCoreRegistersLength =
47    arraysize(kRuntimeParameterCoreRegisters);
48static constexpr SRegister kRuntimeParameterFpuRegisters[] = { S0, S1, S2, S3 };
49static constexpr size_t kRuntimeParameterFpuRegistersLength =
50    arraysize(kRuntimeParameterFpuRegisters);
51
52static constexpr DRegister DTMP = D7;
53static constexpr SRegister STMP = S14;
54
55class InvokeRuntimeCallingConvention : public CallingConvention<Register, SRegister> {
56 public:
57  InvokeRuntimeCallingConvention()
58      : CallingConvention(kRuntimeParameterCoreRegisters,
59                          kRuntimeParameterCoreRegistersLength,
60                          kRuntimeParameterFpuRegisters,
61                          kRuntimeParameterFpuRegistersLength) {}
62
63 private:
64  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
65};
66
67#define __ reinterpret_cast<ArmAssembler*>(codegen->GetAssembler())->
68#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArmWordSize, x).Int32Value()
69
70class SlowPathCodeARM : public SlowPathCode {
71 public:
72  SlowPathCodeARM() : entry_label_(), exit_label_() {}
73
74  Label* GetEntryLabel() { return &entry_label_; }
75  Label* GetExitLabel() { return &exit_label_; }
76
77 private:
78  Label entry_label_;
79  Label exit_label_;
80
81  DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM);
82};
83
84class NullCheckSlowPathARM : public SlowPathCodeARM {
85 public:
86  explicit NullCheckSlowPathARM(HNullCheck* instruction) : instruction_(instruction) {}
87
88  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
89    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
90    __ Bind(GetEntryLabel());
91    arm_codegen->InvokeRuntime(
92        QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc());
93  }
94
95 private:
96  HNullCheck* const instruction_;
97  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM);
98};
99
100class DivZeroCheckSlowPathARM : public SlowPathCodeARM {
101 public:
102  explicit DivZeroCheckSlowPathARM(HDivZeroCheck* instruction) : instruction_(instruction) {}
103
104  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
105    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
106    __ Bind(GetEntryLabel());
107    arm_codegen->InvokeRuntime(
108        QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc());
109  }
110
111 private:
112  HDivZeroCheck* const instruction_;
113  DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM);
114};
115
116class StackOverflowCheckSlowPathARM : public SlowPathCodeARM {
117 public:
118  StackOverflowCheckSlowPathARM() {}
119
120  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
121    __ Bind(GetEntryLabel());
122    __ LoadFromOffset(kLoadWord, PC, TR,
123        QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pThrowStackOverflow).Int32Value());
124  }
125
126 private:
127  DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathARM);
128};
129
130class SuspendCheckSlowPathARM : public SlowPathCodeARM {
131 public:
132  SuspendCheckSlowPathARM(HSuspendCheck* instruction, HBasicBlock* successor)
133      : instruction_(instruction), successor_(successor) {}
134
135  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
136    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
137    __ Bind(GetEntryLabel());
138    codegen->SaveLiveRegisters(instruction_->GetLocations());
139    arm_codegen->InvokeRuntime(
140        QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc());
141    codegen->RestoreLiveRegisters(instruction_->GetLocations());
142    if (successor_ == nullptr) {
143      __ b(GetReturnLabel());
144    } else {
145      __ b(arm_codegen->GetLabelOf(successor_));
146    }
147  }
148
149  Label* GetReturnLabel() {
150    DCHECK(successor_ == nullptr);
151    return &return_label_;
152  }
153
154 private:
155  HSuspendCheck* const instruction_;
156  // If not null, the block to branch to after the suspend check.
157  HBasicBlock* const successor_;
158
159  // If `successor_` is null, the label to branch to after the suspend check.
160  Label return_label_;
161
162  DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM);
163};
164
165class BoundsCheckSlowPathARM : public SlowPathCodeARM {
166 public:
167  BoundsCheckSlowPathARM(HBoundsCheck* instruction,
168                         Location index_location,
169                         Location length_location)
170      : instruction_(instruction),
171        index_location_(index_location),
172        length_location_(length_location) {}
173
174  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
175    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
176    __ Bind(GetEntryLabel());
177    // We're moving two locations to locations that could overlap, so we need a parallel
178    // move resolver.
179    InvokeRuntimeCallingConvention calling_convention;
180    codegen->EmitParallelMoves(
181        index_location_,
182        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
183        length_location_,
184        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
185    arm_codegen->InvokeRuntime(
186        QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc());
187  }
188
189 private:
190  HBoundsCheck* const instruction_;
191  const Location index_location_;
192  const Location length_location_;
193
194  DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM);
195};
196
197class LoadClassSlowPathARM : public SlowPathCodeARM {
198 public:
199  LoadClassSlowPathARM(HLoadClass* cls,
200                       HInstruction* at,
201                       uint32_t dex_pc,
202                       bool do_clinit)
203      : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
204    DCHECK(at->IsLoadClass() || at->IsClinitCheck());
205  }
206
207  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
208    LocationSummary* locations = at_->GetLocations();
209
210    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
211    __ Bind(GetEntryLabel());
212    codegen->SaveLiveRegisters(locations);
213
214    InvokeRuntimeCallingConvention calling_convention;
215    __ LoadImmediate(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex());
216    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
217    int32_t entry_point_offset = do_clinit_
218        ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
219        : QUICK_ENTRY_POINT(pInitializeType);
220    arm_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_);
221
222    // Move the class to the desired location.
223    Location out = locations->Out();
224    if (out.IsValid()) {
225      DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
226      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
227    }
228    codegen->RestoreLiveRegisters(locations);
229    __ b(GetExitLabel());
230  }
231
232 private:
233  // The class this slow path will load.
234  HLoadClass* const cls_;
235
236  // The instruction where this slow path is happening.
237  // (Might be the load class or an initialization check).
238  HInstruction* const at_;
239
240  // The dex PC of `at_`.
241  const uint32_t dex_pc_;
242
243  // Whether to initialize the class.
244  const bool do_clinit_;
245
246  DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM);
247};
248
249class LoadStringSlowPathARM : public SlowPathCodeARM {
250 public:
251  explicit LoadStringSlowPathARM(HLoadString* instruction) : instruction_(instruction) {}
252
253  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
254    LocationSummary* locations = instruction_->GetLocations();
255    DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
256
257    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
258    __ Bind(GetEntryLabel());
259    codegen->SaveLiveRegisters(locations);
260
261    InvokeRuntimeCallingConvention calling_convention;
262    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
263    __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction_->GetStringIndex());
264    arm_codegen->InvokeRuntime(
265        QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc());
266    arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
267
268    codegen->RestoreLiveRegisters(locations);
269    __ b(GetExitLabel());
270  }
271
272 private:
273  HLoadString* const instruction_;
274
275  DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM);
276};
277
278class TypeCheckSlowPathARM : public SlowPathCodeARM {
279 public:
280  TypeCheckSlowPathARM(HInstruction* instruction,
281                       Location class_to_check,
282                       Location object_class,
283                       uint32_t dex_pc)
284      : instruction_(instruction),
285        class_to_check_(class_to_check),
286        object_class_(object_class),
287        dex_pc_(dex_pc) {}
288
289  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
290    LocationSummary* locations = instruction_->GetLocations();
291    DCHECK(instruction_->IsCheckCast()
292           || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
293
294    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
295    __ Bind(GetEntryLabel());
296    codegen->SaveLiveRegisters(locations);
297
298    // We're moving two locations to locations that could overlap, so we need a parallel
299    // move resolver.
300    InvokeRuntimeCallingConvention calling_convention;
301    codegen->EmitParallelMoves(
302        class_to_check_,
303        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
304        object_class_,
305        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
306
307    if (instruction_->IsInstanceOf()) {
308      arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc_);
309      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
310    } else {
311      DCHECK(instruction_->IsCheckCast());
312      arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc_);
313    }
314
315    codegen->RestoreLiveRegisters(locations);
316    __ b(GetExitLabel());
317  }
318
319 private:
320  HInstruction* const instruction_;
321  const Location class_to_check_;
322  const Location object_class_;
323  uint32_t dex_pc_;
324
325  DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM);
326};
327
328#undef __
329
330#undef __
331#define __ reinterpret_cast<ArmAssembler*>(GetAssembler())->
332
333inline Condition ARMCondition(IfCondition cond) {
334  switch (cond) {
335    case kCondEQ: return EQ;
336    case kCondNE: return NE;
337    case kCondLT: return LT;
338    case kCondLE: return LE;
339    case kCondGT: return GT;
340    case kCondGE: return GE;
341    default:
342      LOG(FATAL) << "Unknown if condition";
343  }
344  return EQ;        // Unreachable.
345}
346
347inline Condition ARMOppositeCondition(IfCondition cond) {
348  switch (cond) {
349    case kCondEQ: return NE;
350    case kCondNE: return EQ;
351    case kCondLT: return GE;
352    case kCondLE: return GT;
353    case kCondGT: return LE;
354    case kCondGE: return LT;
355    default:
356      LOG(FATAL) << "Unknown if condition";
357  }
358  return EQ;        // Unreachable.
359}
360
361void CodeGeneratorARM::DumpCoreRegister(std::ostream& stream, int reg) const {
362  stream << ArmManagedRegister::FromCoreRegister(Register(reg));
363}
364
365void CodeGeneratorARM::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
366  stream << ArmManagedRegister::FromSRegister(SRegister(reg));
367}
368
369size_t CodeGeneratorARM::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
370  __ StoreToOffset(kStoreWord, static_cast<Register>(reg_id), SP, stack_index);
371  return kArmWordSize;
372}
373
374size_t CodeGeneratorARM::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
375  __ LoadFromOffset(kLoadWord, static_cast<Register>(reg_id), SP, stack_index);
376  return kArmWordSize;
377}
378
379size_t CodeGeneratorARM::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
380  __ StoreSToOffset(static_cast<SRegister>(reg_id), SP, stack_index);
381  return kArmWordSize;
382}
383
384size_t CodeGeneratorARM::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
385  __ LoadSFromOffset(static_cast<SRegister>(reg_id), SP, stack_index);
386  return kArmWordSize;
387}
388
389CodeGeneratorARM::CodeGeneratorARM(HGraph* graph,
390                                   const ArmInstructionSetFeatures* isa_features)
391    : CodeGenerator(graph, kNumberOfCoreRegisters, kNumberOfSRegisters, kNumberOfRegisterPairs),
392      block_labels_(graph->GetArena(), 0),
393      location_builder_(graph, this),
394      instruction_visitor_(graph, this),
395      move_resolver_(graph->GetArena(), this),
396      assembler_(true),
397      isa_features_(isa_features) {}
398
399size_t CodeGeneratorARM::FrameEntrySpillSize() const {
400  return kNumberOfPushedRegistersAtEntry * kArmWordSize;
401}
402
403Location CodeGeneratorARM::AllocateFreeRegister(Primitive::Type type) const {
404  switch (type) {
405    case Primitive::kPrimLong: {
406      size_t reg = FindFreeEntry(blocked_register_pairs_, kNumberOfRegisterPairs);
407      ArmManagedRegister pair =
408          ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
409      DCHECK(!blocked_core_registers_[pair.AsRegisterPairLow()]);
410      DCHECK(!blocked_core_registers_[pair.AsRegisterPairHigh()]);
411
412      blocked_core_registers_[pair.AsRegisterPairLow()] = true;
413      blocked_core_registers_[pair.AsRegisterPairHigh()] = true;
414      UpdateBlockedPairRegisters();
415      return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
416    }
417
418    case Primitive::kPrimByte:
419    case Primitive::kPrimBoolean:
420    case Primitive::kPrimChar:
421    case Primitive::kPrimShort:
422    case Primitive::kPrimInt:
423    case Primitive::kPrimNot: {
424      int reg = FindFreeEntry(blocked_core_registers_, kNumberOfCoreRegisters);
425      // Block all register pairs that contain `reg`.
426      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
427        ArmManagedRegister current =
428            ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
429        if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
430          blocked_register_pairs_[i] = true;
431        }
432      }
433      return Location::RegisterLocation(reg);
434    }
435
436    case Primitive::kPrimFloat: {
437      int reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfSRegisters);
438      return Location::FpuRegisterLocation(reg);
439    }
440
441    case Primitive::kPrimDouble: {
442      int reg = FindTwoFreeConsecutiveAlignedEntries(blocked_fpu_registers_, kNumberOfSRegisters);
443      DCHECK_EQ(reg % 2, 0);
444      return Location::FpuRegisterPairLocation(reg, reg + 1);
445    }
446
447    case Primitive::kPrimVoid:
448      LOG(FATAL) << "Unreachable type " << type;
449  }
450
451  return Location();
452}
453
454void CodeGeneratorARM::SetupBlockedRegisters() const {
455  // Don't allocate the dalvik style register pair passing.
456  blocked_register_pairs_[R1_R2] = true;
457
458  // Stack register, LR and PC are always reserved.
459  blocked_core_registers_[SP] = true;
460  blocked_core_registers_[LR] = true;
461  blocked_core_registers_[PC] = true;
462
463  // Reserve thread register.
464  blocked_core_registers_[TR] = true;
465
466  // Reserve temp register.
467  blocked_core_registers_[IP] = true;
468
469  // TODO: We currently don't use Quick's callee saved registers.
470  // We always save and restore R6 and R7 to make sure we can use three
471  // register pairs for long operations.
472  blocked_core_registers_[R4] = true;
473  blocked_core_registers_[R5] = true;
474  blocked_core_registers_[R8] = true;
475  blocked_core_registers_[R10] = true;
476  blocked_core_registers_[R11] = true;
477
478  // Don't allocate our temporary double register.
479  blocked_fpu_registers_[STMP] = true;
480  blocked_fpu_registers_[STMP + 1] = true;
481  DCHECK_EQ(FromLowSToD(STMP), DTMP);
482
483  blocked_fpu_registers_[S16] = true;
484  blocked_fpu_registers_[S17] = true;
485  blocked_fpu_registers_[S18] = true;
486  blocked_fpu_registers_[S19] = true;
487  blocked_fpu_registers_[S20] = true;
488  blocked_fpu_registers_[S21] = true;
489  blocked_fpu_registers_[S22] = true;
490  blocked_fpu_registers_[S23] = true;
491  blocked_fpu_registers_[S24] = true;
492  blocked_fpu_registers_[S25] = true;
493  blocked_fpu_registers_[S26] = true;
494  blocked_fpu_registers_[S27] = true;
495  blocked_fpu_registers_[S28] = true;
496  blocked_fpu_registers_[S29] = true;
497  blocked_fpu_registers_[S30] = true;
498  blocked_fpu_registers_[S31] = true;
499
500  UpdateBlockedPairRegisters();
501}
502
503void CodeGeneratorARM::UpdateBlockedPairRegisters() const {
504  for (int i = 0; i < kNumberOfRegisterPairs; i++) {
505    ArmManagedRegister current =
506        ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
507    if (blocked_core_registers_[current.AsRegisterPairLow()]
508        || blocked_core_registers_[current.AsRegisterPairHigh()]) {
509      blocked_register_pairs_[i] = true;
510    }
511  }
512}
513
514InstructionCodeGeneratorARM::InstructionCodeGeneratorARM(HGraph* graph, CodeGeneratorARM* codegen)
515      : HGraphVisitor(graph),
516        assembler_(codegen->GetAssembler()),
517        codegen_(codegen) {}
518
519void CodeGeneratorARM::GenerateFrameEntry() {
520  bool skip_overflow_check =
521      IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm);
522  if (!skip_overflow_check) {
523    if (kExplicitStackOverflowCheck) {
524      SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathARM();
525      AddSlowPath(slow_path);
526
527      __ LoadFromOffset(kLoadWord, IP, TR, Thread::StackEndOffset<kArmWordSize>().Int32Value());
528      __ cmp(SP, ShifterOperand(IP));
529      __ b(slow_path->GetEntryLabel(), CC);
530    } else {
531      __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm)));
532      __ LoadFromOffset(kLoadWord, IP, IP, 0);
533      RecordPcInfo(nullptr, 0);
534    }
535  }
536
537  core_spill_mask_ |= (1 << LR | 1 << R6 | 1 << R7);
538  __ PushList(1 << LR | 1 << R6 | 1 << R7);
539
540  // The return PC has already been pushed on the stack.
541  __ AddConstant(SP, -(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize));
542  __ StoreToOffset(kStoreWord, R0, SP, 0);
543}
544
545void CodeGeneratorARM::GenerateFrameExit() {
546  __ AddConstant(SP, GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize);
547  __ PopList(1 << PC | 1 << R6 | 1 << R7);
548}
549
550void CodeGeneratorARM::Bind(HBasicBlock* block) {
551  __ Bind(GetLabelOf(block));
552}
553
554Location CodeGeneratorARM::GetStackLocation(HLoadLocal* load) const {
555  switch (load->GetType()) {
556    case Primitive::kPrimLong:
557    case Primitive::kPrimDouble:
558      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
559      break;
560
561    case Primitive::kPrimInt:
562    case Primitive::kPrimNot:
563    case Primitive::kPrimFloat:
564      return Location::StackSlot(GetStackSlot(load->GetLocal()));
565
566    case Primitive::kPrimBoolean:
567    case Primitive::kPrimByte:
568    case Primitive::kPrimChar:
569    case Primitive::kPrimShort:
570    case Primitive::kPrimVoid:
571      LOG(FATAL) << "Unexpected type " << load->GetType();
572  }
573
574  LOG(FATAL) << "Unreachable";
575  return Location();
576}
577
578Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
579  switch (type) {
580    case Primitive::kPrimBoolean:
581    case Primitive::kPrimByte:
582    case Primitive::kPrimChar:
583    case Primitive::kPrimShort:
584    case Primitive::kPrimInt:
585    case Primitive::kPrimNot: {
586      uint32_t index = gp_index_++;
587      uint32_t stack_index = stack_index_++;
588      if (index < calling_convention.GetNumberOfRegisters()) {
589        return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
590      } else {
591        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
592      }
593    }
594
595    case Primitive::kPrimLong: {
596      uint32_t index = gp_index_;
597      uint32_t stack_index = stack_index_;
598      gp_index_ += 2;
599      stack_index_ += 2;
600      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
601        if (calling_convention.GetRegisterAt(index) == R1) {
602          // Skip R1, and use R2_R3 instead.
603          gp_index_++;
604          index++;
605        }
606      }
607      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
608        DCHECK_EQ(calling_convention.GetRegisterAt(index) + 1,
609                  calling_convention.GetRegisterAt(index + 1));
610        return Location::RegisterPairLocation(calling_convention.GetRegisterAt(index),
611                                              calling_convention.GetRegisterAt(index + 1));
612      } else {
613        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
614      }
615    }
616
617    case Primitive::kPrimFloat: {
618      uint32_t stack_index = stack_index_++;
619      if (float_index_ % 2 == 0) {
620        float_index_ = std::max(double_index_, float_index_);
621      }
622      if (float_index_ < calling_convention.GetNumberOfFpuRegisters()) {
623        return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(float_index_++));
624      } else {
625        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
626      }
627    }
628
629    case Primitive::kPrimDouble: {
630      double_index_ = std::max(double_index_, RoundUp(float_index_, 2));
631      uint32_t stack_index = stack_index_;
632      stack_index_ += 2;
633      if (double_index_ + 1 < calling_convention.GetNumberOfFpuRegisters()) {
634        uint32_t index = double_index_;
635        double_index_ += 2;
636        DCHECK_EQ(calling_convention.GetFpuRegisterAt(index) + 1,
637                  calling_convention.GetFpuRegisterAt(index + 1));
638        DCHECK_EQ(calling_convention.GetFpuRegisterAt(index) & 1, 0);
639        return Location::FpuRegisterPairLocation(
640          calling_convention.GetFpuRegisterAt(index),
641          calling_convention.GetFpuRegisterAt(index + 1));
642      } else {
643        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
644      }
645    }
646
647    case Primitive::kPrimVoid:
648      LOG(FATAL) << "Unexpected parameter type " << type;
649      break;
650  }
651  return Location();
652}
653
654Location InvokeDexCallingConventionVisitor::GetReturnLocation(Primitive::Type type) {
655  switch (type) {
656    case Primitive::kPrimBoolean:
657    case Primitive::kPrimByte:
658    case Primitive::kPrimChar:
659    case Primitive::kPrimShort:
660    case Primitive::kPrimInt:
661    case Primitive::kPrimNot: {
662      return Location::RegisterLocation(R0);
663    }
664
665    case Primitive::kPrimFloat: {
666      return Location::FpuRegisterLocation(S0);
667    }
668
669    case Primitive::kPrimLong: {
670      return Location::RegisterPairLocation(R0, R1);
671    }
672
673    case Primitive::kPrimDouble: {
674      return Location::FpuRegisterPairLocation(S0, S1);
675    }
676
677    case Primitive::kPrimVoid:
678      return Location();
679  }
680  UNREACHABLE();
681  return Location();
682}
683
684void CodeGeneratorARM::Move32(Location destination, Location source) {
685  if (source.Equals(destination)) {
686    return;
687  }
688  if (destination.IsRegister()) {
689    if (source.IsRegister()) {
690      __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>());
691    } else if (source.IsFpuRegister()) {
692      __ vmovrs(destination.AsRegister<Register>(), source.AsFpuRegister<SRegister>());
693    } else {
694      __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(), SP, source.GetStackIndex());
695    }
696  } else if (destination.IsFpuRegister()) {
697    if (source.IsRegister()) {
698      __ vmovsr(destination.AsFpuRegister<SRegister>(), source.AsRegister<Register>());
699    } else if (source.IsFpuRegister()) {
700      __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>());
701    } else {
702      __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex());
703    }
704  } else {
705    DCHECK(destination.IsStackSlot()) << destination;
706    if (source.IsRegister()) {
707      __ StoreToOffset(kStoreWord, source.AsRegister<Register>(), SP, destination.GetStackIndex());
708    } else if (source.IsFpuRegister()) {
709      __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex());
710    } else {
711      DCHECK(source.IsStackSlot()) << source;
712      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
713      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
714    }
715  }
716}
717
718void CodeGeneratorARM::Move64(Location destination, Location source) {
719  if (source.Equals(destination)) {
720    return;
721  }
722  if (destination.IsRegisterPair()) {
723    if (source.IsRegisterPair()) {
724      EmitParallelMoves(
725          Location::RegisterLocation(source.AsRegisterPairHigh<Register>()),
726          Location::RegisterLocation(destination.AsRegisterPairHigh<Register>()),
727          Location::RegisterLocation(source.AsRegisterPairLow<Register>()),
728          Location::RegisterLocation(destination.AsRegisterPairLow<Register>()));
729    } else if (source.IsFpuRegister()) {
730      UNIMPLEMENTED(FATAL);
731    } else {
732      // No conflict possible, so just do the moves.
733      DCHECK(source.IsDoubleStackSlot());
734      if (destination.AsRegisterPairLow<Register>() == R1) {
735        DCHECK_EQ(destination.AsRegisterPairHigh<Register>(), R2);
736        __ LoadFromOffset(kLoadWord, R1, SP, source.GetStackIndex());
737        __ LoadFromOffset(kLoadWord, R2, SP, source.GetHighStackIndex(kArmWordSize));
738      } else {
739        __ LoadFromOffset(kLoadWordPair, destination.AsRegisterPairLow<Register>(),
740                          SP, source.GetStackIndex());
741      }
742    }
743  } else if (destination.IsFpuRegisterPair()) {
744    if (source.IsDoubleStackSlot()) {
745      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
746                         SP,
747                         source.GetStackIndex());
748    } else {
749      UNIMPLEMENTED(FATAL);
750    }
751  } else {
752    DCHECK(destination.IsDoubleStackSlot());
753    if (source.IsRegisterPair()) {
754      // No conflict possible, so just do the moves.
755      if (source.AsRegisterPairLow<Register>() == R1) {
756        DCHECK_EQ(source.AsRegisterPairHigh<Register>(), R2);
757        __ StoreToOffset(kStoreWord, R1, SP, destination.GetStackIndex());
758        __ StoreToOffset(kStoreWord, R2, SP, destination.GetHighStackIndex(kArmWordSize));
759      } else {
760        __ StoreToOffset(kStoreWordPair, source.AsRegisterPairLow<Register>(),
761                         SP, destination.GetStackIndex());
762      }
763    } else if (source.IsFpuRegisterPair()) {
764      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
765                        SP,
766                        destination.GetStackIndex());
767    } else {
768      DCHECK(source.IsDoubleStackSlot());
769      EmitParallelMoves(
770          Location::StackSlot(source.GetStackIndex()),
771          Location::StackSlot(destination.GetStackIndex()),
772          Location::StackSlot(source.GetHighStackIndex(kArmWordSize)),
773          Location::StackSlot(destination.GetHighStackIndex(kArmWordSize)));
774    }
775  }
776}
777
778void CodeGeneratorARM::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
779  LocationSummary* locations = instruction->GetLocations();
780  if (locations != nullptr && locations->Out().Equals(location)) {
781    return;
782  }
783
784  if (locations != nullptr && locations->Out().IsConstant()) {
785    HConstant* const_to_move = locations->Out().GetConstant();
786    if (const_to_move->IsIntConstant()) {
787      int32_t value = const_to_move->AsIntConstant()->GetValue();
788      if (location.IsRegister()) {
789        __ LoadImmediate(location.AsRegister<Register>(), value);
790      } else {
791        DCHECK(location.IsStackSlot());
792        __ LoadImmediate(IP, value);
793        __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
794      }
795    } else {
796      DCHECK(const_to_move->IsLongConstant()) << const_to_move;
797      int64_t value = const_to_move->AsLongConstant()->GetValue();
798      if (location.IsRegisterPair()) {
799        __ LoadImmediate(location.AsRegisterPairLow<Register>(), Low32Bits(value));
800        __ LoadImmediate(location.AsRegisterPairHigh<Register>(), High32Bits(value));
801      } else {
802        DCHECK(location.IsDoubleStackSlot());
803        __ LoadImmediate(IP, Low32Bits(value));
804        __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
805        __ LoadImmediate(IP, High32Bits(value));
806        __ StoreToOffset(kStoreWord, IP, SP, location.GetHighStackIndex(kArmWordSize));
807      }
808    }
809  } else if (instruction->IsLoadLocal()) {
810    uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
811    switch (instruction->GetType()) {
812      case Primitive::kPrimBoolean:
813      case Primitive::kPrimByte:
814      case Primitive::kPrimChar:
815      case Primitive::kPrimShort:
816      case Primitive::kPrimInt:
817      case Primitive::kPrimNot:
818      case Primitive::kPrimFloat:
819        Move32(location, Location::StackSlot(stack_slot));
820        break;
821
822      case Primitive::kPrimLong:
823      case Primitive::kPrimDouble:
824        Move64(location, Location::DoubleStackSlot(stack_slot));
825        break;
826
827      default:
828        LOG(FATAL) << "Unexpected type " << instruction->GetType();
829    }
830  } else if (instruction->IsTemporary()) {
831    Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
832    if (temp_location.IsStackSlot()) {
833      Move32(location, temp_location);
834    } else {
835      DCHECK(temp_location.IsDoubleStackSlot());
836      Move64(location, temp_location);
837    }
838  } else {
839    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
840    switch (instruction->GetType()) {
841      case Primitive::kPrimBoolean:
842      case Primitive::kPrimByte:
843      case Primitive::kPrimChar:
844      case Primitive::kPrimShort:
845      case Primitive::kPrimNot:
846      case Primitive::kPrimInt:
847      case Primitive::kPrimFloat:
848        Move32(location, locations->Out());
849        break;
850
851      case Primitive::kPrimLong:
852      case Primitive::kPrimDouble:
853        Move64(location, locations->Out());
854        break;
855
856      default:
857        LOG(FATAL) << "Unexpected type " << instruction->GetType();
858    }
859  }
860}
861
862void CodeGeneratorARM::InvokeRuntime(int32_t entry_point_offset,
863                                     HInstruction* instruction,
864                                     uint32_t dex_pc) {
865  __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
866  __ blx(LR);
867  RecordPcInfo(instruction, dex_pc);
868  DCHECK(instruction->IsSuspendCheck()
869      || instruction->IsBoundsCheck()
870      || instruction->IsNullCheck()
871      || instruction->IsDivZeroCheck()
872      || instruction->GetLocations()->CanCall()
873      || !IsLeafMethod());
874}
875
876void LocationsBuilderARM::VisitGoto(HGoto* got) {
877  got->SetLocations(nullptr);
878}
879
880void InstructionCodeGeneratorARM::VisitGoto(HGoto* got) {
881  HBasicBlock* successor = got->GetSuccessor();
882  DCHECK(!successor->IsExitBlock());
883
884  HBasicBlock* block = got->GetBlock();
885  HInstruction* previous = got->GetPrevious();
886
887  HLoopInformation* info = block->GetLoopInformation();
888  if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
889    codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
890    GenerateSuspendCheck(info->GetSuspendCheck(), successor);
891    return;
892  }
893
894  if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
895    GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
896  }
897  if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
898    __ b(codegen_->GetLabelOf(successor));
899  }
900}
901
902void LocationsBuilderARM::VisitExit(HExit* exit) {
903  exit->SetLocations(nullptr);
904}
905
906void InstructionCodeGeneratorARM::VisitExit(HExit* exit) {
907  UNUSED(exit);
908  if (kIsDebugBuild) {
909    __ Comment("Unreachable");
910    __ bkpt(0);
911  }
912}
913
914void LocationsBuilderARM::VisitIf(HIf* if_instr) {
915  LocationSummary* locations =
916      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
917  HInstruction* cond = if_instr->InputAt(0);
918  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
919    locations->SetInAt(0, Location::RequiresRegister());
920  }
921}
922
923void InstructionCodeGeneratorARM::VisitIf(HIf* if_instr) {
924  HInstruction* cond = if_instr->InputAt(0);
925  if (cond->IsIntConstant()) {
926    // Constant condition, statically compared against 1.
927    int32_t cond_value = cond->AsIntConstant()->GetValue();
928    if (cond_value == 1) {
929      if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
930                                     if_instr->IfTrueSuccessor())) {
931        __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
932      }
933      return;
934    } else {
935      DCHECK_EQ(cond_value, 0);
936    }
937  } else {
938    if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
939      // Condition has been materialized, compare the output to 0
940      DCHECK(if_instr->GetLocations()->InAt(0).IsRegister());
941      __ cmp(if_instr->GetLocations()->InAt(0).AsRegister<Register>(),
942             ShifterOperand(0));
943      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()), NE);
944    } else {
945      // Condition has not been materialized, use its inputs as the
946      // comparison and its condition as the branch condition.
947      LocationSummary* locations = cond->GetLocations();
948      Register left = locations->InAt(0).AsRegister<Register>();
949      if (locations->InAt(1).IsRegister()) {
950        __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>()));
951      } else {
952        DCHECK(locations->InAt(1).IsConstant());
953        int32_t value =
954            locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
955        ShifterOperand operand;
956        if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) {
957          __ cmp(left, operand);
958        } else {
959          Register temp = IP;
960          __ LoadImmediate(temp, value);
961          __ cmp(left, ShifterOperand(temp));
962        }
963      }
964      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()),
965           ARMCondition(cond->AsCondition()->GetCondition()));
966    }
967  }
968  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
969                                 if_instr->IfFalseSuccessor())) {
970    __ b(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
971  }
972}
973
974
975void LocationsBuilderARM::VisitCondition(HCondition* comp) {
976  LocationSummary* locations =
977      new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall);
978  locations->SetInAt(0, Location::RequiresRegister());
979  locations->SetInAt(1, Location::RegisterOrConstant(comp->InputAt(1)));
980  if (comp->NeedsMaterialization()) {
981    locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
982  }
983}
984
985void InstructionCodeGeneratorARM::VisitCondition(HCondition* comp) {
986  if (!comp->NeedsMaterialization()) return;
987  LocationSummary* locations = comp->GetLocations();
988  Register left = locations->InAt(0).AsRegister<Register>();
989
990  if (locations->InAt(1).IsRegister()) {
991    __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>()));
992  } else {
993    DCHECK(locations->InAt(1).IsConstant());
994    int32_t value = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
995    ShifterOperand operand;
996    if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) {
997      __ cmp(left, operand);
998    } else {
999      Register temp = IP;
1000      __ LoadImmediate(temp, value);
1001      __ cmp(left, ShifterOperand(temp));
1002    }
1003  }
1004  __ it(ARMCondition(comp->GetCondition()), kItElse);
1005  __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(1),
1006         ARMCondition(comp->GetCondition()));
1007  __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(0),
1008         ARMOppositeCondition(comp->GetCondition()));
1009}
1010
1011void LocationsBuilderARM::VisitEqual(HEqual* comp) {
1012  VisitCondition(comp);
1013}
1014
1015void InstructionCodeGeneratorARM::VisitEqual(HEqual* comp) {
1016  VisitCondition(comp);
1017}
1018
1019void LocationsBuilderARM::VisitNotEqual(HNotEqual* comp) {
1020  VisitCondition(comp);
1021}
1022
1023void InstructionCodeGeneratorARM::VisitNotEqual(HNotEqual* comp) {
1024  VisitCondition(comp);
1025}
1026
1027void LocationsBuilderARM::VisitLessThan(HLessThan* comp) {
1028  VisitCondition(comp);
1029}
1030
1031void InstructionCodeGeneratorARM::VisitLessThan(HLessThan* comp) {
1032  VisitCondition(comp);
1033}
1034
1035void LocationsBuilderARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1036  VisitCondition(comp);
1037}
1038
1039void InstructionCodeGeneratorARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1040  VisitCondition(comp);
1041}
1042
1043void LocationsBuilderARM::VisitGreaterThan(HGreaterThan* comp) {
1044  VisitCondition(comp);
1045}
1046
1047void InstructionCodeGeneratorARM::VisitGreaterThan(HGreaterThan* comp) {
1048  VisitCondition(comp);
1049}
1050
1051void LocationsBuilderARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1052  VisitCondition(comp);
1053}
1054
1055void InstructionCodeGeneratorARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1056  VisitCondition(comp);
1057}
1058
1059void LocationsBuilderARM::VisitLocal(HLocal* local) {
1060  local->SetLocations(nullptr);
1061}
1062
1063void InstructionCodeGeneratorARM::VisitLocal(HLocal* local) {
1064  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
1065}
1066
1067void LocationsBuilderARM::VisitLoadLocal(HLoadLocal* load) {
1068  load->SetLocations(nullptr);
1069}
1070
1071void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load) {
1072  // Nothing to do, this is driven by the code generator.
1073  UNUSED(load);
1074}
1075
1076void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) {
1077  LocationSummary* locations =
1078      new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
1079  switch (store->InputAt(1)->GetType()) {
1080    case Primitive::kPrimBoolean:
1081    case Primitive::kPrimByte:
1082    case Primitive::kPrimChar:
1083    case Primitive::kPrimShort:
1084    case Primitive::kPrimInt:
1085    case Primitive::kPrimNot:
1086    case Primitive::kPrimFloat:
1087      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
1088      break;
1089
1090    case Primitive::kPrimLong:
1091    case Primitive::kPrimDouble:
1092      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
1093      break;
1094
1095    default:
1096      LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType();
1097  }
1098}
1099
1100void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store) {
1101  UNUSED(store);
1102}
1103
1104void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) {
1105  LocationSummary* locations =
1106      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1107  locations->SetOut(Location::ConstantLocation(constant));
1108}
1109
1110void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant) {
1111  // Will be generated at use site.
1112  UNUSED(constant);
1113}
1114
1115void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) {
1116  LocationSummary* locations =
1117      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1118  locations->SetOut(Location::ConstantLocation(constant));
1119}
1120
1121void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant) {
1122  // Will be generated at use site.
1123  UNUSED(constant);
1124}
1125
1126void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) {
1127  LocationSummary* locations =
1128      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1129  locations->SetOut(Location::ConstantLocation(constant));
1130}
1131
1132void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant) {
1133  // Will be generated at use site.
1134  UNUSED(constant);
1135}
1136
1137void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) {
1138  LocationSummary* locations =
1139      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1140  locations->SetOut(Location::ConstantLocation(constant));
1141}
1142
1143void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant) {
1144  // Will be generated at use site.
1145  UNUSED(constant);
1146}
1147
1148void LocationsBuilderARM::VisitReturnVoid(HReturnVoid* ret) {
1149  ret->SetLocations(nullptr);
1150}
1151
1152void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret) {
1153  UNUSED(ret);
1154  codegen_->GenerateFrameExit();
1155}
1156
1157void LocationsBuilderARM::VisitReturn(HReturn* ret) {
1158  LocationSummary* locations =
1159      new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
1160  locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType()));
1161}
1162
1163void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret) {
1164  UNUSED(ret);
1165  codegen_->GenerateFrameExit();
1166}
1167
1168void LocationsBuilderARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1169  HandleInvoke(invoke);
1170}
1171
1172void CodeGeneratorARM::LoadCurrentMethod(Register reg) {
1173  __ LoadFromOffset(kLoadWord, reg, SP, kCurrentMethodStackOffset);
1174}
1175
1176void InstructionCodeGeneratorARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1177  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1178
1179  // TODO: Implement all kinds of calls:
1180  // 1) boot -> boot
1181  // 2) app -> boot
1182  // 3) app -> app
1183  //
1184  // Currently we implement the app -> app logic, which looks up in the resolve cache.
1185
1186  // temp = method;
1187  codegen_->LoadCurrentMethod(temp);
1188  // temp = temp->dex_cache_resolved_methods_;
1189  __ LoadFromOffset(
1190      kLoadWord, temp, temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value());
1191  // temp = temp[index_in_cache]
1192  __ LoadFromOffset(
1193      kLoadWord, temp, temp, CodeGenerator::GetCacheOffset(invoke->GetDexMethodIndex()));
1194  // LR = temp[offset_of_quick_compiled_code]
1195  __ LoadFromOffset(kLoadWord, LR, temp,
1196                     mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1197                         kArmWordSize).Int32Value());
1198  // LR()
1199  __ blx(LR);
1200
1201  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1202  DCHECK(!codegen_->IsLeafMethod());
1203}
1204
1205void LocationsBuilderARM::HandleInvoke(HInvoke* invoke) {
1206  LocationSummary* locations =
1207      new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1208  locations->AddTemp(Location::RegisterLocation(R0));
1209
1210  InvokeDexCallingConventionVisitor calling_convention_visitor;
1211  for (size_t i = 0; i < invoke->InputCount(); i++) {
1212    HInstruction* input = invoke->InputAt(i);
1213    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1214  }
1215
1216  locations->SetOut(calling_convention_visitor.GetReturnLocation(invoke->GetType()));
1217}
1218
1219void LocationsBuilderARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1220  HandleInvoke(invoke);
1221}
1222
1223void InstructionCodeGeneratorARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1224  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1225  uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() +
1226          invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
1227  LocationSummary* locations = invoke->GetLocations();
1228  Location receiver = locations->InAt(0);
1229  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1230  // temp = object->GetClass();
1231  if (receiver.IsStackSlot()) {
1232    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1233    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1234  } else {
1235    __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
1236  }
1237  // temp = temp->GetMethodAt(method_offset);
1238  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1239      kArmWordSize).Int32Value();
1240  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1241  // LR = temp->GetEntryPoint();
1242  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1243  // LR();
1244  __ blx(LR);
1245  DCHECK(!codegen_->IsLeafMethod());
1246  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1247}
1248
1249void LocationsBuilderARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1250  HandleInvoke(invoke);
1251  // Add the hidden argument.
1252  invoke->GetLocations()->AddTemp(Location::RegisterLocation(R12));
1253}
1254
1255void InstructionCodeGeneratorARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1256  // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
1257  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1258  uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() +
1259          (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry);
1260  LocationSummary* locations = invoke->GetLocations();
1261  Location receiver = locations->InAt(0);
1262  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1263
1264  // Set the hidden argument.
1265  __ LoadImmediate(invoke->GetLocations()->GetTemp(1).AsRegister<Register>(),
1266                   invoke->GetDexMethodIndex());
1267
1268  // temp = object->GetClass();
1269  if (receiver.IsStackSlot()) {
1270    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1271    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1272  } else {
1273    __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
1274  }
1275  // temp = temp->GetImtEntryAt(method_offset);
1276  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1277      kArmWordSize).Int32Value();
1278  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1279  // LR = temp->GetEntryPoint();
1280  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1281  // LR();
1282  __ blx(LR);
1283  DCHECK(!codegen_->IsLeafMethod());
1284  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1285}
1286
1287void LocationsBuilderARM::VisitNeg(HNeg* neg) {
1288  LocationSummary* locations =
1289      new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
1290  switch (neg->GetResultType()) {
1291    case Primitive::kPrimInt:
1292    case Primitive::kPrimLong: {
1293      bool output_overlaps = (neg->GetResultType() == Primitive::kPrimLong);
1294      locations->SetInAt(0, Location::RequiresRegister());
1295      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1296      break;
1297    }
1298
1299    case Primitive::kPrimFloat:
1300    case Primitive::kPrimDouble:
1301      locations->SetInAt(0, Location::RequiresFpuRegister());
1302      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1303      break;
1304
1305    default:
1306      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1307  }
1308}
1309
1310void InstructionCodeGeneratorARM::VisitNeg(HNeg* neg) {
1311  LocationSummary* locations = neg->GetLocations();
1312  Location out = locations->Out();
1313  Location in = locations->InAt(0);
1314  switch (neg->GetResultType()) {
1315    case Primitive::kPrimInt:
1316      DCHECK(in.IsRegister());
1317      __ rsb(out.AsRegister<Register>(), in.AsRegister<Register>(), ShifterOperand(0));
1318      break;
1319
1320    case Primitive::kPrimLong:
1321      DCHECK(in.IsRegisterPair());
1322      // out.lo = 0 - in.lo (and update the carry/borrow (C) flag)
1323      __ rsbs(out.AsRegisterPairLow<Register>(),
1324              in.AsRegisterPairLow<Register>(),
1325              ShifterOperand(0));
1326      // We cannot emit an RSC (Reverse Subtract with Carry)
1327      // instruction here, as it does not exist in the Thumb-2
1328      // instruction set.  We use the following approach
1329      // using SBC and SUB instead.
1330      //
1331      // out.hi = -C
1332      __ sbc(out.AsRegisterPairHigh<Register>(),
1333             out.AsRegisterPairHigh<Register>(),
1334             ShifterOperand(out.AsRegisterPairHigh<Register>()));
1335      // out.hi = out.hi - in.hi
1336      __ sub(out.AsRegisterPairHigh<Register>(),
1337             out.AsRegisterPairHigh<Register>(),
1338             ShifterOperand(in.AsRegisterPairHigh<Register>()));
1339      break;
1340
1341    case Primitive::kPrimFloat:
1342      DCHECK(in.IsFpuRegister());
1343      __ vnegs(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>());
1344      break;
1345
1346    case Primitive::kPrimDouble:
1347      DCHECK(in.IsFpuRegisterPair());
1348      __ vnegd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1349               FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1350      break;
1351
1352    default:
1353      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1354  }
1355}
1356
1357void LocationsBuilderARM::VisitTypeConversion(HTypeConversion* conversion) {
1358  Primitive::Type result_type = conversion->GetResultType();
1359  Primitive::Type input_type = conversion->GetInputType();
1360  DCHECK_NE(result_type, input_type);
1361
1362  // The float-to-long and double-to-long type conversions rely on a
1363  // call to the runtime.
1364  LocationSummary::CallKind call_kind =
1365      ((input_type == Primitive::kPrimFloat || input_type == Primitive::kPrimDouble)
1366       && result_type == Primitive::kPrimLong)
1367      ? LocationSummary::kCall
1368      : LocationSummary::kNoCall;
1369  LocationSummary* locations =
1370      new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind);
1371
1372  switch (result_type) {
1373    case Primitive::kPrimByte:
1374      switch (input_type) {
1375        case Primitive::kPrimShort:
1376        case Primitive::kPrimInt:
1377        case Primitive::kPrimChar:
1378          // Processing a Dex `int-to-byte' instruction.
1379          locations->SetInAt(0, Location::RequiresRegister());
1380          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1381          break;
1382
1383        default:
1384          LOG(FATAL) << "Unexpected type conversion from " << input_type
1385                     << " to " << result_type;
1386      }
1387      break;
1388
1389    case Primitive::kPrimShort:
1390      switch (input_type) {
1391        case Primitive::kPrimByte:
1392        case Primitive::kPrimInt:
1393        case Primitive::kPrimChar:
1394          // Processing a Dex `int-to-short' instruction.
1395          locations->SetInAt(0, Location::RequiresRegister());
1396          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1397          break;
1398
1399        default:
1400          LOG(FATAL) << "Unexpected type conversion from " << input_type
1401                     << " to " << result_type;
1402      }
1403      break;
1404
1405    case Primitive::kPrimInt:
1406      switch (input_type) {
1407        case Primitive::kPrimLong:
1408          // Processing a Dex `long-to-int' instruction.
1409          locations->SetInAt(0, Location::Any());
1410          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1411          break;
1412
1413        case Primitive::kPrimFloat:
1414          // Processing a Dex `float-to-int' instruction.
1415          locations->SetInAt(0, Location::RequiresFpuRegister());
1416          locations->SetOut(Location::RequiresRegister());
1417          locations->AddTemp(Location::RequiresFpuRegister());
1418          break;
1419
1420        case Primitive::kPrimDouble:
1421          // Processing a Dex `double-to-int' instruction.
1422          locations->SetInAt(0, Location::RequiresFpuRegister());
1423          locations->SetOut(Location::RequiresRegister());
1424          locations->AddTemp(Location::RequiresFpuRegister());
1425          break;
1426
1427        default:
1428          LOG(FATAL) << "Unexpected type conversion from " << input_type
1429                     << " to " << result_type;
1430      }
1431      break;
1432
1433    case Primitive::kPrimLong:
1434      switch (input_type) {
1435        case Primitive::kPrimByte:
1436        case Primitive::kPrimShort:
1437        case Primitive::kPrimInt:
1438        case Primitive::kPrimChar:
1439          // Processing a Dex `int-to-long' instruction.
1440          locations->SetInAt(0, Location::RequiresRegister());
1441          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1442          break;
1443
1444        case Primitive::kPrimFloat: {
1445          // Processing a Dex `float-to-long' instruction.
1446          InvokeRuntimeCallingConvention calling_convention;
1447          locations->SetInAt(0, Location::FpuRegisterLocation(
1448              calling_convention.GetFpuRegisterAt(0)));
1449          locations->SetOut(Location::RegisterPairLocation(R0, R1));
1450          break;
1451        }
1452
1453        case Primitive::kPrimDouble: {
1454          // Processing a Dex `double-to-long' instruction.
1455          InvokeRuntimeCallingConvention calling_convention;
1456          locations->SetInAt(0, Location::FpuRegisterPairLocation(
1457              calling_convention.GetFpuRegisterAt(0),
1458              calling_convention.GetFpuRegisterAt(1)));
1459          locations->SetOut(Location::RegisterPairLocation(R0, R1));
1460          break;
1461        }
1462
1463        default:
1464          LOG(FATAL) << "Unexpected type conversion from " << input_type
1465                     << " to " << result_type;
1466      }
1467      break;
1468
1469    case Primitive::kPrimChar:
1470      switch (input_type) {
1471        case Primitive::kPrimByte:
1472        case Primitive::kPrimShort:
1473        case Primitive::kPrimInt:
1474          // Processing a Dex `int-to-char' instruction.
1475          locations->SetInAt(0, Location::RequiresRegister());
1476          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1477          break;
1478
1479        default:
1480          LOG(FATAL) << "Unexpected type conversion from " << input_type
1481                     << " to " << result_type;
1482      }
1483      break;
1484
1485    case Primitive::kPrimFloat:
1486      switch (input_type) {
1487        case Primitive::kPrimByte:
1488        case Primitive::kPrimShort:
1489        case Primitive::kPrimInt:
1490        case Primitive::kPrimChar:
1491          // Processing a Dex `int-to-float' instruction.
1492          locations->SetInAt(0, Location::RequiresRegister());
1493          locations->SetOut(Location::RequiresFpuRegister());
1494          break;
1495
1496        case Primitive::kPrimLong:
1497          // Processing a Dex `long-to-float' instruction.
1498          locations->SetInAt(0, Location::RequiresRegister());
1499          locations->SetOut(Location::RequiresFpuRegister());
1500          locations->AddTemp(Location::RequiresRegister());
1501          locations->AddTemp(Location::RequiresRegister());
1502          locations->AddTemp(Location::RequiresFpuRegister());
1503          locations->AddTemp(Location::RequiresFpuRegister());
1504          break;
1505
1506        case Primitive::kPrimDouble:
1507          // Processing a Dex `double-to-float' instruction.
1508          locations->SetInAt(0, Location::RequiresFpuRegister());
1509          locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1510          break;
1511
1512        default:
1513          LOG(FATAL) << "Unexpected type conversion from " << input_type
1514                     << " to " << result_type;
1515      };
1516      break;
1517
1518    case Primitive::kPrimDouble:
1519      switch (input_type) {
1520        case Primitive::kPrimByte:
1521        case Primitive::kPrimShort:
1522        case Primitive::kPrimInt:
1523        case Primitive::kPrimChar:
1524          // Processing a Dex `int-to-double' instruction.
1525          locations->SetInAt(0, Location::RequiresRegister());
1526          locations->SetOut(Location::RequiresFpuRegister());
1527          break;
1528
1529        case Primitive::kPrimLong:
1530          // Processing a Dex `long-to-double' instruction.
1531          locations->SetInAt(0, Location::RequiresRegister());
1532          locations->SetOut(Location::RequiresFpuRegister());
1533          locations->AddTemp(Location::RequiresRegister());
1534          locations->AddTemp(Location::RequiresRegister());
1535          locations->AddTemp(Location::RequiresFpuRegister());
1536          break;
1537
1538        case Primitive::kPrimFloat:
1539          // Processing a Dex `float-to-double' instruction.
1540          locations->SetInAt(0, Location::RequiresFpuRegister());
1541          locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1542          break;
1543
1544        default:
1545          LOG(FATAL) << "Unexpected type conversion from " << input_type
1546                     << " to " << result_type;
1547      };
1548      break;
1549
1550    default:
1551      LOG(FATAL) << "Unexpected type conversion from " << input_type
1552                 << " to " << result_type;
1553  }
1554}
1555
1556void InstructionCodeGeneratorARM::VisitTypeConversion(HTypeConversion* conversion) {
1557  LocationSummary* locations = conversion->GetLocations();
1558  Location out = locations->Out();
1559  Location in = locations->InAt(0);
1560  Primitive::Type result_type = conversion->GetResultType();
1561  Primitive::Type input_type = conversion->GetInputType();
1562  DCHECK_NE(result_type, input_type);
1563  switch (result_type) {
1564    case Primitive::kPrimByte:
1565      switch (input_type) {
1566        case Primitive::kPrimShort:
1567        case Primitive::kPrimInt:
1568        case Primitive::kPrimChar:
1569          // Processing a Dex `int-to-byte' instruction.
1570          __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 8);
1571          break;
1572
1573        default:
1574          LOG(FATAL) << "Unexpected type conversion from " << input_type
1575                     << " to " << result_type;
1576      }
1577      break;
1578
1579    case Primitive::kPrimShort:
1580      switch (input_type) {
1581        case Primitive::kPrimByte:
1582        case Primitive::kPrimInt:
1583        case Primitive::kPrimChar:
1584          // Processing a Dex `int-to-short' instruction.
1585          __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16);
1586          break;
1587
1588        default:
1589          LOG(FATAL) << "Unexpected type conversion from " << input_type
1590                     << " to " << result_type;
1591      }
1592      break;
1593
1594    case Primitive::kPrimInt:
1595      switch (input_type) {
1596        case Primitive::kPrimLong:
1597          // Processing a Dex `long-to-int' instruction.
1598          DCHECK(out.IsRegister());
1599          if (in.IsRegisterPair()) {
1600            __ Mov(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
1601          } else if (in.IsDoubleStackSlot()) {
1602            __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), SP, in.GetStackIndex());
1603          } else {
1604            DCHECK(in.IsConstant());
1605            DCHECK(in.GetConstant()->IsLongConstant());
1606            int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
1607            __ LoadImmediate(out.AsRegister<Register>(), static_cast<int32_t>(value));
1608          }
1609          break;
1610
1611        case Primitive::kPrimFloat: {
1612          // Processing a Dex `float-to-int' instruction.
1613          SRegister temp = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
1614          __ vmovs(temp, in.AsFpuRegister<SRegister>());
1615          __ vcvtis(temp, temp);
1616          __ vmovrs(out.AsRegister<Register>(), temp);
1617          break;
1618        }
1619
1620        case Primitive::kPrimDouble: {
1621          // Processing a Dex `double-to-int' instruction.
1622          SRegister temp_s = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
1623          DRegister temp_d = FromLowSToD(temp_s);
1624          __ vmovd(temp_d, FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1625          __ vcvtid(temp_s, temp_d);
1626          __ vmovrs(out.AsRegister<Register>(), temp_s);
1627          break;
1628        }
1629
1630        default:
1631          LOG(FATAL) << "Unexpected type conversion from " << input_type
1632                     << " to " << result_type;
1633      }
1634      break;
1635
1636    case Primitive::kPrimLong:
1637      switch (input_type) {
1638        case Primitive::kPrimByte:
1639        case Primitive::kPrimShort:
1640        case Primitive::kPrimInt:
1641        case Primitive::kPrimChar:
1642          // Processing a Dex `int-to-long' instruction.
1643          DCHECK(out.IsRegisterPair());
1644          DCHECK(in.IsRegister());
1645          __ Mov(out.AsRegisterPairLow<Register>(), in.AsRegister<Register>());
1646          // Sign extension.
1647          __ Asr(out.AsRegisterPairHigh<Register>(),
1648                 out.AsRegisterPairLow<Register>(),
1649                 31);
1650          break;
1651
1652        case Primitive::kPrimFloat:
1653          // Processing a Dex `float-to-long' instruction.
1654          codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pF2l),
1655                                  conversion,
1656                                  conversion->GetDexPc());
1657          break;
1658
1659        case Primitive::kPrimDouble:
1660          // Processing a Dex `double-to-long' instruction.
1661          codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pD2l),
1662                                  conversion,
1663                                  conversion->GetDexPc());
1664          break;
1665
1666        default:
1667          LOG(FATAL) << "Unexpected type conversion from " << input_type
1668                     << " to " << result_type;
1669      }
1670      break;
1671
1672    case Primitive::kPrimChar:
1673      switch (input_type) {
1674        case Primitive::kPrimByte:
1675        case Primitive::kPrimShort:
1676        case Primitive::kPrimInt:
1677          // Processing a Dex `int-to-char' instruction.
1678          __ ubfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16);
1679          break;
1680
1681        default:
1682          LOG(FATAL) << "Unexpected type conversion from " << input_type
1683                     << " to " << result_type;
1684      }
1685      break;
1686
1687    case Primitive::kPrimFloat:
1688      switch (input_type) {
1689        case Primitive::kPrimByte:
1690        case Primitive::kPrimShort:
1691        case Primitive::kPrimInt:
1692        case Primitive::kPrimChar: {
1693          // Processing a Dex `int-to-float' instruction.
1694          __ vmovsr(out.AsFpuRegister<SRegister>(), in.AsRegister<Register>());
1695          __ vcvtsi(out.AsFpuRegister<SRegister>(), out.AsFpuRegister<SRegister>());
1696          break;
1697        }
1698
1699        case Primitive::kPrimLong: {
1700          // Processing a Dex `long-to-float' instruction.
1701          Register low = in.AsRegisterPairLow<Register>();
1702          Register high = in.AsRegisterPairHigh<Register>();
1703          SRegister output = out.AsFpuRegister<SRegister>();
1704          Register constant_low = locations->GetTemp(0).AsRegister<Register>();
1705          Register constant_high = locations->GetTemp(1).AsRegister<Register>();
1706          SRegister temp1_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>();
1707          DRegister temp1_d = FromLowSToD(temp1_s);
1708          SRegister temp2_s = locations->GetTemp(3).AsFpuRegisterPairLow<SRegister>();
1709          DRegister temp2_d = FromLowSToD(temp2_s);
1710
1711          // Operations use doubles for precision reasons (each 32-bit
1712          // half of a long fits in the 53-bit mantissa of a double,
1713          // but not in the 24-bit mantissa of a float).  This is
1714          // especially important for the low bits.  The result is
1715          // eventually converted to float.
1716
1717          // temp1_d = int-to-double(high)
1718          __ vmovsr(temp1_s, high);
1719          __ vcvtdi(temp1_d, temp1_s);
1720          // Using vmovd to load the `k2Pow32EncodingForDouble` constant
1721          // as an immediate value into `temp2_d` does not work, as
1722          // this instruction only transfers 8 significant bits of its
1723          // immediate operand.  Instead, use two 32-bit core
1724          // registers to load `k2Pow32EncodingForDouble` into
1725          // `temp2_d`.
1726          __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble));
1727          __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble));
1728          __ vmovdrr(temp2_d, constant_low, constant_high);
1729          // temp1_d = temp1_d * 2^32
1730          __ vmuld(temp1_d, temp1_d, temp2_d);
1731          // temp2_d = unsigned-to-double(low)
1732          __ vmovsr(temp2_s, low);
1733          __ vcvtdu(temp2_d, temp2_s);
1734          // temp1_d = temp1_d + temp2_d
1735          __ vaddd(temp1_d, temp1_d, temp2_d);
1736          // output = double-to-float(temp1_d);
1737          __ vcvtsd(output, temp1_d);
1738          break;
1739        }
1740
1741        case Primitive::kPrimDouble:
1742          // Processing a Dex `double-to-float' instruction.
1743          __ vcvtsd(out.AsFpuRegister<SRegister>(),
1744                    FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1745          break;
1746
1747        default:
1748          LOG(FATAL) << "Unexpected type conversion from " << input_type
1749                     << " to " << result_type;
1750      };
1751      break;
1752
1753    case Primitive::kPrimDouble:
1754      switch (input_type) {
1755        case Primitive::kPrimByte:
1756        case Primitive::kPrimShort:
1757        case Primitive::kPrimInt:
1758        case Primitive::kPrimChar: {
1759          // Processing a Dex `int-to-double' instruction.
1760          __ vmovsr(out.AsFpuRegisterPairLow<SRegister>(), in.AsRegister<Register>());
1761          __ vcvtdi(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1762                    out.AsFpuRegisterPairLow<SRegister>());
1763          break;
1764        }
1765
1766        case Primitive::kPrimLong: {
1767          // Processing a Dex `long-to-double' instruction.
1768          Register low = in.AsRegisterPairLow<Register>();
1769          Register high = in.AsRegisterPairHigh<Register>();
1770          SRegister out_s = out.AsFpuRegisterPairLow<SRegister>();
1771          DRegister out_d = FromLowSToD(out_s);
1772          Register constant_low = locations->GetTemp(0).AsRegister<Register>();
1773          Register constant_high = locations->GetTemp(1).AsRegister<Register>();
1774          SRegister temp_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>();
1775          DRegister temp_d = FromLowSToD(temp_s);
1776
1777          // out_d = int-to-double(high)
1778          __ vmovsr(out_s, high);
1779          __ vcvtdi(out_d, out_s);
1780          // Using vmovd to load the `k2Pow32EncodingForDouble` constant
1781          // as an immediate value into `temp_d` does not work, as
1782          // this instruction only transfers 8 significant bits of its
1783          // immediate operand.  Instead, use two 32-bit core
1784          // registers to load `k2Pow32EncodingForDouble` into `temp_d`.
1785          __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble));
1786          __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble));
1787          __ vmovdrr(temp_d, constant_low, constant_high);
1788          // out_d = out_d * 2^32
1789          __ vmuld(out_d, out_d, temp_d);
1790          // temp_d = unsigned-to-double(low)
1791          __ vmovsr(temp_s, low);
1792          __ vcvtdu(temp_d, temp_s);
1793          // out_d = out_d + temp_d
1794          __ vaddd(out_d, out_d, temp_d);
1795          break;
1796        }
1797
1798        case Primitive::kPrimFloat:
1799          // Processing a Dex `float-to-double' instruction.
1800          __ vcvtds(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1801                    in.AsFpuRegister<SRegister>());
1802          break;
1803
1804        default:
1805          LOG(FATAL) << "Unexpected type conversion from " << input_type
1806                     << " to " << result_type;
1807      };
1808      break;
1809
1810    default:
1811      LOG(FATAL) << "Unexpected type conversion from " << input_type
1812                 << " to " << result_type;
1813  }
1814}
1815
1816void LocationsBuilderARM::VisitAdd(HAdd* add) {
1817  LocationSummary* locations =
1818      new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
1819  switch (add->GetResultType()) {
1820    case Primitive::kPrimInt:
1821    case Primitive::kPrimLong: {
1822      bool output_overlaps = (add->GetResultType() == Primitive::kPrimLong);
1823      locations->SetInAt(0, Location::RequiresRegister());
1824      locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
1825      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1826      break;
1827    }
1828
1829    case Primitive::kPrimFloat:
1830    case Primitive::kPrimDouble: {
1831      locations->SetInAt(0, Location::RequiresFpuRegister());
1832      locations->SetInAt(1, Location::RequiresFpuRegister());
1833      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1834      break;
1835    }
1836
1837    default:
1838      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1839  }
1840}
1841
1842void InstructionCodeGeneratorARM::VisitAdd(HAdd* add) {
1843  LocationSummary* locations = add->GetLocations();
1844  Location out = locations->Out();
1845  Location first = locations->InAt(0);
1846  Location second = locations->InAt(1);
1847  switch (add->GetResultType()) {
1848    case Primitive::kPrimInt:
1849      if (second.IsRegister()) {
1850        __ add(out.AsRegister<Register>(),
1851               first.AsRegister<Register>(),
1852               ShifterOperand(second.AsRegister<Register>()));
1853      } else {
1854        __ AddConstant(out.AsRegister<Register>(),
1855                       first.AsRegister<Register>(),
1856                       second.GetConstant()->AsIntConstant()->GetValue());
1857      }
1858      break;
1859
1860    case Primitive::kPrimLong:
1861      __ adds(out.AsRegisterPairLow<Register>(),
1862              first.AsRegisterPairLow<Register>(),
1863              ShifterOperand(second.AsRegisterPairLow<Register>()));
1864      __ adc(out.AsRegisterPairHigh<Register>(),
1865             first.AsRegisterPairHigh<Register>(),
1866             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1867      break;
1868
1869    case Primitive::kPrimFloat:
1870      __ vadds(out.AsFpuRegister<SRegister>(),
1871               first.AsFpuRegister<SRegister>(),
1872               second.AsFpuRegister<SRegister>());
1873      break;
1874
1875    case Primitive::kPrimDouble:
1876      __ vaddd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1877               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1878               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1879      break;
1880
1881    default:
1882      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1883  }
1884}
1885
1886void LocationsBuilderARM::VisitSub(HSub* sub) {
1887  LocationSummary* locations =
1888      new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
1889  switch (sub->GetResultType()) {
1890    case Primitive::kPrimInt:
1891    case Primitive::kPrimLong: {
1892      bool output_overlaps = (sub->GetResultType() == Primitive::kPrimLong);
1893      locations->SetInAt(0, Location::RequiresRegister());
1894      locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1)));
1895      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1896      break;
1897    }
1898    case Primitive::kPrimFloat:
1899    case Primitive::kPrimDouble: {
1900      locations->SetInAt(0, Location::RequiresFpuRegister());
1901      locations->SetInAt(1, Location::RequiresFpuRegister());
1902      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1903      break;
1904    }
1905    default:
1906      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1907  }
1908}
1909
1910void InstructionCodeGeneratorARM::VisitSub(HSub* sub) {
1911  LocationSummary* locations = sub->GetLocations();
1912  Location out = locations->Out();
1913  Location first = locations->InAt(0);
1914  Location second = locations->InAt(1);
1915  switch (sub->GetResultType()) {
1916    case Primitive::kPrimInt: {
1917      if (second.IsRegister()) {
1918        __ sub(out.AsRegister<Register>(),
1919               first.AsRegister<Register>(),
1920               ShifterOperand(second.AsRegister<Register>()));
1921      } else {
1922        __ AddConstant(out.AsRegister<Register>(),
1923                       first.AsRegister<Register>(),
1924                       -second.GetConstant()->AsIntConstant()->GetValue());
1925      }
1926      break;
1927    }
1928
1929    case Primitive::kPrimLong: {
1930      __ subs(out.AsRegisterPairLow<Register>(),
1931              first.AsRegisterPairLow<Register>(),
1932              ShifterOperand(second.AsRegisterPairLow<Register>()));
1933      __ sbc(out.AsRegisterPairHigh<Register>(),
1934             first.AsRegisterPairHigh<Register>(),
1935             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1936      break;
1937    }
1938
1939    case Primitive::kPrimFloat: {
1940      __ vsubs(out.AsFpuRegister<SRegister>(),
1941               first.AsFpuRegister<SRegister>(),
1942               second.AsFpuRegister<SRegister>());
1943      break;
1944    }
1945
1946    case Primitive::kPrimDouble: {
1947      __ vsubd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1948               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1949               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1950      break;
1951    }
1952
1953
1954    default:
1955      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1956  }
1957}
1958
1959void LocationsBuilderARM::VisitMul(HMul* mul) {
1960  LocationSummary* locations =
1961      new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
1962  switch (mul->GetResultType()) {
1963    case Primitive::kPrimInt:
1964    case Primitive::kPrimLong:  {
1965      locations->SetInAt(0, Location::RequiresRegister());
1966      locations->SetInAt(1, Location::RequiresRegister());
1967      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1968      break;
1969    }
1970
1971    case Primitive::kPrimFloat:
1972    case Primitive::kPrimDouble: {
1973      locations->SetInAt(0, Location::RequiresFpuRegister());
1974      locations->SetInAt(1, Location::RequiresFpuRegister());
1975      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1976      break;
1977    }
1978
1979    default:
1980      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1981  }
1982}
1983
1984void InstructionCodeGeneratorARM::VisitMul(HMul* mul) {
1985  LocationSummary* locations = mul->GetLocations();
1986  Location out = locations->Out();
1987  Location first = locations->InAt(0);
1988  Location second = locations->InAt(1);
1989  switch (mul->GetResultType()) {
1990    case Primitive::kPrimInt: {
1991      __ mul(out.AsRegister<Register>(),
1992             first.AsRegister<Register>(),
1993             second.AsRegister<Register>());
1994      break;
1995    }
1996    case Primitive::kPrimLong: {
1997      Register out_hi = out.AsRegisterPairHigh<Register>();
1998      Register out_lo = out.AsRegisterPairLow<Register>();
1999      Register in1_hi = first.AsRegisterPairHigh<Register>();
2000      Register in1_lo = first.AsRegisterPairLow<Register>();
2001      Register in2_hi = second.AsRegisterPairHigh<Register>();
2002      Register in2_lo = second.AsRegisterPairLow<Register>();
2003
2004      // Extra checks to protect caused by the existence of R1_R2.
2005      // The algorithm is wrong if out.hi is either in1.lo or in2.lo:
2006      // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2);
2007      DCHECK_NE(out_hi, in1_lo);
2008      DCHECK_NE(out_hi, in2_lo);
2009
2010      // input: in1 - 64 bits, in2 - 64 bits
2011      // output: out
2012      // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
2013      // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
2014      // parts: out.lo = (in1.lo * in2.lo)[31:0]
2015
2016      // IP <- in1.lo * in2.hi
2017      __ mul(IP, in1_lo, in2_hi);
2018      // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo
2019      __ mla(out_hi, in1_hi, in2_lo, IP);
2020      // out.lo <- (in1.lo * in2.lo)[31:0];
2021      __ umull(out_lo, IP, in1_lo, in2_lo);
2022      // out.hi <- in2.hi * in1.lo +  in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
2023      __ add(out_hi, out_hi, ShifterOperand(IP));
2024      break;
2025    }
2026
2027    case Primitive::kPrimFloat: {
2028      __ vmuls(out.AsFpuRegister<SRegister>(),
2029               first.AsFpuRegister<SRegister>(),
2030               second.AsFpuRegister<SRegister>());
2031      break;
2032    }
2033
2034    case Primitive::kPrimDouble: {
2035      __ vmuld(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2036               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2037               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2038      break;
2039    }
2040
2041    default:
2042      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2043  }
2044}
2045
2046void LocationsBuilderARM::VisitDiv(HDiv* div) {
2047  LocationSummary::CallKind call_kind = div->GetResultType() == Primitive::kPrimLong
2048      ? LocationSummary::kCall
2049      : LocationSummary::kNoCall;
2050  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
2051
2052  switch (div->GetResultType()) {
2053    case Primitive::kPrimInt: {
2054      locations->SetInAt(0, Location::RequiresRegister());
2055      locations->SetInAt(1, Location::RequiresRegister());
2056      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2057      break;
2058    }
2059    case Primitive::kPrimLong: {
2060      InvokeRuntimeCallingConvention calling_convention;
2061      locations->SetInAt(0, Location::RegisterPairLocation(
2062          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2063      locations->SetInAt(1, Location::RegisterPairLocation(
2064          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2065      // The runtime helper puts the output in R0,R2.
2066      locations->SetOut(Location::RegisterPairLocation(R0, R2));
2067      break;
2068    }
2069    case Primitive::kPrimFloat:
2070    case Primitive::kPrimDouble: {
2071      locations->SetInAt(0, Location::RequiresFpuRegister());
2072      locations->SetInAt(1, Location::RequiresFpuRegister());
2073      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2074      break;
2075    }
2076
2077    default:
2078      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2079  }
2080}
2081
2082void InstructionCodeGeneratorARM::VisitDiv(HDiv* div) {
2083  LocationSummary* locations = div->GetLocations();
2084  Location out = locations->Out();
2085  Location first = locations->InAt(0);
2086  Location second = locations->InAt(1);
2087
2088  switch (div->GetResultType()) {
2089    case Primitive::kPrimInt: {
2090      __ sdiv(out.AsRegister<Register>(),
2091              first.AsRegister<Register>(),
2092              second.AsRegister<Register>());
2093      break;
2094    }
2095
2096    case Primitive::kPrimLong: {
2097      InvokeRuntimeCallingConvention calling_convention;
2098      DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
2099      DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
2100      DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegisterPairLow<Register>());
2101      DCHECK_EQ(calling_convention.GetRegisterAt(3), second.AsRegisterPairHigh<Register>());
2102      DCHECK_EQ(R0, out.AsRegisterPairLow<Register>());
2103      DCHECK_EQ(R2, out.AsRegisterPairHigh<Register>());
2104
2105      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLdiv), div, div->GetDexPc());
2106      break;
2107    }
2108
2109    case Primitive::kPrimFloat: {
2110      __ vdivs(out.AsFpuRegister<SRegister>(),
2111               first.AsFpuRegister<SRegister>(),
2112               second.AsFpuRegister<SRegister>());
2113      break;
2114    }
2115
2116    case Primitive::kPrimDouble: {
2117      __ vdivd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2118               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2119               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2120      break;
2121    }
2122
2123    default:
2124      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2125  }
2126}
2127
2128void LocationsBuilderARM::VisitRem(HRem* rem) {
2129  Primitive::Type type = rem->GetResultType();
2130  LocationSummary::CallKind call_kind = type == Primitive::kPrimInt
2131      ? LocationSummary::kNoCall
2132      : LocationSummary::kCall;
2133  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
2134
2135  switch (type) {
2136    case Primitive::kPrimInt: {
2137      locations->SetInAt(0, Location::RequiresRegister());
2138      locations->SetInAt(1, Location::RequiresRegister());
2139      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2140      locations->AddTemp(Location::RequiresRegister());
2141      break;
2142    }
2143    case Primitive::kPrimLong: {
2144      InvokeRuntimeCallingConvention calling_convention;
2145      locations->SetInAt(0, Location::RegisterPairLocation(
2146          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2147      locations->SetInAt(1, Location::RegisterPairLocation(
2148          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2149      // The runtime helper puts the output in R2,R3.
2150      locations->SetOut(Location::RegisterPairLocation(R2, R3));
2151      break;
2152    }
2153    case Primitive::kPrimFloat: {
2154      InvokeRuntimeCallingConvention calling_convention;
2155      locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
2156      locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
2157      locations->SetOut(Location::FpuRegisterLocation(S0));
2158      break;
2159    }
2160
2161    case Primitive::kPrimDouble: {
2162      InvokeRuntimeCallingConvention calling_convention;
2163      locations->SetInAt(0, Location::FpuRegisterPairLocation(
2164          calling_convention.GetFpuRegisterAt(0), calling_convention.GetFpuRegisterAt(1)));
2165      locations->SetInAt(1, Location::FpuRegisterPairLocation(
2166          calling_convention.GetFpuRegisterAt(2), calling_convention.GetFpuRegisterAt(3)));
2167      locations->SetOut(Location::Location::FpuRegisterPairLocation(S0, S1));
2168      break;
2169    }
2170
2171    default:
2172      LOG(FATAL) << "Unexpected rem type " << type;
2173  }
2174}
2175
2176void InstructionCodeGeneratorARM::VisitRem(HRem* rem) {
2177  LocationSummary* locations = rem->GetLocations();
2178  Location out = locations->Out();
2179  Location first = locations->InAt(0);
2180  Location second = locations->InAt(1);
2181
2182  Primitive::Type type = rem->GetResultType();
2183  switch (type) {
2184    case Primitive::kPrimInt: {
2185      Register reg1 = first.AsRegister<Register>();
2186      Register reg2 = second.AsRegister<Register>();
2187      Register temp = locations->GetTemp(0).AsRegister<Register>();
2188
2189      // temp = reg1 / reg2  (integer division)
2190      // temp = temp * reg2
2191      // dest = reg1 - temp
2192      __ sdiv(temp, reg1, reg2);
2193      __ mul(temp, temp, reg2);
2194      __ sub(out.AsRegister<Register>(), reg1, ShifterOperand(temp));
2195      break;
2196    }
2197
2198    case Primitive::kPrimLong: {
2199      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLmod), rem, rem->GetDexPc());
2200      break;
2201    }
2202
2203    case Primitive::kPrimFloat: {
2204      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmodf), rem, rem->GetDexPc());
2205      break;
2206    }
2207
2208    case Primitive::kPrimDouble: {
2209      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmod), rem, rem->GetDexPc());
2210      break;
2211    }
2212
2213    default:
2214      LOG(FATAL) << "Unexpected rem type " << type;
2215  }
2216}
2217
2218void LocationsBuilderARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2219  LocationSummary* locations =
2220      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2221  locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2222  if (instruction->HasUses()) {
2223    locations->SetOut(Location::SameAsFirstInput());
2224  }
2225}
2226
2227void InstructionCodeGeneratorARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2228  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM(instruction);
2229  codegen_->AddSlowPath(slow_path);
2230
2231  LocationSummary* locations = instruction->GetLocations();
2232  Location value = locations->InAt(0);
2233
2234  switch (instruction->GetType()) {
2235    case Primitive::kPrimInt: {
2236      if (value.IsRegister()) {
2237        __ cmp(value.AsRegister<Register>(), ShifterOperand(0));
2238        __ b(slow_path->GetEntryLabel(), EQ);
2239      } else {
2240        DCHECK(value.IsConstant()) << value;
2241        if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
2242          __ b(slow_path->GetEntryLabel());
2243        }
2244      }
2245      break;
2246    }
2247    case Primitive::kPrimLong: {
2248      if (value.IsRegisterPair()) {
2249        __ orrs(IP,
2250                value.AsRegisterPairLow<Register>(),
2251                ShifterOperand(value.AsRegisterPairHigh<Register>()));
2252        __ b(slow_path->GetEntryLabel(), EQ);
2253      } else {
2254        DCHECK(value.IsConstant()) << value;
2255        if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
2256          __ b(slow_path->GetEntryLabel());
2257        }
2258      }
2259      break;
2260    default:
2261      LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
2262    }
2263  }
2264}
2265
2266void LocationsBuilderARM::HandleShift(HBinaryOperation* op) {
2267  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2268
2269  LocationSummary::CallKind call_kind = op->GetResultType() == Primitive::kPrimLong
2270      ? LocationSummary::kCall
2271      : LocationSummary::kNoCall;
2272  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(op, call_kind);
2273
2274  switch (op->GetResultType()) {
2275    case Primitive::kPrimInt: {
2276      locations->SetInAt(0, Location::RequiresRegister());
2277      locations->SetInAt(1, Location::RegisterOrConstant(op->InputAt(1)));
2278      locations->SetOut(Location::RequiresRegister());
2279      break;
2280    }
2281    case Primitive::kPrimLong: {
2282      InvokeRuntimeCallingConvention calling_convention;
2283      locations->SetInAt(0, Location::RegisterPairLocation(
2284          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2285      locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2286      // The runtime helper puts the output in R0,R2.
2287      locations->SetOut(Location::RegisterPairLocation(R0, R2));
2288      break;
2289    }
2290    default:
2291      LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
2292  }
2293}
2294
2295void InstructionCodeGeneratorARM::HandleShift(HBinaryOperation* op) {
2296  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2297
2298  LocationSummary* locations = op->GetLocations();
2299  Location out = locations->Out();
2300  Location first = locations->InAt(0);
2301  Location second = locations->InAt(1);
2302
2303  Primitive::Type type = op->GetResultType();
2304  switch (type) {
2305    case Primitive::kPrimInt: {
2306      Register out_reg = out.AsRegister<Register>();
2307      Register first_reg = first.AsRegister<Register>();
2308      // Arm doesn't mask the shift count so we need to do it ourselves.
2309      if (second.IsRegister()) {
2310        Register second_reg = second.AsRegister<Register>();
2311        __ and_(second_reg, second_reg, ShifterOperand(kMaxIntShiftValue));
2312        if (op->IsShl()) {
2313          __ Lsl(out_reg, first_reg, second_reg);
2314        } else if (op->IsShr()) {
2315          __ Asr(out_reg, first_reg, second_reg);
2316        } else {
2317          __ Lsr(out_reg, first_reg, second_reg);
2318        }
2319      } else {
2320        int32_t cst = second.GetConstant()->AsIntConstant()->GetValue();
2321        uint32_t shift_value = static_cast<uint32_t>(cst & kMaxIntShiftValue);
2322        if (shift_value == 0) {  // arm does not support shifting with 0 immediate.
2323          __ Mov(out_reg, first_reg);
2324        } else if (op->IsShl()) {
2325          __ Lsl(out_reg, first_reg, shift_value);
2326        } else if (op->IsShr()) {
2327          __ Asr(out_reg, first_reg, shift_value);
2328        } else {
2329          __ Lsr(out_reg, first_reg, shift_value);
2330        }
2331      }
2332      break;
2333    }
2334    case Primitive::kPrimLong: {
2335      // TODO: Inline the assembly instead of calling the runtime.
2336      InvokeRuntimeCallingConvention calling_convention;
2337      DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
2338      DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
2339      DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegister<Register>());
2340      DCHECK_EQ(R0, out.AsRegisterPairLow<Register>());
2341      DCHECK_EQ(R2, out.AsRegisterPairHigh<Register>());
2342
2343      int32_t entry_point_offset;
2344      if (op->IsShl()) {
2345        entry_point_offset = QUICK_ENTRY_POINT(pShlLong);
2346      } else if (op->IsShr()) {
2347        entry_point_offset = QUICK_ENTRY_POINT(pShrLong);
2348      } else {
2349        entry_point_offset = QUICK_ENTRY_POINT(pUshrLong);
2350      }
2351      __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
2352      __ blx(LR);
2353      break;
2354    }
2355    default:
2356      LOG(FATAL) << "Unexpected operation type " << type;
2357  }
2358}
2359
2360void LocationsBuilderARM::VisitShl(HShl* shl) {
2361  HandleShift(shl);
2362}
2363
2364void InstructionCodeGeneratorARM::VisitShl(HShl* shl) {
2365  HandleShift(shl);
2366}
2367
2368void LocationsBuilderARM::VisitShr(HShr* shr) {
2369  HandleShift(shr);
2370}
2371
2372void InstructionCodeGeneratorARM::VisitShr(HShr* shr) {
2373  HandleShift(shr);
2374}
2375
2376void LocationsBuilderARM::VisitUShr(HUShr* ushr) {
2377  HandleShift(ushr);
2378}
2379
2380void InstructionCodeGeneratorARM::VisitUShr(HUShr* ushr) {
2381  HandleShift(ushr);
2382}
2383
2384void LocationsBuilderARM::VisitNewInstance(HNewInstance* instruction) {
2385  LocationSummary* locations =
2386      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2387  InvokeRuntimeCallingConvention calling_convention;
2388  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2389  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2390  locations->SetOut(Location::RegisterLocation(R0));
2391}
2392
2393void InstructionCodeGeneratorARM::VisitNewInstance(HNewInstance* instruction) {
2394  InvokeRuntimeCallingConvention calling_convention;
2395  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
2396  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
2397  codegen_->InvokeRuntime(
2398      QUICK_ENTRY_POINT(pAllocObjectWithAccessCheck), instruction, instruction->GetDexPc());
2399}
2400
2401void LocationsBuilderARM::VisitNewArray(HNewArray* instruction) {
2402  LocationSummary* locations =
2403      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2404  InvokeRuntimeCallingConvention calling_convention;
2405  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2406  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2407  locations->SetOut(Location::RegisterLocation(R0));
2408  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2409}
2410
2411void InstructionCodeGeneratorARM::VisitNewArray(HNewArray* instruction) {
2412  InvokeRuntimeCallingConvention calling_convention;
2413  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(2));
2414  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
2415  codegen_->InvokeRuntime(
2416      QUICK_ENTRY_POINT(pAllocArrayWithAccessCheck), instruction, instruction->GetDexPc());
2417}
2418
2419void LocationsBuilderARM::VisitParameterValue(HParameterValue* instruction) {
2420  LocationSummary* locations =
2421      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2422  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
2423  if (location.IsStackSlot()) {
2424    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2425  } else if (location.IsDoubleStackSlot()) {
2426    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2427  }
2428  locations->SetOut(location);
2429}
2430
2431void InstructionCodeGeneratorARM::VisitParameterValue(HParameterValue* instruction) {
2432  // Nothing to do, the parameter is already at its location.
2433  UNUSED(instruction);
2434}
2435
2436void LocationsBuilderARM::VisitNot(HNot* not_) {
2437  LocationSummary* locations =
2438      new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
2439  locations->SetInAt(0, Location::RequiresRegister());
2440  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2441}
2442
2443void InstructionCodeGeneratorARM::VisitNot(HNot* not_) {
2444  LocationSummary* locations = not_->GetLocations();
2445  Location out = locations->Out();
2446  Location in = locations->InAt(0);
2447  switch (not_->InputAt(0)->GetType()) {
2448    case Primitive::kPrimBoolean:
2449      __ eor(out.AsRegister<Register>(), in.AsRegister<Register>(), ShifterOperand(1));
2450      break;
2451
2452    case Primitive::kPrimInt:
2453      __ mvn(out.AsRegister<Register>(), ShifterOperand(in.AsRegister<Register>()));
2454      break;
2455
2456    case Primitive::kPrimLong:
2457      __ mvn(out.AsRegisterPairLow<Register>(),
2458             ShifterOperand(in.AsRegisterPairLow<Register>()));
2459      __ mvn(out.AsRegisterPairHigh<Register>(),
2460             ShifterOperand(in.AsRegisterPairHigh<Register>()));
2461      break;
2462
2463    default:
2464      LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
2465  }
2466}
2467
2468void LocationsBuilderARM::VisitCompare(HCompare* compare) {
2469  LocationSummary* locations =
2470      new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2471  switch (compare->InputAt(0)->GetType()) {
2472    case Primitive::kPrimLong: {
2473      locations->SetInAt(0, Location::RequiresRegister());
2474      locations->SetInAt(1, Location::RequiresRegister());
2475      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2476      break;
2477    }
2478    case Primitive::kPrimFloat:
2479    case Primitive::kPrimDouble: {
2480      locations->SetInAt(0, Location::RequiresFpuRegister());
2481      locations->SetInAt(1, Location::RequiresFpuRegister());
2482      locations->SetOut(Location::RequiresRegister());
2483      break;
2484    }
2485    default:
2486      LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2487  }
2488}
2489
2490void InstructionCodeGeneratorARM::VisitCompare(HCompare* compare) {
2491  LocationSummary* locations = compare->GetLocations();
2492  Register out = locations->Out().AsRegister<Register>();
2493  Location left = locations->InAt(0);
2494  Location right = locations->InAt(1);
2495
2496  Label less, greater, done;
2497  Primitive::Type type = compare->InputAt(0)->GetType();
2498  switch (type) {
2499    case Primitive::kPrimLong: {
2500      __ cmp(left.AsRegisterPairHigh<Register>(),
2501             ShifterOperand(right.AsRegisterPairHigh<Register>()));  // Signed compare.
2502      __ b(&less, LT);
2503      __ b(&greater, GT);
2504      // Do LoadImmediate before any `cmp`, as LoadImmediate might affect the status flags.
2505      __ LoadImmediate(out, 0);
2506      __ cmp(left.AsRegisterPairLow<Register>(),
2507             ShifterOperand(right.AsRegisterPairLow<Register>()));  // Unsigned compare.
2508      break;
2509    }
2510    case Primitive::kPrimFloat:
2511    case Primitive::kPrimDouble: {
2512      __ LoadImmediate(out, 0);
2513      if (type == Primitive::kPrimFloat) {
2514        __ vcmps(left.AsFpuRegister<SRegister>(), right.AsFpuRegister<SRegister>());
2515      } else {
2516        __ vcmpd(FromLowSToD(left.AsFpuRegisterPairLow<SRegister>()),
2517                 FromLowSToD(right.AsFpuRegisterPairLow<SRegister>()));
2518      }
2519      __ vmstat();  // transfer FP status register to ARM APSR.
2520      __ b(compare->IsGtBias() ? &greater : &less, VS);  // VS for unordered.
2521      break;
2522    }
2523    default:
2524      LOG(FATAL) << "Unexpected compare type " << type;
2525  }
2526  __ b(&done, EQ);
2527  __ b(&less, CC);  // CC is for both: unsigned compare for longs and 'less than' for floats.
2528
2529  __ Bind(&greater);
2530  __ LoadImmediate(out, 1);
2531  __ b(&done);
2532
2533  __ Bind(&less);
2534  __ LoadImmediate(out, -1);
2535
2536  __ Bind(&done);
2537}
2538
2539void LocationsBuilderARM::VisitPhi(HPhi* instruction) {
2540  LocationSummary* locations =
2541      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2542  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
2543    locations->SetInAt(i, Location::Any());
2544  }
2545  locations->SetOut(Location::Any());
2546}
2547
2548void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction) {
2549  UNUSED(instruction);
2550  LOG(FATAL) << "Unreachable";
2551}
2552
2553void InstructionCodeGeneratorARM::GenerateMemoryBarrier(MemBarrierKind kind) {
2554  // TODO (ported from quick): revisit Arm barrier kinds
2555  DmbOptions flavour = DmbOptions::ISH;  // quiet c++ warnings
2556  switch (kind) {
2557    case MemBarrierKind::kAnyStore:
2558    case MemBarrierKind::kLoadAny:
2559    case MemBarrierKind::kAnyAny: {
2560      flavour = DmbOptions::ISH;
2561      break;
2562    }
2563    case MemBarrierKind::kStoreStore: {
2564      flavour = DmbOptions::ISHST;
2565      break;
2566    }
2567    default:
2568      LOG(FATAL) << "Unexpected memory barrier " << kind;
2569  }
2570  __ dmb(flavour);
2571}
2572
2573void InstructionCodeGeneratorARM::GenerateWideAtomicLoad(Register addr,
2574                                                         uint32_t offset,
2575                                                         Register out_lo,
2576                                                         Register out_hi) {
2577  if (offset != 0) {
2578    __ LoadImmediate(out_lo, offset);
2579    __ add(IP, addr, ShifterOperand(out_lo));
2580    addr = IP;
2581  }
2582  __ ldrexd(out_lo, out_hi, addr);
2583}
2584
2585void InstructionCodeGeneratorARM::GenerateWideAtomicStore(Register addr,
2586                                                          uint32_t offset,
2587                                                          Register value_lo,
2588                                                          Register value_hi,
2589                                                          Register temp1,
2590                                                          Register temp2) {
2591  Label fail;
2592  if (offset != 0) {
2593    __ LoadImmediate(temp1, offset);
2594    __ add(IP, addr, ShifterOperand(temp1));
2595    addr = IP;
2596  }
2597  __ Bind(&fail);
2598  // We need a load followed by store. (The address used in a STREX instruction must
2599  // be the same as the address in the most recently executed LDREX instruction.)
2600  __ ldrexd(temp1, temp2, addr);
2601  __ strexd(temp1, value_lo, value_hi, addr);
2602  __ cmp(temp1, ShifterOperand(0));
2603  __ b(&fail, NE);
2604}
2605
2606void LocationsBuilderARM::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) {
2607  DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2608
2609  LocationSummary* locations =
2610      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2611  locations->SetInAt(0, Location::RequiresRegister());
2612  locations->SetInAt(1, Location::RequiresRegister());
2613
2614
2615  Primitive::Type field_type = field_info.GetFieldType();
2616  bool is_wide = field_type == Primitive::kPrimLong || field_type == Primitive::kPrimDouble;
2617  bool generate_volatile = field_info.IsVolatile()
2618      && is_wide
2619      && !codegen_->GetInstructionSetFeatures()->HasAtomicLdrdAndStrd();
2620  // Temporary registers for the write barrier.
2621  // TODO: consider renaming StoreNeedsWriteBarrier to StoreNeedsGCMark.
2622  if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
2623    locations->AddTemp(Location::RequiresRegister());
2624    locations->AddTemp(Location::RequiresRegister());
2625  } else if (generate_volatile) {
2626    // Arm encoding have some additional constraints for ldrexd/strexd:
2627    // - registers need to be consecutive
2628    // - the first register should be even but not R14.
2629    // We don't test for Arm yet, and the assertion makes sure that we revisit this if we ever
2630    // enable Arm encoding.
2631    DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
2632
2633    locations->AddTemp(Location::RequiresRegister());
2634    locations->AddTemp(Location::RequiresRegister());
2635    if (field_type == Primitive::kPrimDouble) {
2636      // For doubles we need two more registers to copy the value.
2637      locations->AddTemp(Location::RegisterLocation(R2));
2638      locations->AddTemp(Location::RegisterLocation(R3));
2639    }
2640  }
2641}
2642
2643void InstructionCodeGeneratorARM::HandleFieldSet(HInstruction* instruction,
2644                                                 const FieldInfo& field_info) {
2645  DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2646
2647  LocationSummary* locations = instruction->GetLocations();
2648  Register base = locations->InAt(0).AsRegister<Register>();
2649  Location value = locations->InAt(1);
2650
2651  bool is_volatile = field_info.IsVolatile();
2652  bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures()->HasAtomicLdrdAndStrd();
2653  Primitive::Type field_type = field_info.GetFieldType();
2654  uint32_t offset = field_info.GetFieldOffset().Uint32Value();
2655
2656  if (is_volatile) {
2657    GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
2658  }
2659
2660  switch (field_type) {
2661    case Primitive::kPrimBoolean:
2662    case Primitive::kPrimByte: {
2663      __ StoreToOffset(kStoreByte, value.AsRegister<Register>(), base, offset);
2664      break;
2665    }
2666
2667    case Primitive::kPrimShort:
2668    case Primitive::kPrimChar: {
2669      __ StoreToOffset(kStoreHalfword, value.AsRegister<Register>(), base, offset);
2670      break;
2671    }
2672
2673    case Primitive::kPrimInt:
2674    case Primitive::kPrimNot: {
2675      Register value_reg = value.AsRegister<Register>();
2676      __ StoreToOffset(kStoreWord, value_reg, base, offset);
2677      if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
2678        Register temp = locations->GetTemp(0).AsRegister<Register>();
2679        Register card = locations->GetTemp(1).AsRegister<Register>();
2680        codegen_->MarkGCCard(temp, card, base, value_reg);
2681      }
2682      break;
2683    }
2684
2685    case Primitive::kPrimLong: {
2686      if (is_volatile && !atomic_ldrd_strd) {
2687        GenerateWideAtomicStore(base, offset,
2688                                value.AsRegisterPairLow<Register>(),
2689                                value.AsRegisterPairHigh<Register>(),
2690                                locations->GetTemp(0).AsRegister<Register>(),
2691                                locations->GetTemp(1).AsRegister<Register>());
2692      } else {
2693        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), base, offset);
2694      }
2695      break;
2696    }
2697
2698    case Primitive::kPrimFloat: {
2699      __ StoreSToOffset(value.AsFpuRegister<SRegister>(), base, offset);
2700      break;
2701    }
2702
2703    case Primitive::kPrimDouble: {
2704      DRegister value_reg = FromLowSToD(value.AsFpuRegisterPairLow<SRegister>());
2705      if (is_volatile && !atomic_ldrd_strd) {
2706        Register value_reg_lo = locations->GetTemp(0).AsRegister<Register>();
2707        Register value_reg_hi = locations->GetTemp(1).AsRegister<Register>();
2708
2709        __ vmovrrd(value_reg_lo, value_reg_hi, value_reg);
2710
2711        GenerateWideAtomicStore(base, offset,
2712                                value_reg_lo,
2713                                value_reg_hi,
2714                                locations->GetTemp(2).AsRegister<Register>(),
2715                                locations->GetTemp(3).AsRegister<Register>());
2716      } else {
2717        __ StoreDToOffset(value_reg, base, offset);
2718      }
2719      break;
2720    }
2721
2722    case Primitive::kPrimVoid:
2723      LOG(FATAL) << "Unreachable type " << field_type;
2724      UNREACHABLE();
2725  }
2726
2727  if (is_volatile) {
2728    GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
2729  }
2730}
2731
2732void LocationsBuilderARM::HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info) {
2733  DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2734  LocationSummary* locations =
2735      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2736  locations->SetInAt(0, Location::RequiresRegister());
2737  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2738
2739  bool generate_volatile = field_info.IsVolatile()
2740      && (field_info.GetFieldType() == Primitive::kPrimDouble)
2741      && !codegen_->GetInstructionSetFeatures()->HasAtomicLdrdAndStrd();
2742  if (generate_volatile) {
2743    // Arm encoding have some additional constraints for ldrexd/strexd:
2744    // - registers need to be consecutive
2745    // - the first register should be even but not R14.
2746    // We don't test for Arm yet, and the assertion makes sure that we revisit this if we ever
2747    // enable Arm encoding.
2748    DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
2749    locations->AddTemp(Location::RequiresRegister());
2750    locations->AddTemp(Location::RequiresRegister());
2751  }
2752}
2753
2754void InstructionCodeGeneratorARM::HandleFieldGet(HInstruction* instruction,
2755                                                 const FieldInfo& field_info) {
2756  DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2757
2758  LocationSummary* locations = instruction->GetLocations();
2759  Register base = locations->InAt(0).AsRegister<Register>();
2760  Location out = locations->Out();
2761  bool is_volatile = field_info.IsVolatile();
2762  bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures()->HasAtomicLdrdAndStrd();
2763  Primitive::Type field_type = field_info.GetFieldType();
2764  uint32_t offset = field_info.GetFieldOffset().Uint32Value();
2765
2766  switch (field_type) {
2767    case Primitive::kPrimBoolean: {
2768      __ LoadFromOffset(kLoadUnsignedByte, out.AsRegister<Register>(), base, offset);
2769      break;
2770    }
2771
2772    case Primitive::kPrimByte: {
2773      __ LoadFromOffset(kLoadSignedByte, out.AsRegister<Register>(), base, offset);
2774      break;
2775    }
2776
2777    case Primitive::kPrimShort: {
2778      __ LoadFromOffset(kLoadSignedHalfword, out.AsRegister<Register>(), base, offset);
2779      break;
2780    }
2781
2782    case Primitive::kPrimChar: {
2783      __ LoadFromOffset(kLoadUnsignedHalfword, out.AsRegister<Register>(), base, offset);
2784      break;
2785    }
2786
2787    case Primitive::kPrimInt:
2788    case Primitive::kPrimNot: {
2789      __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), base, offset);
2790      break;
2791    }
2792
2793    case Primitive::kPrimLong: {
2794      if (is_volatile && !atomic_ldrd_strd) {
2795        GenerateWideAtomicLoad(base, offset,
2796                               out.AsRegisterPairLow<Register>(),
2797                               out.AsRegisterPairHigh<Register>());
2798      } else {
2799        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), base, offset);
2800      }
2801      break;
2802    }
2803
2804    case Primitive::kPrimFloat: {
2805      __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), base, offset);
2806      break;
2807    }
2808
2809    case Primitive::kPrimDouble: {
2810      DRegister out_reg = FromLowSToD(out.AsFpuRegisterPairLow<SRegister>());
2811      if (is_volatile && !atomic_ldrd_strd) {
2812        Register lo = locations->GetTemp(0).AsRegister<Register>();
2813        Register hi = locations->GetTemp(1).AsRegister<Register>();
2814        GenerateWideAtomicLoad(base, offset, lo, hi);
2815        __ vmovdrr(out_reg, lo, hi);
2816      } else {
2817        __ LoadDFromOffset(out_reg, base, offset);
2818      }
2819      break;
2820    }
2821
2822    case Primitive::kPrimVoid:
2823      LOG(FATAL) << "Unreachable type " << field_type;
2824      UNREACHABLE();
2825  }
2826
2827  if (is_volatile) {
2828    GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
2829  }
2830}
2831
2832void LocationsBuilderARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2833  HandleFieldSet(instruction, instruction->GetFieldInfo());
2834}
2835
2836void InstructionCodeGeneratorARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2837  HandleFieldSet(instruction, instruction->GetFieldInfo());
2838}
2839
2840void LocationsBuilderARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2841  HandleFieldGet(instruction, instruction->GetFieldInfo());
2842}
2843
2844void InstructionCodeGeneratorARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2845  HandleFieldGet(instruction, instruction->GetFieldInfo());
2846}
2847
2848void LocationsBuilderARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2849  HandleFieldGet(instruction, instruction->GetFieldInfo());
2850}
2851
2852void InstructionCodeGeneratorARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2853  HandleFieldGet(instruction, instruction->GetFieldInfo());
2854}
2855
2856void LocationsBuilderARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2857  HandleFieldSet(instruction, instruction->GetFieldInfo());
2858}
2859
2860void InstructionCodeGeneratorARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2861  HandleFieldSet(instruction, instruction->GetFieldInfo());
2862}
2863
2864void LocationsBuilderARM::VisitNullCheck(HNullCheck* instruction) {
2865  LocationSummary* locations =
2866      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2867  locations->SetInAt(0, Location::RequiresRegister());
2868  if (instruction->HasUses()) {
2869    locations->SetOut(Location::SameAsFirstInput());
2870  }
2871}
2872
2873void InstructionCodeGeneratorARM::VisitNullCheck(HNullCheck* instruction) {
2874  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM(instruction);
2875  codegen_->AddSlowPath(slow_path);
2876
2877  LocationSummary* locations = instruction->GetLocations();
2878  Location obj = locations->InAt(0);
2879
2880  if (obj.IsRegister()) {
2881    __ cmp(obj.AsRegister<Register>(), ShifterOperand(0));
2882    __ b(slow_path->GetEntryLabel(), EQ);
2883  } else {
2884    DCHECK(obj.IsConstant()) << obj;
2885    DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0);
2886    __ b(slow_path->GetEntryLabel());
2887  }
2888}
2889
2890void LocationsBuilderARM::VisitArrayGet(HArrayGet* instruction) {
2891  LocationSummary* locations =
2892      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2893  locations->SetInAt(0, Location::RequiresRegister());
2894  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2895  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2896}
2897
2898void InstructionCodeGeneratorARM::VisitArrayGet(HArrayGet* instruction) {
2899  LocationSummary* locations = instruction->GetLocations();
2900  Register obj = locations->InAt(0).AsRegister<Register>();
2901  Location index = locations->InAt(1);
2902
2903  switch (instruction->GetType()) {
2904    case Primitive::kPrimBoolean: {
2905      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
2906      Register out = locations->Out().AsRegister<Register>();
2907      if (index.IsConstant()) {
2908        size_t offset =
2909            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2910        __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
2911      } else {
2912        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
2913        __ LoadFromOffset(kLoadUnsignedByte, out, IP, data_offset);
2914      }
2915      break;
2916    }
2917
2918    case Primitive::kPrimByte: {
2919      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
2920      Register out = locations->Out().AsRegister<Register>();
2921      if (index.IsConstant()) {
2922        size_t offset =
2923            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2924        __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
2925      } else {
2926        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
2927        __ LoadFromOffset(kLoadSignedByte, out, IP, data_offset);
2928      }
2929      break;
2930    }
2931
2932    case Primitive::kPrimShort: {
2933      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
2934      Register out = locations->Out().AsRegister<Register>();
2935      if (index.IsConstant()) {
2936        size_t offset =
2937            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2938        __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
2939      } else {
2940        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
2941        __ LoadFromOffset(kLoadSignedHalfword, out, IP, data_offset);
2942      }
2943      break;
2944    }
2945
2946    case Primitive::kPrimChar: {
2947      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
2948      Register out = locations->Out().AsRegister<Register>();
2949      if (index.IsConstant()) {
2950        size_t offset =
2951            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2952        __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
2953      } else {
2954        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
2955        __ LoadFromOffset(kLoadUnsignedHalfword, out, IP, data_offset);
2956      }
2957      break;
2958    }
2959
2960    case Primitive::kPrimInt:
2961    case Primitive::kPrimNot: {
2962      DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
2963      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2964      Register out = locations->Out().AsRegister<Register>();
2965      if (index.IsConstant()) {
2966        size_t offset =
2967            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2968        __ LoadFromOffset(kLoadWord, out, obj, offset);
2969      } else {
2970        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
2971        __ LoadFromOffset(kLoadWord, out, IP, data_offset);
2972      }
2973      break;
2974    }
2975
2976    case Primitive::kPrimLong: {
2977      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
2978      Location out = locations->Out();
2979      if (index.IsConstant()) {
2980        size_t offset =
2981            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
2982        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset);
2983      } else {
2984        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
2985        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), IP, data_offset);
2986      }
2987      break;
2988    }
2989
2990    case Primitive::kPrimFloat: {
2991      uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
2992      Location out = locations->Out();
2993      DCHECK(out.IsFpuRegister());
2994      if (index.IsConstant()) {
2995        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2996        __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), obj, offset);
2997      } else {
2998        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
2999        __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), IP, data_offset);
3000      }
3001      break;
3002    }
3003
3004    case Primitive::kPrimDouble: {
3005      uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
3006      Location out = locations->Out();
3007      DCHECK(out.IsFpuRegisterPair());
3008      if (index.IsConstant()) {
3009        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3010        __ LoadDFromOffset(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), obj, offset);
3011      } else {
3012        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3013        __ LoadDFromOffset(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), IP, data_offset);
3014      }
3015      break;
3016    }
3017
3018    case Primitive::kPrimVoid:
3019      LOG(FATAL) << "Unreachable type " << instruction->GetType();
3020      UNREACHABLE();
3021  }
3022}
3023
3024void LocationsBuilderARM::VisitArraySet(HArraySet* instruction) {
3025  Primitive::Type value_type = instruction->GetComponentType();
3026
3027  bool needs_write_barrier =
3028      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
3029  bool needs_runtime_call = instruction->NeedsTypeCheck();
3030
3031  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3032      instruction, needs_runtime_call ? LocationSummary::kCall : LocationSummary::kNoCall);
3033  if (needs_runtime_call) {
3034    InvokeRuntimeCallingConvention calling_convention;
3035    locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3036    locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3037    locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
3038  } else {
3039    locations->SetInAt(0, Location::RequiresRegister());
3040    locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3041    locations->SetInAt(2, Location::RequiresRegister());
3042
3043    if (needs_write_barrier) {
3044      // Temporary registers for the write barrier.
3045      locations->AddTemp(Location::RequiresRegister());
3046      locations->AddTemp(Location::RequiresRegister());
3047    }
3048  }
3049}
3050
3051void InstructionCodeGeneratorARM::VisitArraySet(HArraySet* instruction) {
3052  LocationSummary* locations = instruction->GetLocations();
3053  Register obj = locations->InAt(0).AsRegister<Register>();
3054  Location index = locations->InAt(1);
3055  Primitive::Type value_type = instruction->GetComponentType();
3056  bool needs_runtime_call = locations->WillCall();
3057  bool needs_write_barrier =
3058      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
3059
3060  switch (value_type) {
3061    case Primitive::kPrimBoolean:
3062    case Primitive::kPrimByte: {
3063      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
3064      Register value = locations->InAt(2).AsRegister<Register>();
3065      if (index.IsConstant()) {
3066        size_t offset =
3067            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
3068        __ StoreToOffset(kStoreByte, value, obj, offset);
3069      } else {
3070        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
3071        __ StoreToOffset(kStoreByte, value, IP, data_offset);
3072      }
3073      break;
3074    }
3075
3076    case Primitive::kPrimShort:
3077    case Primitive::kPrimChar: {
3078      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
3079      Register value = locations->InAt(2).AsRegister<Register>();
3080      if (index.IsConstant()) {
3081        size_t offset =
3082            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
3083        __ StoreToOffset(kStoreHalfword, value, obj, offset);
3084      } else {
3085        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
3086        __ StoreToOffset(kStoreHalfword, value, IP, data_offset);
3087      }
3088      break;
3089    }
3090
3091    case Primitive::kPrimInt:
3092    case Primitive::kPrimNot: {
3093      if (!needs_runtime_call) {
3094        uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
3095        Register value = locations->InAt(2).AsRegister<Register>();
3096        if (index.IsConstant()) {
3097          size_t offset =
3098              (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3099          __ StoreToOffset(kStoreWord, value, obj, offset);
3100        } else {
3101          DCHECK(index.IsRegister()) << index;
3102          __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3103          __ StoreToOffset(kStoreWord, value, IP, data_offset);
3104        }
3105        if (needs_write_barrier) {
3106          DCHECK_EQ(value_type, Primitive::kPrimNot);
3107          Register temp = locations->GetTemp(0).AsRegister<Register>();
3108          Register card = locations->GetTemp(1).AsRegister<Register>();
3109          codegen_->MarkGCCard(temp, card, obj, value);
3110        }
3111      } else {
3112        DCHECK_EQ(value_type, Primitive::kPrimNot);
3113        codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
3114                                instruction,
3115                                instruction->GetDexPc());
3116      }
3117      break;
3118    }
3119
3120    case Primitive::kPrimLong: {
3121      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
3122      Location value = locations->InAt(2);
3123      if (index.IsConstant()) {
3124        size_t offset =
3125            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3126        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset);
3127      } else {
3128        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3129        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), IP, data_offset);
3130      }
3131      break;
3132    }
3133
3134    case Primitive::kPrimFloat: {
3135      uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
3136      Location value = locations->InAt(2);
3137      DCHECK(value.IsFpuRegister());
3138      if (index.IsConstant()) {
3139        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3140        __ StoreSToOffset(value.AsFpuRegister<SRegister>(), obj, offset);
3141      } else {
3142        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3143        __ StoreSToOffset(value.AsFpuRegister<SRegister>(), IP, data_offset);
3144      }
3145      break;
3146    }
3147
3148    case Primitive::kPrimDouble: {
3149      uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
3150      Location value = locations->InAt(2);
3151      DCHECK(value.IsFpuRegisterPair());
3152      if (index.IsConstant()) {
3153        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3154        __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), obj, offset);
3155      } else {
3156        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3157        __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), IP, data_offset);
3158      }
3159      break;
3160    }
3161
3162    case Primitive::kPrimVoid:
3163      LOG(FATAL) << "Unreachable type " << value_type;
3164      UNREACHABLE();
3165  }
3166}
3167
3168void LocationsBuilderARM::VisitArrayLength(HArrayLength* instruction) {
3169  LocationSummary* locations =
3170      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3171  locations->SetInAt(0, Location::RequiresRegister());
3172  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3173}
3174
3175void InstructionCodeGeneratorARM::VisitArrayLength(HArrayLength* instruction) {
3176  LocationSummary* locations = instruction->GetLocations();
3177  uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
3178  Register obj = locations->InAt(0).AsRegister<Register>();
3179  Register out = locations->Out().AsRegister<Register>();
3180  __ LoadFromOffset(kLoadWord, out, obj, offset);
3181}
3182
3183void LocationsBuilderARM::VisitBoundsCheck(HBoundsCheck* instruction) {
3184  LocationSummary* locations =
3185      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3186  locations->SetInAt(0, Location::RequiresRegister());
3187  locations->SetInAt(1, Location::RequiresRegister());
3188  if (instruction->HasUses()) {
3189    locations->SetOut(Location::SameAsFirstInput());
3190  }
3191}
3192
3193void InstructionCodeGeneratorARM::VisitBoundsCheck(HBoundsCheck* instruction) {
3194  LocationSummary* locations = instruction->GetLocations();
3195  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM(
3196      instruction, locations->InAt(0), locations->InAt(1));
3197  codegen_->AddSlowPath(slow_path);
3198
3199  Register index = locations->InAt(0).AsRegister<Register>();
3200  Register length = locations->InAt(1).AsRegister<Register>();
3201
3202  __ cmp(index, ShifterOperand(length));
3203  __ b(slow_path->GetEntryLabel(), CS);
3204}
3205
3206void CodeGeneratorARM::MarkGCCard(Register temp, Register card, Register object, Register value) {
3207  Label is_null;
3208  __ CompareAndBranchIfZero(value, &is_null);
3209  __ LoadFromOffset(kLoadWord, card, TR, Thread::CardTableOffset<kArmWordSize>().Int32Value());
3210  __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
3211  __ strb(card, Address(card, temp));
3212  __ Bind(&is_null);
3213}
3214
3215void LocationsBuilderARM::VisitTemporary(HTemporary* temp) {
3216  temp->SetLocations(nullptr);
3217}
3218
3219void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp) {
3220  // Nothing to do, this is driven by the code generator.
3221  UNUSED(temp);
3222}
3223
3224void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction) {
3225  UNUSED(instruction);
3226  LOG(FATAL) << "Unreachable";
3227}
3228
3229void InstructionCodeGeneratorARM::VisitParallelMove(HParallelMove* instruction) {
3230  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
3231}
3232
3233void LocationsBuilderARM::VisitSuspendCheck(HSuspendCheck* instruction) {
3234  new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
3235}
3236
3237void InstructionCodeGeneratorARM::VisitSuspendCheck(HSuspendCheck* instruction) {
3238  HBasicBlock* block = instruction->GetBlock();
3239  if (block->GetLoopInformation() != nullptr) {
3240    DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
3241    // The back edge will generate the suspend check.
3242    return;
3243  }
3244  if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
3245    // The goto will generate the suspend check.
3246    return;
3247  }
3248  GenerateSuspendCheck(instruction, nullptr);
3249}
3250
3251void InstructionCodeGeneratorARM::GenerateSuspendCheck(HSuspendCheck* instruction,
3252                                                       HBasicBlock* successor) {
3253  SuspendCheckSlowPathARM* slow_path =
3254      new (GetGraph()->GetArena()) SuspendCheckSlowPathARM(instruction, successor);
3255  codegen_->AddSlowPath(slow_path);
3256
3257  __ LoadFromOffset(
3258      kLoadUnsignedHalfword, IP, TR, Thread::ThreadFlagsOffset<kArmWordSize>().Int32Value());
3259  __ cmp(IP, ShifterOperand(0));
3260  // TODO: Figure out the branch offsets and use cbz/cbnz.
3261  if (successor == nullptr) {
3262    __ b(slow_path->GetEntryLabel(), NE);
3263    __ Bind(slow_path->GetReturnLabel());
3264  } else {
3265    __ b(codegen_->GetLabelOf(successor), EQ);
3266    __ b(slow_path->GetEntryLabel());
3267  }
3268}
3269
3270ArmAssembler* ParallelMoveResolverARM::GetAssembler() const {
3271  return codegen_->GetAssembler();
3272}
3273
3274void ParallelMoveResolverARM::EmitMove(size_t index) {
3275  MoveOperands* move = moves_.Get(index);
3276  Location source = move->GetSource();
3277  Location destination = move->GetDestination();
3278
3279  if (source.IsRegister()) {
3280    if (destination.IsRegister()) {
3281      __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>());
3282    } else {
3283      DCHECK(destination.IsStackSlot());
3284      __ StoreToOffset(kStoreWord, source.AsRegister<Register>(),
3285                       SP, destination.GetStackIndex());
3286    }
3287  } else if (source.IsStackSlot()) {
3288    if (destination.IsRegister()) {
3289      __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(),
3290                        SP, source.GetStackIndex());
3291    } else if (destination.IsFpuRegister()) {
3292      __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex());
3293    } else {
3294      DCHECK(destination.IsStackSlot());
3295      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
3296      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3297    }
3298  } else if (source.IsFpuRegister()) {
3299    if (destination.IsFpuRegister()) {
3300      __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>());
3301    } else {
3302      DCHECK(destination.IsStackSlot());
3303      __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex());
3304    }
3305  } else if (source.IsFpuRegisterPair()) {
3306    if (destination.IsFpuRegisterPair()) {
3307      __ vmovd(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
3308               FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()));
3309    } else {
3310      DCHECK(destination.IsDoubleStackSlot()) << destination;
3311      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
3312                        SP, destination.GetStackIndex());
3313    }
3314  } else if (source.IsDoubleStackSlot()) {
3315    if (destination.IsFpuRegisterPair()) {
3316      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
3317                         SP, source.GetStackIndex());
3318    } else {
3319      DCHECK(destination.IsDoubleStackSlot()) << destination;
3320      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
3321      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3322      __ LoadFromOffset(kLoadWord, IP, SP, source.GetHighStackIndex(kArmWordSize));
3323      __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
3324    }
3325  } else {
3326    DCHECK(source.IsConstant()) << source;
3327    HInstruction* constant = source.GetConstant();
3328    if (constant->IsIntConstant()) {
3329      int32_t value = constant->AsIntConstant()->GetValue();
3330      if (destination.IsRegister()) {
3331        __ LoadImmediate(destination.AsRegister<Register>(), value);
3332      } else {
3333        DCHECK(destination.IsStackSlot());
3334        __ LoadImmediate(IP, value);
3335        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3336      }
3337    } else {
3338      DCHECK(constant->IsFloatConstant());
3339      float value = constant->AsFloatConstant()->GetValue();
3340      if (destination.IsFpuRegister()) {
3341        __ LoadSImmediate(destination.AsFpuRegister<SRegister>(), value);
3342      } else {
3343        DCHECK(destination.IsStackSlot());
3344        __ LoadImmediate(IP, bit_cast<int32_t, float>(value));
3345        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3346      }
3347    }
3348  }
3349}
3350
3351void ParallelMoveResolverARM::Exchange(Register reg, int mem) {
3352  __ Mov(IP, reg);
3353  __ LoadFromOffset(kLoadWord, reg, SP, mem);
3354  __ StoreToOffset(kStoreWord, IP, SP, mem);
3355}
3356
3357void ParallelMoveResolverARM::Exchange(int mem1, int mem2) {
3358  ScratchRegisterScope ensure_scratch(this, IP, R0, codegen_->GetNumberOfCoreRegisters());
3359  int stack_offset = ensure_scratch.IsSpilled() ? kArmWordSize : 0;
3360  __ LoadFromOffset(kLoadWord, static_cast<Register>(ensure_scratch.GetRegister()),
3361                    SP, mem1 + stack_offset);
3362  __ LoadFromOffset(kLoadWord, IP, SP, mem2 + stack_offset);
3363  __ StoreToOffset(kStoreWord, static_cast<Register>(ensure_scratch.GetRegister()),
3364                   SP, mem2 + stack_offset);
3365  __ StoreToOffset(kStoreWord, IP, SP, mem1 + stack_offset);
3366}
3367
3368void ParallelMoveResolverARM::EmitSwap(size_t index) {
3369  MoveOperands* move = moves_.Get(index);
3370  Location source = move->GetSource();
3371  Location destination = move->GetDestination();
3372
3373  if (source.IsRegister() && destination.IsRegister()) {
3374    DCHECK_NE(source.AsRegister<Register>(), IP);
3375    DCHECK_NE(destination.AsRegister<Register>(), IP);
3376    __ Mov(IP, source.AsRegister<Register>());
3377    __ Mov(source.AsRegister<Register>(), destination.AsRegister<Register>());
3378    __ Mov(destination.AsRegister<Register>(), IP);
3379  } else if (source.IsRegister() && destination.IsStackSlot()) {
3380    Exchange(source.AsRegister<Register>(), destination.GetStackIndex());
3381  } else if (source.IsStackSlot() && destination.IsRegister()) {
3382    Exchange(destination.AsRegister<Register>(), source.GetStackIndex());
3383  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
3384    Exchange(source.GetStackIndex(), destination.GetStackIndex());
3385  } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
3386    __ vmovs(STMP, source.AsFpuRegister<SRegister>());
3387    __ vmovs(source.AsFpuRegister<SRegister>(), destination.AsFpuRegister<SRegister>());
3388    __ vmovs(destination.AsFpuRegister<SRegister>(), STMP);
3389  } else if (source.IsFpuRegister() || destination.IsFpuRegister()) {
3390    SRegister reg = source.IsFpuRegister() ? source.AsFpuRegister<SRegister>()
3391                                           : destination.AsFpuRegister<SRegister>();
3392    int mem = source.IsFpuRegister()
3393        ? destination.GetStackIndex()
3394        : source.GetStackIndex();
3395
3396    __ vmovs(STMP, reg);
3397    __ LoadSFromOffset(reg, SP, mem);
3398    __ StoreSToOffset(STMP, SP, mem);
3399  } else if (source.IsFpuRegisterPair() && destination.IsFpuRegisterPair()) {
3400    __ vmovd(DTMP, FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()));
3401    __ vmovd(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
3402             FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()));
3403    __ vmovd(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()), DTMP);
3404  } else if (source.IsFpuRegisterPair() || destination.IsFpuRegisterPair()) {
3405    DRegister reg = source.IsFpuRegisterPair()
3406        ? FromLowSToD(source.AsFpuRegisterPairLow<SRegister>())
3407        : FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>());
3408    int mem = source.IsFpuRegisterPair()
3409        ? destination.GetStackIndex()
3410        : source.GetStackIndex();
3411
3412    __ vmovd(DTMP, reg);
3413    __ LoadDFromOffset(reg, SP, mem);
3414    __ StoreDToOffset(DTMP, SP, mem);
3415  } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
3416    // TODO: We could use DTMP and ask for a pair scratch register (float or core).
3417    // This would save four instructions if two scratch registers are available, and
3418    // two instructions if not.
3419    Exchange(source.GetStackIndex(), destination.GetStackIndex());
3420    Exchange(source.GetHighStackIndex(kArmWordSize), destination.GetHighStackIndex(kArmWordSize));
3421  } else {
3422    LOG(FATAL) << "Unimplemented" << source << " <-> " << destination;
3423  }
3424}
3425
3426void ParallelMoveResolverARM::SpillScratch(int reg) {
3427  __ Push(static_cast<Register>(reg));
3428}
3429
3430void ParallelMoveResolverARM::RestoreScratch(int reg) {
3431  __ Pop(static_cast<Register>(reg));
3432}
3433
3434void LocationsBuilderARM::VisitLoadClass(HLoadClass* cls) {
3435  LocationSummary::CallKind call_kind = cls->CanCallRuntime()
3436      ? LocationSummary::kCallOnSlowPath
3437      : LocationSummary::kNoCall;
3438  LocationSummary* locations =
3439      new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
3440  locations->SetOut(Location::RequiresRegister());
3441}
3442
3443void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) {
3444  Register out = cls->GetLocations()->Out().AsRegister<Register>();
3445  if (cls->IsReferrersClass()) {
3446    DCHECK(!cls->CanCallRuntime());
3447    DCHECK(!cls->MustGenerateClinitCheck());
3448    codegen_->LoadCurrentMethod(out);
3449    __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
3450  } else {
3451    DCHECK(cls->CanCallRuntime());
3452    codegen_->LoadCurrentMethod(out);
3453    __ LoadFromOffset(
3454        kLoadWord, out, out, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value());
3455    __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
3456
3457    SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
3458        cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
3459    codegen_->AddSlowPath(slow_path);
3460    __ cmp(out, ShifterOperand(0));
3461    __ b(slow_path->GetEntryLabel(), EQ);
3462    if (cls->MustGenerateClinitCheck()) {
3463      GenerateClassInitializationCheck(slow_path, out);
3464    } else {
3465      __ Bind(slow_path->GetExitLabel());
3466    }
3467  }
3468}
3469
3470void LocationsBuilderARM::VisitClinitCheck(HClinitCheck* check) {
3471  LocationSummary* locations =
3472      new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
3473  locations->SetInAt(0, Location::RequiresRegister());
3474  if (check->HasUses()) {
3475    locations->SetOut(Location::SameAsFirstInput());
3476  }
3477}
3478
3479void InstructionCodeGeneratorARM::VisitClinitCheck(HClinitCheck* check) {
3480  // We assume the class is not null.
3481  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
3482      check->GetLoadClass(), check, check->GetDexPc(), true);
3483  codegen_->AddSlowPath(slow_path);
3484  GenerateClassInitializationCheck(slow_path,
3485                                   check->GetLocations()->InAt(0).AsRegister<Register>());
3486}
3487
3488void InstructionCodeGeneratorARM::GenerateClassInitializationCheck(
3489    SlowPathCodeARM* slow_path, Register class_reg) {
3490  __ LoadFromOffset(kLoadWord, IP, class_reg, mirror::Class::StatusOffset().Int32Value());
3491  __ cmp(IP, ShifterOperand(mirror::Class::kStatusInitialized));
3492  __ b(slow_path->GetEntryLabel(), LT);
3493  // Even if the initialized flag is set, we may be in a situation where caches are not synced
3494  // properly. Therefore, we do a memory fence.
3495  __ dmb(ISH);
3496  __ Bind(slow_path->GetExitLabel());
3497}
3498
3499void LocationsBuilderARM::VisitLoadString(HLoadString* load) {
3500  LocationSummary* locations =
3501      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
3502  locations->SetOut(Location::RequiresRegister());
3503}
3504
3505void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) {
3506  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM(load);
3507  codegen_->AddSlowPath(slow_path);
3508
3509  Register out = load->GetLocations()->Out().AsRegister<Register>();
3510  codegen_->LoadCurrentMethod(out);
3511  __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
3512  __ LoadFromOffset(kLoadWord, out, out, mirror::Class::DexCacheStringsOffset().Int32Value());
3513  __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(load->GetStringIndex()));
3514  __ cmp(out, ShifterOperand(0));
3515  __ b(slow_path->GetEntryLabel(), EQ);
3516  __ Bind(slow_path->GetExitLabel());
3517}
3518
3519void LocationsBuilderARM::VisitLoadException(HLoadException* load) {
3520  LocationSummary* locations =
3521      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
3522  locations->SetOut(Location::RequiresRegister());
3523}
3524
3525void InstructionCodeGeneratorARM::VisitLoadException(HLoadException* load) {
3526  Register out = load->GetLocations()->Out().AsRegister<Register>();
3527  int32_t offset = Thread::ExceptionOffset<kArmWordSize>().Int32Value();
3528  __ LoadFromOffset(kLoadWord, out, TR, offset);
3529  __ LoadImmediate(IP, 0);
3530  __ StoreToOffset(kStoreWord, IP, TR, offset);
3531}
3532
3533void LocationsBuilderARM::VisitThrow(HThrow* instruction) {
3534  LocationSummary* locations =
3535      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3536  InvokeRuntimeCallingConvention calling_convention;
3537  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3538}
3539
3540void InstructionCodeGeneratorARM::VisitThrow(HThrow* instruction) {
3541  codegen_->InvokeRuntime(
3542      QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc());
3543}
3544
3545void LocationsBuilderARM::VisitInstanceOf(HInstanceOf* instruction) {
3546  LocationSummary::CallKind call_kind = instruction->IsClassFinal()
3547      ? LocationSummary::kNoCall
3548      : LocationSummary::kCallOnSlowPath;
3549  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3550  locations->SetInAt(0, Location::RequiresRegister());
3551  locations->SetInAt(1, Location::RequiresRegister());
3552  locations->SetOut(Location::RequiresRegister());
3553}
3554
3555void InstructionCodeGeneratorARM::VisitInstanceOf(HInstanceOf* instruction) {
3556  LocationSummary* locations = instruction->GetLocations();
3557  Register obj = locations->InAt(0).AsRegister<Register>();
3558  Register cls = locations->InAt(1).AsRegister<Register>();
3559  Register out = locations->Out().AsRegister<Register>();
3560  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3561  Label done, zero;
3562  SlowPathCodeARM* slow_path = nullptr;
3563
3564  // Return 0 if `obj` is null.
3565  // TODO: avoid this check if we know obj is not null.
3566  __ cmp(obj, ShifterOperand(0));
3567  __ b(&zero, EQ);
3568  // Compare the class of `obj` with `cls`.
3569  __ LoadFromOffset(kLoadWord, out, obj, class_offset);
3570  __ cmp(out, ShifterOperand(cls));
3571  if (instruction->IsClassFinal()) {
3572    // Classes must be equal for the instanceof to succeed.
3573    __ b(&zero, NE);
3574    __ LoadImmediate(out, 1);
3575    __ b(&done);
3576  } else {
3577    // If the classes are not equal, we go into a slow path.
3578    DCHECK(locations->OnlyCallsOnSlowPath());
3579    slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
3580        instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc());
3581    codegen_->AddSlowPath(slow_path);
3582    __ b(slow_path->GetEntryLabel(), NE);
3583    __ LoadImmediate(out, 1);
3584    __ b(&done);
3585  }
3586  __ Bind(&zero);
3587  __ LoadImmediate(out, 0);
3588  if (slow_path != nullptr) {
3589    __ Bind(slow_path->GetExitLabel());
3590  }
3591  __ Bind(&done);
3592}
3593
3594void LocationsBuilderARM::VisitCheckCast(HCheckCast* instruction) {
3595  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3596      instruction, LocationSummary::kCallOnSlowPath);
3597  locations->SetInAt(0, Location::RequiresRegister());
3598  locations->SetInAt(1, Location::RequiresRegister());
3599  locations->AddTemp(Location::RequiresRegister());
3600}
3601
3602void InstructionCodeGeneratorARM::VisitCheckCast(HCheckCast* instruction) {
3603  LocationSummary* locations = instruction->GetLocations();
3604  Register obj = locations->InAt(0).AsRegister<Register>();
3605  Register cls = locations->InAt(1).AsRegister<Register>();
3606  Register temp = locations->GetTemp(0).AsRegister<Register>();
3607  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3608
3609  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
3610      instruction, locations->InAt(1), locations->GetTemp(0), instruction->GetDexPc());
3611  codegen_->AddSlowPath(slow_path);
3612
3613  // TODO: avoid this check if we know obj is not null.
3614  __ cmp(obj, ShifterOperand(0));
3615  __ b(slow_path->GetExitLabel(), EQ);
3616  // Compare the class of `obj` with `cls`.
3617  __ LoadFromOffset(kLoadWord, temp, obj, class_offset);
3618  __ cmp(temp, ShifterOperand(cls));
3619  __ b(slow_path->GetEntryLabel(), NE);
3620  __ Bind(slow_path->GetExitLabel());
3621}
3622
3623void LocationsBuilderARM::VisitMonitorOperation(HMonitorOperation* instruction) {
3624  LocationSummary* locations =
3625      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3626  InvokeRuntimeCallingConvention calling_convention;
3627  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3628}
3629
3630void InstructionCodeGeneratorARM::VisitMonitorOperation(HMonitorOperation* instruction) {
3631  codegen_->InvokeRuntime(instruction->IsEnter()
3632        ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
3633      instruction,
3634      instruction->GetDexPc());
3635}
3636
3637void LocationsBuilderARM::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
3638void LocationsBuilderARM::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
3639void LocationsBuilderARM::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
3640
3641void LocationsBuilderARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
3642  LocationSummary* locations =
3643      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3644  DCHECK(instruction->GetResultType() == Primitive::kPrimInt
3645         || instruction->GetResultType() == Primitive::kPrimLong);
3646  locations->SetInAt(0, Location::RequiresRegister());
3647  locations->SetInAt(1, Location::RequiresRegister());
3648  bool output_overlaps = (instruction->GetResultType() == Primitive::kPrimLong);
3649  locations->SetOut(Location::RequiresRegister(), output_overlaps);
3650}
3651
3652void InstructionCodeGeneratorARM::VisitAnd(HAnd* instruction) {
3653  HandleBitwiseOperation(instruction);
3654}
3655
3656void InstructionCodeGeneratorARM::VisitOr(HOr* instruction) {
3657  HandleBitwiseOperation(instruction);
3658}
3659
3660void InstructionCodeGeneratorARM::VisitXor(HXor* instruction) {
3661  HandleBitwiseOperation(instruction);
3662}
3663
3664void InstructionCodeGeneratorARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
3665  LocationSummary* locations = instruction->GetLocations();
3666
3667  if (instruction->GetResultType() == Primitive::kPrimInt) {
3668    Register first = locations->InAt(0).AsRegister<Register>();
3669    Register second = locations->InAt(1).AsRegister<Register>();
3670    Register out = locations->Out().AsRegister<Register>();
3671    if (instruction->IsAnd()) {
3672      __ and_(out, first, ShifterOperand(second));
3673    } else if (instruction->IsOr()) {
3674      __ orr(out, first, ShifterOperand(second));
3675    } else {
3676      DCHECK(instruction->IsXor());
3677      __ eor(out, first, ShifterOperand(second));
3678    }
3679  } else {
3680    DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3681    Location first = locations->InAt(0);
3682    Location second = locations->InAt(1);
3683    Location out = locations->Out();
3684    if (instruction->IsAnd()) {
3685      __ and_(out.AsRegisterPairLow<Register>(),
3686              first.AsRegisterPairLow<Register>(),
3687              ShifterOperand(second.AsRegisterPairLow<Register>()));
3688      __ and_(out.AsRegisterPairHigh<Register>(),
3689              first.AsRegisterPairHigh<Register>(),
3690              ShifterOperand(second.AsRegisterPairHigh<Register>()));
3691    } else if (instruction->IsOr()) {
3692      __ orr(out.AsRegisterPairLow<Register>(),
3693             first.AsRegisterPairLow<Register>(),
3694             ShifterOperand(second.AsRegisterPairLow<Register>()));
3695      __ orr(out.AsRegisterPairHigh<Register>(),
3696             first.AsRegisterPairHigh<Register>(),
3697             ShifterOperand(second.AsRegisterPairHigh<Register>()));
3698    } else {
3699      DCHECK(instruction->IsXor());
3700      __ eor(out.AsRegisterPairLow<Register>(),
3701             first.AsRegisterPairLow<Register>(),
3702             ShifterOperand(second.AsRegisterPairLow<Register>()));
3703      __ eor(out.AsRegisterPairHigh<Register>(),
3704             first.AsRegisterPairHigh<Register>(),
3705             ShifterOperand(second.AsRegisterPairHigh<Register>()));
3706    }
3707  }
3708}
3709
3710}  // namespace arm
3711}  // namespace art
3712