code_generator_arm.cc revision af07bc121121d7bd7e8329c55dfe24782207b561
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm.h"
18
19#include "entrypoints/quick/quick_entrypoints.h"
20#include "gc/accounting/card_table.h"
21#include "mirror/array-inl.h"
22#include "mirror/art_method.h"
23#include "mirror/class.h"
24#include "thread.h"
25#include "utils/arm/assembler_arm.h"
26#include "utils/arm/managed_register_arm.h"
27#include "utils/assembler.h"
28#include "utils/stack_checks.h"
29
30namespace art {
31
32namespace arm {
33
34static DRegister FromLowSToD(SRegister reg) {
35  DCHECK_EQ(reg % 2, 0);
36  return static_cast<DRegister>(reg / 2);
37}
38
39static constexpr bool kExplicitStackOverflowCheck = false;
40
41static constexpr int kNumberOfPushedRegistersAtEntry = 1 + 2;  // LR, R6, R7
42static constexpr int kCurrentMethodStackOffset = 0;
43
44static constexpr Register kRuntimeParameterCoreRegisters[] = { R0, R1, R2 };
45static constexpr size_t kRuntimeParameterCoreRegistersLength =
46    arraysize(kRuntimeParameterCoreRegisters);
47static constexpr SRegister kRuntimeParameterFpuRegisters[] = { };
48static constexpr size_t kRuntimeParameterFpuRegistersLength = 0;
49
50class InvokeRuntimeCallingConvention : public CallingConvention<Register, SRegister> {
51 public:
52  InvokeRuntimeCallingConvention()
53      : CallingConvention(kRuntimeParameterCoreRegisters,
54                          kRuntimeParameterCoreRegistersLength,
55                          kRuntimeParameterFpuRegisters,
56                          kRuntimeParameterFpuRegistersLength) {}
57
58 private:
59  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
60};
61
62#define __ reinterpret_cast<ArmAssembler*>(codegen->GetAssembler())->
63#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArmWordSize, x).Int32Value()
64
65class SlowPathCodeARM : public SlowPathCode {
66 public:
67  SlowPathCodeARM() : entry_label_(), exit_label_() {}
68
69  Label* GetEntryLabel() { return &entry_label_; }
70  Label* GetExitLabel() { return &exit_label_; }
71
72 private:
73  Label entry_label_;
74  Label exit_label_;
75
76  DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM);
77};
78
79class NullCheckSlowPathARM : public SlowPathCodeARM {
80 public:
81  explicit NullCheckSlowPathARM(HNullCheck* instruction) : instruction_(instruction) {}
82
83  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
84    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
85    __ Bind(GetEntryLabel());
86    arm_codegen->InvokeRuntime(
87        QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc());
88  }
89
90 private:
91  HNullCheck* const instruction_;
92  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM);
93};
94
95class DivZeroCheckSlowPathARM : public SlowPathCodeARM {
96 public:
97  explicit DivZeroCheckSlowPathARM(HDivZeroCheck* instruction) : instruction_(instruction) {}
98
99  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
100    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
101    __ Bind(GetEntryLabel());
102    arm_codegen->InvokeRuntime(
103        QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc());
104  }
105
106 private:
107  HDivZeroCheck* const instruction_;
108  DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM);
109};
110
111class StackOverflowCheckSlowPathARM : public SlowPathCodeARM {
112 public:
113  StackOverflowCheckSlowPathARM() {}
114
115  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
116    __ Bind(GetEntryLabel());
117    __ LoadFromOffset(kLoadWord, PC, TR,
118        QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pThrowStackOverflow).Int32Value());
119  }
120
121 private:
122  DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathARM);
123};
124
125class SuspendCheckSlowPathARM : public SlowPathCodeARM {
126 public:
127  explicit SuspendCheckSlowPathARM(HSuspendCheck* instruction, HBasicBlock* successor)
128      : instruction_(instruction), successor_(successor) {}
129
130  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
131    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
132    __ Bind(GetEntryLabel());
133    codegen->SaveLiveRegisters(instruction_->GetLocations());
134    arm_codegen->InvokeRuntime(
135        QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc());
136    codegen->RestoreLiveRegisters(instruction_->GetLocations());
137    if (successor_ == nullptr) {
138      __ b(GetReturnLabel());
139    } else {
140      __ b(arm_codegen->GetLabelOf(successor_));
141    }
142  }
143
144  Label* GetReturnLabel() {
145    DCHECK(successor_ == nullptr);
146    return &return_label_;
147  }
148
149 private:
150  HSuspendCheck* const instruction_;
151  // If not null, the block to branch to after the suspend check.
152  HBasicBlock* const successor_;
153
154  // If `successor_` is null, the label to branch to after the suspend check.
155  Label return_label_;
156
157  DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM);
158};
159
160class BoundsCheckSlowPathARM : public SlowPathCodeARM {
161 public:
162  BoundsCheckSlowPathARM(HBoundsCheck* instruction,
163                         Location index_location,
164                         Location length_location)
165      : instruction_(instruction),
166        index_location_(index_location),
167        length_location_(length_location) {}
168
169  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
170    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
171    __ Bind(GetEntryLabel());
172    // We're moving two locations to locations that could overlap, so we need a parallel
173    // move resolver.
174    InvokeRuntimeCallingConvention calling_convention;
175    codegen->EmitParallelMoves(
176        index_location_,
177        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
178        length_location_,
179        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
180    arm_codegen->InvokeRuntime(
181        QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc());
182  }
183
184 private:
185  HBoundsCheck* const instruction_;
186  const Location index_location_;
187  const Location length_location_;
188
189  DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM);
190};
191
192class LoadClassSlowPathARM : public SlowPathCodeARM {
193 public:
194  LoadClassSlowPathARM(HLoadClass* cls,
195                       HInstruction* at,
196                       uint32_t dex_pc,
197                       bool do_clinit)
198      : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
199    DCHECK(at->IsLoadClass() || at->IsClinitCheck());
200  }
201
202  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
203    LocationSummary* locations = at_->GetLocations();
204
205    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
206    __ Bind(GetEntryLabel());
207    codegen->SaveLiveRegisters(locations);
208
209    InvokeRuntimeCallingConvention calling_convention;
210    __ LoadImmediate(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex());
211    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
212    int32_t entry_point_offset = do_clinit_
213        ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
214        : QUICK_ENTRY_POINT(pInitializeType);
215    arm_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_);
216
217    // Move the class to the desired location.
218    Location out = locations->Out();
219    if (out.IsValid()) {
220      DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
221      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
222    }
223    codegen->RestoreLiveRegisters(locations);
224    __ b(GetExitLabel());
225  }
226
227 private:
228  // The class this slow path will load.
229  HLoadClass* const cls_;
230
231  // The instruction where this slow path is happening.
232  // (Might be the load class or an initialization check).
233  HInstruction* const at_;
234
235  // The dex PC of `at_`.
236  const uint32_t dex_pc_;
237
238  // Whether to initialize the class.
239  const bool do_clinit_;
240
241  DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM);
242};
243
244class LoadStringSlowPathARM : public SlowPathCodeARM {
245 public:
246  explicit LoadStringSlowPathARM(HLoadString* instruction) : instruction_(instruction) {}
247
248  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
249    LocationSummary* locations = instruction_->GetLocations();
250    DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
251
252    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
253    __ Bind(GetEntryLabel());
254    codegen->SaveLiveRegisters(locations);
255
256    InvokeRuntimeCallingConvention calling_convention;
257    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(0));
258    __ LoadImmediate(calling_convention.GetRegisterAt(1), instruction_->GetStringIndex());
259    arm_codegen->InvokeRuntime(
260        QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc());
261    arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
262
263    codegen->RestoreLiveRegisters(locations);
264    __ b(GetExitLabel());
265  }
266
267 private:
268  HLoadString* const instruction_;
269
270  DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM);
271};
272
273class TypeCheckSlowPathARM : public SlowPathCodeARM {
274 public:
275  TypeCheckSlowPathARM(HInstruction* instruction,
276                       Location class_to_check,
277                       Location object_class,
278                       uint32_t dex_pc)
279      : instruction_(instruction),
280        class_to_check_(class_to_check),
281        object_class_(object_class),
282        dex_pc_(dex_pc) {}
283
284  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
285    LocationSummary* locations = instruction_->GetLocations();
286    DCHECK(instruction_->IsCheckCast()
287           || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
288
289    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
290    __ Bind(GetEntryLabel());
291    codegen->SaveLiveRegisters(locations);
292
293    // We're moving two locations to locations that could overlap, so we need a parallel
294    // move resolver.
295    InvokeRuntimeCallingConvention calling_convention;
296    codegen->EmitParallelMoves(
297        class_to_check_,
298        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
299        object_class_,
300        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
301
302    if (instruction_->IsInstanceOf()) {
303      arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc_);
304      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
305    } else {
306      DCHECK(instruction_->IsCheckCast());
307      arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc_);
308    }
309
310    codegen->RestoreLiveRegisters(locations);
311    __ b(GetExitLabel());
312  }
313
314 private:
315  HInstruction* const instruction_;
316  const Location class_to_check_;
317  const Location object_class_;
318  uint32_t dex_pc_;
319
320  DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM);
321};
322
323#undef __
324
325#undef __
326#define __ reinterpret_cast<ArmAssembler*>(GetAssembler())->
327
328inline Condition ARMCondition(IfCondition cond) {
329  switch (cond) {
330    case kCondEQ: return EQ;
331    case kCondNE: return NE;
332    case kCondLT: return LT;
333    case kCondLE: return LE;
334    case kCondGT: return GT;
335    case kCondGE: return GE;
336    default:
337      LOG(FATAL) << "Unknown if condition";
338  }
339  return EQ;        // Unreachable.
340}
341
342inline Condition ARMOppositeCondition(IfCondition cond) {
343  switch (cond) {
344    case kCondEQ: return NE;
345    case kCondNE: return EQ;
346    case kCondLT: return GE;
347    case kCondLE: return GT;
348    case kCondGT: return LE;
349    case kCondGE: return LT;
350    default:
351      LOG(FATAL) << "Unknown if condition";
352  }
353  return EQ;        // Unreachable.
354}
355
356void CodeGeneratorARM::DumpCoreRegister(std::ostream& stream, int reg) const {
357  stream << ArmManagedRegister::FromCoreRegister(Register(reg));
358}
359
360void CodeGeneratorARM::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
361  stream << ArmManagedRegister::FromSRegister(SRegister(reg));
362}
363
364size_t CodeGeneratorARM::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
365  __ StoreToOffset(kStoreWord, static_cast<Register>(reg_id), SP, stack_index);
366  return kArmWordSize;
367}
368
369size_t CodeGeneratorARM::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
370  __ LoadFromOffset(kLoadWord, static_cast<Register>(reg_id), SP, stack_index);
371  return kArmWordSize;
372}
373
374CodeGeneratorARM::CodeGeneratorARM(HGraph* graph)
375    : CodeGenerator(graph, kNumberOfCoreRegisters, kNumberOfSRegisters, kNumberOfRegisterPairs),
376      block_labels_(graph->GetArena(), 0),
377      location_builder_(graph, this),
378      instruction_visitor_(graph, this),
379      move_resolver_(graph->GetArena(), this),
380      assembler_(true) {}
381
382size_t CodeGeneratorARM::FrameEntrySpillSize() const {
383  return kNumberOfPushedRegistersAtEntry * kArmWordSize;
384}
385
386Location CodeGeneratorARM::AllocateFreeRegister(Primitive::Type type) const {
387  switch (type) {
388    case Primitive::kPrimLong: {
389      size_t reg = FindFreeEntry(blocked_register_pairs_, kNumberOfRegisterPairs);
390      ArmManagedRegister pair =
391          ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
392      DCHECK(!blocked_core_registers_[pair.AsRegisterPairLow()]);
393      DCHECK(!blocked_core_registers_[pair.AsRegisterPairHigh()]);
394
395      blocked_core_registers_[pair.AsRegisterPairLow()] = true;
396      blocked_core_registers_[pair.AsRegisterPairHigh()] = true;
397      UpdateBlockedPairRegisters();
398      return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
399    }
400
401    case Primitive::kPrimByte:
402    case Primitive::kPrimBoolean:
403    case Primitive::kPrimChar:
404    case Primitive::kPrimShort:
405    case Primitive::kPrimInt:
406    case Primitive::kPrimNot: {
407      int reg = FindFreeEntry(blocked_core_registers_, kNumberOfCoreRegisters);
408      // Block all register pairs that contain `reg`.
409      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
410        ArmManagedRegister current =
411            ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
412        if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
413          blocked_register_pairs_[i] = true;
414        }
415      }
416      return Location::RegisterLocation(reg);
417    }
418
419    case Primitive::kPrimFloat: {
420      int reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfSRegisters);
421      return Location::FpuRegisterLocation(reg);
422    }
423
424    case Primitive::kPrimDouble: {
425      int reg = FindTwoFreeConsecutiveAlignedEntries(blocked_fpu_registers_, kNumberOfSRegisters);
426      DCHECK_EQ(reg % 2, 0);
427      return Location::FpuRegisterPairLocation(reg, reg + 1);
428    }
429
430    case Primitive::kPrimVoid:
431      LOG(FATAL) << "Unreachable type " << type;
432  }
433
434  return Location();
435}
436
437void CodeGeneratorARM::SetupBlockedRegisters() const {
438  // Don't allocate the dalvik style register pair passing.
439  blocked_register_pairs_[R1_R2] = true;
440
441  // Stack register, LR and PC are always reserved.
442  blocked_core_registers_[SP] = true;
443  blocked_core_registers_[LR] = true;
444  blocked_core_registers_[PC] = true;
445
446  // Reserve thread register.
447  blocked_core_registers_[TR] = true;
448
449  // Reserve temp register.
450  blocked_core_registers_[IP] = true;
451
452  // TODO: We currently don't use Quick's callee saved registers.
453  // We always save and restore R6 and R7 to make sure we can use three
454  // register pairs for long operations.
455  blocked_core_registers_[R4] = true;
456  blocked_core_registers_[R5] = true;
457  blocked_core_registers_[R8] = true;
458  blocked_core_registers_[R10] = true;
459  blocked_core_registers_[R11] = true;
460
461  blocked_fpu_registers_[S16] = true;
462  blocked_fpu_registers_[S17] = true;
463  blocked_fpu_registers_[S18] = true;
464  blocked_fpu_registers_[S19] = true;
465  blocked_fpu_registers_[S20] = true;
466  blocked_fpu_registers_[S21] = true;
467  blocked_fpu_registers_[S22] = true;
468  blocked_fpu_registers_[S23] = true;
469  blocked_fpu_registers_[S24] = true;
470  blocked_fpu_registers_[S25] = true;
471  blocked_fpu_registers_[S26] = true;
472  blocked_fpu_registers_[S27] = true;
473  blocked_fpu_registers_[S28] = true;
474  blocked_fpu_registers_[S29] = true;
475  blocked_fpu_registers_[S30] = true;
476  blocked_fpu_registers_[S31] = true;
477
478  UpdateBlockedPairRegisters();
479}
480
481void CodeGeneratorARM::UpdateBlockedPairRegisters() const {
482  for (int i = 0; i < kNumberOfRegisterPairs; i++) {
483    ArmManagedRegister current =
484        ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
485    if (blocked_core_registers_[current.AsRegisterPairLow()]
486        || blocked_core_registers_[current.AsRegisterPairHigh()]) {
487      blocked_register_pairs_[i] = true;
488    }
489  }
490}
491
492InstructionCodeGeneratorARM::InstructionCodeGeneratorARM(HGraph* graph, CodeGeneratorARM* codegen)
493      : HGraphVisitor(graph),
494        assembler_(codegen->GetAssembler()),
495        codegen_(codegen) {}
496
497void CodeGeneratorARM::GenerateFrameEntry() {
498  bool skip_overflow_check = IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm);
499  if (!skip_overflow_check) {
500    if (kExplicitStackOverflowCheck) {
501      SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathARM();
502      AddSlowPath(slow_path);
503
504      __ LoadFromOffset(kLoadWord, IP, TR, Thread::StackEndOffset<kArmWordSize>().Int32Value());
505      __ cmp(SP, ShifterOperand(IP));
506      __ b(slow_path->GetEntryLabel(), CC);
507    } else {
508      __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm)));
509      __ LoadFromOffset(kLoadWord, IP, IP, 0);
510      RecordPcInfo(nullptr, 0);
511    }
512  }
513
514  core_spill_mask_ |= (1 << LR | 1 << R6 | 1 << R7);
515  __ PushList(1 << LR | 1 << R6 | 1 << R7);
516
517  // The return PC has already been pushed on the stack.
518  __ AddConstant(SP, -(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize));
519  __ StoreToOffset(kStoreWord, R0, SP, 0);
520}
521
522void CodeGeneratorARM::GenerateFrameExit() {
523  __ AddConstant(SP, GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize);
524  __ PopList(1 << PC | 1 << R6 | 1 << R7);
525}
526
527void CodeGeneratorARM::Bind(HBasicBlock* block) {
528  __ Bind(GetLabelOf(block));
529}
530
531Location CodeGeneratorARM::GetStackLocation(HLoadLocal* load) const {
532  switch (load->GetType()) {
533    case Primitive::kPrimLong:
534    case Primitive::kPrimDouble:
535      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
536      break;
537
538    case Primitive::kPrimInt:
539    case Primitive::kPrimNot:
540    case Primitive::kPrimFloat:
541      return Location::StackSlot(GetStackSlot(load->GetLocal()));
542
543    case Primitive::kPrimBoolean:
544    case Primitive::kPrimByte:
545    case Primitive::kPrimChar:
546    case Primitive::kPrimShort:
547    case Primitive::kPrimVoid:
548      LOG(FATAL) << "Unexpected type " << load->GetType();
549  }
550
551  LOG(FATAL) << "Unreachable";
552  return Location();
553}
554
555Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
556  switch (type) {
557    case Primitive::kPrimBoolean:
558    case Primitive::kPrimByte:
559    case Primitive::kPrimChar:
560    case Primitive::kPrimShort:
561    case Primitive::kPrimInt:
562    case Primitive::kPrimNot: {
563      uint32_t index = gp_index_++;
564      uint32_t stack_index = stack_index_++;
565      if (index < calling_convention.GetNumberOfRegisters()) {
566        return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
567      } else {
568        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
569      }
570    }
571
572    case Primitive::kPrimLong: {
573      uint32_t index = gp_index_;
574      uint32_t stack_index = stack_index_;
575      gp_index_ += 2;
576      stack_index_ += 2;
577      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
578        ArmManagedRegister pair = ArmManagedRegister::FromRegisterPair(
579            calling_convention.GetRegisterPairAt(index));
580        return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
581      } else if (index + 1 == calling_convention.GetNumberOfRegisters()) {
582        return Location::QuickParameter(index, stack_index);
583      } else {
584        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
585      }
586    }
587
588    case Primitive::kPrimFloat: {
589      uint32_t stack_index = stack_index_++;
590      if (float_index_ % 2 == 0) {
591        float_index_ = std::max(double_index_, float_index_);
592      }
593      if (float_index_ < calling_convention.GetNumberOfFpuRegisters()) {
594        return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(float_index_++));
595      } else {
596        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
597      }
598    }
599
600    case Primitive::kPrimDouble: {
601      double_index_ = std::max(double_index_, RoundUp(float_index_, 2));
602      uint32_t stack_index = stack_index_;
603      stack_index_ += 2;
604      if (double_index_ + 1 < calling_convention.GetNumberOfFpuRegisters()) {
605        uint32_t index = double_index_;
606        double_index_ += 2;
607        return Location::FpuRegisterPairLocation(
608          calling_convention.GetFpuRegisterAt(index),
609          calling_convention.GetFpuRegisterAt(index + 1));
610      } else {
611        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
612      }
613    }
614
615    case Primitive::kPrimVoid:
616      LOG(FATAL) << "Unexpected parameter type " << type;
617      break;
618  }
619  return Location();
620}
621
622Location InvokeDexCallingConventionVisitor::GetReturnLocation(Primitive::Type type) {
623  switch (type) {
624    case Primitive::kPrimBoolean:
625    case Primitive::kPrimByte:
626    case Primitive::kPrimChar:
627    case Primitive::kPrimShort:
628    case Primitive::kPrimInt:
629    case Primitive::kPrimNot: {
630      return Location::RegisterLocation(R0);
631    }
632
633    case Primitive::kPrimFloat: {
634      return Location::FpuRegisterLocation(S0);
635    }
636
637    case Primitive::kPrimLong: {
638      return Location::RegisterPairLocation(R0, R1);
639    }
640
641    case Primitive::kPrimDouble: {
642      return Location::FpuRegisterPairLocation(S0, S1);
643    }
644
645    case Primitive::kPrimVoid:
646      return Location();
647  }
648  UNREACHABLE();
649  return Location();
650}
651
652void CodeGeneratorARM::Move32(Location destination, Location source) {
653  if (source.Equals(destination)) {
654    return;
655  }
656  if (destination.IsRegister()) {
657    if (source.IsRegister()) {
658      __ Mov(destination.As<Register>(), source.As<Register>());
659    } else if (source.IsFpuRegister()) {
660      __ vmovrs(destination.As<Register>(), source.As<SRegister>());
661    } else {
662      __ LoadFromOffset(kLoadWord, destination.As<Register>(), SP, source.GetStackIndex());
663    }
664  } else if (destination.IsFpuRegister()) {
665    if (source.IsRegister()) {
666      __ vmovsr(destination.As<SRegister>(), source.As<Register>());
667    } else if (source.IsFpuRegister()) {
668      __ vmovs(destination.As<SRegister>(), source.As<SRegister>());
669    } else {
670      __ LoadSFromOffset(destination.As<SRegister>(), SP, source.GetStackIndex());
671    }
672  } else {
673    DCHECK(destination.IsStackSlot());
674    if (source.IsRegister()) {
675      __ StoreToOffset(kStoreWord, source.As<Register>(), SP, destination.GetStackIndex());
676    } else if (source.IsFpuRegister()) {
677      __ StoreSToOffset(source.As<SRegister>(), SP, destination.GetStackIndex());
678    } else {
679      DCHECK(source.IsStackSlot());
680      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
681      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
682    }
683  }
684}
685
686void CodeGeneratorARM::Move64(Location destination, Location source) {
687  if (source.Equals(destination)) {
688    return;
689  }
690  if (destination.IsRegisterPair()) {
691    if (source.IsRegisterPair()) {
692      __ Mov(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairLow<Register>());
693      __ Mov(destination.AsRegisterPairHigh<Register>(), source.AsRegisterPairHigh<Register>());
694    } else if (source.IsFpuRegister()) {
695      UNIMPLEMENTED(FATAL);
696    } else if (source.IsQuickParameter()) {
697      uint16_t register_index = source.GetQuickParameterRegisterIndex();
698      uint16_t stack_index = source.GetQuickParameterStackIndex();
699      InvokeDexCallingConvention calling_convention;
700      __ Mov(destination.AsRegisterPairLow<Register>(),
701             calling_convention.GetRegisterAt(register_index));
702      __ LoadFromOffset(kLoadWord, destination.AsRegisterPairHigh<Register>(),
703             SP, calling_convention.GetStackOffsetOf(stack_index + 1) + GetFrameSize());
704    } else {
705      DCHECK(source.IsDoubleStackSlot());
706      if (destination.AsRegisterPairLow<Register>() == R1) {
707        DCHECK_EQ(destination.AsRegisterPairHigh<Register>(), R2);
708        __ LoadFromOffset(kLoadWord, R1, SP, source.GetStackIndex());
709        __ LoadFromOffset(kLoadWord, R2, SP, source.GetHighStackIndex(kArmWordSize));
710      } else {
711        __ LoadFromOffset(kLoadWordPair, destination.AsRegisterPairLow<Register>(),
712                          SP, source.GetStackIndex());
713      }
714    }
715  } else if (destination.IsFpuRegisterPair()) {
716    if (source.IsDoubleStackSlot()) {
717      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
718                         SP,
719                         source.GetStackIndex());
720    } else {
721      UNIMPLEMENTED(FATAL);
722    }
723  } else if (destination.IsQuickParameter()) {
724    InvokeDexCallingConvention calling_convention;
725    uint16_t register_index = destination.GetQuickParameterRegisterIndex();
726    uint16_t stack_index = destination.GetQuickParameterStackIndex();
727    if (source.IsRegisterPair()) {
728      __ Mov(calling_convention.GetRegisterAt(register_index),
729             source.AsRegisterPairLow<Register>());
730      __ StoreToOffset(kStoreWord, source.AsRegisterPairHigh<Register>(),
731             SP, calling_convention.GetStackOffsetOf(stack_index + 1));
732    } else if (source.IsFpuRegister()) {
733      UNIMPLEMENTED(FATAL);
734    } else {
735      DCHECK(source.IsDoubleStackSlot());
736      __ LoadFromOffset(
737          kLoadWord, calling_convention.GetRegisterAt(register_index), SP, source.GetStackIndex());
738      __ LoadFromOffset(kLoadWord, R0, SP, source.GetHighStackIndex(kArmWordSize));
739      __ StoreToOffset(kStoreWord, R0, SP, calling_convention.GetStackOffsetOf(stack_index + 1));
740    }
741  } else {
742    DCHECK(destination.IsDoubleStackSlot());
743    if (source.IsRegisterPair()) {
744      if (source.AsRegisterPairLow<Register>() == R1) {
745        DCHECK_EQ(source.AsRegisterPairHigh<Register>(), R2);
746        __ StoreToOffset(kStoreWord, R1, SP, destination.GetStackIndex());
747        __ StoreToOffset(kStoreWord, R2, SP, destination.GetHighStackIndex(kArmWordSize));
748      } else {
749        __ StoreToOffset(kStoreWordPair, source.AsRegisterPairLow<Register>(),
750                         SP, destination.GetStackIndex());
751      }
752    } else if (source.IsQuickParameter()) {
753      InvokeDexCallingConvention calling_convention;
754      uint16_t register_index = source.GetQuickParameterRegisterIndex();
755      uint16_t stack_index = source.GetQuickParameterStackIndex();
756      __ StoreToOffset(kStoreWord, calling_convention.GetRegisterAt(register_index),
757             SP, destination.GetStackIndex());
758      __ LoadFromOffset(kLoadWord, R0,
759             SP, calling_convention.GetStackOffsetOf(stack_index + 1) + GetFrameSize());
760      __ StoreToOffset(kStoreWord, R0, SP, destination.GetHighStackIndex(kArmWordSize));
761    } else if (source.IsFpuRegisterPair()) {
762      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
763                        SP,
764                        destination.GetStackIndex());
765    } else {
766      DCHECK(source.IsDoubleStackSlot());
767      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
768      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
769      __ LoadFromOffset(kLoadWord, IP, SP, source.GetHighStackIndex(kArmWordSize));
770      __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
771    }
772  }
773}
774
775void CodeGeneratorARM::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
776  LocationSummary* locations = instruction->GetLocations();
777  if (locations != nullptr && locations->Out().Equals(location)) {
778    return;
779  }
780
781  if (instruction->IsIntConstant()) {
782    int32_t value = instruction->AsIntConstant()->GetValue();
783    if (location.IsRegister()) {
784      __ LoadImmediate(location.As<Register>(), value);
785    } else {
786      DCHECK(location.IsStackSlot());
787      __ LoadImmediate(IP, value);
788      __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
789    }
790  } else if (instruction->IsLongConstant()) {
791    int64_t value = instruction->AsLongConstant()->GetValue();
792    if (location.IsRegisterPair()) {
793      __ LoadImmediate(location.AsRegisterPairLow<Register>(), Low32Bits(value));
794      __ LoadImmediate(location.AsRegisterPairHigh<Register>(), High32Bits(value));
795    } else {
796      DCHECK(location.IsDoubleStackSlot());
797      __ LoadImmediate(IP, Low32Bits(value));
798      __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
799      __ LoadImmediate(IP, High32Bits(value));
800      __ StoreToOffset(kStoreWord, IP, SP, location.GetHighStackIndex(kArmWordSize));
801    }
802  } else if (instruction->IsLoadLocal()) {
803    uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
804    switch (instruction->GetType()) {
805      case Primitive::kPrimBoolean:
806      case Primitive::kPrimByte:
807      case Primitive::kPrimChar:
808      case Primitive::kPrimShort:
809      case Primitive::kPrimInt:
810      case Primitive::kPrimNot:
811      case Primitive::kPrimFloat:
812        Move32(location, Location::StackSlot(stack_slot));
813        break;
814
815      case Primitive::kPrimLong:
816      case Primitive::kPrimDouble:
817        Move64(location, Location::DoubleStackSlot(stack_slot));
818        break;
819
820      default:
821        LOG(FATAL) << "Unexpected type " << instruction->GetType();
822    }
823  } else if (instruction->IsTemporary()) {
824    Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
825    Move32(location, temp_location);
826  } else {
827    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
828    switch (instruction->GetType()) {
829      case Primitive::kPrimBoolean:
830      case Primitive::kPrimByte:
831      case Primitive::kPrimChar:
832      case Primitive::kPrimShort:
833      case Primitive::kPrimNot:
834      case Primitive::kPrimInt:
835      case Primitive::kPrimFloat:
836        Move32(location, locations->Out());
837        break;
838
839      case Primitive::kPrimLong:
840      case Primitive::kPrimDouble:
841        Move64(location, locations->Out());
842        break;
843
844      default:
845        LOG(FATAL) << "Unexpected type " << instruction->GetType();
846    }
847  }
848}
849
850void CodeGeneratorARM::InvokeRuntime(int32_t entry_point_offset,
851                                     HInstruction* instruction,
852                                     uint32_t dex_pc) {
853  __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
854  __ blx(LR);
855  RecordPcInfo(instruction, dex_pc);
856  DCHECK(instruction->IsSuspendCheck()
857      || instruction->IsBoundsCheck()
858      || instruction->IsNullCheck()
859      || instruction->IsDivZeroCheck()
860      || !IsLeafMethod());
861}
862
863void LocationsBuilderARM::VisitGoto(HGoto* got) {
864  got->SetLocations(nullptr);
865}
866
867void InstructionCodeGeneratorARM::VisitGoto(HGoto* got) {
868  HBasicBlock* successor = got->GetSuccessor();
869  DCHECK(!successor->IsExitBlock());
870
871  HBasicBlock* block = got->GetBlock();
872  HInstruction* previous = got->GetPrevious();
873
874  HLoopInformation* info = block->GetLoopInformation();
875  if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
876    codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
877    GenerateSuspendCheck(info->GetSuspendCheck(), successor);
878    return;
879  }
880
881  if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
882    GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
883  }
884  if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
885    __ b(codegen_->GetLabelOf(successor));
886  }
887}
888
889void LocationsBuilderARM::VisitExit(HExit* exit) {
890  exit->SetLocations(nullptr);
891}
892
893void InstructionCodeGeneratorARM::VisitExit(HExit* exit) {
894  UNUSED(exit);
895  if (kIsDebugBuild) {
896    __ Comment("Unreachable");
897    __ bkpt(0);
898  }
899}
900
901void LocationsBuilderARM::VisitIf(HIf* if_instr) {
902  LocationSummary* locations =
903      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
904  HInstruction* cond = if_instr->InputAt(0);
905  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
906    locations->SetInAt(0, Location::RequiresRegister());
907  }
908}
909
910void InstructionCodeGeneratorARM::VisitIf(HIf* if_instr) {
911  HInstruction* cond = if_instr->InputAt(0);
912  if (cond->IsIntConstant()) {
913    // Constant condition, statically compared against 1.
914    int32_t cond_value = cond->AsIntConstant()->GetValue();
915    if (cond_value == 1) {
916      if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
917                                     if_instr->IfTrueSuccessor())) {
918        __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
919      }
920      return;
921    } else {
922      DCHECK_EQ(cond_value, 0);
923    }
924  } else {
925    if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
926      // Condition has been materialized, compare the output to 0
927      DCHECK(if_instr->GetLocations()->InAt(0).IsRegister());
928      __ cmp(if_instr->GetLocations()->InAt(0).As<Register>(),
929             ShifterOperand(0));
930      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()), NE);
931    } else {
932      // Condition has not been materialized, use its inputs as the
933      // comparison and its condition as the branch condition.
934      LocationSummary* locations = cond->GetLocations();
935      if (locations->InAt(1).IsRegister()) {
936        __ cmp(locations->InAt(0).As<Register>(),
937               ShifterOperand(locations->InAt(1).As<Register>()));
938      } else {
939        DCHECK(locations->InAt(1).IsConstant());
940        int32_t value =
941            locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
942        ShifterOperand operand;
943        if (ShifterOperand::CanHoldArm(value, &operand)) {
944          __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(value));
945        } else {
946          Register temp = IP;
947          __ LoadImmediate(temp, value);
948          __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(temp));
949        }
950      }
951      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()),
952           ARMCondition(cond->AsCondition()->GetCondition()));
953    }
954  }
955  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
956                                 if_instr->IfFalseSuccessor())) {
957    __ b(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
958  }
959}
960
961
962void LocationsBuilderARM::VisitCondition(HCondition* comp) {
963  LocationSummary* locations =
964      new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall);
965  locations->SetInAt(0, Location::RequiresRegister());
966  locations->SetInAt(1, Location::RegisterOrConstant(comp->InputAt(1)));
967  if (comp->NeedsMaterialization()) {
968    locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
969  }
970}
971
972void InstructionCodeGeneratorARM::VisitCondition(HCondition* comp) {
973  if (!comp->NeedsMaterialization()) return;
974
975  LocationSummary* locations = comp->GetLocations();
976  if (locations->InAt(1).IsRegister()) {
977    __ cmp(locations->InAt(0).As<Register>(),
978           ShifterOperand(locations->InAt(1).As<Register>()));
979  } else {
980    DCHECK(locations->InAt(1).IsConstant());
981    int32_t value = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
982    ShifterOperand operand;
983    if (ShifterOperand::CanHoldArm(value, &operand)) {
984      __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(value));
985    } else {
986      Register temp = IP;
987      __ LoadImmediate(temp, value);
988      __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(temp));
989    }
990  }
991  __ it(ARMCondition(comp->GetCondition()), kItElse);
992  __ mov(locations->Out().As<Register>(), ShifterOperand(1),
993         ARMCondition(comp->GetCondition()));
994  __ mov(locations->Out().As<Register>(), ShifterOperand(0),
995         ARMOppositeCondition(comp->GetCondition()));
996}
997
998void LocationsBuilderARM::VisitEqual(HEqual* comp) {
999  VisitCondition(comp);
1000}
1001
1002void InstructionCodeGeneratorARM::VisitEqual(HEqual* comp) {
1003  VisitCondition(comp);
1004}
1005
1006void LocationsBuilderARM::VisitNotEqual(HNotEqual* comp) {
1007  VisitCondition(comp);
1008}
1009
1010void InstructionCodeGeneratorARM::VisitNotEqual(HNotEqual* comp) {
1011  VisitCondition(comp);
1012}
1013
1014void LocationsBuilderARM::VisitLessThan(HLessThan* comp) {
1015  VisitCondition(comp);
1016}
1017
1018void InstructionCodeGeneratorARM::VisitLessThan(HLessThan* comp) {
1019  VisitCondition(comp);
1020}
1021
1022void LocationsBuilderARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1023  VisitCondition(comp);
1024}
1025
1026void InstructionCodeGeneratorARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1027  VisitCondition(comp);
1028}
1029
1030void LocationsBuilderARM::VisitGreaterThan(HGreaterThan* comp) {
1031  VisitCondition(comp);
1032}
1033
1034void InstructionCodeGeneratorARM::VisitGreaterThan(HGreaterThan* comp) {
1035  VisitCondition(comp);
1036}
1037
1038void LocationsBuilderARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1039  VisitCondition(comp);
1040}
1041
1042void InstructionCodeGeneratorARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1043  VisitCondition(comp);
1044}
1045
1046void LocationsBuilderARM::VisitLocal(HLocal* local) {
1047  local->SetLocations(nullptr);
1048}
1049
1050void InstructionCodeGeneratorARM::VisitLocal(HLocal* local) {
1051  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
1052}
1053
1054void LocationsBuilderARM::VisitLoadLocal(HLoadLocal* load) {
1055  load->SetLocations(nullptr);
1056}
1057
1058void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load) {
1059  // Nothing to do, this is driven by the code generator.
1060  UNUSED(load);
1061}
1062
1063void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) {
1064  LocationSummary* locations =
1065      new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
1066  switch (store->InputAt(1)->GetType()) {
1067    case Primitive::kPrimBoolean:
1068    case Primitive::kPrimByte:
1069    case Primitive::kPrimChar:
1070    case Primitive::kPrimShort:
1071    case Primitive::kPrimInt:
1072    case Primitive::kPrimNot:
1073    case Primitive::kPrimFloat:
1074      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
1075      break;
1076
1077    case Primitive::kPrimLong:
1078    case Primitive::kPrimDouble:
1079      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
1080      break;
1081
1082    default:
1083      LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType();
1084  }
1085}
1086
1087void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store) {
1088  UNUSED(store);
1089}
1090
1091void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) {
1092  LocationSummary* locations =
1093      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1094  locations->SetOut(Location::ConstantLocation(constant));
1095}
1096
1097void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant) {
1098  // Will be generated at use site.
1099  UNUSED(constant);
1100}
1101
1102void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) {
1103  LocationSummary* locations =
1104      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1105  locations->SetOut(Location::ConstantLocation(constant));
1106}
1107
1108void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant) {
1109  // Will be generated at use site.
1110  UNUSED(constant);
1111}
1112
1113void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) {
1114  LocationSummary* locations =
1115      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1116  locations->SetOut(Location::ConstantLocation(constant));
1117}
1118
1119void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant) {
1120  // Will be generated at use site.
1121  UNUSED(constant);
1122}
1123
1124void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) {
1125  LocationSummary* locations =
1126      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1127  locations->SetOut(Location::ConstantLocation(constant));
1128}
1129
1130void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant) {
1131  // Will be generated at use site.
1132  UNUSED(constant);
1133}
1134
1135void LocationsBuilderARM::VisitReturnVoid(HReturnVoid* ret) {
1136  ret->SetLocations(nullptr);
1137}
1138
1139void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret) {
1140  UNUSED(ret);
1141  codegen_->GenerateFrameExit();
1142}
1143
1144void LocationsBuilderARM::VisitReturn(HReturn* ret) {
1145  LocationSummary* locations =
1146      new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
1147  locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType()));
1148}
1149
1150void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret) {
1151  UNUSED(ret);
1152  codegen_->GenerateFrameExit();
1153}
1154
1155void LocationsBuilderARM::VisitInvokeStatic(HInvokeStatic* invoke) {
1156  HandleInvoke(invoke);
1157}
1158
1159void CodeGeneratorARM::LoadCurrentMethod(Register reg) {
1160  __ LoadFromOffset(kLoadWord, reg, SP, kCurrentMethodStackOffset);
1161}
1162
1163void InstructionCodeGeneratorARM::VisitInvokeStatic(HInvokeStatic* invoke) {
1164  Register temp = invoke->GetLocations()->GetTemp(0).As<Register>();
1165
1166  // TODO: Implement all kinds of calls:
1167  // 1) boot -> boot
1168  // 2) app -> boot
1169  // 3) app -> app
1170  //
1171  // Currently we implement the app -> app logic, which looks up in the resolve cache.
1172
1173  // temp = method;
1174  codegen_->LoadCurrentMethod(temp);
1175  // temp = temp->dex_cache_resolved_methods_;
1176  __ LoadFromOffset(
1177      kLoadWord, temp, temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value());
1178  // temp = temp[index_in_cache]
1179  __ LoadFromOffset(
1180      kLoadWord, temp, temp, CodeGenerator::GetCacheOffset(invoke->GetIndexInDexCache()));
1181  // LR = temp[offset_of_quick_compiled_code]
1182  __ LoadFromOffset(kLoadWord, LR, temp,
1183                     mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value());
1184  // LR()
1185  __ blx(LR);
1186
1187  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1188  DCHECK(!codegen_->IsLeafMethod());
1189}
1190
1191void LocationsBuilderARM::HandleInvoke(HInvoke* invoke) {
1192  LocationSummary* locations =
1193      new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1194  locations->AddTemp(Location::RegisterLocation(R0));
1195
1196  InvokeDexCallingConventionVisitor calling_convention_visitor;
1197  for (size_t i = 0; i < invoke->InputCount(); i++) {
1198    HInstruction* input = invoke->InputAt(i);
1199    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1200  }
1201
1202  locations->SetOut(calling_convention_visitor.GetReturnLocation(invoke->GetType()));
1203}
1204
1205void LocationsBuilderARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1206  HandleInvoke(invoke);
1207}
1208
1209void InstructionCodeGeneratorARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1210  Register temp = invoke->GetLocations()->GetTemp(0).As<Register>();
1211  uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() +
1212          invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
1213  LocationSummary* locations = invoke->GetLocations();
1214  Location receiver = locations->InAt(0);
1215  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1216  // temp = object->GetClass();
1217  if (receiver.IsStackSlot()) {
1218    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1219    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1220  } else {
1221    __ LoadFromOffset(kLoadWord, temp, receiver.As<Register>(), class_offset);
1222  }
1223  // temp = temp->GetMethodAt(method_offset);
1224  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value();
1225  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1226  // LR = temp->GetEntryPoint();
1227  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1228  // LR();
1229  __ blx(LR);
1230  DCHECK(!codegen_->IsLeafMethod());
1231  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1232}
1233
1234void LocationsBuilderARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1235  HandleInvoke(invoke);
1236  // Add the hidden argument.
1237  invoke->GetLocations()->AddTemp(Location::RegisterLocation(R12));
1238}
1239
1240void InstructionCodeGeneratorARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1241  // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
1242  Register temp = invoke->GetLocations()->GetTemp(0).As<Register>();
1243  uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() +
1244          (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry);
1245  LocationSummary* locations = invoke->GetLocations();
1246  Location receiver = locations->InAt(0);
1247  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1248
1249  // Set the hidden argument.
1250  __ LoadImmediate(invoke->GetLocations()->GetTemp(1).As<Register>(), invoke->GetDexMethodIndex());
1251
1252  // temp = object->GetClass();
1253  if (receiver.IsStackSlot()) {
1254    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1255    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1256  } else {
1257    __ LoadFromOffset(kLoadWord, temp, receiver.As<Register>(), class_offset);
1258  }
1259  // temp = temp->GetImtEntryAt(method_offset);
1260  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value();
1261  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1262  // LR = temp->GetEntryPoint();
1263  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1264  // LR();
1265  __ blx(LR);
1266  DCHECK(!codegen_->IsLeafMethod());
1267  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1268}
1269
1270void LocationsBuilderARM::VisitNeg(HNeg* neg) {
1271  LocationSummary* locations =
1272      new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
1273  switch (neg->GetResultType()) {
1274    case Primitive::kPrimInt:
1275    case Primitive::kPrimLong: {
1276      bool output_overlaps = (neg->GetResultType() == Primitive::kPrimLong);
1277      locations->SetInAt(0, Location::RequiresRegister());
1278      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1279      break;
1280    }
1281
1282    case Primitive::kPrimFloat:
1283    case Primitive::kPrimDouble:
1284      locations->SetInAt(0, Location::RequiresFpuRegister());
1285      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1286      break;
1287
1288    default:
1289      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1290  }
1291}
1292
1293void InstructionCodeGeneratorARM::VisitNeg(HNeg* neg) {
1294  LocationSummary* locations = neg->GetLocations();
1295  Location out = locations->Out();
1296  Location in = locations->InAt(0);
1297  switch (neg->GetResultType()) {
1298    case Primitive::kPrimInt:
1299      DCHECK(in.IsRegister());
1300      __ rsb(out.As<Register>(), in.As<Register>(), ShifterOperand(0));
1301      break;
1302
1303    case Primitive::kPrimLong:
1304      DCHECK(in.IsRegisterPair());
1305      // out.lo = 0 - in.lo (and update the carry/borrow (C) flag)
1306      __ rsbs(out.AsRegisterPairLow<Register>(),
1307              in.AsRegisterPairLow<Register>(),
1308              ShifterOperand(0));
1309      // We cannot emit an RSC (Reverse Subtract with Carry)
1310      // instruction here, as it does not exist in the Thumb-2
1311      // instruction set.  We use the following approach
1312      // using SBC and SUB instead.
1313      //
1314      // out.hi = -C
1315      __ sbc(out.AsRegisterPairHigh<Register>(),
1316             out.AsRegisterPairHigh<Register>(),
1317             ShifterOperand(out.AsRegisterPairHigh<Register>()));
1318      // out.hi = out.hi - in.hi
1319      __ sub(out.AsRegisterPairHigh<Register>(),
1320             out.AsRegisterPairHigh<Register>(),
1321             ShifterOperand(in.AsRegisterPairHigh<Register>()));
1322      break;
1323
1324    case Primitive::kPrimFloat:
1325      DCHECK(in.IsFpuRegister());
1326      __ vnegs(out.As<SRegister>(), in.As<SRegister>());
1327      break;
1328
1329    case Primitive::kPrimDouble:
1330      DCHECK(in.IsFpuRegisterPair());
1331      __ vnegd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1332               FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1333      break;
1334
1335    default:
1336      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1337  }
1338}
1339
1340void LocationsBuilderARM::VisitTypeConversion(HTypeConversion* conversion) {
1341  LocationSummary* locations =
1342      new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
1343  Primitive::Type result_type = conversion->GetResultType();
1344  Primitive::Type input_type = conversion->GetInputType();
1345  switch (result_type) {
1346    case Primitive::kPrimInt:
1347      switch (input_type) {
1348        case Primitive::kPrimLong:
1349          // long-to-int conversion.
1350          locations->SetInAt(0, Location::Any());
1351          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1352          break;
1353
1354        case Primitive::kPrimFloat:
1355        case Primitive::kPrimDouble:
1356          LOG(FATAL) << "Type conversion from " << input_type
1357                     << " to " << result_type << " not yet implemented";
1358          break;
1359
1360        default:
1361          LOG(FATAL) << "Unexpected type conversion from " << input_type
1362                     << " to " << result_type;
1363      }
1364      break;
1365
1366    case Primitive::kPrimLong:
1367      switch (input_type) {
1368        case Primitive::kPrimByte:
1369        case Primitive::kPrimShort:
1370        case Primitive::kPrimInt:
1371        case Primitive::kPrimChar:
1372          // int-to-long conversion.
1373          locations->SetInAt(0, Location::RequiresRegister());
1374          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1375          break;
1376
1377        case Primitive::kPrimFloat:
1378        case Primitive::kPrimDouble:
1379          LOG(FATAL) << "Type conversion from " << input_type << " to "
1380                     << result_type << " not yet implemented";
1381          break;
1382
1383        default:
1384          LOG(FATAL) << "Unexpected type conversion from " << input_type
1385                     << " to " << result_type;
1386      }
1387      break;
1388
1389    case Primitive::kPrimFloat:
1390    case Primitive::kPrimDouble:
1391      LOG(FATAL) << "Type conversion from " << input_type
1392                 << " to " << result_type << " not yet implemented";
1393      break;
1394
1395    default:
1396      LOG(FATAL) << "Unexpected type conversion from " << input_type
1397                 << " to " << result_type;
1398  }
1399}
1400
1401void InstructionCodeGeneratorARM::VisitTypeConversion(HTypeConversion* conversion) {
1402  LocationSummary* locations = conversion->GetLocations();
1403  Location out = locations->Out();
1404  Location in = locations->InAt(0);
1405  Primitive::Type result_type = conversion->GetResultType();
1406  Primitive::Type input_type = conversion->GetInputType();
1407  switch (result_type) {
1408    case Primitive::kPrimInt:
1409      switch (input_type) {
1410        case Primitive::kPrimLong:
1411          // long-to-int conversion.
1412          DCHECK(out.IsRegister());
1413          if (in.IsRegisterPair()) {
1414            __ Mov(out.As<Register>(), in.AsRegisterPairLow<Register>());
1415          } else if (in.IsDoubleStackSlot()) {
1416            __ LoadFromOffset(kLoadWord, out.As<Register>(), SP, in.GetStackIndex());
1417          } else {
1418            DCHECK(in.IsConstant());
1419            DCHECK(in.GetConstant()->IsLongConstant());
1420            int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
1421            __ LoadImmediate(out.As<Register>(), static_cast<int32_t>(value));
1422          }
1423          break;
1424
1425        case Primitive::kPrimFloat:
1426        case Primitive::kPrimDouble:
1427          LOG(FATAL) << "Type conversion from " << input_type
1428                     << " to " << result_type << " not yet implemented";
1429          break;
1430
1431        default:
1432          LOG(FATAL) << "Unexpected type conversion from " << input_type
1433                     << " to " << result_type;
1434      }
1435      break;
1436
1437    case Primitive::kPrimLong:
1438      switch (input_type) {
1439        case Primitive::kPrimByte:
1440        case Primitive::kPrimShort:
1441        case Primitive::kPrimInt:
1442        case Primitive::kPrimChar:
1443          // int-to-long conversion.
1444          DCHECK(out.IsRegisterPair());
1445          DCHECK(in.IsRegister());
1446          __ Mov(out.AsRegisterPairLow<Register>(), in.As<Register>());
1447          // Sign extension.
1448          __ Asr(out.AsRegisterPairHigh<Register>(),
1449                 out.AsRegisterPairLow<Register>(),
1450                 31);
1451          break;
1452
1453        case Primitive::kPrimFloat:
1454        case Primitive::kPrimDouble:
1455          LOG(FATAL) << "Type conversion from " << input_type << " to "
1456                     << result_type << " not yet implemented";
1457          break;
1458
1459        default:
1460          LOG(FATAL) << "Unexpected type conversion from " << input_type
1461                     << " to " << result_type;
1462      }
1463      break;
1464
1465    case Primitive::kPrimFloat:
1466    case Primitive::kPrimDouble:
1467      LOG(FATAL) << "Type conversion from " << input_type
1468                 << " to " << result_type << " not yet implemented";
1469      break;
1470
1471    default:
1472      LOG(FATAL) << "Unexpected type conversion from " << input_type
1473                 << " to " << result_type;
1474  }
1475}
1476
1477void LocationsBuilderARM::VisitAdd(HAdd* add) {
1478  LocationSummary* locations =
1479      new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
1480  switch (add->GetResultType()) {
1481    case Primitive::kPrimInt:
1482    case Primitive::kPrimLong: {
1483      bool output_overlaps = (add->GetResultType() == Primitive::kPrimLong);
1484      locations->SetInAt(0, Location::RequiresRegister());
1485      locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
1486      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1487      break;
1488    }
1489
1490    case Primitive::kPrimFloat:
1491    case Primitive::kPrimDouble: {
1492      locations->SetInAt(0, Location::RequiresFpuRegister());
1493      locations->SetInAt(1, Location::RequiresFpuRegister());
1494      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1495      break;
1496    }
1497
1498    default:
1499      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1500  }
1501}
1502
1503void InstructionCodeGeneratorARM::VisitAdd(HAdd* add) {
1504  LocationSummary* locations = add->GetLocations();
1505  Location out = locations->Out();
1506  Location first = locations->InAt(0);
1507  Location second = locations->InAt(1);
1508  switch (add->GetResultType()) {
1509    case Primitive::kPrimInt:
1510      if (second.IsRegister()) {
1511        __ add(out.As<Register>(), first.As<Register>(), ShifterOperand(second.As<Register>()));
1512      } else {
1513        __ AddConstant(out.As<Register>(),
1514                       first.As<Register>(),
1515                       second.GetConstant()->AsIntConstant()->GetValue());
1516      }
1517      break;
1518
1519    case Primitive::kPrimLong:
1520      __ adds(out.AsRegisterPairLow<Register>(),
1521              first.AsRegisterPairLow<Register>(),
1522              ShifterOperand(second.AsRegisterPairLow<Register>()));
1523      __ adc(out.AsRegisterPairHigh<Register>(),
1524             first.AsRegisterPairHigh<Register>(),
1525             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1526      break;
1527
1528    case Primitive::kPrimFloat:
1529      __ vadds(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>());
1530      break;
1531
1532    case Primitive::kPrimDouble:
1533      __ vaddd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1534               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1535               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1536      break;
1537
1538    default:
1539      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1540  }
1541}
1542
1543void LocationsBuilderARM::VisitSub(HSub* sub) {
1544  LocationSummary* locations =
1545      new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
1546  switch (sub->GetResultType()) {
1547    case Primitive::kPrimInt:
1548    case Primitive::kPrimLong: {
1549      bool output_overlaps = (sub->GetResultType() == Primitive::kPrimLong);
1550      locations->SetInAt(0, Location::RequiresRegister());
1551      locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1)));
1552      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1553      break;
1554    }
1555    case Primitive::kPrimFloat:
1556    case Primitive::kPrimDouble: {
1557      locations->SetInAt(0, Location::RequiresFpuRegister());
1558      locations->SetInAt(1, Location::RequiresFpuRegister());
1559      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1560      break;
1561    }
1562    default:
1563      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1564  }
1565}
1566
1567void InstructionCodeGeneratorARM::VisitSub(HSub* sub) {
1568  LocationSummary* locations = sub->GetLocations();
1569  Location out = locations->Out();
1570  Location first = locations->InAt(0);
1571  Location second = locations->InAt(1);
1572  switch (sub->GetResultType()) {
1573    case Primitive::kPrimInt: {
1574      if (second.IsRegister()) {
1575        __ sub(out.As<Register>(), first.As<Register>(), ShifterOperand(second.As<Register>()));
1576      } else {
1577        __ AddConstant(out.As<Register>(),
1578                       first.As<Register>(),
1579                       -second.GetConstant()->AsIntConstant()->GetValue());
1580      }
1581      break;
1582    }
1583
1584    case Primitive::kPrimLong: {
1585      __ subs(out.AsRegisterPairLow<Register>(),
1586              first.AsRegisterPairLow<Register>(),
1587              ShifterOperand(second.AsRegisterPairLow<Register>()));
1588      __ sbc(out.AsRegisterPairHigh<Register>(),
1589             first.AsRegisterPairHigh<Register>(),
1590             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1591      break;
1592    }
1593
1594    case Primitive::kPrimFloat: {
1595      __ vsubs(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>());
1596      break;
1597    }
1598
1599    case Primitive::kPrimDouble: {
1600      __ vsubd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1601               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1602               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1603      break;
1604    }
1605
1606
1607    default:
1608      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1609  }
1610}
1611
1612void LocationsBuilderARM::VisitMul(HMul* mul) {
1613  LocationSummary* locations =
1614      new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
1615  switch (mul->GetResultType()) {
1616    case Primitive::kPrimInt:
1617    case Primitive::kPrimLong:  {
1618      locations->SetInAt(0, Location::RequiresRegister());
1619      locations->SetInAt(1, Location::RequiresRegister());
1620      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1621      break;
1622    }
1623
1624    case Primitive::kPrimFloat:
1625    case Primitive::kPrimDouble: {
1626      locations->SetInAt(0, Location::RequiresFpuRegister());
1627      locations->SetInAt(1, Location::RequiresFpuRegister());
1628      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1629      break;
1630    }
1631
1632    default:
1633      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1634  }
1635}
1636
1637void InstructionCodeGeneratorARM::VisitMul(HMul* mul) {
1638  LocationSummary* locations = mul->GetLocations();
1639  Location out = locations->Out();
1640  Location first = locations->InAt(0);
1641  Location second = locations->InAt(1);
1642  switch (mul->GetResultType()) {
1643    case Primitive::kPrimInt: {
1644      __ mul(out.As<Register>(), first.As<Register>(), second.As<Register>());
1645      break;
1646    }
1647    case Primitive::kPrimLong: {
1648      Register out_hi = out.AsRegisterPairHigh<Register>();
1649      Register out_lo = out.AsRegisterPairLow<Register>();
1650      Register in1_hi = first.AsRegisterPairHigh<Register>();
1651      Register in1_lo = first.AsRegisterPairLow<Register>();
1652      Register in2_hi = second.AsRegisterPairHigh<Register>();
1653      Register in2_lo = second.AsRegisterPairLow<Register>();
1654
1655      // Extra checks to protect caused by the existence of R1_R2.
1656      // The algorithm is wrong if out.hi is either in1.lo or in2.lo:
1657      // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2);
1658      DCHECK_NE(out_hi, in1_lo);
1659      DCHECK_NE(out_hi, in2_lo);
1660
1661      // input: in1 - 64 bits, in2 - 64 bits
1662      // output: out
1663      // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
1664      // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
1665      // parts: out.lo = (in1.lo * in2.lo)[31:0]
1666
1667      // IP <- in1.lo * in2.hi
1668      __ mul(IP, in1_lo, in2_hi);
1669      // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo
1670      __ mla(out_hi, in1_hi, in2_lo, IP);
1671      // out.lo <- (in1.lo * in2.lo)[31:0];
1672      __ umull(out_lo, IP, in1_lo, in2_lo);
1673      // out.hi <- in2.hi * in1.lo +  in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
1674      __ add(out_hi, out_hi, ShifterOperand(IP));
1675      break;
1676    }
1677
1678    case Primitive::kPrimFloat: {
1679      __ vmuls(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>());
1680      break;
1681    }
1682
1683    case Primitive::kPrimDouble: {
1684      __ vmuld(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1685               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1686               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1687      break;
1688    }
1689
1690    default:
1691      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1692  }
1693}
1694
1695void LocationsBuilderARM::VisitDiv(HDiv* div) {
1696  LocationSummary* locations =
1697      new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
1698  switch (div->GetResultType()) {
1699    case Primitive::kPrimInt: {
1700      locations->SetInAt(0, Location::RequiresRegister());
1701      locations->SetInAt(1, Location::RequiresRegister());
1702      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1703      break;
1704    }
1705    case Primitive::kPrimLong: {
1706      LOG(FATAL) << "Not implemented div type" << div->GetResultType();
1707      break;
1708    }
1709    case Primitive::kPrimFloat:
1710    case Primitive::kPrimDouble: {
1711      locations->SetInAt(0, Location::RequiresFpuRegister());
1712      locations->SetInAt(1, Location::RequiresFpuRegister());
1713      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1714      break;
1715    }
1716
1717    default:
1718      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
1719  }
1720}
1721
1722void InstructionCodeGeneratorARM::VisitDiv(HDiv* div) {
1723  LocationSummary* locations = div->GetLocations();
1724  Location out = locations->Out();
1725  Location first = locations->InAt(0);
1726  Location second = locations->InAt(1);
1727
1728  switch (div->GetResultType()) {
1729    case Primitive::kPrimInt: {
1730      __ sdiv(out.As<Register>(), first.As<Register>(), second.As<Register>());
1731      break;
1732    }
1733
1734    case Primitive::kPrimLong: {
1735      LOG(FATAL) << "Not implemented div type" << div->GetResultType();
1736      break;
1737    }
1738
1739    case Primitive::kPrimFloat: {
1740      __ vdivs(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>());
1741      break;
1742    }
1743
1744    case Primitive::kPrimDouble: {
1745      __ vdivd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1746               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1747               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1748      break;
1749    }
1750
1751    default:
1752      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
1753  }
1754}
1755
1756void LocationsBuilderARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1757  LocationSummary* locations =
1758      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1759  locations->SetInAt(0, Location::RequiresRegister());
1760  if (instruction->HasUses()) {
1761    locations->SetOut(Location::SameAsFirstInput());
1762  }
1763}
1764
1765void InstructionCodeGeneratorARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1766  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM(instruction);
1767  codegen_->AddSlowPath(slow_path);
1768
1769  LocationSummary* locations = instruction->GetLocations();
1770  Location value = locations->InAt(0);
1771
1772  DCHECK(value.IsRegister()) << value;
1773  __ cmp(value.As<Register>(), ShifterOperand(0));
1774  __ b(slow_path->GetEntryLabel(), EQ);
1775}
1776
1777void LocationsBuilderARM::VisitNewInstance(HNewInstance* instruction) {
1778  LocationSummary* locations =
1779      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
1780  InvokeRuntimeCallingConvention calling_convention;
1781  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1782  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1783  locations->SetOut(Location::RegisterLocation(R0));
1784}
1785
1786void InstructionCodeGeneratorARM::VisitNewInstance(HNewInstance* instruction) {
1787  InvokeRuntimeCallingConvention calling_convention;
1788  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
1789  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
1790  codegen_->InvokeRuntime(
1791      QUICK_ENTRY_POINT(pAllocObjectWithAccessCheck), instruction, instruction->GetDexPc());
1792}
1793
1794void LocationsBuilderARM::VisitNewArray(HNewArray* instruction) {
1795  LocationSummary* locations =
1796      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
1797  InvokeRuntimeCallingConvention calling_convention;
1798  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1799  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1800  locations->SetOut(Location::RegisterLocation(R0));
1801  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1802}
1803
1804void InstructionCodeGeneratorARM::VisitNewArray(HNewArray* instruction) {
1805  InvokeRuntimeCallingConvention calling_convention;
1806  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
1807  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
1808  codegen_->InvokeRuntime(
1809      QUICK_ENTRY_POINT(pAllocArrayWithAccessCheck), instruction, instruction->GetDexPc());
1810}
1811
1812void LocationsBuilderARM::VisitParameterValue(HParameterValue* instruction) {
1813  LocationSummary* locations =
1814      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1815  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
1816  if (location.IsStackSlot()) {
1817    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1818  } else if (location.IsDoubleStackSlot()) {
1819    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1820  }
1821  locations->SetOut(location);
1822}
1823
1824void InstructionCodeGeneratorARM::VisitParameterValue(HParameterValue* instruction) {
1825  // Nothing to do, the parameter is already at its location.
1826  UNUSED(instruction);
1827}
1828
1829void LocationsBuilderARM::VisitNot(HNot* not_) {
1830  LocationSummary* locations =
1831      new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
1832  locations->SetInAt(0, Location::RequiresRegister());
1833  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1834}
1835
1836void InstructionCodeGeneratorARM::VisitNot(HNot* not_) {
1837  LocationSummary* locations = not_->GetLocations();
1838  Location out = locations->Out();
1839  Location in = locations->InAt(0);
1840  switch (not_->InputAt(0)->GetType()) {
1841    case Primitive::kPrimBoolean:
1842      __ eor(out.As<Register>(), in.As<Register>(), ShifterOperand(1));
1843      break;
1844
1845    case Primitive::kPrimInt:
1846      __ mvn(out.As<Register>(), ShifterOperand(in.As<Register>()));
1847      break;
1848
1849    case Primitive::kPrimLong:
1850      __ mvn(out.AsRegisterPairLow<Register>(),
1851             ShifterOperand(in.AsRegisterPairLow<Register>()));
1852      __ mvn(out.AsRegisterPairHigh<Register>(),
1853             ShifterOperand(in.AsRegisterPairHigh<Register>()));
1854      break;
1855
1856    default:
1857      LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
1858  }
1859}
1860
1861void LocationsBuilderARM::VisitCompare(HCompare* compare) {
1862  LocationSummary* locations =
1863      new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
1864  locations->SetInAt(0, Location::RequiresRegister());
1865  locations->SetInAt(1, Location::RequiresRegister());
1866  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1867}
1868
1869void InstructionCodeGeneratorARM::VisitCompare(HCompare* compare) {
1870  LocationSummary* locations = compare->GetLocations();
1871  switch (compare->InputAt(0)->GetType()) {
1872    case Primitive::kPrimLong: {
1873      Register output = locations->Out().As<Register>();
1874      Location left = locations->InAt(0);
1875      Location right = locations->InAt(1);
1876      Label less, greater, done;
1877      __ cmp(left.AsRegisterPairHigh<Register>(),
1878             ShifterOperand(right.AsRegisterPairHigh<Register>()));  // Signed compare.
1879      __ b(&less, LT);
1880      __ b(&greater, GT);
1881      // Do LoadImmediate before any `cmp`, as LoadImmediate might affect
1882      // the status flags.
1883      __ LoadImmediate(output, 0);
1884      __ cmp(left.AsRegisterPairLow<Register>(),
1885             ShifterOperand(right.AsRegisterPairLow<Register>()));  // Unsigned compare.
1886      __ b(&done, EQ);
1887      __ b(&less, CC);
1888
1889      __ Bind(&greater);
1890      __ LoadImmediate(output, 1);
1891      __ b(&done);
1892
1893      __ Bind(&less);
1894      __ LoadImmediate(output, -1);
1895
1896      __ Bind(&done);
1897      break;
1898    }
1899    default:
1900      LOG(FATAL) << "Unimplemented compare type " << compare->InputAt(0)->GetType();
1901  }
1902}
1903
1904void LocationsBuilderARM::VisitPhi(HPhi* instruction) {
1905  LocationSummary* locations =
1906      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1907  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
1908    locations->SetInAt(i, Location::Any());
1909  }
1910  locations->SetOut(Location::Any());
1911}
1912
1913void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction) {
1914  UNUSED(instruction);
1915  LOG(FATAL) << "Unreachable";
1916}
1917
1918void LocationsBuilderARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
1919  LocationSummary* locations =
1920      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1921  bool needs_write_barrier =
1922      CodeGenerator::StoreNeedsWriteBarrier(instruction->GetFieldType(), instruction->GetValue());
1923  locations->SetInAt(0, Location::RequiresRegister());
1924  locations->SetInAt(1, Location::RequiresRegister());
1925  // Temporary registers for the write barrier.
1926  if (needs_write_barrier) {
1927    locations->AddTemp(Location::RequiresRegister());
1928    locations->AddTemp(Location::RequiresRegister());
1929  }
1930}
1931
1932void InstructionCodeGeneratorARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
1933  LocationSummary* locations = instruction->GetLocations();
1934  Register obj = locations->InAt(0).As<Register>();
1935  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
1936  Primitive::Type field_type = instruction->GetFieldType();
1937
1938  switch (field_type) {
1939    case Primitive::kPrimBoolean:
1940    case Primitive::kPrimByte: {
1941      Register value = locations->InAt(1).As<Register>();
1942      __ StoreToOffset(kStoreByte, value, obj, offset);
1943      break;
1944    }
1945
1946    case Primitive::kPrimShort:
1947    case Primitive::kPrimChar: {
1948      Register value = locations->InAt(1).As<Register>();
1949      __ StoreToOffset(kStoreHalfword, value, obj, offset);
1950      break;
1951    }
1952
1953    case Primitive::kPrimInt:
1954    case Primitive::kPrimNot: {
1955      Register value = locations->InAt(1).As<Register>();
1956      __ StoreToOffset(kStoreWord, value, obj, offset);
1957      if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->GetValue())) {
1958        Register temp = locations->GetTemp(0).As<Register>();
1959        Register card = locations->GetTemp(1).As<Register>();
1960        codegen_->MarkGCCard(temp, card, obj, value);
1961      }
1962      break;
1963    }
1964
1965    case Primitive::kPrimLong: {
1966      Location value = locations->InAt(1);
1967      __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset);
1968      break;
1969    }
1970
1971    case Primitive::kPrimFloat: {
1972      SRegister value = locations->InAt(1).As<SRegister>();
1973      __ StoreSToOffset(value, obj, offset);
1974      break;
1975    }
1976
1977    case Primitive::kPrimDouble: {
1978      DRegister value = FromLowSToD(locations->InAt(1).AsFpuRegisterPairLow<SRegister>());
1979      __ StoreDToOffset(value, obj, offset);
1980      break;
1981    }
1982
1983    case Primitive::kPrimVoid:
1984      LOG(FATAL) << "Unreachable type " << field_type;
1985      UNREACHABLE();
1986  }
1987}
1988
1989void LocationsBuilderARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
1990  LocationSummary* locations =
1991      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1992  locations->SetInAt(0, Location::RequiresRegister());
1993  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1994}
1995
1996void InstructionCodeGeneratorARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
1997  LocationSummary* locations = instruction->GetLocations();
1998  Register obj = locations->InAt(0).As<Register>();
1999  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
2000
2001  switch (instruction->GetType()) {
2002    case Primitive::kPrimBoolean: {
2003      Register out = locations->Out().As<Register>();
2004      __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
2005      break;
2006    }
2007
2008    case Primitive::kPrimByte: {
2009      Register out = locations->Out().As<Register>();
2010      __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
2011      break;
2012    }
2013
2014    case Primitive::kPrimShort: {
2015      Register out = locations->Out().As<Register>();
2016      __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
2017      break;
2018    }
2019
2020    case Primitive::kPrimChar: {
2021      Register out = locations->Out().As<Register>();
2022      __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
2023      break;
2024    }
2025
2026    case Primitive::kPrimInt:
2027    case Primitive::kPrimNot: {
2028      Register out = locations->Out().As<Register>();
2029      __ LoadFromOffset(kLoadWord, out, obj, offset);
2030      break;
2031    }
2032
2033    case Primitive::kPrimLong: {
2034      // TODO: support volatile.
2035      Location out = locations->Out();
2036      __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset);
2037      break;
2038    }
2039
2040    case Primitive::kPrimFloat: {
2041      SRegister out = locations->Out().As<SRegister>();
2042      __ LoadSFromOffset(out, obj, offset);
2043      break;
2044    }
2045
2046    case Primitive::kPrimDouble: {
2047      DRegister out = FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>());
2048      __ LoadDFromOffset(out, obj, offset);
2049      break;
2050    }
2051
2052    case Primitive::kPrimVoid:
2053      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2054      UNREACHABLE();
2055  }
2056}
2057
2058void LocationsBuilderARM::VisitNullCheck(HNullCheck* instruction) {
2059  LocationSummary* locations =
2060      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2061  locations->SetInAt(0, Location::RequiresRegister());
2062  if (instruction->HasUses()) {
2063    locations->SetOut(Location::SameAsFirstInput());
2064  }
2065}
2066
2067void InstructionCodeGeneratorARM::VisitNullCheck(HNullCheck* instruction) {
2068  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM(instruction);
2069  codegen_->AddSlowPath(slow_path);
2070
2071  LocationSummary* locations = instruction->GetLocations();
2072  Location obj = locations->InAt(0);
2073
2074  if (obj.IsRegister()) {
2075    __ cmp(obj.As<Register>(), ShifterOperand(0));
2076    __ b(slow_path->GetEntryLabel(), EQ);
2077  } else {
2078    DCHECK(obj.IsConstant()) << obj;
2079    DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0);
2080    __ b(slow_path->GetEntryLabel());
2081  }
2082}
2083
2084void LocationsBuilderARM::VisitArrayGet(HArrayGet* instruction) {
2085  LocationSummary* locations =
2086      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2087  locations->SetInAt(0, Location::RequiresRegister());
2088  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2089  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2090}
2091
2092void InstructionCodeGeneratorARM::VisitArrayGet(HArrayGet* instruction) {
2093  LocationSummary* locations = instruction->GetLocations();
2094  Register obj = locations->InAt(0).As<Register>();
2095  Location index = locations->InAt(1);
2096
2097  switch (instruction->GetType()) {
2098    case Primitive::kPrimBoolean: {
2099      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
2100      Register out = locations->Out().As<Register>();
2101      if (index.IsConstant()) {
2102        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2103        __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
2104      } else {
2105        __ add(IP, obj, ShifterOperand(index.As<Register>()));
2106        __ LoadFromOffset(kLoadUnsignedByte, out, IP, data_offset);
2107      }
2108      break;
2109    }
2110
2111    case Primitive::kPrimByte: {
2112      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
2113      Register out = locations->Out().As<Register>();
2114      if (index.IsConstant()) {
2115        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2116        __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
2117      } else {
2118        __ add(IP, obj, ShifterOperand(index.As<Register>()));
2119        __ LoadFromOffset(kLoadSignedByte, out, IP, data_offset);
2120      }
2121      break;
2122    }
2123
2124    case Primitive::kPrimShort: {
2125      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
2126      Register out = locations->Out().As<Register>();
2127      if (index.IsConstant()) {
2128        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2129        __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
2130      } else {
2131        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_2));
2132        __ LoadFromOffset(kLoadSignedHalfword, out, IP, data_offset);
2133      }
2134      break;
2135    }
2136
2137    case Primitive::kPrimChar: {
2138      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
2139      Register out = locations->Out().As<Register>();
2140      if (index.IsConstant()) {
2141        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2142        __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
2143      } else {
2144        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_2));
2145        __ LoadFromOffset(kLoadUnsignedHalfword, out, IP, data_offset);
2146      }
2147      break;
2148    }
2149
2150    case Primitive::kPrimInt:
2151    case Primitive::kPrimNot: {
2152      DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
2153      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2154      Register out = locations->Out().As<Register>();
2155      if (index.IsConstant()) {
2156        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2157        __ LoadFromOffset(kLoadWord, out, obj, offset);
2158      } else {
2159        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_4));
2160        __ LoadFromOffset(kLoadWord, out, IP, data_offset);
2161      }
2162      break;
2163    }
2164
2165    case Primitive::kPrimLong: {
2166      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
2167      Location out = locations->Out();
2168      if (index.IsConstant()) {
2169        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
2170        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset);
2171      } else {
2172        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_8));
2173        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), IP, data_offset);
2174      }
2175      break;
2176    }
2177
2178    case Primitive::kPrimFloat:
2179    case Primitive::kPrimDouble:
2180      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
2181      UNREACHABLE();
2182    case Primitive::kPrimVoid:
2183      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2184      UNREACHABLE();
2185  }
2186}
2187
2188void LocationsBuilderARM::VisitArraySet(HArraySet* instruction) {
2189  Primitive::Type value_type = instruction->GetComponentType();
2190
2191  bool needs_write_barrier =
2192      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
2193  bool needs_runtime_call = instruction->NeedsTypeCheck();
2194
2195  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2196      instruction, needs_runtime_call ? LocationSummary::kCall : LocationSummary::kNoCall);
2197  if (needs_runtime_call) {
2198    InvokeRuntimeCallingConvention calling_convention;
2199    locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2200    locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2201    locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2202  } else {
2203    locations->SetInAt(0, Location::RequiresRegister());
2204    locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2205    locations->SetInAt(2, Location::RequiresRegister());
2206
2207    if (needs_write_barrier) {
2208      // Temporary registers for the write barrier.
2209      locations->AddTemp(Location::RequiresRegister());
2210      locations->AddTemp(Location::RequiresRegister());
2211    }
2212  }
2213}
2214
2215void InstructionCodeGeneratorARM::VisitArraySet(HArraySet* instruction) {
2216  LocationSummary* locations = instruction->GetLocations();
2217  Register obj = locations->InAt(0).As<Register>();
2218  Location index = locations->InAt(1);
2219  Primitive::Type value_type = instruction->GetComponentType();
2220  bool needs_runtime_call = locations->WillCall();
2221  bool needs_write_barrier =
2222      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
2223
2224  switch (value_type) {
2225    case Primitive::kPrimBoolean:
2226    case Primitive::kPrimByte: {
2227      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
2228      Register value = locations->InAt(2).As<Register>();
2229      if (index.IsConstant()) {
2230        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2231        __ StoreToOffset(kStoreByte, value, obj, offset);
2232      } else {
2233        __ add(IP, obj, ShifterOperand(index.As<Register>()));
2234        __ StoreToOffset(kStoreByte, value, IP, data_offset);
2235      }
2236      break;
2237    }
2238
2239    case Primitive::kPrimShort:
2240    case Primitive::kPrimChar: {
2241      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
2242      Register value = locations->InAt(2).As<Register>();
2243      if (index.IsConstant()) {
2244        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2245        __ StoreToOffset(kStoreHalfword, value, obj, offset);
2246      } else {
2247        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_2));
2248        __ StoreToOffset(kStoreHalfword, value, IP, data_offset);
2249      }
2250      break;
2251    }
2252
2253    case Primitive::kPrimInt:
2254    case Primitive::kPrimNot: {
2255      if (!needs_runtime_call) {
2256        uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2257        Register value = locations->InAt(2).As<Register>();
2258        if (index.IsConstant()) {
2259          size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2260          __ StoreToOffset(kStoreWord, value, obj, offset);
2261        } else {
2262          DCHECK(index.IsRegister()) << index;
2263          __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_4));
2264          __ StoreToOffset(kStoreWord, value, IP, data_offset);
2265        }
2266        if (needs_write_barrier) {
2267          DCHECK_EQ(value_type, Primitive::kPrimNot);
2268          Register temp = locations->GetTemp(0).As<Register>();
2269          Register card = locations->GetTemp(1).As<Register>();
2270          codegen_->MarkGCCard(temp, card, obj, value);
2271        }
2272      } else {
2273        DCHECK_EQ(value_type, Primitive::kPrimNot);
2274        codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject), instruction, instruction->GetDexPc());
2275      }
2276      break;
2277    }
2278
2279    case Primitive::kPrimLong: {
2280      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
2281      Location value = locations->InAt(2);
2282      if (index.IsConstant()) {
2283        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
2284        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset);
2285      } else {
2286        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_8));
2287        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), IP, data_offset);
2288      }
2289      break;
2290    }
2291
2292    case Primitive::kPrimFloat:
2293    case Primitive::kPrimDouble:
2294      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
2295      UNREACHABLE();
2296    case Primitive::kPrimVoid:
2297      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2298      UNREACHABLE();
2299  }
2300}
2301
2302void LocationsBuilderARM::VisitArrayLength(HArrayLength* instruction) {
2303  LocationSummary* locations =
2304      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2305  locations->SetInAt(0, Location::RequiresRegister());
2306  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2307}
2308
2309void InstructionCodeGeneratorARM::VisitArrayLength(HArrayLength* instruction) {
2310  LocationSummary* locations = instruction->GetLocations();
2311  uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
2312  Register obj = locations->InAt(0).As<Register>();
2313  Register out = locations->Out().As<Register>();
2314  __ LoadFromOffset(kLoadWord, out, obj, offset);
2315}
2316
2317void LocationsBuilderARM::VisitBoundsCheck(HBoundsCheck* instruction) {
2318  LocationSummary* locations =
2319      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2320  locations->SetInAt(0, Location::RequiresRegister());
2321  locations->SetInAt(1, Location::RequiresRegister());
2322  if (instruction->HasUses()) {
2323    locations->SetOut(Location::SameAsFirstInput());
2324  }
2325}
2326
2327void InstructionCodeGeneratorARM::VisitBoundsCheck(HBoundsCheck* instruction) {
2328  LocationSummary* locations = instruction->GetLocations();
2329  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM(
2330      instruction, locations->InAt(0), locations->InAt(1));
2331  codegen_->AddSlowPath(slow_path);
2332
2333  Register index = locations->InAt(0).As<Register>();
2334  Register length = locations->InAt(1).As<Register>();
2335
2336  __ cmp(index, ShifterOperand(length));
2337  __ b(slow_path->GetEntryLabel(), CS);
2338}
2339
2340void CodeGeneratorARM::MarkGCCard(Register temp, Register card, Register object, Register value) {
2341  Label is_null;
2342  __ CompareAndBranchIfZero(value, &is_null);
2343  __ LoadFromOffset(kLoadWord, card, TR, Thread::CardTableOffset<kArmWordSize>().Int32Value());
2344  __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
2345  __ strb(card, Address(card, temp));
2346  __ Bind(&is_null);
2347}
2348
2349void LocationsBuilderARM::VisitTemporary(HTemporary* temp) {
2350  temp->SetLocations(nullptr);
2351}
2352
2353void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp) {
2354  // Nothing to do, this is driven by the code generator.
2355  UNUSED(temp);
2356}
2357
2358void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction) {
2359  UNUSED(instruction);
2360  LOG(FATAL) << "Unreachable";
2361}
2362
2363void InstructionCodeGeneratorARM::VisitParallelMove(HParallelMove* instruction) {
2364  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
2365}
2366
2367void LocationsBuilderARM::VisitSuspendCheck(HSuspendCheck* instruction) {
2368  new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
2369}
2370
2371void InstructionCodeGeneratorARM::VisitSuspendCheck(HSuspendCheck* instruction) {
2372  HBasicBlock* block = instruction->GetBlock();
2373  if (block->GetLoopInformation() != nullptr) {
2374    DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
2375    // The back edge will generate the suspend check.
2376    return;
2377  }
2378  if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
2379    // The goto will generate the suspend check.
2380    return;
2381  }
2382  GenerateSuspendCheck(instruction, nullptr);
2383}
2384
2385void InstructionCodeGeneratorARM::GenerateSuspendCheck(HSuspendCheck* instruction,
2386                                                       HBasicBlock* successor) {
2387  SuspendCheckSlowPathARM* slow_path =
2388      new (GetGraph()->GetArena()) SuspendCheckSlowPathARM(instruction, successor);
2389  codegen_->AddSlowPath(slow_path);
2390
2391  __ LoadFromOffset(
2392      kLoadUnsignedHalfword, IP, TR, Thread::ThreadFlagsOffset<kArmWordSize>().Int32Value());
2393  __ cmp(IP, ShifterOperand(0));
2394  // TODO: Figure out the branch offsets and use cbz/cbnz.
2395  if (successor == nullptr) {
2396    __ b(slow_path->GetEntryLabel(), NE);
2397    __ Bind(slow_path->GetReturnLabel());
2398  } else {
2399    __ b(codegen_->GetLabelOf(successor), EQ);
2400    __ b(slow_path->GetEntryLabel());
2401  }
2402}
2403
2404ArmAssembler* ParallelMoveResolverARM::GetAssembler() const {
2405  return codegen_->GetAssembler();
2406}
2407
2408void ParallelMoveResolverARM::EmitMove(size_t index) {
2409  MoveOperands* move = moves_.Get(index);
2410  Location source = move->GetSource();
2411  Location destination = move->GetDestination();
2412
2413  if (source.IsRegister()) {
2414    if (destination.IsRegister()) {
2415      __ Mov(destination.As<Register>(), source.As<Register>());
2416    } else {
2417      DCHECK(destination.IsStackSlot());
2418      __ StoreToOffset(kStoreWord, source.As<Register>(),
2419                       SP, destination.GetStackIndex());
2420    }
2421  } else if (source.IsStackSlot()) {
2422    if (destination.IsRegister()) {
2423      __ LoadFromOffset(kLoadWord, destination.As<Register>(),
2424                        SP, source.GetStackIndex());
2425    } else {
2426      DCHECK(destination.IsStackSlot());
2427      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
2428      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
2429    }
2430  } else {
2431    DCHECK(source.IsConstant());
2432    DCHECK(source.GetConstant()->IsIntConstant());
2433    int32_t value = source.GetConstant()->AsIntConstant()->GetValue();
2434    if (destination.IsRegister()) {
2435      __ LoadImmediate(destination.As<Register>(), value);
2436    } else {
2437      DCHECK(destination.IsStackSlot());
2438      __ LoadImmediate(IP, value);
2439      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
2440    }
2441  }
2442}
2443
2444void ParallelMoveResolverARM::Exchange(Register reg, int mem) {
2445  __ Mov(IP, reg);
2446  __ LoadFromOffset(kLoadWord, reg, SP, mem);
2447  __ StoreToOffset(kStoreWord, IP, SP, mem);
2448}
2449
2450void ParallelMoveResolverARM::Exchange(int mem1, int mem2) {
2451  ScratchRegisterScope ensure_scratch(this, IP, R0, codegen_->GetNumberOfCoreRegisters());
2452  int stack_offset = ensure_scratch.IsSpilled() ? kArmWordSize : 0;
2453  __ LoadFromOffset(kLoadWord, static_cast<Register>(ensure_scratch.GetRegister()),
2454                    SP, mem1 + stack_offset);
2455  __ LoadFromOffset(kLoadWord, IP, SP, mem2 + stack_offset);
2456  __ StoreToOffset(kStoreWord, static_cast<Register>(ensure_scratch.GetRegister()),
2457                   SP, mem2 + stack_offset);
2458  __ StoreToOffset(kStoreWord, IP, SP, mem1 + stack_offset);
2459}
2460
2461void ParallelMoveResolverARM::EmitSwap(size_t index) {
2462  MoveOperands* move = moves_.Get(index);
2463  Location source = move->GetSource();
2464  Location destination = move->GetDestination();
2465
2466  if (source.IsRegister() && destination.IsRegister()) {
2467    DCHECK_NE(source.As<Register>(), IP);
2468    DCHECK_NE(destination.As<Register>(), IP);
2469    __ Mov(IP, source.As<Register>());
2470    __ Mov(source.As<Register>(), destination.As<Register>());
2471    __ Mov(destination.As<Register>(), IP);
2472  } else if (source.IsRegister() && destination.IsStackSlot()) {
2473    Exchange(source.As<Register>(), destination.GetStackIndex());
2474  } else if (source.IsStackSlot() && destination.IsRegister()) {
2475    Exchange(destination.As<Register>(), source.GetStackIndex());
2476  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
2477    Exchange(source.GetStackIndex(), destination.GetStackIndex());
2478  } else {
2479    LOG(FATAL) << "Unimplemented";
2480  }
2481}
2482
2483void ParallelMoveResolverARM::SpillScratch(int reg) {
2484  __ Push(static_cast<Register>(reg));
2485}
2486
2487void ParallelMoveResolverARM::RestoreScratch(int reg) {
2488  __ Pop(static_cast<Register>(reg));
2489}
2490
2491void LocationsBuilderARM::VisitLoadClass(HLoadClass* cls) {
2492  LocationSummary::CallKind call_kind = cls->CanCallRuntime()
2493      ? LocationSummary::kCallOnSlowPath
2494      : LocationSummary::kNoCall;
2495  LocationSummary* locations =
2496      new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
2497  locations->SetOut(Location::RequiresRegister());
2498}
2499
2500void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) {
2501  Register out = cls->GetLocations()->Out().As<Register>();
2502  if (cls->IsReferrersClass()) {
2503    DCHECK(!cls->CanCallRuntime());
2504    DCHECK(!cls->MustGenerateClinitCheck());
2505    codegen_->LoadCurrentMethod(out);
2506    __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
2507  } else {
2508    DCHECK(cls->CanCallRuntime());
2509    codegen_->LoadCurrentMethod(out);
2510    __ LoadFromOffset(
2511        kLoadWord, out, out, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value());
2512    __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
2513
2514    SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
2515        cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
2516    codegen_->AddSlowPath(slow_path);
2517    __ cmp(out, ShifterOperand(0));
2518    __ b(slow_path->GetEntryLabel(), EQ);
2519    if (cls->MustGenerateClinitCheck()) {
2520      GenerateClassInitializationCheck(slow_path, out);
2521    } else {
2522      __ Bind(slow_path->GetExitLabel());
2523    }
2524  }
2525}
2526
2527void LocationsBuilderARM::VisitClinitCheck(HClinitCheck* check) {
2528  LocationSummary* locations =
2529      new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
2530  locations->SetInAt(0, Location::RequiresRegister());
2531  if (check->HasUses()) {
2532    locations->SetOut(Location::SameAsFirstInput());
2533  }
2534}
2535
2536void InstructionCodeGeneratorARM::VisitClinitCheck(HClinitCheck* check) {
2537  // We assume the class is not null.
2538  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
2539      check->GetLoadClass(), check, check->GetDexPc(), true);
2540  codegen_->AddSlowPath(slow_path);
2541  GenerateClassInitializationCheck(slow_path, check->GetLocations()->InAt(0).As<Register>());
2542}
2543
2544void InstructionCodeGeneratorARM::GenerateClassInitializationCheck(
2545    SlowPathCodeARM* slow_path, Register class_reg) {
2546  __ LoadFromOffset(kLoadWord, IP, class_reg, mirror::Class::StatusOffset().Int32Value());
2547  __ cmp(IP, ShifterOperand(mirror::Class::kStatusInitialized));
2548  __ b(slow_path->GetEntryLabel(), LT);
2549  // Even if the initialized flag is set, we may be in a situation where caches are not synced
2550  // properly. Therefore, we do a memory fence.
2551  __ dmb(ISH);
2552  __ Bind(slow_path->GetExitLabel());
2553}
2554
2555void LocationsBuilderARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2556  LocationSummary* locations =
2557      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2558  locations->SetInAt(0, Location::RequiresRegister());
2559  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2560}
2561
2562void InstructionCodeGeneratorARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2563  LocationSummary* locations = instruction->GetLocations();
2564  Register cls = locations->InAt(0).As<Register>();
2565  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
2566
2567  switch (instruction->GetType()) {
2568    case Primitive::kPrimBoolean: {
2569      Register out = locations->Out().As<Register>();
2570      __ LoadFromOffset(kLoadUnsignedByte, out, cls, offset);
2571      break;
2572    }
2573
2574    case Primitive::kPrimByte: {
2575      Register out = locations->Out().As<Register>();
2576      __ LoadFromOffset(kLoadSignedByte, out, cls, offset);
2577      break;
2578    }
2579
2580    case Primitive::kPrimShort: {
2581      Register out = locations->Out().As<Register>();
2582      __ LoadFromOffset(kLoadSignedHalfword, out, cls, offset);
2583      break;
2584    }
2585
2586    case Primitive::kPrimChar: {
2587      Register out = locations->Out().As<Register>();
2588      __ LoadFromOffset(kLoadUnsignedHalfword, out, cls, offset);
2589      break;
2590    }
2591
2592    case Primitive::kPrimInt:
2593    case Primitive::kPrimNot: {
2594      Register out = locations->Out().As<Register>();
2595      __ LoadFromOffset(kLoadWord, out, cls, offset);
2596      break;
2597    }
2598
2599    case Primitive::kPrimLong: {
2600      // TODO: support volatile.
2601      Location out = locations->Out();
2602      __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), cls, offset);
2603      break;
2604    }
2605
2606    case Primitive::kPrimFloat: {
2607      SRegister out = locations->Out().As<SRegister>();
2608      __ LoadSFromOffset(out, cls, offset);
2609      break;
2610    }
2611
2612    case Primitive::kPrimDouble: {
2613      DRegister out = FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>());
2614      __ LoadDFromOffset(out, cls, offset);
2615      break;
2616    }
2617
2618    case Primitive::kPrimVoid:
2619      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2620      UNREACHABLE();
2621  }
2622}
2623
2624void LocationsBuilderARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2625  LocationSummary* locations =
2626      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2627  bool needs_write_barrier =
2628      CodeGenerator::StoreNeedsWriteBarrier(instruction->GetFieldType(), instruction->GetValue());
2629  locations->SetInAt(0, Location::RequiresRegister());
2630  locations->SetInAt(1, Location::RequiresRegister());
2631  // Temporary registers for the write barrier.
2632  if (needs_write_barrier) {
2633    locations->AddTemp(Location::RequiresRegister());
2634    locations->AddTemp(Location::RequiresRegister());
2635  }
2636}
2637
2638void InstructionCodeGeneratorARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2639  LocationSummary* locations = instruction->GetLocations();
2640  Register cls = locations->InAt(0).As<Register>();
2641  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
2642  Primitive::Type field_type = instruction->GetFieldType();
2643
2644  switch (field_type) {
2645    case Primitive::kPrimBoolean:
2646    case Primitive::kPrimByte: {
2647      Register value = locations->InAt(1).As<Register>();
2648      __ StoreToOffset(kStoreByte, value, cls, offset);
2649      break;
2650    }
2651
2652    case Primitive::kPrimShort:
2653    case Primitive::kPrimChar: {
2654      Register value = locations->InAt(1).As<Register>();
2655      __ StoreToOffset(kStoreHalfword, value, cls, offset);
2656      break;
2657    }
2658
2659    case Primitive::kPrimInt:
2660    case Primitive::kPrimNot: {
2661      Register value = locations->InAt(1).As<Register>();
2662      __ StoreToOffset(kStoreWord, value, cls, offset);
2663      if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->GetValue())) {
2664        Register temp = locations->GetTemp(0).As<Register>();
2665        Register card = locations->GetTemp(1).As<Register>();
2666        codegen_->MarkGCCard(temp, card, cls, value);
2667      }
2668      break;
2669    }
2670
2671    case Primitive::kPrimLong: {
2672      Location value = locations->InAt(1);
2673      __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), cls, offset);
2674      break;
2675    }
2676
2677    case Primitive::kPrimFloat: {
2678      SRegister value = locations->InAt(1).As<SRegister>();
2679      __ StoreSToOffset(value, cls, offset);
2680      break;
2681    }
2682
2683    case Primitive::kPrimDouble: {
2684      DRegister value = FromLowSToD(locations->InAt(1).AsFpuRegisterPairLow<SRegister>());
2685      __ StoreDToOffset(value, cls, offset);
2686      break;
2687    }
2688
2689    case Primitive::kPrimVoid:
2690      LOG(FATAL) << "Unreachable type " << field_type;
2691      UNREACHABLE();
2692  }
2693}
2694
2695void LocationsBuilderARM::VisitLoadString(HLoadString* load) {
2696  LocationSummary* locations =
2697      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
2698  locations->SetOut(Location::RequiresRegister());
2699}
2700
2701void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) {
2702  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM(load);
2703  codegen_->AddSlowPath(slow_path);
2704
2705  Register out = load->GetLocations()->Out().As<Register>();
2706  codegen_->LoadCurrentMethod(out);
2707  __ LoadFromOffset(
2708      kLoadWord, out, out, mirror::ArtMethod::DexCacheStringsOffset().Int32Value());
2709  __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(load->GetStringIndex()));
2710  __ cmp(out, ShifterOperand(0));
2711  __ b(slow_path->GetEntryLabel(), EQ);
2712  __ Bind(slow_path->GetExitLabel());
2713}
2714
2715void LocationsBuilderARM::VisitLoadException(HLoadException* load) {
2716  LocationSummary* locations =
2717      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
2718  locations->SetOut(Location::RequiresRegister());
2719}
2720
2721void InstructionCodeGeneratorARM::VisitLoadException(HLoadException* load) {
2722  Register out = load->GetLocations()->Out().As<Register>();
2723  int32_t offset = Thread::ExceptionOffset<kArmWordSize>().Int32Value();
2724  __ LoadFromOffset(kLoadWord, out, TR, offset);
2725  __ LoadImmediate(IP, 0);
2726  __ StoreToOffset(kStoreWord, IP, TR, offset);
2727}
2728
2729void LocationsBuilderARM::VisitThrow(HThrow* instruction) {
2730  LocationSummary* locations =
2731      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2732  InvokeRuntimeCallingConvention calling_convention;
2733  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2734}
2735
2736void InstructionCodeGeneratorARM::VisitThrow(HThrow* instruction) {
2737  codegen_->InvokeRuntime(
2738      QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc());
2739}
2740
2741void LocationsBuilderARM::VisitInstanceOf(HInstanceOf* instruction) {
2742  LocationSummary::CallKind call_kind = instruction->IsClassFinal()
2743      ? LocationSummary::kNoCall
2744      : LocationSummary::kCallOnSlowPath;
2745  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
2746  locations->SetInAt(0, Location::RequiresRegister());
2747  locations->SetInAt(1, Location::RequiresRegister());
2748  locations->SetOut(Location::RequiresRegister());
2749}
2750
2751void InstructionCodeGeneratorARM::VisitInstanceOf(HInstanceOf* instruction) {
2752  LocationSummary* locations = instruction->GetLocations();
2753  Register obj = locations->InAt(0).As<Register>();
2754  Register cls = locations->InAt(1).As<Register>();
2755  Register out = locations->Out().As<Register>();
2756  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2757  Label done, zero;
2758  SlowPathCodeARM* slow_path = nullptr;
2759
2760  // Return 0 if `obj` is null.
2761  // TODO: avoid this check if we know obj is not null.
2762  __ cmp(obj, ShifterOperand(0));
2763  __ b(&zero, EQ);
2764  // Compare the class of `obj` with `cls`.
2765  __ LoadFromOffset(kLoadWord, out, obj, class_offset);
2766  __ cmp(out, ShifterOperand(cls));
2767  if (instruction->IsClassFinal()) {
2768    // Classes must be equal for the instanceof to succeed.
2769    __ b(&zero, NE);
2770    __ LoadImmediate(out, 1);
2771    __ b(&done);
2772  } else {
2773    // If the classes are not equal, we go into a slow path.
2774    DCHECK(locations->OnlyCallsOnSlowPath());
2775    slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
2776        instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc());
2777    codegen_->AddSlowPath(slow_path);
2778    __ b(slow_path->GetEntryLabel(), NE);
2779    __ LoadImmediate(out, 1);
2780    __ b(&done);
2781  }
2782  __ Bind(&zero);
2783  __ LoadImmediate(out, 0);
2784  if (slow_path != nullptr) {
2785    __ Bind(slow_path->GetExitLabel());
2786  }
2787  __ Bind(&done);
2788}
2789
2790void LocationsBuilderARM::VisitCheckCast(HCheckCast* instruction) {
2791  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2792      instruction, LocationSummary::kCallOnSlowPath);
2793  locations->SetInAt(0, Location::RequiresRegister());
2794  locations->SetInAt(1, Location::RequiresRegister());
2795  locations->AddTemp(Location::RequiresRegister());
2796}
2797
2798void InstructionCodeGeneratorARM::VisitCheckCast(HCheckCast* instruction) {
2799  LocationSummary* locations = instruction->GetLocations();
2800  Register obj = locations->InAt(0).As<Register>();
2801  Register cls = locations->InAt(1).As<Register>();
2802  Register temp = locations->GetTemp(0).As<Register>();
2803  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2804
2805  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
2806      instruction, locations->InAt(1), locations->GetTemp(0), instruction->GetDexPc());
2807  codegen_->AddSlowPath(slow_path);
2808
2809  // TODO: avoid this check if we know obj is not null.
2810  __ cmp(obj, ShifterOperand(0));
2811  __ b(slow_path->GetExitLabel(), EQ);
2812  // Compare the class of `obj` with `cls`.
2813  __ LoadFromOffset(kLoadWord, temp, obj, class_offset);
2814  __ cmp(temp, ShifterOperand(cls));
2815  __ b(slow_path->GetEntryLabel(), NE);
2816  __ Bind(slow_path->GetExitLabel());
2817}
2818
2819void LocationsBuilderARM::VisitMonitorOperation(HMonitorOperation* instruction) {
2820  LocationSummary* locations =
2821      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2822  InvokeRuntimeCallingConvention calling_convention;
2823  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2824}
2825
2826void InstructionCodeGeneratorARM::VisitMonitorOperation(HMonitorOperation* instruction) {
2827  codegen_->InvokeRuntime(instruction->IsEnter()
2828        ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
2829      instruction,
2830      instruction->GetDexPc());
2831}
2832
2833void LocationsBuilderARM::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
2834void LocationsBuilderARM::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
2835void LocationsBuilderARM::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
2836
2837void LocationsBuilderARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
2838  LocationSummary* locations =
2839      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2840  DCHECK(instruction->GetResultType() == Primitive::kPrimInt
2841         || instruction->GetResultType() == Primitive::kPrimLong);
2842  locations->SetInAt(0, Location::RequiresRegister());
2843  locations->SetInAt(1, Location::RequiresRegister());
2844  bool output_overlaps = (instruction->GetResultType() == Primitive::kPrimLong);
2845  locations->SetOut(Location::RequiresRegister(), output_overlaps);
2846}
2847
2848void InstructionCodeGeneratorARM::VisitAnd(HAnd* instruction) {
2849  HandleBitwiseOperation(instruction);
2850}
2851
2852void InstructionCodeGeneratorARM::VisitOr(HOr* instruction) {
2853  HandleBitwiseOperation(instruction);
2854}
2855
2856void InstructionCodeGeneratorARM::VisitXor(HXor* instruction) {
2857  HandleBitwiseOperation(instruction);
2858}
2859
2860void InstructionCodeGeneratorARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
2861  LocationSummary* locations = instruction->GetLocations();
2862
2863  if (instruction->GetResultType() == Primitive::kPrimInt) {
2864    Register first = locations->InAt(0).As<Register>();
2865    Register second = locations->InAt(1).As<Register>();
2866    Register out = locations->Out().As<Register>();
2867    if (instruction->IsAnd()) {
2868      __ and_(out, first, ShifterOperand(second));
2869    } else if (instruction->IsOr()) {
2870      __ orr(out, first, ShifterOperand(second));
2871    } else {
2872      DCHECK(instruction->IsXor());
2873      __ eor(out, first, ShifterOperand(second));
2874    }
2875  } else {
2876    DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
2877    Location first = locations->InAt(0);
2878    Location second = locations->InAt(1);
2879    Location out = locations->Out();
2880    if (instruction->IsAnd()) {
2881      __ and_(out.AsRegisterPairLow<Register>(),
2882              first.AsRegisterPairLow<Register>(),
2883              ShifterOperand(second.AsRegisterPairLow<Register>()));
2884      __ and_(out.AsRegisterPairHigh<Register>(),
2885              first.AsRegisterPairHigh<Register>(),
2886              ShifterOperand(second.AsRegisterPairHigh<Register>()));
2887    } else if (instruction->IsOr()) {
2888      __ orr(out.AsRegisterPairLow<Register>(),
2889             first.AsRegisterPairLow<Register>(),
2890             ShifterOperand(second.AsRegisterPairLow<Register>()));
2891      __ orr(out.AsRegisterPairHigh<Register>(),
2892             first.AsRegisterPairHigh<Register>(),
2893             ShifterOperand(second.AsRegisterPairHigh<Register>()));
2894    } else {
2895      DCHECK(instruction->IsXor());
2896      __ eor(out.AsRegisterPairLow<Register>(),
2897             first.AsRegisterPairLow<Register>(),
2898             ShifterOperand(second.AsRegisterPairLow<Register>()));
2899      __ eor(out.AsRegisterPairHigh<Register>(),
2900             first.AsRegisterPairHigh<Register>(),
2901             ShifterOperand(second.AsRegisterPairHigh<Register>()));
2902    }
2903  }
2904}
2905
2906}  // namespace arm
2907}  // namespace art
2908