code_generator_arm.cc revision 981e45424f52735b1c61ae0eac7e299ed313f8db
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm.h"
18
19#include "entrypoints/quick/quick_entrypoints.h"
20#include "gc/accounting/card_table.h"
21#include "mirror/array-inl.h"
22#include "mirror/art_method.h"
23#include "mirror/class.h"
24#include "thread.h"
25#include "utils/arm/assembler_arm.h"
26#include "utils/arm/managed_register_arm.h"
27#include "utils/assembler.h"
28#include "utils/stack_checks.h"
29
30namespace art {
31
32namespace arm {
33
34static DRegister FromLowSToD(SRegister reg) {
35  DCHECK_EQ(reg % 2, 0);
36  return static_cast<DRegister>(reg / 2);
37}
38
39static constexpr bool kExplicitStackOverflowCheck = false;
40
41static constexpr int kNumberOfPushedRegistersAtEntry = 1 + 2;  // LR, R6, R7
42static constexpr int kCurrentMethodStackOffset = 0;
43
44static constexpr Register kRuntimeParameterCoreRegisters[] = { R0, R1, R2, R3 };
45static constexpr size_t kRuntimeParameterCoreRegistersLength =
46    arraysize(kRuntimeParameterCoreRegisters);
47static constexpr SRegister kRuntimeParameterFpuRegisters[] = { };
48static constexpr size_t kRuntimeParameterFpuRegistersLength = 0;
49
50class InvokeRuntimeCallingConvention : public CallingConvention<Register, SRegister> {
51 public:
52  InvokeRuntimeCallingConvention()
53      : CallingConvention(kRuntimeParameterCoreRegisters,
54                          kRuntimeParameterCoreRegistersLength,
55                          kRuntimeParameterFpuRegisters,
56                          kRuntimeParameterFpuRegistersLength) {}
57
58 private:
59  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
60};
61
62#define __ reinterpret_cast<ArmAssembler*>(codegen->GetAssembler())->
63#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArmWordSize, x).Int32Value()
64
65class SlowPathCodeARM : public SlowPathCode {
66 public:
67  SlowPathCodeARM() : entry_label_(), exit_label_() {}
68
69  Label* GetEntryLabel() { return &entry_label_; }
70  Label* GetExitLabel() { return &exit_label_; }
71
72 private:
73  Label entry_label_;
74  Label exit_label_;
75
76  DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM);
77};
78
79class NullCheckSlowPathARM : public SlowPathCodeARM {
80 public:
81  explicit NullCheckSlowPathARM(HNullCheck* instruction) : instruction_(instruction) {}
82
83  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
84    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
85    __ Bind(GetEntryLabel());
86    arm_codegen->InvokeRuntime(
87        QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc());
88  }
89
90 private:
91  HNullCheck* const instruction_;
92  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM);
93};
94
95class DivZeroCheckSlowPathARM : public SlowPathCodeARM {
96 public:
97  explicit DivZeroCheckSlowPathARM(HDivZeroCheck* instruction) : instruction_(instruction) {}
98
99  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
100    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
101    __ Bind(GetEntryLabel());
102    arm_codegen->InvokeRuntime(
103        QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc());
104  }
105
106 private:
107  HDivZeroCheck* const instruction_;
108  DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM);
109};
110
111class StackOverflowCheckSlowPathARM : public SlowPathCodeARM {
112 public:
113  StackOverflowCheckSlowPathARM() {}
114
115  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
116    __ Bind(GetEntryLabel());
117    __ LoadFromOffset(kLoadWord, PC, TR,
118        QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pThrowStackOverflow).Int32Value());
119  }
120
121 private:
122  DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathARM);
123};
124
125class SuspendCheckSlowPathARM : public SlowPathCodeARM {
126 public:
127  explicit SuspendCheckSlowPathARM(HSuspendCheck* instruction, HBasicBlock* successor)
128      : instruction_(instruction), successor_(successor) {}
129
130  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
131    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
132    __ Bind(GetEntryLabel());
133    codegen->SaveLiveRegisters(instruction_->GetLocations());
134    arm_codegen->InvokeRuntime(
135        QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc());
136    codegen->RestoreLiveRegisters(instruction_->GetLocations());
137    if (successor_ == nullptr) {
138      __ b(GetReturnLabel());
139    } else {
140      __ b(arm_codegen->GetLabelOf(successor_));
141    }
142  }
143
144  Label* GetReturnLabel() {
145    DCHECK(successor_ == nullptr);
146    return &return_label_;
147  }
148
149 private:
150  HSuspendCheck* const instruction_;
151  // If not null, the block to branch to after the suspend check.
152  HBasicBlock* const successor_;
153
154  // If `successor_` is null, the label to branch to after the suspend check.
155  Label return_label_;
156
157  DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM);
158};
159
160class BoundsCheckSlowPathARM : public SlowPathCodeARM {
161 public:
162  BoundsCheckSlowPathARM(HBoundsCheck* instruction,
163                         Location index_location,
164                         Location length_location)
165      : instruction_(instruction),
166        index_location_(index_location),
167        length_location_(length_location) {}
168
169  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
170    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
171    __ Bind(GetEntryLabel());
172    // We're moving two locations to locations that could overlap, so we need a parallel
173    // move resolver.
174    InvokeRuntimeCallingConvention calling_convention;
175    codegen->EmitParallelMoves(
176        index_location_,
177        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
178        length_location_,
179        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
180    arm_codegen->InvokeRuntime(
181        QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc());
182  }
183
184 private:
185  HBoundsCheck* const instruction_;
186  const Location index_location_;
187  const Location length_location_;
188
189  DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM);
190};
191
192class LoadClassSlowPathARM : public SlowPathCodeARM {
193 public:
194  LoadClassSlowPathARM(HLoadClass* cls,
195                       HInstruction* at,
196                       uint32_t dex_pc,
197                       bool do_clinit)
198      : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
199    DCHECK(at->IsLoadClass() || at->IsClinitCheck());
200  }
201
202  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
203    LocationSummary* locations = at_->GetLocations();
204
205    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
206    __ Bind(GetEntryLabel());
207    codegen->SaveLiveRegisters(locations);
208
209    InvokeRuntimeCallingConvention calling_convention;
210    __ LoadImmediate(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex());
211    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
212    int32_t entry_point_offset = do_clinit_
213        ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
214        : QUICK_ENTRY_POINT(pInitializeType);
215    arm_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_);
216
217    // Move the class to the desired location.
218    Location out = locations->Out();
219    if (out.IsValid()) {
220      DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
221      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
222    }
223    codegen->RestoreLiveRegisters(locations);
224    __ b(GetExitLabel());
225  }
226
227 private:
228  // The class this slow path will load.
229  HLoadClass* const cls_;
230
231  // The instruction where this slow path is happening.
232  // (Might be the load class or an initialization check).
233  HInstruction* const at_;
234
235  // The dex PC of `at_`.
236  const uint32_t dex_pc_;
237
238  // Whether to initialize the class.
239  const bool do_clinit_;
240
241  DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM);
242};
243
244class LoadStringSlowPathARM : public SlowPathCodeARM {
245 public:
246  explicit LoadStringSlowPathARM(HLoadString* instruction) : instruction_(instruction) {}
247
248  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
249    LocationSummary* locations = instruction_->GetLocations();
250    DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
251
252    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
253    __ Bind(GetEntryLabel());
254    codegen->SaveLiveRegisters(locations);
255
256    InvokeRuntimeCallingConvention calling_convention;
257    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(0));
258    __ LoadImmediate(calling_convention.GetRegisterAt(1), instruction_->GetStringIndex());
259    arm_codegen->InvokeRuntime(
260        QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc());
261    arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
262
263    codegen->RestoreLiveRegisters(locations);
264    __ b(GetExitLabel());
265  }
266
267 private:
268  HLoadString* const instruction_;
269
270  DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM);
271};
272
273class TypeCheckSlowPathARM : public SlowPathCodeARM {
274 public:
275  TypeCheckSlowPathARM(HInstruction* instruction,
276                       Location class_to_check,
277                       Location object_class,
278                       uint32_t dex_pc)
279      : instruction_(instruction),
280        class_to_check_(class_to_check),
281        object_class_(object_class),
282        dex_pc_(dex_pc) {}
283
284  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
285    LocationSummary* locations = instruction_->GetLocations();
286    DCHECK(instruction_->IsCheckCast()
287           || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
288
289    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
290    __ Bind(GetEntryLabel());
291    codegen->SaveLiveRegisters(locations);
292
293    // We're moving two locations to locations that could overlap, so we need a parallel
294    // move resolver.
295    InvokeRuntimeCallingConvention calling_convention;
296    codegen->EmitParallelMoves(
297        class_to_check_,
298        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
299        object_class_,
300        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
301
302    if (instruction_->IsInstanceOf()) {
303      arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc_);
304      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
305    } else {
306      DCHECK(instruction_->IsCheckCast());
307      arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc_);
308    }
309
310    codegen->RestoreLiveRegisters(locations);
311    __ b(GetExitLabel());
312  }
313
314 private:
315  HInstruction* const instruction_;
316  const Location class_to_check_;
317  const Location object_class_;
318  uint32_t dex_pc_;
319
320  DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM);
321};
322
323#undef __
324
325#undef __
326#define __ reinterpret_cast<ArmAssembler*>(GetAssembler())->
327
328inline Condition ARMCondition(IfCondition cond) {
329  switch (cond) {
330    case kCondEQ: return EQ;
331    case kCondNE: return NE;
332    case kCondLT: return LT;
333    case kCondLE: return LE;
334    case kCondGT: return GT;
335    case kCondGE: return GE;
336    default:
337      LOG(FATAL) << "Unknown if condition";
338  }
339  return EQ;        // Unreachable.
340}
341
342inline Condition ARMOppositeCondition(IfCondition cond) {
343  switch (cond) {
344    case kCondEQ: return NE;
345    case kCondNE: return EQ;
346    case kCondLT: return GE;
347    case kCondLE: return GT;
348    case kCondGT: return LE;
349    case kCondGE: return LT;
350    default:
351      LOG(FATAL) << "Unknown if condition";
352  }
353  return EQ;        // Unreachable.
354}
355
356void CodeGeneratorARM::DumpCoreRegister(std::ostream& stream, int reg) const {
357  stream << ArmManagedRegister::FromCoreRegister(Register(reg));
358}
359
360void CodeGeneratorARM::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
361  stream << ArmManagedRegister::FromSRegister(SRegister(reg));
362}
363
364size_t CodeGeneratorARM::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
365  __ StoreToOffset(kStoreWord, static_cast<Register>(reg_id), SP, stack_index);
366  return kArmWordSize;
367}
368
369size_t CodeGeneratorARM::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
370  __ LoadFromOffset(kLoadWord, static_cast<Register>(reg_id), SP, stack_index);
371  return kArmWordSize;
372}
373
374CodeGeneratorARM::CodeGeneratorARM(HGraph* graph)
375    : CodeGenerator(graph, kNumberOfCoreRegisters, kNumberOfSRegisters, kNumberOfRegisterPairs),
376      block_labels_(graph->GetArena(), 0),
377      location_builder_(graph, this),
378      instruction_visitor_(graph, this),
379      move_resolver_(graph->GetArena(), this),
380      assembler_(true) {}
381
382size_t CodeGeneratorARM::FrameEntrySpillSize() const {
383  return kNumberOfPushedRegistersAtEntry * kArmWordSize;
384}
385
386Location CodeGeneratorARM::AllocateFreeRegister(Primitive::Type type) const {
387  switch (type) {
388    case Primitive::kPrimLong: {
389      size_t reg = FindFreeEntry(blocked_register_pairs_, kNumberOfRegisterPairs);
390      ArmManagedRegister pair =
391          ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
392      DCHECK(!blocked_core_registers_[pair.AsRegisterPairLow()]);
393      DCHECK(!blocked_core_registers_[pair.AsRegisterPairHigh()]);
394
395      blocked_core_registers_[pair.AsRegisterPairLow()] = true;
396      blocked_core_registers_[pair.AsRegisterPairHigh()] = true;
397      UpdateBlockedPairRegisters();
398      return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
399    }
400
401    case Primitive::kPrimByte:
402    case Primitive::kPrimBoolean:
403    case Primitive::kPrimChar:
404    case Primitive::kPrimShort:
405    case Primitive::kPrimInt:
406    case Primitive::kPrimNot: {
407      int reg = FindFreeEntry(blocked_core_registers_, kNumberOfCoreRegisters);
408      // Block all register pairs that contain `reg`.
409      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
410        ArmManagedRegister current =
411            ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
412        if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
413          blocked_register_pairs_[i] = true;
414        }
415      }
416      return Location::RegisterLocation(reg);
417    }
418
419    case Primitive::kPrimFloat: {
420      int reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfSRegisters);
421      return Location::FpuRegisterLocation(reg);
422    }
423
424    case Primitive::kPrimDouble: {
425      int reg = FindTwoFreeConsecutiveAlignedEntries(blocked_fpu_registers_, kNumberOfSRegisters);
426      DCHECK_EQ(reg % 2, 0);
427      return Location::FpuRegisterPairLocation(reg, reg + 1);
428    }
429
430    case Primitive::kPrimVoid:
431      LOG(FATAL) << "Unreachable type " << type;
432  }
433
434  return Location();
435}
436
437void CodeGeneratorARM::SetupBlockedRegisters() const {
438  // Don't allocate the dalvik style register pair passing.
439  blocked_register_pairs_[R1_R2] = true;
440
441  // Stack register, LR and PC are always reserved.
442  blocked_core_registers_[SP] = true;
443  blocked_core_registers_[LR] = true;
444  blocked_core_registers_[PC] = true;
445
446  // Reserve thread register.
447  blocked_core_registers_[TR] = true;
448
449  // Reserve temp register.
450  blocked_core_registers_[IP] = true;
451
452  // TODO: We currently don't use Quick's callee saved registers.
453  // We always save and restore R6 and R7 to make sure we can use three
454  // register pairs for long operations.
455  blocked_core_registers_[R4] = true;
456  blocked_core_registers_[R5] = true;
457  blocked_core_registers_[R8] = true;
458  blocked_core_registers_[R10] = true;
459  blocked_core_registers_[R11] = true;
460
461  blocked_fpu_registers_[S16] = true;
462  blocked_fpu_registers_[S17] = true;
463  blocked_fpu_registers_[S18] = true;
464  blocked_fpu_registers_[S19] = true;
465  blocked_fpu_registers_[S20] = true;
466  blocked_fpu_registers_[S21] = true;
467  blocked_fpu_registers_[S22] = true;
468  blocked_fpu_registers_[S23] = true;
469  blocked_fpu_registers_[S24] = true;
470  blocked_fpu_registers_[S25] = true;
471  blocked_fpu_registers_[S26] = true;
472  blocked_fpu_registers_[S27] = true;
473  blocked_fpu_registers_[S28] = true;
474  blocked_fpu_registers_[S29] = true;
475  blocked_fpu_registers_[S30] = true;
476  blocked_fpu_registers_[S31] = true;
477
478  UpdateBlockedPairRegisters();
479}
480
481void CodeGeneratorARM::UpdateBlockedPairRegisters() const {
482  for (int i = 0; i < kNumberOfRegisterPairs; i++) {
483    ArmManagedRegister current =
484        ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
485    if (blocked_core_registers_[current.AsRegisterPairLow()]
486        || blocked_core_registers_[current.AsRegisterPairHigh()]) {
487      blocked_register_pairs_[i] = true;
488    }
489  }
490}
491
492InstructionCodeGeneratorARM::InstructionCodeGeneratorARM(HGraph* graph, CodeGeneratorARM* codegen)
493      : HGraphVisitor(graph),
494        assembler_(codegen->GetAssembler()),
495        codegen_(codegen) {}
496
497void CodeGeneratorARM::GenerateFrameEntry() {
498  bool skip_overflow_check = IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm);
499  if (!skip_overflow_check) {
500    if (kExplicitStackOverflowCheck) {
501      SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathARM();
502      AddSlowPath(slow_path);
503
504      __ LoadFromOffset(kLoadWord, IP, TR, Thread::StackEndOffset<kArmWordSize>().Int32Value());
505      __ cmp(SP, ShifterOperand(IP));
506      __ b(slow_path->GetEntryLabel(), CC);
507    } else {
508      __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm)));
509      __ LoadFromOffset(kLoadWord, IP, IP, 0);
510      RecordPcInfo(nullptr, 0);
511    }
512  }
513
514  core_spill_mask_ |= (1 << LR | 1 << R6 | 1 << R7);
515  __ PushList(1 << LR | 1 << R6 | 1 << R7);
516
517  // The return PC has already been pushed on the stack.
518  __ AddConstant(SP, -(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize));
519  __ StoreToOffset(kStoreWord, R0, SP, 0);
520}
521
522void CodeGeneratorARM::GenerateFrameExit() {
523  __ AddConstant(SP, GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize);
524  __ PopList(1 << PC | 1 << R6 | 1 << R7);
525}
526
527void CodeGeneratorARM::Bind(HBasicBlock* block) {
528  __ Bind(GetLabelOf(block));
529}
530
531Location CodeGeneratorARM::GetStackLocation(HLoadLocal* load) const {
532  switch (load->GetType()) {
533    case Primitive::kPrimLong:
534    case Primitive::kPrimDouble:
535      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
536      break;
537
538    case Primitive::kPrimInt:
539    case Primitive::kPrimNot:
540    case Primitive::kPrimFloat:
541      return Location::StackSlot(GetStackSlot(load->GetLocal()));
542
543    case Primitive::kPrimBoolean:
544    case Primitive::kPrimByte:
545    case Primitive::kPrimChar:
546    case Primitive::kPrimShort:
547    case Primitive::kPrimVoid:
548      LOG(FATAL) << "Unexpected type " << load->GetType();
549  }
550
551  LOG(FATAL) << "Unreachable";
552  return Location();
553}
554
555Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
556  switch (type) {
557    case Primitive::kPrimBoolean:
558    case Primitive::kPrimByte:
559    case Primitive::kPrimChar:
560    case Primitive::kPrimShort:
561    case Primitive::kPrimInt:
562    case Primitive::kPrimNot: {
563      uint32_t index = gp_index_++;
564      uint32_t stack_index = stack_index_++;
565      if (index < calling_convention.GetNumberOfRegisters()) {
566        return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
567      } else {
568        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
569      }
570    }
571
572    case Primitive::kPrimLong: {
573      uint32_t index = gp_index_;
574      uint32_t stack_index = stack_index_;
575      gp_index_ += 2;
576      stack_index_ += 2;
577      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
578        ArmManagedRegister pair = ArmManagedRegister::FromRegisterPair(
579            calling_convention.GetRegisterPairAt(index));
580        return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
581      } else if (index + 1 == calling_convention.GetNumberOfRegisters()) {
582        return Location::QuickParameter(index, stack_index);
583      } else {
584        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
585      }
586    }
587
588    case Primitive::kPrimFloat: {
589      uint32_t stack_index = stack_index_++;
590      if (float_index_ % 2 == 0) {
591        float_index_ = std::max(double_index_, float_index_);
592      }
593      if (float_index_ < calling_convention.GetNumberOfFpuRegisters()) {
594        return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(float_index_++));
595      } else {
596        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
597      }
598    }
599
600    case Primitive::kPrimDouble: {
601      double_index_ = std::max(double_index_, RoundUp(float_index_, 2));
602      uint32_t stack_index = stack_index_;
603      stack_index_ += 2;
604      if (double_index_ + 1 < calling_convention.GetNumberOfFpuRegisters()) {
605        uint32_t index = double_index_;
606        double_index_ += 2;
607        return Location::FpuRegisterPairLocation(
608          calling_convention.GetFpuRegisterAt(index),
609          calling_convention.GetFpuRegisterAt(index + 1));
610      } else {
611        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
612      }
613    }
614
615    case Primitive::kPrimVoid:
616      LOG(FATAL) << "Unexpected parameter type " << type;
617      break;
618  }
619  return Location();
620}
621
622Location InvokeDexCallingConventionVisitor::GetReturnLocation(Primitive::Type type) {
623  switch (type) {
624    case Primitive::kPrimBoolean:
625    case Primitive::kPrimByte:
626    case Primitive::kPrimChar:
627    case Primitive::kPrimShort:
628    case Primitive::kPrimInt:
629    case Primitive::kPrimNot: {
630      return Location::RegisterLocation(R0);
631    }
632
633    case Primitive::kPrimFloat: {
634      return Location::FpuRegisterLocation(S0);
635    }
636
637    case Primitive::kPrimLong: {
638      return Location::RegisterPairLocation(R0, R1);
639    }
640
641    case Primitive::kPrimDouble: {
642      return Location::FpuRegisterPairLocation(S0, S1);
643    }
644
645    case Primitive::kPrimVoid:
646      return Location();
647  }
648  UNREACHABLE();
649  return Location();
650}
651
652void CodeGeneratorARM::Move32(Location destination, Location source) {
653  if (source.Equals(destination)) {
654    return;
655  }
656  if (destination.IsRegister()) {
657    if (source.IsRegister()) {
658      __ Mov(destination.As<Register>(), source.As<Register>());
659    } else if (source.IsFpuRegister()) {
660      __ vmovrs(destination.As<Register>(), source.As<SRegister>());
661    } else {
662      __ LoadFromOffset(kLoadWord, destination.As<Register>(), SP, source.GetStackIndex());
663    }
664  } else if (destination.IsFpuRegister()) {
665    if (source.IsRegister()) {
666      __ vmovsr(destination.As<SRegister>(), source.As<Register>());
667    } else if (source.IsFpuRegister()) {
668      __ vmovs(destination.As<SRegister>(), source.As<SRegister>());
669    } else {
670      __ LoadSFromOffset(destination.As<SRegister>(), SP, source.GetStackIndex());
671    }
672  } else {
673    DCHECK(destination.IsStackSlot()) << destination;
674    if (source.IsRegister()) {
675      __ StoreToOffset(kStoreWord, source.As<Register>(), SP, destination.GetStackIndex());
676    } else if (source.IsFpuRegister()) {
677      __ StoreSToOffset(source.As<SRegister>(), SP, destination.GetStackIndex());
678    } else {
679      DCHECK(source.IsStackSlot()) << source;
680      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
681      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
682    }
683  }
684}
685
686void CodeGeneratorARM::Move64(Location destination, Location source) {
687  if (source.Equals(destination)) {
688    return;
689  }
690  if (destination.IsRegisterPair()) {
691    if (source.IsRegisterPair()) {
692      __ Mov(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairLow<Register>());
693      __ Mov(destination.AsRegisterPairHigh<Register>(), source.AsRegisterPairHigh<Register>());
694    } else if (source.IsFpuRegister()) {
695      UNIMPLEMENTED(FATAL);
696    } else if (source.IsQuickParameter()) {
697      uint16_t register_index = source.GetQuickParameterRegisterIndex();
698      uint16_t stack_index = source.GetQuickParameterStackIndex();
699      InvokeDexCallingConvention calling_convention;
700      __ Mov(destination.AsRegisterPairLow<Register>(),
701             calling_convention.GetRegisterAt(register_index));
702      __ LoadFromOffset(kLoadWord, destination.AsRegisterPairHigh<Register>(),
703             SP, calling_convention.GetStackOffsetOf(stack_index + 1) + GetFrameSize());
704    } else {
705      DCHECK(source.IsDoubleStackSlot());
706      if (destination.AsRegisterPairLow<Register>() == R1) {
707        DCHECK_EQ(destination.AsRegisterPairHigh<Register>(), R2);
708        __ LoadFromOffset(kLoadWord, R1, SP, source.GetStackIndex());
709        __ LoadFromOffset(kLoadWord, R2, SP, source.GetHighStackIndex(kArmWordSize));
710      } else {
711        __ LoadFromOffset(kLoadWordPair, destination.AsRegisterPairLow<Register>(),
712                          SP, source.GetStackIndex());
713      }
714    }
715  } else if (destination.IsFpuRegisterPair()) {
716    if (source.IsDoubleStackSlot()) {
717      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
718                         SP,
719                         source.GetStackIndex());
720    } else {
721      UNIMPLEMENTED(FATAL);
722    }
723  } else if (destination.IsQuickParameter()) {
724    InvokeDexCallingConvention calling_convention;
725    uint16_t register_index = destination.GetQuickParameterRegisterIndex();
726    uint16_t stack_index = destination.GetQuickParameterStackIndex();
727    if (source.IsRegisterPair()) {
728      __ Mov(calling_convention.GetRegisterAt(register_index),
729             source.AsRegisterPairLow<Register>());
730      __ StoreToOffset(kStoreWord, source.AsRegisterPairHigh<Register>(),
731             SP, calling_convention.GetStackOffsetOf(stack_index + 1));
732    } else if (source.IsFpuRegister()) {
733      UNIMPLEMENTED(FATAL);
734    } else {
735      DCHECK(source.IsDoubleStackSlot());
736      __ LoadFromOffset(
737          kLoadWord, calling_convention.GetRegisterAt(register_index), SP, source.GetStackIndex());
738      __ LoadFromOffset(kLoadWord, R0, SP, source.GetHighStackIndex(kArmWordSize));
739      __ StoreToOffset(kStoreWord, R0, SP, calling_convention.GetStackOffsetOf(stack_index + 1));
740    }
741  } else {
742    DCHECK(destination.IsDoubleStackSlot());
743    if (source.IsRegisterPair()) {
744      if (source.AsRegisterPairLow<Register>() == R1) {
745        DCHECK_EQ(source.AsRegisterPairHigh<Register>(), R2);
746        __ StoreToOffset(kStoreWord, R1, SP, destination.GetStackIndex());
747        __ StoreToOffset(kStoreWord, R2, SP, destination.GetHighStackIndex(kArmWordSize));
748      } else {
749        __ StoreToOffset(kStoreWordPair, source.AsRegisterPairLow<Register>(),
750                         SP, destination.GetStackIndex());
751      }
752    } else if (source.IsQuickParameter()) {
753      InvokeDexCallingConvention calling_convention;
754      uint16_t register_index = source.GetQuickParameterRegisterIndex();
755      uint16_t stack_index = source.GetQuickParameterStackIndex();
756      __ StoreToOffset(kStoreWord, calling_convention.GetRegisterAt(register_index),
757             SP, destination.GetStackIndex());
758      __ LoadFromOffset(kLoadWord, R0,
759             SP, calling_convention.GetStackOffsetOf(stack_index + 1) + GetFrameSize());
760      __ StoreToOffset(kStoreWord, R0, SP, destination.GetHighStackIndex(kArmWordSize));
761    } else if (source.IsFpuRegisterPair()) {
762      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
763                        SP,
764                        destination.GetStackIndex());
765    } else {
766      DCHECK(source.IsDoubleStackSlot());
767      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
768      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
769      __ LoadFromOffset(kLoadWord, IP, SP, source.GetHighStackIndex(kArmWordSize));
770      __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
771    }
772  }
773}
774
775void CodeGeneratorARM::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
776  LocationSummary* locations = instruction->GetLocations();
777  if (locations != nullptr && locations->Out().Equals(location)) {
778    return;
779  }
780
781  if (locations != nullptr && locations->Out().IsConstant()) {
782    HConstant* const_to_move = locations->Out().GetConstant();
783    if (const_to_move->IsIntConstant()) {
784      int32_t value = const_to_move->AsIntConstant()->GetValue();
785      if (location.IsRegister()) {
786        __ LoadImmediate(location.As<Register>(), value);
787      } else {
788        DCHECK(location.IsStackSlot());
789        __ LoadImmediate(IP, value);
790        __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
791      }
792    } else if (const_to_move->IsLongConstant()) {
793      int64_t value = const_to_move->AsLongConstant()->GetValue();
794      if (location.IsRegisterPair()) {
795        __ LoadImmediate(location.AsRegisterPairLow<Register>(), Low32Bits(value));
796        __ LoadImmediate(location.AsRegisterPairHigh<Register>(), High32Bits(value));
797      } else {
798        DCHECK(location.IsDoubleStackSlot());
799        __ LoadImmediate(IP, Low32Bits(value));
800        __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
801        __ LoadImmediate(IP, High32Bits(value));
802        __ StoreToOffset(kStoreWord, IP, SP, location.GetHighStackIndex(kArmWordSize));
803      }
804    }
805  } else if (instruction->IsLoadLocal()) {
806    uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
807    switch (instruction->GetType()) {
808      case Primitive::kPrimBoolean:
809      case Primitive::kPrimByte:
810      case Primitive::kPrimChar:
811      case Primitive::kPrimShort:
812      case Primitive::kPrimInt:
813      case Primitive::kPrimNot:
814      case Primitive::kPrimFloat:
815        Move32(location, Location::StackSlot(stack_slot));
816        break;
817
818      case Primitive::kPrimLong:
819      case Primitive::kPrimDouble:
820        Move64(location, Location::DoubleStackSlot(stack_slot));
821        break;
822
823      default:
824        LOG(FATAL) << "Unexpected type " << instruction->GetType();
825    }
826  } else if (instruction->IsTemporary()) {
827    Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
828    if (temp_location.IsStackSlot()) {
829      Move32(location, temp_location);
830    } else {
831      DCHECK(temp_location.IsDoubleStackSlot());
832      Move64(location, temp_location);
833    }
834  } else {
835    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
836    switch (instruction->GetType()) {
837      case Primitive::kPrimBoolean:
838      case Primitive::kPrimByte:
839      case Primitive::kPrimChar:
840      case Primitive::kPrimShort:
841      case Primitive::kPrimNot:
842      case Primitive::kPrimInt:
843      case Primitive::kPrimFloat:
844        Move32(location, locations->Out());
845        break;
846
847      case Primitive::kPrimLong:
848      case Primitive::kPrimDouble:
849        Move64(location, locations->Out());
850        break;
851
852      default:
853        LOG(FATAL) << "Unexpected type " << instruction->GetType();
854    }
855  }
856}
857
858void CodeGeneratorARM::InvokeRuntime(int32_t entry_point_offset,
859                                     HInstruction* instruction,
860                                     uint32_t dex_pc) {
861  __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
862  __ blx(LR);
863  RecordPcInfo(instruction, dex_pc);
864  DCHECK(instruction->IsSuspendCheck()
865      || instruction->IsBoundsCheck()
866      || instruction->IsNullCheck()
867      || instruction->IsDivZeroCheck()
868      || !IsLeafMethod());
869}
870
871void LocationsBuilderARM::VisitGoto(HGoto* got) {
872  got->SetLocations(nullptr);
873}
874
875void InstructionCodeGeneratorARM::VisitGoto(HGoto* got) {
876  HBasicBlock* successor = got->GetSuccessor();
877  DCHECK(!successor->IsExitBlock());
878
879  HBasicBlock* block = got->GetBlock();
880  HInstruction* previous = got->GetPrevious();
881
882  HLoopInformation* info = block->GetLoopInformation();
883  if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
884    codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
885    GenerateSuspendCheck(info->GetSuspendCheck(), successor);
886    return;
887  }
888
889  if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
890    GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
891  }
892  if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
893    __ b(codegen_->GetLabelOf(successor));
894  }
895}
896
897void LocationsBuilderARM::VisitExit(HExit* exit) {
898  exit->SetLocations(nullptr);
899}
900
901void InstructionCodeGeneratorARM::VisitExit(HExit* exit) {
902  UNUSED(exit);
903  if (kIsDebugBuild) {
904    __ Comment("Unreachable");
905    __ bkpt(0);
906  }
907}
908
909void LocationsBuilderARM::VisitIf(HIf* if_instr) {
910  LocationSummary* locations =
911      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
912  HInstruction* cond = if_instr->InputAt(0);
913  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
914    locations->SetInAt(0, Location::RequiresRegister());
915  }
916}
917
918void InstructionCodeGeneratorARM::VisitIf(HIf* if_instr) {
919  HInstruction* cond = if_instr->InputAt(0);
920  if (cond->IsIntConstant()) {
921    // Constant condition, statically compared against 1.
922    int32_t cond_value = cond->AsIntConstant()->GetValue();
923    if (cond_value == 1) {
924      if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
925                                     if_instr->IfTrueSuccessor())) {
926        __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
927      }
928      return;
929    } else {
930      DCHECK_EQ(cond_value, 0);
931    }
932  } else {
933    if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
934      // Condition has been materialized, compare the output to 0
935      DCHECK(if_instr->GetLocations()->InAt(0).IsRegister());
936      __ cmp(if_instr->GetLocations()->InAt(0).As<Register>(),
937             ShifterOperand(0));
938      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()), NE);
939    } else {
940      // Condition has not been materialized, use its inputs as the
941      // comparison and its condition as the branch condition.
942      LocationSummary* locations = cond->GetLocations();
943      if (locations->InAt(1).IsRegister()) {
944        __ cmp(locations->InAt(0).As<Register>(),
945               ShifterOperand(locations->InAt(1).As<Register>()));
946      } else {
947        DCHECK(locations->InAt(1).IsConstant());
948        int32_t value =
949            locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
950        ShifterOperand operand;
951        if (ShifterOperand::CanHoldArm(value, &operand)) {
952          __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(value));
953        } else {
954          Register temp = IP;
955          __ LoadImmediate(temp, value);
956          __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(temp));
957        }
958      }
959      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()),
960           ARMCondition(cond->AsCondition()->GetCondition()));
961    }
962  }
963  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
964                                 if_instr->IfFalseSuccessor())) {
965    __ b(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
966  }
967}
968
969
970void LocationsBuilderARM::VisitCondition(HCondition* comp) {
971  LocationSummary* locations =
972      new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall);
973  locations->SetInAt(0, Location::RequiresRegister());
974  locations->SetInAt(1, Location::RegisterOrConstant(comp->InputAt(1)));
975  if (comp->NeedsMaterialization()) {
976    locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
977  }
978}
979
980void InstructionCodeGeneratorARM::VisitCondition(HCondition* comp) {
981  if (!comp->NeedsMaterialization()) return;
982
983  LocationSummary* locations = comp->GetLocations();
984  if (locations->InAt(1).IsRegister()) {
985    __ cmp(locations->InAt(0).As<Register>(),
986           ShifterOperand(locations->InAt(1).As<Register>()));
987  } else {
988    DCHECK(locations->InAt(1).IsConstant());
989    int32_t value = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
990    ShifterOperand operand;
991    if (ShifterOperand::CanHoldArm(value, &operand)) {
992      __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(value));
993    } else {
994      Register temp = IP;
995      __ LoadImmediate(temp, value);
996      __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(temp));
997    }
998  }
999  __ it(ARMCondition(comp->GetCondition()), kItElse);
1000  __ mov(locations->Out().As<Register>(), ShifterOperand(1),
1001         ARMCondition(comp->GetCondition()));
1002  __ mov(locations->Out().As<Register>(), ShifterOperand(0),
1003         ARMOppositeCondition(comp->GetCondition()));
1004}
1005
1006void LocationsBuilderARM::VisitEqual(HEqual* comp) {
1007  VisitCondition(comp);
1008}
1009
1010void InstructionCodeGeneratorARM::VisitEqual(HEqual* comp) {
1011  VisitCondition(comp);
1012}
1013
1014void LocationsBuilderARM::VisitNotEqual(HNotEqual* comp) {
1015  VisitCondition(comp);
1016}
1017
1018void InstructionCodeGeneratorARM::VisitNotEqual(HNotEqual* comp) {
1019  VisitCondition(comp);
1020}
1021
1022void LocationsBuilderARM::VisitLessThan(HLessThan* comp) {
1023  VisitCondition(comp);
1024}
1025
1026void InstructionCodeGeneratorARM::VisitLessThan(HLessThan* comp) {
1027  VisitCondition(comp);
1028}
1029
1030void LocationsBuilderARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1031  VisitCondition(comp);
1032}
1033
1034void InstructionCodeGeneratorARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1035  VisitCondition(comp);
1036}
1037
1038void LocationsBuilderARM::VisitGreaterThan(HGreaterThan* comp) {
1039  VisitCondition(comp);
1040}
1041
1042void InstructionCodeGeneratorARM::VisitGreaterThan(HGreaterThan* comp) {
1043  VisitCondition(comp);
1044}
1045
1046void LocationsBuilderARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1047  VisitCondition(comp);
1048}
1049
1050void InstructionCodeGeneratorARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1051  VisitCondition(comp);
1052}
1053
1054void LocationsBuilderARM::VisitLocal(HLocal* local) {
1055  local->SetLocations(nullptr);
1056}
1057
1058void InstructionCodeGeneratorARM::VisitLocal(HLocal* local) {
1059  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
1060}
1061
1062void LocationsBuilderARM::VisitLoadLocal(HLoadLocal* load) {
1063  load->SetLocations(nullptr);
1064}
1065
1066void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load) {
1067  // Nothing to do, this is driven by the code generator.
1068  UNUSED(load);
1069}
1070
1071void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) {
1072  LocationSummary* locations =
1073      new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
1074  switch (store->InputAt(1)->GetType()) {
1075    case Primitive::kPrimBoolean:
1076    case Primitive::kPrimByte:
1077    case Primitive::kPrimChar:
1078    case Primitive::kPrimShort:
1079    case Primitive::kPrimInt:
1080    case Primitive::kPrimNot:
1081    case Primitive::kPrimFloat:
1082      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
1083      break;
1084
1085    case Primitive::kPrimLong:
1086    case Primitive::kPrimDouble:
1087      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
1088      break;
1089
1090    default:
1091      LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType();
1092  }
1093}
1094
1095void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store) {
1096  UNUSED(store);
1097}
1098
1099void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) {
1100  LocationSummary* locations =
1101      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1102  locations->SetOut(Location::ConstantLocation(constant));
1103}
1104
1105void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant) {
1106  // Will be generated at use site.
1107  UNUSED(constant);
1108}
1109
1110void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) {
1111  LocationSummary* locations =
1112      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1113  locations->SetOut(Location::ConstantLocation(constant));
1114}
1115
1116void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant) {
1117  // Will be generated at use site.
1118  UNUSED(constant);
1119}
1120
1121void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) {
1122  LocationSummary* locations =
1123      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1124  locations->SetOut(Location::ConstantLocation(constant));
1125}
1126
1127void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant) {
1128  // Will be generated at use site.
1129  UNUSED(constant);
1130}
1131
1132void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) {
1133  LocationSummary* locations =
1134      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1135  locations->SetOut(Location::ConstantLocation(constant));
1136}
1137
1138void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant) {
1139  // Will be generated at use site.
1140  UNUSED(constant);
1141}
1142
1143void LocationsBuilderARM::VisitReturnVoid(HReturnVoid* ret) {
1144  ret->SetLocations(nullptr);
1145}
1146
1147void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret) {
1148  UNUSED(ret);
1149  codegen_->GenerateFrameExit();
1150}
1151
1152void LocationsBuilderARM::VisitReturn(HReturn* ret) {
1153  LocationSummary* locations =
1154      new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
1155  locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType()));
1156}
1157
1158void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret) {
1159  UNUSED(ret);
1160  codegen_->GenerateFrameExit();
1161}
1162
1163void LocationsBuilderARM::VisitInvokeStatic(HInvokeStatic* invoke) {
1164  HandleInvoke(invoke);
1165}
1166
1167void CodeGeneratorARM::LoadCurrentMethod(Register reg) {
1168  __ LoadFromOffset(kLoadWord, reg, SP, kCurrentMethodStackOffset);
1169}
1170
1171void InstructionCodeGeneratorARM::VisitInvokeStatic(HInvokeStatic* invoke) {
1172  Register temp = invoke->GetLocations()->GetTemp(0).As<Register>();
1173
1174  // TODO: Implement all kinds of calls:
1175  // 1) boot -> boot
1176  // 2) app -> boot
1177  // 3) app -> app
1178  //
1179  // Currently we implement the app -> app logic, which looks up in the resolve cache.
1180
1181  // temp = method;
1182  codegen_->LoadCurrentMethod(temp);
1183  // temp = temp->dex_cache_resolved_methods_;
1184  __ LoadFromOffset(
1185      kLoadWord, temp, temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value());
1186  // temp = temp[index_in_cache]
1187  __ LoadFromOffset(
1188      kLoadWord, temp, temp, CodeGenerator::GetCacheOffset(invoke->GetIndexInDexCache()));
1189  // LR = temp[offset_of_quick_compiled_code]
1190  __ LoadFromOffset(kLoadWord, LR, temp,
1191                     mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value());
1192  // LR()
1193  __ blx(LR);
1194
1195  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1196  DCHECK(!codegen_->IsLeafMethod());
1197}
1198
1199void LocationsBuilderARM::HandleInvoke(HInvoke* invoke) {
1200  LocationSummary* locations =
1201      new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1202  locations->AddTemp(Location::RegisterLocation(R0));
1203
1204  InvokeDexCallingConventionVisitor calling_convention_visitor;
1205  for (size_t i = 0; i < invoke->InputCount(); i++) {
1206    HInstruction* input = invoke->InputAt(i);
1207    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1208  }
1209
1210  locations->SetOut(calling_convention_visitor.GetReturnLocation(invoke->GetType()));
1211}
1212
1213void LocationsBuilderARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1214  HandleInvoke(invoke);
1215}
1216
1217void InstructionCodeGeneratorARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1218  Register temp = invoke->GetLocations()->GetTemp(0).As<Register>();
1219  uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() +
1220          invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
1221  LocationSummary* locations = invoke->GetLocations();
1222  Location receiver = locations->InAt(0);
1223  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1224  // temp = object->GetClass();
1225  if (receiver.IsStackSlot()) {
1226    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1227    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1228  } else {
1229    __ LoadFromOffset(kLoadWord, temp, receiver.As<Register>(), class_offset);
1230  }
1231  // temp = temp->GetMethodAt(method_offset);
1232  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value();
1233  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1234  // LR = temp->GetEntryPoint();
1235  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1236  // LR();
1237  __ blx(LR);
1238  DCHECK(!codegen_->IsLeafMethod());
1239  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1240}
1241
1242void LocationsBuilderARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1243  HandleInvoke(invoke);
1244  // Add the hidden argument.
1245  invoke->GetLocations()->AddTemp(Location::RegisterLocation(R12));
1246}
1247
1248void InstructionCodeGeneratorARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1249  // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
1250  Register temp = invoke->GetLocations()->GetTemp(0).As<Register>();
1251  uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() +
1252          (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry);
1253  LocationSummary* locations = invoke->GetLocations();
1254  Location receiver = locations->InAt(0);
1255  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1256
1257  // Set the hidden argument.
1258  __ LoadImmediate(invoke->GetLocations()->GetTemp(1).As<Register>(), invoke->GetDexMethodIndex());
1259
1260  // temp = object->GetClass();
1261  if (receiver.IsStackSlot()) {
1262    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1263    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1264  } else {
1265    __ LoadFromOffset(kLoadWord, temp, receiver.As<Register>(), class_offset);
1266  }
1267  // temp = temp->GetImtEntryAt(method_offset);
1268  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value();
1269  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1270  // LR = temp->GetEntryPoint();
1271  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1272  // LR();
1273  __ blx(LR);
1274  DCHECK(!codegen_->IsLeafMethod());
1275  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1276}
1277
1278void LocationsBuilderARM::VisitNeg(HNeg* neg) {
1279  LocationSummary* locations =
1280      new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
1281  switch (neg->GetResultType()) {
1282    case Primitive::kPrimInt:
1283    case Primitive::kPrimLong: {
1284      bool output_overlaps = (neg->GetResultType() == Primitive::kPrimLong);
1285      locations->SetInAt(0, Location::RequiresRegister());
1286      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1287      break;
1288    }
1289
1290    case Primitive::kPrimFloat:
1291    case Primitive::kPrimDouble:
1292      locations->SetInAt(0, Location::RequiresFpuRegister());
1293      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1294      break;
1295
1296    default:
1297      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1298  }
1299}
1300
1301void InstructionCodeGeneratorARM::VisitNeg(HNeg* neg) {
1302  LocationSummary* locations = neg->GetLocations();
1303  Location out = locations->Out();
1304  Location in = locations->InAt(0);
1305  switch (neg->GetResultType()) {
1306    case Primitive::kPrimInt:
1307      DCHECK(in.IsRegister());
1308      __ rsb(out.As<Register>(), in.As<Register>(), ShifterOperand(0));
1309      break;
1310
1311    case Primitive::kPrimLong:
1312      DCHECK(in.IsRegisterPair());
1313      // out.lo = 0 - in.lo (and update the carry/borrow (C) flag)
1314      __ rsbs(out.AsRegisterPairLow<Register>(),
1315              in.AsRegisterPairLow<Register>(),
1316              ShifterOperand(0));
1317      // We cannot emit an RSC (Reverse Subtract with Carry)
1318      // instruction here, as it does not exist in the Thumb-2
1319      // instruction set.  We use the following approach
1320      // using SBC and SUB instead.
1321      //
1322      // out.hi = -C
1323      __ sbc(out.AsRegisterPairHigh<Register>(),
1324             out.AsRegisterPairHigh<Register>(),
1325             ShifterOperand(out.AsRegisterPairHigh<Register>()));
1326      // out.hi = out.hi - in.hi
1327      __ sub(out.AsRegisterPairHigh<Register>(),
1328             out.AsRegisterPairHigh<Register>(),
1329             ShifterOperand(in.AsRegisterPairHigh<Register>()));
1330      break;
1331
1332    case Primitive::kPrimFloat:
1333      DCHECK(in.IsFpuRegister());
1334      __ vnegs(out.As<SRegister>(), in.As<SRegister>());
1335      break;
1336
1337    case Primitive::kPrimDouble:
1338      DCHECK(in.IsFpuRegisterPair());
1339      __ vnegd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1340               FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1341      break;
1342
1343    default:
1344      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1345  }
1346}
1347
1348void LocationsBuilderARM::VisitTypeConversion(HTypeConversion* conversion) {
1349  LocationSummary* locations =
1350      new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
1351  Primitive::Type result_type = conversion->GetResultType();
1352  Primitive::Type input_type = conversion->GetInputType();
1353  switch (result_type) {
1354    case Primitive::kPrimByte:
1355      switch (input_type) {
1356        case Primitive::kPrimShort:
1357        case Primitive::kPrimInt:
1358        case Primitive::kPrimChar:
1359          // Processing a Dex `int-to-byte' instruction.
1360          locations->SetInAt(0, Location::RequiresRegister());
1361          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1362          break;
1363
1364        default:
1365          LOG(FATAL) << "Unexpected type conversion from " << input_type
1366                     << " to " << result_type;
1367      }
1368      break;
1369
1370    case Primitive::kPrimInt:
1371      switch (input_type) {
1372        case Primitive::kPrimLong:
1373          // Processing a Dex `long-to-int' instruction.
1374          locations->SetInAt(0, Location::Any());
1375          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1376          break;
1377
1378        case Primitive::kPrimFloat:
1379        case Primitive::kPrimDouble:
1380          LOG(FATAL) << "Type conversion from " << input_type
1381                     << " to " << result_type << " not yet implemented";
1382          break;
1383
1384        default:
1385          LOG(FATAL) << "Unexpected type conversion from " << input_type
1386                     << " to " << result_type;
1387      }
1388      break;
1389
1390    case Primitive::kPrimLong:
1391      switch (input_type) {
1392        case Primitive::kPrimByte:
1393        case Primitive::kPrimShort:
1394        case Primitive::kPrimInt:
1395        case Primitive::kPrimChar:
1396          // Processing a Dex `int-to-long' instruction.
1397          locations->SetInAt(0, Location::RequiresRegister());
1398          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1399          break;
1400
1401        case Primitive::kPrimFloat:
1402        case Primitive::kPrimDouble:
1403          LOG(FATAL) << "Type conversion from " << input_type << " to "
1404                     << result_type << " not yet implemented";
1405          break;
1406
1407        default:
1408          LOG(FATAL) << "Unexpected type conversion from " << input_type
1409                     << " to " << result_type;
1410      }
1411      break;
1412
1413    case Primitive::kPrimChar:
1414      switch (input_type) {
1415        case Primitive::kPrimByte:
1416        case Primitive::kPrimShort:
1417        case Primitive::kPrimInt:
1418        case Primitive::kPrimChar:
1419          // Processing a Dex `int-to-char' instruction.
1420          locations->SetInAt(0, Location::RequiresRegister());
1421          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1422          break;
1423
1424        default:
1425          LOG(FATAL) << "Unexpected type conversion from " << input_type
1426                     << " to " << result_type;
1427      }
1428      break;
1429
1430    case Primitive::kPrimFloat:
1431    case Primitive::kPrimDouble:
1432      LOG(FATAL) << "Type conversion from " << input_type
1433                 << " to " << result_type << " not yet implemented";
1434      break;
1435
1436    default:
1437      LOG(FATAL) << "Unexpected type conversion from " << input_type
1438                 << " to " << result_type;
1439  }
1440}
1441
1442void InstructionCodeGeneratorARM::VisitTypeConversion(HTypeConversion* conversion) {
1443  LocationSummary* locations = conversion->GetLocations();
1444  Location out = locations->Out();
1445  Location in = locations->InAt(0);
1446  Primitive::Type result_type = conversion->GetResultType();
1447  Primitive::Type input_type = conversion->GetInputType();
1448  switch (result_type) {
1449    case Primitive::kPrimByte:
1450      switch (input_type) {
1451        case Primitive::kPrimShort:
1452        case Primitive::kPrimInt:
1453        case Primitive::kPrimChar:
1454          // Processing a Dex `int-to-byte' instruction.
1455          __ sbfx(out.As<Register>(), in.As<Register>(), 0, 8);
1456          break;
1457
1458        default:
1459          LOG(FATAL) << "Unexpected type conversion from " << input_type
1460                     << " to " << result_type;
1461      }
1462      break;
1463
1464    case Primitive::kPrimInt:
1465      switch (input_type) {
1466        case Primitive::kPrimLong:
1467          // Processing a Dex `long-to-int' instruction.
1468          DCHECK(out.IsRegister());
1469          if (in.IsRegisterPair()) {
1470            __ Mov(out.As<Register>(), in.AsRegisterPairLow<Register>());
1471          } else if (in.IsDoubleStackSlot()) {
1472            __ LoadFromOffset(kLoadWord, out.As<Register>(), SP, in.GetStackIndex());
1473          } else {
1474            DCHECK(in.IsConstant());
1475            DCHECK(in.GetConstant()->IsLongConstant());
1476            int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
1477            __ LoadImmediate(out.As<Register>(), static_cast<int32_t>(value));
1478          }
1479          break;
1480
1481        case Primitive::kPrimFloat:
1482        case Primitive::kPrimDouble:
1483          LOG(FATAL) << "Type conversion from " << input_type
1484                     << " to " << result_type << " not yet implemented";
1485          break;
1486
1487        default:
1488          LOG(FATAL) << "Unexpected type conversion from " << input_type
1489                     << " to " << result_type;
1490      }
1491      break;
1492
1493    case Primitive::kPrimLong:
1494      switch (input_type) {
1495        case Primitive::kPrimByte:
1496        case Primitive::kPrimShort:
1497        case Primitive::kPrimInt:
1498        case Primitive::kPrimChar:
1499          // Processing a Dex `int-to-long' instruction.
1500          DCHECK(out.IsRegisterPair());
1501          DCHECK(in.IsRegister());
1502          __ Mov(out.AsRegisterPairLow<Register>(), in.As<Register>());
1503          // Sign extension.
1504          __ Asr(out.AsRegisterPairHigh<Register>(),
1505                 out.AsRegisterPairLow<Register>(),
1506                 31);
1507          break;
1508
1509        case Primitive::kPrimFloat:
1510        case Primitive::kPrimDouble:
1511          LOG(FATAL) << "Type conversion from " << input_type << " to "
1512                     << result_type << " not yet implemented";
1513          break;
1514
1515        default:
1516          LOG(FATAL) << "Unexpected type conversion from " << input_type
1517                     << " to " << result_type;
1518      }
1519      break;
1520
1521    case Primitive::kPrimChar:
1522      switch (input_type) {
1523        case Primitive::kPrimByte:
1524        case Primitive::kPrimShort:
1525        case Primitive::kPrimInt:
1526        case Primitive::kPrimChar:
1527          // Processing a Dex `int-to-char' instruction.
1528          __ ubfx(out.As<Register>(), in.As<Register>(), 0, 16);
1529          break;
1530
1531        default:
1532          LOG(FATAL) << "Unexpected type conversion from " << input_type
1533                     << " to " << result_type;
1534      }
1535      break;
1536
1537    case Primitive::kPrimFloat:
1538    case Primitive::kPrimDouble:
1539      LOG(FATAL) << "Type conversion from " << input_type
1540                 << " to " << result_type << " not yet implemented";
1541      break;
1542
1543    default:
1544      LOG(FATAL) << "Unexpected type conversion from " << input_type
1545                 << " to " << result_type;
1546  }
1547}
1548
1549void LocationsBuilderARM::VisitAdd(HAdd* add) {
1550  LocationSummary* locations =
1551      new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
1552  switch (add->GetResultType()) {
1553    case Primitive::kPrimInt:
1554    case Primitive::kPrimLong: {
1555      bool output_overlaps = (add->GetResultType() == Primitive::kPrimLong);
1556      locations->SetInAt(0, Location::RequiresRegister());
1557      locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
1558      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1559      break;
1560    }
1561
1562    case Primitive::kPrimFloat:
1563    case Primitive::kPrimDouble: {
1564      locations->SetInAt(0, Location::RequiresFpuRegister());
1565      locations->SetInAt(1, Location::RequiresFpuRegister());
1566      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1567      break;
1568    }
1569
1570    default:
1571      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1572  }
1573}
1574
1575void InstructionCodeGeneratorARM::VisitAdd(HAdd* add) {
1576  LocationSummary* locations = add->GetLocations();
1577  Location out = locations->Out();
1578  Location first = locations->InAt(0);
1579  Location second = locations->InAt(1);
1580  switch (add->GetResultType()) {
1581    case Primitive::kPrimInt:
1582      if (second.IsRegister()) {
1583        __ add(out.As<Register>(), first.As<Register>(), ShifterOperand(second.As<Register>()));
1584      } else {
1585        __ AddConstant(out.As<Register>(),
1586                       first.As<Register>(),
1587                       second.GetConstant()->AsIntConstant()->GetValue());
1588      }
1589      break;
1590
1591    case Primitive::kPrimLong:
1592      __ adds(out.AsRegisterPairLow<Register>(),
1593              first.AsRegisterPairLow<Register>(),
1594              ShifterOperand(second.AsRegisterPairLow<Register>()));
1595      __ adc(out.AsRegisterPairHigh<Register>(),
1596             first.AsRegisterPairHigh<Register>(),
1597             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1598      break;
1599
1600    case Primitive::kPrimFloat:
1601      __ vadds(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>());
1602      break;
1603
1604    case Primitive::kPrimDouble:
1605      __ vaddd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1606               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1607               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1608      break;
1609
1610    default:
1611      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1612  }
1613}
1614
1615void LocationsBuilderARM::VisitSub(HSub* sub) {
1616  LocationSummary* locations =
1617      new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
1618  switch (sub->GetResultType()) {
1619    case Primitive::kPrimInt:
1620    case Primitive::kPrimLong: {
1621      bool output_overlaps = (sub->GetResultType() == Primitive::kPrimLong);
1622      locations->SetInAt(0, Location::RequiresRegister());
1623      locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1)));
1624      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1625      break;
1626    }
1627    case Primitive::kPrimFloat:
1628    case Primitive::kPrimDouble: {
1629      locations->SetInAt(0, Location::RequiresFpuRegister());
1630      locations->SetInAt(1, Location::RequiresFpuRegister());
1631      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1632      break;
1633    }
1634    default:
1635      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1636  }
1637}
1638
1639void InstructionCodeGeneratorARM::VisitSub(HSub* sub) {
1640  LocationSummary* locations = sub->GetLocations();
1641  Location out = locations->Out();
1642  Location first = locations->InAt(0);
1643  Location second = locations->InAt(1);
1644  switch (sub->GetResultType()) {
1645    case Primitive::kPrimInt: {
1646      if (second.IsRegister()) {
1647        __ sub(out.As<Register>(), first.As<Register>(), ShifterOperand(second.As<Register>()));
1648      } else {
1649        __ AddConstant(out.As<Register>(),
1650                       first.As<Register>(),
1651                       -second.GetConstant()->AsIntConstant()->GetValue());
1652      }
1653      break;
1654    }
1655
1656    case Primitive::kPrimLong: {
1657      __ subs(out.AsRegisterPairLow<Register>(),
1658              first.AsRegisterPairLow<Register>(),
1659              ShifterOperand(second.AsRegisterPairLow<Register>()));
1660      __ sbc(out.AsRegisterPairHigh<Register>(),
1661             first.AsRegisterPairHigh<Register>(),
1662             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1663      break;
1664    }
1665
1666    case Primitive::kPrimFloat: {
1667      __ vsubs(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>());
1668      break;
1669    }
1670
1671    case Primitive::kPrimDouble: {
1672      __ vsubd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1673               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1674               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1675      break;
1676    }
1677
1678
1679    default:
1680      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1681  }
1682}
1683
1684void LocationsBuilderARM::VisitMul(HMul* mul) {
1685  LocationSummary* locations =
1686      new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
1687  switch (mul->GetResultType()) {
1688    case Primitive::kPrimInt:
1689    case Primitive::kPrimLong:  {
1690      locations->SetInAt(0, Location::RequiresRegister());
1691      locations->SetInAt(1, Location::RequiresRegister());
1692      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1693      break;
1694    }
1695
1696    case Primitive::kPrimFloat:
1697    case Primitive::kPrimDouble: {
1698      locations->SetInAt(0, Location::RequiresFpuRegister());
1699      locations->SetInAt(1, Location::RequiresFpuRegister());
1700      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1701      break;
1702    }
1703
1704    default:
1705      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1706  }
1707}
1708
1709void InstructionCodeGeneratorARM::VisitMul(HMul* mul) {
1710  LocationSummary* locations = mul->GetLocations();
1711  Location out = locations->Out();
1712  Location first = locations->InAt(0);
1713  Location second = locations->InAt(1);
1714  switch (mul->GetResultType()) {
1715    case Primitive::kPrimInt: {
1716      __ mul(out.As<Register>(), first.As<Register>(), second.As<Register>());
1717      break;
1718    }
1719    case Primitive::kPrimLong: {
1720      Register out_hi = out.AsRegisterPairHigh<Register>();
1721      Register out_lo = out.AsRegisterPairLow<Register>();
1722      Register in1_hi = first.AsRegisterPairHigh<Register>();
1723      Register in1_lo = first.AsRegisterPairLow<Register>();
1724      Register in2_hi = second.AsRegisterPairHigh<Register>();
1725      Register in2_lo = second.AsRegisterPairLow<Register>();
1726
1727      // Extra checks to protect caused by the existence of R1_R2.
1728      // The algorithm is wrong if out.hi is either in1.lo or in2.lo:
1729      // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2);
1730      DCHECK_NE(out_hi, in1_lo);
1731      DCHECK_NE(out_hi, in2_lo);
1732
1733      // input: in1 - 64 bits, in2 - 64 bits
1734      // output: out
1735      // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
1736      // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
1737      // parts: out.lo = (in1.lo * in2.lo)[31:0]
1738
1739      // IP <- in1.lo * in2.hi
1740      __ mul(IP, in1_lo, in2_hi);
1741      // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo
1742      __ mla(out_hi, in1_hi, in2_lo, IP);
1743      // out.lo <- (in1.lo * in2.lo)[31:0];
1744      __ umull(out_lo, IP, in1_lo, in2_lo);
1745      // out.hi <- in2.hi * in1.lo +  in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
1746      __ add(out_hi, out_hi, ShifterOperand(IP));
1747      break;
1748    }
1749
1750    case Primitive::kPrimFloat: {
1751      __ vmuls(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>());
1752      break;
1753    }
1754
1755    case Primitive::kPrimDouble: {
1756      __ vmuld(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1757               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1758               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1759      break;
1760    }
1761
1762    default:
1763      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1764  }
1765}
1766
1767void LocationsBuilderARM::VisitDiv(HDiv* div) {
1768  LocationSummary::CallKind call_kind = div->GetResultType() == Primitive::kPrimLong
1769      ? LocationSummary::kCall
1770      : LocationSummary::kNoCall;
1771  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
1772
1773  switch (div->GetResultType()) {
1774    case Primitive::kPrimInt: {
1775      locations->SetInAt(0, Location::RequiresRegister());
1776      locations->SetInAt(1, Location::RequiresRegister());
1777      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1778      break;
1779    }
1780    case Primitive::kPrimLong: {
1781      InvokeRuntimeCallingConvention calling_convention;
1782      locations->SetInAt(0, Location::RegisterPairLocation(
1783          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
1784      locations->SetInAt(1, Location::RegisterPairLocation(
1785          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
1786      // The runtime helper puts the output in R0,R2.
1787      locations->SetOut(Location::RegisterPairLocation(R0, R2));
1788      break;
1789    }
1790    case Primitive::kPrimFloat:
1791    case Primitive::kPrimDouble: {
1792      locations->SetInAt(0, Location::RequiresFpuRegister());
1793      locations->SetInAt(1, Location::RequiresFpuRegister());
1794      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1795      break;
1796    }
1797
1798    default:
1799      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
1800  }
1801}
1802
1803void InstructionCodeGeneratorARM::VisitDiv(HDiv* div) {
1804  LocationSummary* locations = div->GetLocations();
1805  Location out = locations->Out();
1806  Location first = locations->InAt(0);
1807  Location second = locations->InAt(1);
1808
1809  switch (div->GetResultType()) {
1810    case Primitive::kPrimInt: {
1811      __ sdiv(out.As<Register>(), first.As<Register>(), second.As<Register>());
1812      break;
1813    }
1814
1815    case Primitive::kPrimLong: {
1816      InvokeRuntimeCallingConvention calling_convention;
1817      DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
1818      DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
1819      DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegisterPairLow<Register>());
1820      DCHECK_EQ(calling_convention.GetRegisterAt(3), second.AsRegisterPairHigh<Register>());
1821      DCHECK_EQ(R0, out.AsRegisterPairLow<Register>());
1822      DCHECK_EQ(R2, out.AsRegisterPairHigh<Register>());
1823
1824      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLdiv), div, div->GetDexPc());
1825      break;
1826    }
1827
1828    case Primitive::kPrimFloat: {
1829      __ vdivs(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>());
1830      break;
1831    }
1832
1833    case Primitive::kPrimDouble: {
1834      __ vdivd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1835               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1836               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1837      break;
1838    }
1839
1840    default:
1841      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
1842  }
1843}
1844
1845void LocationsBuilderARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1846  LocationSummary* locations =
1847      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1848  locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
1849  if (instruction->HasUses()) {
1850    locations->SetOut(Location::SameAsFirstInput());
1851  }
1852}
1853
1854void InstructionCodeGeneratorARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1855  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM(instruction);
1856  codegen_->AddSlowPath(slow_path);
1857
1858  LocationSummary* locations = instruction->GetLocations();
1859  Location value = locations->InAt(0);
1860
1861  switch (instruction->GetType()) {
1862    case Primitive::kPrimInt: {
1863      if (value.IsRegister()) {
1864        __ cmp(value.As<Register>(), ShifterOperand(0));
1865        __ b(slow_path->GetEntryLabel(), EQ);
1866      } else {
1867        DCHECK(value.IsConstant()) << value;
1868        if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
1869          __ b(slow_path->GetEntryLabel());
1870        }
1871      }
1872      break;
1873    }
1874    case Primitive::kPrimLong: {
1875      if (value.IsRegisterPair()) {
1876        __ orrs(IP,
1877                value.AsRegisterPairLow<Register>(),
1878                ShifterOperand(value.AsRegisterPairHigh<Register>()));
1879        __ b(slow_path->GetEntryLabel(), EQ);
1880      } else {
1881        DCHECK(value.IsConstant()) << value;
1882        if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
1883          __ b(slow_path->GetEntryLabel());
1884        }
1885      }
1886      break;
1887    default:
1888      LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
1889    }
1890  }
1891}
1892
1893void LocationsBuilderARM::VisitNewInstance(HNewInstance* instruction) {
1894  LocationSummary* locations =
1895      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
1896  InvokeRuntimeCallingConvention calling_convention;
1897  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1898  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1899  locations->SetOut(Location::RegisterLocation(R0));
1900}
1901
1902void InstructionCodeGeneratorARM::VisitNewInstance(HNewInstance* instruction) {
1903  InvokeRuntimeCallingConvention calling_convention;
1904  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
1905  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
1906  codegen_->InvokeRuntime(
1907      QUICK_ENTRY_POINT(pAllocObjectWithAccessCheck), instruction, instruction->GetDexPc());
1908}
1909
1910void LocationsBuilderARM::VisitNewArray(HNewArray* instruction) {
1911  LocationSummary* locations =
1912      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
1913  InvokeRuntimeCallingConvention calling_convention;
1914  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1915  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1916  locations->SetOut(Location::RegisterLocation(R0));
1917  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1918}
1919
1920void InstructionCodeGeneratorARM::VisitNewArray(HNewArray* instruction) {
1921  InvokeRuntimeCallingConvention calling_convention;
1922  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
1923  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
1924  codegen_->InvokeRuntime(
1925      QUICK_ENTRY_POINT(pAllocArrayWithAccessCheck), instruction, instruction->GetDexPc());
1926}
1927
1928void LocationsBuilderARM::VisitParameterValue(HParameterValue* instruction) {
1929  LocationSummary* locations =
1930      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1931  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
1932  if (location.IsStackSlot()) {
1933    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1934  } else if (location.IsDoubleStackSlot()) {
1935    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1936  }
1937  locations->SetOut(location);
1938}
1939
1940void InstructionCodeGeneratorARM::VisitParameterValue(HParameterValue* instruction) {
1941  // Nothing to do, the parameter is already at its location.
1942  UNUSED(instruction);
1943}
1944
1945void LocationsBuilderARM::VisitNot(HNot* not_) {
1946  LocationSummary* locations =
1947      new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
1948  locations->SetInAt(0, Location::RequiresRegister());
1949  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1950}
1951
1952void InstructionCodeGeneratorARM::VisitNot(HNot* not_) {
1953  LocationSummary* locations = not_->GetLocations();
1954  Location out = locations->Out();
1955  Location in = locations->InAt(0);
1956  switch (not_->InputAt(0)->GetType()) {
1957    case Primitive::kPrimBoolean:
1958      __ eor(out.As<Register>(), in.As<Register>(), ShifterOperand(1));
1959      break;
1960
1961    case Primitive::kPrimInt:
1962      __ mvn(out.As<Register>(), ShifterOperand(in.As<Register>()));
1963      break;
1964
1965    case Primitive::kPrimLong:
1966      __ mvn(out.AsRegisterPairLow<Register>(),
1967             ShifterOperand(in.AsRegisterPairLow<Register>()));
1968      __ mvn(out.AsRegisterPairHigh<Register>(),
1969             ShifterOperand(in.AsRegisterPairHigh<Register>()));
1970      break;
1971
1972    default:
1973      LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
1974  }
1975}
1976
1977void LocationsBuilderARM::VisitCompare(HCompare* compare) {
1978  LocationSummary* locations =
1979      new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
1980  locations->SetInAt(0, Location::RequiresRegister());
1981  locations->SetInAt(1, Location::RequiresRegister());
1982  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1983}
1984
1985void InstructionCodeGeneratorARM::VisitCompare(HCompare* compare) {
1986  LocationSummary* locations = compare->GetLocations();
1987  switch (compare->InputAt(0)->GetType()) {
1988    case Primitive::kPrimLong: {
1989      Register output = locations->Out().As<Register>();
1990      Location left = locations->InAt(0);
1991      Location right = locations->InAt(1);
1992      Label less, greater, done;
1993      __ cmp(left.AsRegisterPairHigh<Register>(),
1994             ShifterOperand(right.AsRegisterPairHigh<Register>()));  // Signed compare.
1995      __ b(&less, LT);
1996      __ b(&greater, GT);
1997      // Do LoadImmediate before any `cmp`, as LoadImmediate might affect
1998      // the status flags.
1999      __ LoadImmediate(output, 0);
2000      __ cmp(left.AsRegisterPairLow<Register>(),
2001             ShifterOperand(right.AsRegisterPairLow<Register>()));  // Unsigned compare.
2002      __ b(&done, EQ);
2003      __ b(&less, CC);
2004
2005      __ Bind(&greater);
2006      __ LoadImmediate(output, 1);
2007      __ b(&done);
2008
2009      __ Bind(&less);
2010      __ LoadImmediate(output, -1);
2011
2012      __ Bind(&done);
2013      break;
2014    }
2015    default:
2016      LOG(FATAL) << "Unimplemented compare type " << compare->InputAt(0)->GetType();
2017  }
2018}
2019
2020void LocationsBuilderARM::VisitPhi(HPhi* instruction) {
2021  LocationSummary* locations =
2022      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2023  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
2024    locations->SetInAt(i, Location::Any());
2025  }
2026  locations->SetOut(Location::Any());
2027}
2028
2029void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction) {
2030  UNUSED(instruction);
2031  LOG(FATAL) << "Unreachable";
2032}
2033
2034void LocationsBuilderARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2035  LocationSummary* locations =
2036      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2037  bool is_object_type = instruction->GetFieldType() == Primitive::kPrimNot;
2038  locations->SetInAt(0, Location::RequiresRegister());
2039  locations->SetInAt(1, Location::RequiresRegister());
2040  // Temporary registers for the write barrier.
2041  if (is_object_type) {
2042    locations->AddTemp(Location::RequiresRegister());
2043    locations->AddTemp(Location::RequiresRegister());
2044  }
2045}
2046
2047void InstructionCodeGeneratorARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2048  LocationSummary* locations = instruction->GetLocations();
2049  Register obj = locations->InAt(0).As<Register>();
2050  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
2051  Primitive::Type field_type = instruction->GetFieldType();
2052
2053  switch (field_type) {
2054    case Primitive::kPrimBoolean:
2055    case Primitive::kPrimByte: {
2056      Register value = locations->InAt(1).As<Register>();
2057      __ StoreToOffset(kStoreByte, value, obj, offset);
2058      break;
2059    }
2060
2061    case Primitive::kPrimShort:
2062    case Primitive::kPrimChar: {
2063      Register value = locations->InAt(1).As<Register>();
2064      __ StoreToOffset(kStoreHalfword, value, obj, offset);
2065      break;
2066    }
2067
2068    case Primitive::kPrimInt:
2069    case Primitive::kPrimNot: {
2070      Register value = locations->InAt(1).As<Register>();
2071      __ StoreToOffset(kStoreWord, value, obj, offset);
2072      if (field_type == Primitive::kPrimNot) {
2073        Register temp = locations->GetTemp(0).As<Register>();
2074        Register card = locations->GetTemp(1).As<Register>();
2075        codegen_->MarkGCCard(temp, card, obj, value);
2076      }
2077      break;
2078    }
2079
2080    case Primitive::kPrimLong: {
2081      Location value = locations->InAt(1);
2082      __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset);
2083      break;
2084    }
2085
2086    case Primitive::kPrimFloat: {
2087      SRegister value = locations->InAt(1).As<SRegister>();
2088      __ StoreSToOffset(value, obj, offset);
2089      break;
2090    }
2091
2092    case Primitive::kPrimDouble: {
2093      DRegister value = FromLowSToD(locations->InAt(1).AsFpuRegisterPairLow<SRegister>());
2094      __ StoreDToOffset(value, obj, offset);
2095      break;
2096    }
2097
2098    case Primitive::kPrimVoid:
2099      LOG(FATAL) << "Unreachable type " << field_type;
2100      UNREACHABLE();
2101  }
2102}
2103
2104void LocationsBuilderARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2105  LocationSummary* locations =
2106      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2107  locations->SetInAt(0, Location::RequiresRegister());
2108  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2109}
2110
2111void InstructionCodeGeneratorARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2112  LocationSummary* locations = instruction->GetLocations();
2113  Register obj = locations->InAt(0).As<Register>();
2114  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
2115
2116  switch (instruction->GetType()) {
2117    case Primitive::kPrimBoolean: {
2118      Register out = locations->Out().As<Register>();
2119      __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
2120      break;
2121    }
2122
2123    case Primitive::kPrimByte: {
2124      Register out = locations->Out().As<Register>();
2125      __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
2126      break;
2127    }
2128
2129    case Primitive::kPrimShort: {
2130      Register out = locations->Out().As<Register>();
2131      __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
2132      break;
2133    }
2134
2135    case Primitive::kPrimChar: {
2136      Register out = locations->Out().As<Register>();
2137      __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
2138      break;
2139    }
2140
2141    case Primitive::kPrimInt:
2142    case Primitive::kPrimNot: {
2143      Register out = locations->Out().As<Register>();
2144      __ LoadFromOffset(kLoadWord, out, obj, offset);
2145      break;
2146    }
2147
2148    case Primitive::kPrimLong: {
2149      // TODO: support volatile.
2150      Location out = locations->Out();
2151      __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset);
2152      break;
2153    }
2154
2155    case Primitive::kPrimFloat: {
2156      SRegister out = locations->Out().As<SRegister>();
2157      __ LoadSFromOffset(out, obj, offset);
2158      break;
2159    }
2160
2161    case Primitive::kPrimDouble: {
2162      DRegister out = FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>());
2163      __ LoadDFromOffset(out, obj, offset);
2164      break;
2165    }
2166
2167    case Primitive::kPrimVoid:
2168      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2169      UNREACHABLE();
2170  }
2171}
2172
2173void LocationsBuilderARM::VisitNullCheck(HNullCheck* instruction) {
2174  LocationSummary* locations =
2175      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2176  locations->SetInAt(0, Location::RequiresRegister());
2177  if (instruction->HasUses()) {
2178    locations->SetOut(Location::SameAsFirstInput());
2179  }
2180}
2181
2182void InstructionCodeGeneratorARM::VisitNullCheck(HNullCheck* instruction) {
2183  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM(instruction);
2184  codegen_->AddSlowPath(slow_path);
2185
2186  LocationSummary* locations = instruction->GetLocations();
2187  Location obj = locations->InAt(0);
2188
2189  if (obj.IsRegister()) {
2190    __ cmp(obj.As<Register>(), ShifterOperand(0));
2191    __ b(slow_path->GetEntryLabel(), EQ);
2192  } else {
2193    DCHECK(obj.IsConstant()) << obj;
2194    DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0);
2195    __ b(slow_path->GetEntryLabel());
2196  }
2197}
2198
2199void LocationsBuilderARM::VisitArrayGet(HArrayGet* instruction) {
2200  LocationSummary* locations =
2201      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2202  locations->SetInAt(0, Location::RequiresRegister());
2203  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2204  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2205}
2206
2207void InstructionCodeGeneratorARM::VisitArrayGet(HArrayGet* instruction) {
2208  LocationSummary* locations = instruction->GetLocations();
2209  Register obj = locations->InAt(0).As<Register>();
2210  Location index = locations->InAt(1);
2211
2212  switch (instruction->GetType()) {
2213    case Primitive::kPrimBoolean: {
2214      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
2215      Register out = locations->Out().As<Register>();
2216      if (index.IsConstant()) {
2217        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2218        __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
2219      } else {
2220        __ add(IP, obj, ShifterOperand(index.As<Register>()));
2221        __ LoadFromOffset(kLoadUnsignedByte, out, IP, data_offset);
2222      }
2223      break;
2224    }
2225
2226    case Primitive::kPrimByte: {
2227      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
2228      Register out = locations->Out().As<Register>();
2229      if (index.IsConstant()) {
2230        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2231        __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
2232      } else {
2233        __ add(IP, obj, ShifterOperand(index.As<Register>()));
2234        __ LoadFromOffset(kLoadSignedByte, out, IP, data_offset);
2235      }
2236      break;
2237    }
2238
2239    case Primitive::kPrimShort: {
2240      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
2241      Register out = locations->Out().As<Register>();
2242      if (index.IsConstant()) {
2243        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2244        __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
2245      } else {
2246        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_2));
2247        __ LoadFromOffset(kLoadSignedHalfword, out, IP, data_offset);
2248      }
2249      break;
2250    }
2251
2252    case Primitive::kPrimChar: {
2253      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
2254      Register out = locations->Out().As<Register>();
2255      if (index.IsConstant()) {
2256        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2257        __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
2258      } else {
2259        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_2));
2260        __ LoadFromOffset(kLoadUnsignedHalfword, out, IP, data_offset);
2261      }
2262      break;
2263    }
2264
2265    case Primitive::kPrimInt:
2266    case Primitive::kPrimNot: {
2267      DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
2268      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2269      Register out = locations->Out().As<Register>();
2270      if (index.IsConstant()) {
2271        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2272        __ LoadFromOffset(kLoadWord, out, obj, offset);
2273      } else {
2274        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_4));
2275        __ LoadFromOffset(kLoadWord, out, IP, data_offset);
2276      }
2277      break;
2278    }
2279
2280    case Primitive::kPrimLong: {
2281      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
2282      Location out = locations->Out();
2283      if (index.IsConstant()) {
2284        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
2285        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset);
2286      } else {
2287        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_8));
2288        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), IP, data_offset);
2289      }
2290      break;
2291    }
2292
2293    case Primitive::kPrimFloat:
2294    case Primitive::kPrimDouble:
2295      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
2296      UNREACHABLE();
2297    case Primitive::kPrimVoid:
2298      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2299      UNREACHABLE();
2300  }
2301}
2302
2303void LocationsBuilderARM::VisitArraySet(HArraySet* instruction) {
2304  Primitive::Type value_type = instruction->GetComponentType();
2305  bool is_object = value_type == Primitive::kPrimNot;
2306  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2307      instruction, is_object ? LocationSummary::kCall : LocationSummary::kNoCall);
2308  if (is_object) {
2309    InvokeRuntimeCallingConvention calling_convention;
2310    locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2311    locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2312    locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2313  } else {
2314    locations->SetInAt(0, Location::RequiresRegister());
2315    locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2316    locations->SetInAt(2, Location::RequiresRegister());
2317  }
2318}
2319
2320void InstructionCodeGeneratorARM::VisitArraySet(HArraySet* instruction) {
2321  LocationSummary* locations = instruction->GetLocations();
2322  Register obj = locations->InAt(0).As<Register>();
2323  Location index = locations->InAt(1);
2324  Primitive::Type value_type = instruction->GetComponentType();
2325
2326  switch (value_type) {
2327    case Primitive::kPrimBoolean:
2328    case Primitive::kPrimByte: {
2329      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
2330      Register value = locations->InAt(2).As<Register>();
2331      if (index.IsConstant()) {
2332        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2333        __ StoreToOffset(kStoreByte, value, obj, offset);
2334      } else {
2335        __ add(IP, obj, ShifterOperand(index.As<Register>()));
2336        __ StoreToOffset(kStoreByte, value, IP, data_offset);
2337      }
2338      break;
2339    }
2340
2341    case Primitive::kPrimShort:
2342    case Primitive::kPrimChar: {
2343      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
2344      Register value = locations->InAt(2).As<Register>();
2345      if (index.IsConstant()) {
2346        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2347        __ StoreToOffset(kStoreHalfword, value, obj, offset);
2348      } else {
2349        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_2));
2350        __ StoreToOffset(kStoreHalfword, value, IP, data_offset);
2351      }
2352      break;
2353    }
2354
2355    case Primitive::kPrimInt: {
2356      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2357      Register value = locations->InAt(2).As<Register>();
2358      if (index.IsConstant()) {
2359        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2360        __ StoreToOffset(kStoreWord, value, obj, offset);
2361      } else {
2362        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_4));
2363        __ StoreToOffset(kStoreWord, value, IP, data_offset);
2364      }
2365      break;
2366    }
2367
2368    case Primitive::kPrimNot: {
2369      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject), instruction, instruction->GetDexPc());
2370      break;
2371    }
2372
2373    case Primitive::kPrimLong: {
2374      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
2375      Location value = locations->InAt(2);
2376      if (index.IsConstant()) {
2377        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
2378        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset);
2379      } else {
2380        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_8));
2381        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), IP, data_offset);
2382      }
2383      break;
2384    }
2385
2386    case Primitive::kPrimFloat:
2387    case Primitive::kPrimDouble:
2388      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
2389      UNREACHABLE();
2390    case Primitive::kPrimVoid:
2391      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2392      UNREACHABLE();
2393  }
2394}
2395
2396void LocationsBuilderARM::VisitArrayLength(HArrayLength* instruction) {
2397  LocationSummary* locations =
2398      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2399  locations->SetInAt(0, Location::RequiresRegister());
2400  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2401}
2402
2403void InstructionCodeGeneratorARM::VisitArrayLength(HArrayLength* instruction) {
2404  LocationSummary* locations = instruction->GetLocations();
2405  uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
2406  Register obj = locations->InAt(0).As<Register>();
2407  Register out = locations->Out().As<Register>();
2408  __ LoadFromOffset(kLoadWord, out, obj, offset);
2409}
2410
2411void LocationsBuilderARM::VisitBoundsCheck(HBoundsCheck* instruction) {
2412  LocationSummary* locations =
2413      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2414  locations->SetInAt(0, Location::RequiresRegister());
2415  locations->SetInAt(1, Location::RequiresRegister());
2416  if (instruction->HasUses()) {
2417    locations->SetOut(Location::SameAsFirstInput());
2418  }
2419}
2420
2421void InstructionCodeGeneratorARM::VisitBoundsCheck(HBoundsCheck* instruction) {
2422  LocationSummary* locations = instruction->GetLocations();
2423  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM(
2424      instruction, locations->InAt(0), locations->InAt(1));
2425  codegen_->AddSlowPath(slow_path);
2426
2427  Register index = locations->InAt(0).As<Register>();
2428  Register length = locations->InAt(1).As<Register>();
2429
2430  __ cmp(index, ShifterOperand(length));
2431  __ b(slow_path->GetEntryLabel(), CS);
2432}
2433
2434void CodeGeneratorARM::MarkGCCard(Register temp, Register card, Register object, Register value) {
2435  Label is_null;
2436  __ CompareAndBranchIfZero(value, &is_null);
2437  __ LoadFromOffset(kLoadWord, card, TR, Thread::CardTableOffset<kArmWordSize>().Int32Value());
2438  __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
2439  __ strb(card, Address(card, temp));
2440  __ Bind(&is_null);
2441}
2442
2443void LocationsBuilderARM::VisitTemporary(HTemporary* temp) {
2444  temp->SetLocations(nullptr);
2445}
2446
2447void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp) {
2448  // Nothing to do, this is driven by the code generator.
2449  UNUSED(temp);
2450}
2451
2452void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction) {
2453  UNUSED(instruction);
2454  LOG(FATAL) << "Unreachable";
2455}
2456
2457void InstructionCodeGeneratorARM::VisitParallelMove(HParallelMove* instruction) {
2458  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
2459}
2460
2461void LocationsBuilderARM::VisitSuspendCheck(HSuspendCheck* instruction) {
2462  new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
2463}
2464
2465void InstructionCodeGeneratorARM::VisitSuspendCheck(HSuspendCheck* instruction) {
2466  HBasicBlock* block = instruction->GetBlock();
2467  if (block->GetLoopInformation() != nullptr) {
2468    DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
2469    // The back edge will generate the suspend check.
2470    return;
2471  }
2472  if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
2473    // The goto will generate the suspend check.
2474    return;
2475  }
2476  GenerateSuspendCheck(instruction, nullptr);
2477}
2478
2479void InstructionCodeGeneratorARM::GenerateSuspendCheck(HSuspendCheck* instruction,
2480                                                       HBasicBlock* successor) {
2481  SuspendCheckSlowPathARM* slow_path =
2482      new (GetGraph()->GetArena()) SuspendCheckSlowPathARM(instruction, successor);
2483  codegen_->AddSlowPath(slow_path);
2484
2485  __ LoadFromOffset(
2486      kLoadUnsignedHalfword, IP, TR, Thread::ThreadFlagsOffset<kArmWordSize>().Int32Value());
2487  __ cmp(IP, ShifterOperand(0));
2488  // TODO: Figure out the branch offsets and use cbz/cbnz.
2489  if (successor == nullptr) {
2490    __ b(slow_path->GetEntryLabel(), NE);
2491    __ Bind(slow_path->GetReturnLabel());
2492  } else {
2493    __ b(codegen_->GetLabelOf(successor), EQ);
2494    __ b(slow_path->GetEntryLabel());
2495  }
2496}
2497
2498ArmAssembler* ParallelMoveResolverARM::GetAssembler() const {
2499  return codegen_->GetAssembler();
2500}
2501
2502void ParallelMoveResolverARM::EmitMove(size_t index) {
2503  MoveOperands* move = moves_.Get(index);
2504  Location source = move->GetSource();
2505  Location destination = move->GetDestination();
2506
2507  if (source.IsRegister()) {
2508    if (destination.IsRegister()) {
2509      __ Mov(destination.As<Register>(), source.As<Register>());
2510    } else {
2511      DCHECK(destination.IsStackSlot());
2512      __ StoreToOffset(kStoreWord, source.As<Register>(),
2513                       SP, destination.GetStackIndex());
2514    }
2515  } else if (source.IsStackSlot()) {
2516    if (destination.IsRegister()) {
2517      __ LoadFromOffset(kLoadWord, destination.As<Register>(),
2518                        SP, source.GetStackIndex());
2519    } else {
2520      DCHECK(destination.IsStackSlot());
2521      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
2522      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
2523    }
2524  } else {
2525    DCHECK(source.IsConstant());
2526    DCHECK(source.GetConstant()->IsIntConstant());
2527    int32_t value = source.GetConstant()->AsIntConstant()->GetValue();
2528    if (destination.IsRegister()) {
2529      __ LoadImmediate(destination.As<Register>(), value);
2530    } else {
2531      DCHECK(destination.IsStackSlot());
2532      __ LoadImmediate(IP, value);
2533      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
2534    }
2535  }
2536}
2537
2538void ParallelMoveResolverARM::Exchange(Register reg, int mem) {
2539  __ Mov(IP, reg);
2540  __ LoadFromOffset(kLoadWord, reg, SP, mem);
2541  __ StoreToOffset(kStoreWord, IP, SP, mem);
2542}
2543
2544void ParallelMoveResolverARM::Exchange(int mem1, int mem2) {
2545  ScratchRegisterScope ensure_scratch(this, IP, R0, codegen_->GetNumberOfCoreRegisters());
2546  int stack_offset = ensure_scratch.IsSpilled() ? kArmWordSize : 0;
2547  __ LoadFromOffset(kLoadWord, static_cast<Register>(ensure_scratch.GetRegister()),
2548                    SP, mem1 + stack_offset);
2549  __ LoadFromOffset(kLoadWord, IP, SP, mem2 + stack_offset);
2550  __ StoreToOffset(kStoreWord, static_cast<Register>(ensure_scratch.GetRegister()),
2551                   SP, mem2 + stack_offset);
2552  __ StoreToOffset(kStoreWord, IP, SP, mem1 + stack_offset);
2553}
2554
2555void ParallelMoveResolverARM::EmitSwap(size_t index) {
2556  MoveOperands* move = moves_.Get(index);
2557  Location source = move->GetSource();
2558  Location destination = move->GetDestination();
2559
2560  if (source.IsRegister() && destination.IsRegister()) {
2561    DCHECK_NE(source.As<Register>(), IP);
2562    DCHECK_NE(destination.As<Register>(), IP);
2563    __ Mov(IP, source.As<Register>());
2564    __ Mov(source.As<Register>(), destination.As<Register>());
2565    __ Mov(destination.As<Register>(), IP);
2566  } else if (source.IsRegister() && destination.IsStackSlot()) {
2567    Exchange(source.As<Register>(), destination.GetStackIndex());
2568  } else if (source.IsStackSlot() && destination.IsRegister()) {
2569    Exchange(destination.As<Register>(), source.GetStackIndex());
2570  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
2571    Exchange(source.GetStackIndex(), destination.GetStackIndex());
2572  } else {
2573    LOG(FATAL) << "Unimplemented";
2574  }
2575}
2576
2577void ParallelMoveResolverARM::SpillScratch(int reg) {
2578  __ Push(static_cast<Register>(reg));
2579}
2580
2581void ParallelMoveResolverARM::RestoreScratch(int reg) {
2582  __ Pop(static_cast<Register>(reg));
2583}
2584
2585void LocationsBuilderARM::VisitLoadClass(HLoadClass* cls) {
2586  LocationSummary::CallKind call_kind = cls->CanCallRuntime()
2587      ? LocationSummary::kCallOnSlowPath
2588      : LocationSummary::kNoCall;
2589  LocationSummary* locations =
2590      new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
2591  locations->SetOut(Location::RequiresRegister());
2592}
2593
2594void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) {
2595  Register out = cls->GetLocations()->Out().As<Register>();
2596  if (cls->IsReferrersClass()) {
2597    DCHECK(!cls->CanCallRuntime());
2598    DCHECK(!cls->MustGenerateClinitCheck());
2599    codegen_->LoadCurrentMethod(out);
2600    __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
2601  } else {
2602    DCHECK(cls->CanCallRuntime());
2603    codegen_->LoadCurrentMethod(out);
2604    __ LoadFromOffset(
2605        kLoadWord, out, out, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value());
2606    __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
2607
2608    SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
2609        cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
2610    codegen_->AddSlowPath(slow_path);
2611    __ cmp(out, ShifterOperand(0));
2612    __ b(slow_path->GetEntryLabel(), EQ);
2613    if (cls->MustGenerateClinitCheck()) {
2614      GenerateClassInitializationCheck(slow_path, out);
2615    } else {
2616      __ Bind(slow_path->GetExitLabel());
2617    }
2618  }
2619}
2620
2621void LocationsBuilderARM::VisitClinitCheck(HClinitCheck* check) {
2622  LocationSummary* locations =
2623      new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
2624  locations->SetInAt(0, Location::RequiresRegister());
2625  if (check->HasUses()) {
2626    locations->SetOut(Location::SameAsFirstInput());
2627  }
2628}
2629
2630void InstructionCodeGeneratorARM::VisitClinitCheck(HClinitCheck* check) {
2631  // We assume the class is not null.
2632  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
2633      check->GetLoadClass(), check, check->GetDexPc(), true);
2634  codegen_->AddSlowPath(slow_path);
2635  GenerateClassInitializationCheck(slow_path, check->GetLocations()->InAt(0).As<Register>());
2636}
2637
2638void InstructionCodeGeneratorARM::GenerateClassInitializationCheck(
2639    SlowPathCodeARM* slow_path, Register class_reg) {
2640  __ LoadFromOffset(kLoadWord, IP, class_reg, mirror::Class::StatusOffset().Int32Value());
2641  __ cmp(IP, ShifterOperand(mirror::Class::kStatusInitialized));
2642  __ b(slow_path->GetEntryLabel(), LT);
2643  // Even if the initialized flag is set, we may be in a situation where caches are not synced
2644  // properly. Therefore, we do a memory fence.
2645  __ dmb(ISH);
2646  __ Bind(slow_path->GetExitLabel());
2647}
2648
2649void LocationsBuilderARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2650  LocationSummary* locations =
2651      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2652  locations->SetInAt(0, Location::RequiresRegister());
2653  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2654}
2655
2656void InstructionCodeGeneratorARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2657  LocationSummary* locations = instruction->GetLocations();
2658  Register cls = locations->InAt(0).As<Register>();
2659  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
2660
2661  switch (instruction->GetType()) {
2662    case Primitive::kPrimBoolean: {
2663      Register out = locations->Out().As<Register>();
2664      __ LoadFromOffset(kLoadUnsignedByte, out, cls, offset);
2665      break;
2666    }
2667
2668    case Primitive::kPrimByte: {
2669      Register out = locations->Out().As<Register>();
2670      __ LoadFromOffset(kLoadSignedByte, out, cls, offset);
2671      break;
2672    }
2673
2674    case Primitive::kPrimShort: {
2675      Register out = locations->Out().As<Register>();
2676      __ LoadFromOffset(kLoadSignedHalfword, out, cls, offset);
2677      break;
2678    }
2679
2680    case Primitive::kPrimChar: {
2681      Register out = locations->Out().As<Register>();
2682      __ LoadFromOffset(kLoadUnsignedHalfword, out, cls, offset);
2683      break;
2684    }
2685
2686    case Primitive::kPrimInt:
2687    case Primitive::kPrimNot: {
2688      Register out = locations->Out().As<Register>();
2689      __ LoadFromOffset(kLoadWord, out, cls, offset);
2690      break;
2691    }
2692
2693    case Primitive::kPrimLong: {
2694      // TODO: support volatile.
2695      Location out = locations->Out();
2696      __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), cls, offset);
2697      break;
2698    }
2699
2700    case Primitive::kPrimFloat: {
2701      SRegister out = locations->Out().As<SRegister>();
2702      __ LoadSFromOffset(out, cls, offset);
2703      break;
2704    }
2705
2706    case Primitive::kPrimDouble: {
2707      DRegister out = FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>());
2708      __ LoadDFromOffset(out, cls, offset);
2709      break;
2710    }
2711
2712    case Primitive::kPrimVoid:
2713      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2714      UNREACHABLE();
2715  }
2716}
2717
2718void LocationsBuilderARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2719  LocationSummary* locations =
2720      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2721  bool is_object_type = instruction->GetFieldType() == Primitive::kPrimNot;
2722  locations->SetInAt(0, Location::RequiresRegister());
2723  locations->SetInAt(1, Location::RequiresRegister());
2724  // Temporary registers for the write barrier.
2725  if (is_object_type) {
2726    locations->AddTemp(Location::RequiresRegister());
2727    locations->AddTemp(Location::RequiresRegister());
2728  }
2729}
2730
2731void InstructionCodeGeneratorARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2732  LocationSummary* locations = instruction->GetLocations();
2733  Register cls = locations->InAt(0).As<Register>();
2734  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
2735  Primitive::Type field_type = instruction->GetFieldType();
2736
2737  switch (field_type) {
2738    case Primitive::kPrimBoolean:
2739    case Primitive::kPrimByte: {
2740      Register value = locations->InAt(1).As<Register>();
2741      __ StoreToOffset(kStoreByte, value, cls, offset);
2742      break;
2743    }
2744
2745    case Primitive::kPrimShort:
2746    case Primitive::kPrimChar: {
2747      Register value = locations->InAt(1).As<Register>();
2748      __ StoreToOffset(kStoreHalfword, value, cls, offset);
2749      break;
2750    }
2751
2752    case Primitive::kPrimInt:
2753    case Primitive::kPrimNot: {
2754      Register value = locations->InAt(1).As<Register>();
2755      __ StoreToOffset(kStoreWord, value, cls, offset);
2756      if (field_type == Primitive::kPrimNot) {
2757        Register temp = locations->GetTemp(0).As<Register>();
2758        Register card = locations->GetTemp(1).As<Register>();
2759        codegen_->MarkGCCard(temp, card, cls, value);
2760      }
2761      break;
2762    }
2763
2764    case Primitive::kPrimLong: {
2765      Location value = locations->InAt(1);
2766      __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), cls, offset);
2767      break;
2768    }
2769
2770    case Primitive::kPrimFloat: {
2771      SRegister value = locations->InAt(1).As<SRegister>();
2772      __ StoreSToOffset(value, cls, offset);
2773      break;
2774    }
2775
2776    case Primitive::kPrimDouble: {
2777      DRegister value = FromLowSToD(locations->InAt(1).AsFpuRegisterPairLow<SRegister>());
2778      __ StoreDToOffset(value, cls, offset);
2779      break;
2780    }
2781
2782    case Primitive::kPrimVoid:
2783      LOG(FATAL) << "Unreachable type " << field_type;
2784      UNREACHABLE();
2785  }
2786}
2787
2788void LocationsBuilderARM::VisitLoadString(HLoadString* load) {
2789  LocationSummary* locations =
2790      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
2791  locations->SetOut(Location::RequiresRegister());
2792}
2793
2794void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) {
2795  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM(load);
2796  codegen_->AddSlowPath(slow_path);
2797
2798  Register out = load->GetLocations()->Out().As<Register>();
2799  codegen_->LoadCurrentMethod(out);
2800  __ LoadFromOffset(
2801      kLoadWord, out, out, mirror::ArtMethod::DexCacheStringsOffset().Int32Value());
2802  __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(load->GetStringIndex()));
2803  __ cmp(out, ShifterOperand(0));
2804  __ b(slow_path->GetEntryLabel(), EQ);
2805  __ Bind(slow_path->GetExitLabel());
2806}
2807
2808void LocationsBuilderARM::VisitLoadException(HLoadException* load) {
2809  LocationSummary* locations =
2810      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
2811  locations->SetOut(Location::RequiresRegister());
2812}
2813
2814void InstructionCodeGeneratorARM::VisitLoadException(HLoadException* load) {
2815  Register out = load->GetLocations()->Out().As<Register>();
2816  int32_t offset = Thread::ExceptionOffset<kArmWordSize>().Int32Value();
2817  __ LoadFromOffset(kLoadWord, out, TR, offset);
2818  __ LoadImmediate(IP, 0);
2819  __ StoreToOffset(kStoreWord, IP, TR, offset);
2820}
2821
2822void LocationsBuilderARM::VisitThrow(HThrow* instruction) {
2823  LocationSummary* locations =
2824      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2825  InvokeRuntimeCallingConvention calling_convention;
2826  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2827}
2828
2829void InstructionCodeGeneratorARM::VisitThrow(HThrow* instruction) {
2830  codegen_->InvokeRuntime(
2831      QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc());
2832}
2833
2834void LocationsBuilderARM::VisitInstanceOf(HInstanceOf* instruction) {
2835  LocationSummary::CallKind call_kind = instruction->IsClassFinal()
2836      ? LocationSummary::kNoCall
2837      : LocationSummary::kCallOnSlowPath;
2838  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
2839  locations->SetInAt(0, Location::RequiresRegister());
2840  locations->SetInAt(1, Location::RequiresRegister());
2841  locations->SetOut(Location::RequiresRegister());
2842}
2843
2844void InstructionCodeGeneratorARM::VisitInstanceOf(HInstanceOf* instruction) {
2845  LocationSummary* locations = instruction->GetLocations();
2846  Register obj = locations->InAt(0).As<Register>();
2847  Register cls = locations->InAt(1).As<Register>();
2848  Register out = locations->Out().As<Register>();
2849  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2850  Label done, zero;
2851  SlowPathCodeARM* slow_path = nullptr;
2852
2853  // Return 0 if `obj` is null.
2854  // TODO: avoid this check if we know obj is not null.
2855  __ cmp(obj, ShifterOperand(0));
2856  __ b(&zero, EQ);
2857  // Compare the class of `obj` with `cls`.
2858  __ LoadFromOffset(kLoadWord, out, obj, class_offset);
2859  __ cmp(out, ShifterOperand(cls));
2860  if (instruction->IsClassFinal()) {
2861    // Classes must be equal for the instanceof to succeed.
2862    __ b(&zero, NE);
2863    __ LoadImmediate(out, 1);
2864    __ b(&done);
2865  } else {
2866    // If the classes are not equal, we go into a slow path.
2867    DCHECK(locations->OnlyCallsOnSlowPath());
2868    slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
2869        instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc());
2870    codegen_->AddSlowPath(slow_path);
2871    __ b(slow_path->GetEntryLabel(), NE);
2872    __ LoadImmediate(out, 1);
2873    __ b(&done);
2874  }
2875  __ Bind(&zero);
2876  __ LoadImmediate(out, 0);
2877  if (slow_path != nullptr) {
2878    __ Bind(slow_path->GetExitLabel());
2879  }
2880  __ Bind(&done);
2881}
2882
2883void LocationsBuilderARM::VisitCheckCast(HCheckCast* instruction) {
2884  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2885      instruction, LocationSummary::kCallOnSlowPath);
2886  locations->SetInAt(0, Location::RequiresRegister());
2887  locations->SetInAt(1, Location::RequiresRegister());
2888  locations->AddTemp(Location::RequiresRegister());
2889}
2890
2891void InstructionCodeGeneratorARM::VisitCheckCast(HCheckCast* instruction) {
2892  LocationSummary* locations = instruction->GetLocations();
2893  Register obj = locations->InAt(0).As<Register>();
2894  Register cls = locations->InAt(1).As<Register>();
2895  Register temp = locations->GetTemp(0).As<Register>();
2896  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2897
2898  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
2899      instruction, locations->InAt(1), locations->GetTemp(0), instruction->GetDexPc());
2900  codegen_->AddSlowPath(slow_path);
2901
2902  // TODO: avoid this check if we know obj is not null.
2903  __ cmp(obj, ShifterOperand(0));
2904  __ b(slow_path->GetExitLabel(), EQ);
2905  // Compare the class of `obj` with `cls`.
2906  __ LoadFromOffset(kLoadWord, temp, obj, class_offset);
2907  __ cmp(temp, ShifterOperand(cls));
2908  __ b(slow_path->GetEntryLabel(), NE);
2909  __ Bind(slow_path->GetExitLabel());
2910}
2911
2912void LocationsBuilderARM::VisitMonitorOperation(HMonitorOperation* instruction) {
2913  LocationSummary* locations =
2914      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2915  InvokeRuntimeCallingConvention calling_convention;
2916  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2917}
2918
2919void InstructionCodeGeneratorARM::VisitMonitorOperation(HMonitorOperation* instruction) {
2920  codegen_->InvokeRuntime(instruction->IsEnter()
2921        ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
2922      instruction,
2923      instruction->GetDexPc());
2924}
2925
2926void LocationsBuilderARM::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
2927void LocationsBuilderARM::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
2928void LocationsBuilderARM::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
2929
2930void LocationsBuilderARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
2931  LocationSummary* locations =
2932      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2933  DCHECK(instruction->GetResultType() == Primitive::kPrimInt
2934         || instruction->GetResultType() == Primitive::kPrimLong);
2935  locations->SetInAt(0, Location::RequiresRegister());
2936  locations->SetInAt(1, Location::RequiresRegister());
2937  bool output_overlaps = (instruction->GetResultType() == Primitive::kPrimLong);
2938  locations->SetOut(Location::RequiresRegister(), output_overlaps);
2939}
2940
2941void InstructionCodeGeneratorARM::VisitAnd(HAnd* instruction) {
2942  HandleBitwiseOperation(instruction);
2943}
2944
2945void InstructionCodeGeneratorARM::VisitOr(HOr* instruction) {
2946  HandleBitwiseOperation(instruction);
2947}
2948
2949void InstructionCodeGeneratorARM::VisitXor(HXor* instruction) {
2950  HandleBitwiseOperation(instruction);
2951}
2952
2953void InstructionCodeGeneratorARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
2954  LocationSummary* locations = instruction->GetLocations();
2955
2956  if (instruction->GetResultType() == Primitive::kPrimInt) {
2957    Register first = locations->InAt(0).As<Register>();
2958    Register second = locations->InAt(1).As<Register>();
2959    Register out = locations->Out().As<Register>();
2960    if (instruction->IsAnd()) {
2961      __ and_(out, first, ShifterOperand(second));
2962    } else if (instruction->IsOr()) {
2963      __ orr(out, first, ShifterOperand(second));
2964    } else {
2965      DCHECK(instruction->IsXor());
2966      __ eor(out, first, ShifterOperand(second));
2967    }
2968  } else {
2969    DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
2970    Location first = locations->InAt(0);
2971    Location second = locations->InAt(1);
2972    Location out = locations->Out();
2973    if (instruction->IsAnd()) {
2974      __ and_(out.AsRegisterPairLow<Register>(),
2975              first.AsRegisterPairLow<Register>(),
2976              ShifterOperand(second.AsRegisterPairLow<Register>()));
2977      __ and_(out.AsRegisterPairHigh<Register>(),
2978              first.AsRegisterPairHigh<Register>(),
2979              ShifterOperand(second.AsRegisterPairHigh<Register>()));
2980    } else if (instruction->IsOr()) {
2981      __ orr(out.AsRegisterPairLow<Register>(),
2982             first.AsRegisterPairLow<Register>(),
2983             ShifterOperand(second.AsRegisterPairLow<Register>()));
2984      __ orr(out.AsRegisterPairHigh<Register>(),
2985             first.AsRegisterPairHigh<Register>(),
2986             ShifterOperand(second.AsRegisterPairHigh<Register>()));
2987    } else {
2988      DCHECK(instruction->IsXor());
2989      __ eor(out.AsRegisterPairLow<Register>(),
2990             first.AsRegisterPairLow<Register>(),
2991             ShifterOperand(second.AsRegisterPairLow<Register>()));
2992      __ eor(out.AsRegisterPairHigh<Register>(),
2993             first.AsRegisterPairHigh<Register>(),
2994             ShifterOperand(second.AsRegisterPairHigh<Register>()));
2995    }
2996  }
2997}
2998
2999}  // namespace arm
3000}  // namespace art
3001