code_generator_arm.cc revision 647b96f29cb81832e698f863884fdba06674c9de
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm.h"
18
19#include "entrypoints/quick/quick_entrypoints.h"
20#include "gc/accounting/card_table.h"
21#include "mirror/array-inl.h"
22#include "mirror/art_method.h"
23#include "mirror/class.h"
24#include "thread.h"
25#include "utils.h"
26#include "utils/arm/assembler_arm.h"
27#include "utils/arm/managed_register_arm.h"
28#include "utils/assembler.h"
29#include "utils/stack_checks.h"
30
31namespace art {
32
33namespace arm {
34
35static DRegister FromLowSToD(SRegister reg) {
36  DCHECK_EQ(reg % 2, 0);
37  return static_cast<DRegister>(reg / 2);
38}
39
40static constexpr bool kExplicitStackOverflowCheck = false;
41
42static constexpr int kNumberOfPushedRegistersAtEntry = 1 + 2;  // LR, R6, R7
43static constexpr int kCurrentMethodStackOffset = 0;
44
45static constexpr Register kRuntimeParameterCoreRegisters[] = { R0, R1, R2 };
46static constexpr size_t kRuntimeParameterCoreRegistersLength =
47    arraysize(kRuntimeParameterCoreRegisters);
48static constexpr SRegister kRuntimeParameterFpuRegisters[] = { };
49static constexpr size_t kRuntimeParameterFpuRegistersLength = 0;
50
51class InvokeRuntimeCallingConvention : public CallingConvention<Register, SRegister> {
52 public:
53  InvokeRuntimeCallingConvention()
54      : CallingConvention(kRuntimeParameterCoreRegisters,
55                          kRuntimeParameterCoreRegistersLength,
56                          kRuntimeParameterFpuRegisters,
57                          kRuntimeParameterFpuRegistersLength) {}
58
59 private:
60  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
61};
62
63#define __ reinterpret_cast<ArmAssembler*>(codegen->GetAssembler())->
64#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArmWordSize, x).Int32Value()
65
66class SlowPathCodeARM : public SlowPathCode {
67 public:
68  SlowPathCodeARM() : entry_label_(), exit_label_() {}
69
70  Label* GetEntryLabel() { return &entry_label_; }
71  Label* GetExitLabel() { return &exit_label_; }
72
73 private:
74  Label entry_label_;
75  Label exit_label_;
76
77  DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM);
78};
79
80class NullCheckSlowPathARM : public SlowPathCodeARM {
81 public:
82  explicit NullCheckSlowPathARM(HNullCheck* instruction) : instruction_(instruction) {}
83
84  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
85    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
86    __ Bind(GetEntryLabel());
87    arm_codegen->InvokeRuntime(
88        QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc());
89  }
90
91 private:
92  HNullCheck* const instruction_;
93  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM);
94};
95
96class DivZeroCheckSlowPathARM : public SlowPathCodeARM {
97 public:
98  explicit DivZeroCheckSlowPathARM(HDivZeroCheck* instruction) : instruction_(instruction) {}
99
100  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
101    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
102    __ Bind(GetEntryLabel());
103    arm_codegen->InvokeRuntime(
104        QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc());
105  }
106
107 private:
108  HDivZeroCheck* const instruction_;
109  DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM);
110};
111
112class StackOverflowCheckSlowPathARM : public SlowPathCodeARM {
113 public:
114  StackOverflowCheckSlowPathARM() {}
115
116  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
117    __ Bind(GetEntryLabel());
118    __ LoadFromOffset(kLoadWord, PC, TR,
119        QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pThrowStackOverflow).Int32Value());
120  }
121
122 private:
123  DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathARM);
124};
125
126class SuspendCheckSlowPathARM : public SlowPathCodeARM {
127 public:
128  explicit SuspendCheckSlowPathARM(HSuspendCheck* instruction, HBasicBlock* successor)
129      : instruction_(instruction), successor_(successor) {}
130
131  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
132    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
133    __ Bind(GetEntryLabel());
134    codegen->SaveLiveRegisters(instruction_->GetLocations());
135    arm_codegen->InvokeRuntime(
136        QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc());
137    codegen->RestoreLiveRegisters(instruction_->GetLocations());
138    if (successor_ == nullptr) {
139      __ b(GetReturnLabel());
140    } else {
141      __ b(arm_codegen->GetLabelOf(successor_));
142    }
143  }
144
145  Label* GetReturnLabel() {
146    DCHECK(successor_ == nullptr);
147    return &return_label_;
148  }
149
150 private:
151  HSuspendCheck* const instruction_;
152  // If not null, the block to branch to after the suspend check.
153  HBasicBlock* const successor_;
154
155  // If `successor_` is null, the label to branch to after the suspend check.
156  Label return_label_;
157
158  DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM);
159};
160
161class BoundsCheckSlowPathARM : public SlowPathCodeARM {
162 public:
163  BoundsCheckSlowPathARM(HBoundsCheck* instruction,
164                         Location index_location,
165                         Location length_location)
166      : instruction_(instruction),
167        index_location_(index_location),
168        length_location_(length_location) {}
169
170  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
171    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
172    __ Bind(GetEntryLabel());
173    InvokeRuntimeCallingConvention calling_convention;
174    arm_codegen->Move32(
175        Location::RegisterLocation(calling_convention.GetRegisterAt(0)), index_location_);
176    arm_codegen->Move32(
177        Location::RegisterLocation(calling_convention.GetRegisterAt(1)), length_location_);
178    arm_codegen->InvokeRuntime(
179        QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc());
180  }
181
182 private:
183  HBoundsCheck* const instruction_;
184  const Location index_location_;
185  const Location length_location_;
186
187  DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM);
188};
189
190class LoadClassSlowPathARM : public SlowPathCodeARM {
191 public:
192  LoadClassSlowPathARM(HLoadClass* cls,
193                       HInstruction* at,
194                       uint32_t dex_pc,
195                       bool do_clinit)
196      : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
197    DCHECK(at->IsLoadClass() || at->IsClinitCheck());
198  }
199
200  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
201    LocationSummary* locations = at_->GetLocations();
202
203    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
204    __ Bind(GetEntryLabel());
205    codegen->SaveLiveRegisters(locations);
206
207    InvokeRuntimeCallingConvention calling_convention;
208    __ LoadImmediate(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex());
209    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
210    int32_t entry_point_offset = do_clinit_
211        ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
212        : QUICK_ENTRY_POINT(pInitializeType);
213    arm_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_);
214
215    // Move the class to the desired location.
216    Location out = locations->Out();
217    if (out.IsValid()) {
218      DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
219      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
220    }
221    codegen->RestoreLiveRegisters(locations);
222    __ b(GetExitLabel());
223  }
224
225 private:
226  // The class this slow path will load.
227  HLoadClass* const cls_;
228
229  // The instruction where this slow path is happening.
230  // (Might be the load class or an initialization check).
231  HInstruction* const at_;
232
233  // The dex PC of `at_`.
234  const uint32_t dex_pc_;
235
236  // Whether to initialize the class.
237  const bool do_clinit_;
238
239  DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM);
240};
241
242class LoadStringSlowPathARM : public SlowPathCodeARM {
243 public:
244  explicit LoadStringSlowPathARM(HLoadString* instruction) : instruction_(instruction) {}
245
246  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
247    LocationSummary* locations = instruction_->GetLocations();
248    DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
249
250    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
251    __ Bind(GetEntryLabel());
252    codegen->SaveLiveRegisters(locations);
253
254    InvokeRuntimeCallingConvention calling_convention;
255    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(0));
256    __ LoadImmediate(calling_convention.GetRegisterAt(1), instruction_->GetStringIndex());
257    arm_codegen->InvokeRuntime(
258        QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc());
259    arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
260
261    codegen->RestoreLiveRegisters(locations);
262    __ b(GetExitLabel());
263  }
264
265 private:
266  HLoadString* const instruction_;
267
268  DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM);
269};
270
271class TypeCheckSlowPathARM : public SlowPathCodeARM {
272 public:
273  explicit TypeCheckSlowPathARM(HTypeCheck* instruction, Location object_class)
274      : instruction_(instruction),
275        object_class_(object_class) {}
276
277  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
278    LocationSummary* locations = instruction_->GetLocations();
279    DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
280
281    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
282    __ Bind(GetEntryLabel());
283    codegen->SaveLiveRegisters(locations);
284
285    // We're moving two locations to locations that could overlap, so we need a parallel
286    // move resolver.
287    InvokeRuntimeCallingConvention calling_convention;
288    MoveOperands move1(locations->InAt(1),
289                       Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
290                       nullptr);
291    MoveOperands move2(object_class_,
292                       Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
293                       nullptr);
294    HParallelMove parallel_move(codegen->GetGraph()->GetArena());
295    parallel_move.AddMove(&move1);
296    parallel_move.AddMove(&move2);
297    arm_codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
298
299    arm_codegen->InvokeRuntime(
300        QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, instruction_->GetDexPc());
301    arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
302
303    codegen->RestoreLiveRegisters(locations);
304    __ b(GetExitLabel());
305  }
306
307 private:
308  HTypeCheck* const instruction_;
309  const Location object_class_;
310
311  DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM);
312};
313
314#undef __
315
316#undef __
317#define __ reinterpret_cast<ArmAssembler*>(GetAssembler())->
318
319inline Condition ARMCondition(IfCondition cond) {
320  switch (cond) {
321    case kCondEQ: return EQ;
322    case kCondNE: return NE;
323    case kCondLT: return LT;
324    case kCondLE: return LE;
325    case kCondGT: return GT;
326    case kCondGE: return GE;
327    default:
328      LOG(FATAL) << "Unknown if condition";
329  }
330  return EQ;        // Unreachable.
331}
332
333inline Condition ARMOppositeCondition(IfCondition cond) {
334  switch (cond) {
335    case kCondEQ: return NE;
336    case kCondNE: return EQ;
337    case kCondLT: return GE;
338    case kCondLE: return GT;
339    case kCondGT: return LE;
340    case kCondGE: return LT;
341    default:
342      LOG(FATAL) << "Unknown if condition";
343  }
344  return EQ;        // Unreachable.
345}
346
347void CodeGeneratorARM::DumpCoreRegister(std::ostream& stream, int reg) const {
348  stream << ArmManagedRegister::FromCoreRegister(Register(reg));
349}
350
351void CodeGeneratorARM::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
352  stream << ArmManagedRegister::FromSRegister(SRegister(reg));
353}
354
355size_t CodeGeneratorARM::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
356  __ StoreToOffset(kStoreWord, static_cast<Register>(reg_id), SP, stack_index);
357  return kArmWordSize;
358}
359
360size_t CodeGeneratorARM::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
361  __ LoadFromOffset(kLoadWord, static_cast<Register>(reg_id), SP, stack_index);
362  return kArmWordSize;
363}
364
365CodeGeneratorARM::CodeGeneratorARM(HGraph* graph)
366    : CodeGenerator(graph, kNumberOfCoreRegisters, kNumberOfSRegisters, kNumberOfRegisterPairs),
367      block_labels_(graph->GetArena(), 0),
368      location_builder_(graph, this),
369      instruction_visitor_(graph, this),
370      move_resolver_(graph->GetArena(), this),
371      assembler_(true) {}
372
373size_t CodeGeneratorARM::FrameEntrySpillSize() const {
374  return kNumberOfPushedRegistersAtEntry * kArmWordSize;
375}
376
377Location CodeGeneratorARM::AllocateFreeRegister(Primitive::Type type) const {
378  switch (type) {
379    case Primitive::kPrimLong: {
380      size_t reg = FindFreeEntry(blocked_register_pairs_, kNumberOfRegisterPairs);
381      ArmManagedRegister pair =
382          ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
383      DCHECK(!blocked_core_registers_[pair.AsRegisterPairLow()]);
384      DCHECK(!blocked_core_registers_[pair.AsRegisterPairHigh()]);
385
386      blocked_core_registers_[pair.AsRegisterPairLow()] = true;
387      blocked_core_registers_[pair.AsRegisterPairHigh()] = true;
388      UpdateBlockedPairRegisters();
389      return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
390    }
391
392    case Primitive::kPrimByte:
393    case Primitive::kPrimBoolean:
394    case Primitive::kPrimChar:
395    case Primitive::kPrimShort:
396    case Primitive::kPrimInt:
397    case Primitive::kPrimNot: {
398      int reg = FindFreeEntry(blocked_core_registers_, kNumberOfCoreRegisters);
399      // Block all register pairs that contain `reg`.
400      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
401        ArmManagedRegister current =
402            ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
403        if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
404          blocked_register_pairs_[i] = true;
405        }
406      }
407      return Location::RegisterLocation(reg);
408    }
409
410    case Primitive::kPrimFloat: {
411      int reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfSRegisters);
412      return Location::FpuRegisterLocation(reg);
413    }
414
415    case Primitive::kPrimDouble: {
416      int reg = FindTwoFreeConsecutiveAlignedEntries(blocked_fpu_registers_, kNumberOfSRegisters);
417      DCHECK_EQ(reg % 2, 0);
418      return Location::FpuRegisterPairLocation(reg, reg + 1);
419    }
420
421    case Primitive::kPrimVoid:
422      LOG(FATAL) << "Unreachable type " << type;
423  }
424
425  return Location();
426}
427
428void CodeGeneratorARM::SetupBlockedRegisters() const {
429  // Don't allocate the dalvik style register pair passing.
430  blocked_register_pairs_[R1_R2] = true;
431
432  // Stack register, LR and PC are always reserved.
433  blocked_core_registers_[SP] = true;
434  blocked_core_registers_[LR] = true;
435  blocked_core_registers_[PC] = true;
436
437  // Reserve thread register.
438  blocked_core_registers_[TR] = true;
439
440  // Reserve temp register.
441  blocked_core_registers_[IP] = true;
442
443  // TODO: We currently don't use Quick's callee saved registers.
444  // We always save and restore R6 and R7 to make sure we can use three
445  // register pairs for long operations.
446  blocked_core_registers_[R4] = true;
447  blocked_core_registers_[R5] = true;
448  blocked_core_registers_[R8] = true;
449  blocked_core_registers_[R10] = true;
450  blocked_core_registers_[R11] = true;
451
452  blocked_fpu_registers_[S16] = true;
453  blocked_fpu_registers_[S17] = true;
454  blocked_fpu_registers_[S18] = true;
455  blocked_fpu_registers_[S19] = true;
456  blocked_fpu_registers_[S20] = true;
457  blocked_fpu_registers_[S21] = true;
458  blocked_fpu_registers_[S22] = true;
459  blocked_fpu_registers_[S23] = true;
460  blocked_fpu_registers_[S24] = true;
461  blocked_fpu_registers_[S25] = true;
462  blocked_fpu_registers_[S26] = true;
463  blocked_fpu_registers_[S27] = true;
464  blocked_fpu_registers_[S28] = true;
465  blocked_fpu_registers_[S29] = true;
466  blocked_fpu_registers_[S30] = true;
467  blocked_fpu_registers_[S31] = true;
468
469  UpdateBlockedPairRegisters();
470}
471
472void CodeGeneratorARM::UpdateBlockedPairRegisters() const {
473  for (int i = 0; i < kNumberOfRegisterPairs; i++) {
474    ArmManagedRegister current =
475        ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
476    if (blocked_core_registers_[current.AsRegisterPairLow()]
477        || blocked_core_registers_[current.AsRegisterPairHigh()]) {
478      blocked_register_pairs_[i] = true;
479    }
480  }
481}
482
483InstructionCodeGeneratorARM::InstructionCodeGeneratorARM(HGraph* graph, CodeGeneratorARM* codegen)
484      : HGraphVisitor(graph),
485        assembler_(codegen->GetAssembler()),
486        codegen_(codegen) {}
487
488void CodeGeneratorARM::GenerateFrameEntry() {
489  bool skip_overflow_check = IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm);
490  if (!skip_overflow_check) {
491    if (kExplicitStackOverflowCheck) {
492      SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathARM();
493      AddSlowPath(slow_path);
494
495      __ LoadFromOffset(kLoadWord, IP, TR, Thread::StackEndOffset<kArmWordSize>().Int32Value());
496      __ cmp(SP, ShifterOperand(IP));
497      __ b(slow_path->GetEntryLabel(), CC);
498    } else {
499      __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm)));
500      __ LoadFromOffset(kLoadWord, IP, IP, 0);
501      RecordPcInfo(nullptr, 0);
502    }
503  }
504
505  core_spill_mask_ |= (1 << LR | 1 << R6 | 1 << R7);
506  __ PushList(1 << LR | 1 << R6 | 1 << R7);
507
508  // The return PC has already been pushed on the stack.
509  __ AddConstant(SP, -(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize));
510  __ StoreToOffset(kStoreWord, R0, SP, 0);
511}
512
513void CodeGeneratorARM::GenerateFrameExit() {
514  __ AddConstant(SP, GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize);
515  __ PopList(1 << PC | 1 << R6 | 1 << R7);
516}
517
518void CodeGeneratorARM::Bind(HBasicBlock* block) {
519  __ Bind(GetLabelOf(block));
520}
521
522Location CodeGeneratorARM::GetStackLocation(HLoadLocal* load) const {
523  switch (load->GetType()) {
524    case Primitive::kPrimLong:
525    case Primitive::kPrimDouble:
526      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
527      break;
528
529    case Primitive::kPrimInt:
530    case Primitive::kPrimNot:
531    case Primitive::kPrimFloat:
532      return Location::StackSlot(GetStackSlot(load->GetLocal()));
533
534    case Primitive::kPrimBoolean:
535    case Primitive::kPrimByte:
536    case Primitive::kPrimChar:
537    case Primitive::kPrimShort:
538    case Primitive::kPrimVoid:
539      LOG(FATAL) << "Unexpected type " << load->GetType();
540  }
541
542  LOG(FATAL) << "Unreachable";
543  return Location();
544}
545
546Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
547  switch (type) {
548    case Primitive::kPrimBoolean:
549    case Primitive::kPrimByte:
550    case Primitive::kPrimChar:
551    case Primitive::kPrimShort:
552    case Primitive::kPrimInt:
553    case Primitive::kPrimNot: {
554      uint32_t index = gp_index_++;
555      uint32_t stack_index = stack_index_++;
556      if (index < calling_convention.GetNumberOfRegisters()) {
557        return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
558      } else {
559        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
560      }
561    }
562
563    case Primitive::kPrimLong: {
564      uint32_t index = gp_index_;
565      uint32_t stack_index = stack_index_;
566      gp_index_ += 2;
567      stack_index_ += 2;
568      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
569        ArmManagedRegister pair = ArmManagedRegister::FromRegisterPair(
570            calling_convention.GetRegisterPairAt(index));
571        return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
572      } else if (index + 1 == calling_convention.GetNumberOfRegisters()) {
573        return Location::QuickParameter(index, stack_index);
574      } else {
575        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
576      }
577    }
578
579    case Primitive::kPrimFloat: {
580      uint32_t stack_index = stack_index_++;
581      if (float_index_ % 2 == 0) {
582        float_index_ = std::max(double_index_, float_index_);
583      }
584      if (float_index_ < calling_convention.GetNumberOfFpuRegisters()) {
585        return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(float_index_++));
586      } else {
587        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
588      }
589    }
590
591    case Primitive::kPrimDouble: {
592      double_index_ = std::max(double_index_, RoundUp(float_index_, 2));
593      uint32_t stack_index = stack_index_;
594      stack_index_ += 2;
595      if (double_index_ + 1 < calling_convention.GetNumberOfFpuRegisters()) {
596        uint32_t index = double_index_;
597        double_index_ += 2;
598        return Location::FpuRegisterPairLocation(
599          calling_convention.GetFpuRegisterAt(index),
600          calling_convention.GetFpuRegisterAt(index + 1));
601      } else {
602        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
603      }
604    }
605
606    case Primitive::kPrimVoid:
607      LOG(FATAL) << "Unexpected parameter type " << type;
608      break;
609  }
610  return Location();
611}
612
613Location InvokeDexCallingConventionVisitor::GetReturnLocation(Primitive::Type type) {
614  switch (type) {
615    case Primitive::kPrimBoolean:
616    case Primitive::kPrimByte:
617    case Primitive::kPrimChar:
618    case Primitive::kPrimShort:
619    case Primitive::kPrimInt:
620    case Primitive::kPrimNot: {
621      return Location::RegisterLocation(R0);
622    }
623
624    case Primitive::kPrimFloat: {
625      return Location::FpuRegisterLocation(S0);
626    }
627
628    case Primitive::kPrimLong: {
629      return Location::RegisterPairLocation(R0, R1);
630    }
631
632    case Primitive::kPrimDouble: {
633      return Location::FpuRegisterPairLocation(S0, S1);
634    }
635
636    case Primitive::kPrimVoid:
637      return Location();
638  }
639  UNREACHABLE();
640  return Location();
641}
642
643void CodeGeneratorARM::Move32(Location destination, Location source) {
644  if (source.Equals(destination)) {
645    return;
646  }
647  if (destination.IsRegister()) {
648    if (source.IsRegister()) {
649      __ Mov(destination.As<Register>(), source.As<Register>());
650    } else if (source.IsFpuRegister()) {
651      __ vmovrs(destination.As<Register>(), source.As<SRegister>());
652    } else {
653      __ LoadFromOffset(kLoadWord, destination.As<Register>(), SP, source.GetStackIndex());
654    }
655  } else if (destination.IsFpuRegister()) {
656    if (source.IsRegister()) {
657      __ vmovsr(destination.As<SRegister>(), source.As<Register>());
658    } else if (source.IsFpuRegister()) {
659      __ vmovs(destination.As<SRegister>(), source.As<SRegister>());
660    } else {
661      __ LoadSFromOffset(destination.As<SRegister>(), SP, source.GetStackIndex());
662    }
663  } else {
664    DCHECK(destination.IsStackSlot());
665    if (source.IsRegister()) {
666      __ StoreToOffset(kStoreWord, source.As<Register>(), SP, destination.GetStackIndex());
667    } else if (source.IsFpuRegister()) {
668      __ StoreSToOffset(source.As<SRegister>(), SP, destination.GetStackIndex());
669    } else {
670      DCHECK(source.IsStackSlot());
671      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
672      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
673    }
674  }
675}
676
677void CodeGeneratorARM::Move64(Location destination, Location source) {
678  if (source.Equals(destination)) {
679    return;
680  }
681  if (destination.IsRegisterPair()) {
682    if (source.IsRegisterPair()) {
683      __ Mov(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairLow<Register>());
684      __ Mov(destination.AsRegisterPairHigh<Register>(), source.AsRegisterPairHigh<Register>());
685    } else if (source.IsFpuRegister()) {
686      UNIMPLEMENTED(FATAL);
687    } else if (source.IsQuickParameter()) {
688      uint16_t register_index = source.GetQuickParameterRegisterIndex();
689      uint16_t stack_index = source.GetQuickParameterStackIndex();
690      InvokeDexCallingConvention calling_convention;
691      __ Mov(destination.AsRegisterPairLow<Register>(),
692             calling_convention.GetRegisterAt(register_index));
693      __ LoadFromOffset(kLoadWord, destination.AsRegisterPairHigh<Register>(),
694             SP, calling_convention.GetStackOffsetOf(stack_index + 1) + GetFrameSize());
695    } else {
696      DCHECK(source.IsDoubleStackSlot());
697      if (destination.AsRegisterPairLow<Register>() == R1) {
698        DCHECK_EQ(destination.AsRegisterPairHigh<Register>(), R2);
699        __ LoadFromOffset(kLoadWord, R1, SP, source.GetStackIndex());
700        __ LoadFromOffset(kLoadWord, R2, SP, source.GetHighStackIndex(kArmWordSize));
701      } else {
702        __ LoadFromOffset(kLoadWordPair, destination.AsRegisterPairLow<Register>(),
703                          SP, source.GetStackIndex());
704      }
705    }
706  } else if (destination.IsFpuRegisterPair()) {
707    if (source.IsDoubleStackSlot()) {
708      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
709                         SP,
710                         source.GetStackIndex());
711    } else {
712      UNIMPLEMENTED(FATAL);
713    }
714  } else if (destination.IsQuickParameter()) {
715    InvokeDexCallingConvention calling_convention;
716    uint16_t register_index = destination.GetQuickParameterRegisterIndex();
717    uint16_t stack_index = destination.GetQuickParameterStackIndex();
718    if (source.IsRegisterPair()) {
719      __ Mov(calling_convention.GetRegisterAt(register_index),
720             source.AsRegisterPairLow<Register>());
721      __ StoreToOffset(kStoreWord, source.AsRegisterPairHigh<Register>(),
722             SP, calling_convention.GetStackOffsetOf(stack_index + 1));
723    } else if (source.IsFpuRegister()) {
724      UNIMPLEMENTED(FATAL);
725    } else {
726      DCHECK(source.IsDoubleStackSlot());
727      __ LoadFromOffset(
728          kLoadWord, calling_convention.GetRegisterAt(register_index), SP, source.GetStackIndex());
729      __ LoadFromOffset(kLoadWord, R0, SP, source.GetHighStackIndex(kArmWordSize));
730      __ StoreToOffset(kStoreWord, R0, SP, calling_convention.GetStackOffsetOf(stack_index + 1));
731    }
732  } else {
733    DCHECK(destination.IsDoubleStackSlot());
734    if (source.IsRegisterPair()) {
735      if (source.AsRegisterPairLow<Register>() == R1) {
736        DCHECK_EQ(source.AsRegisterPairHigh<Register>(), R2);
737        __ StoreToOffset(kStoreWord, R1, SP, destination.GetStackIndex());
738        __ StoreToOffset(kStoreWord, R2, SP, destination.GetHighStackIndex(kArmWordSize));
739      } else {
740        __ StoreToOffset(kStoreWordPair, source.AsRegisterPairLow<Register>(),
741                         SP, destination.GetStackIndex());
742      }
743    } else if (source.IsQuickParameter()) {
744      InvokeDexCallingConvention calling_convention;
745      uint16_t register_index = source.GetQuickParameterRegisterIndex();
746      uint16_t stack_index = source.GetQuickParameterStackIndex();
747      __ StoreToOffset(kStoreWord, calling_convention.GetRegisterAt(register_index),
748             SP, destination.GetStackIndex());
749      __ LoadFromOffset(kLoadWord, R0,
750             SP, calling_convention.GetStackOffsetOf(stack_index + 1) + GetFrameSize());
751      __ StoreToOffset(kStoreWord, R0, SP, destination.GetHighStackIndex(kArmWordSize));
752    } else if (source.IsFpuRegisterPair()) {
753      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
754                        SP,
755                        destination.GetStackIndex());
756    } else {
757      DCHECK(source.IsDoubleStackSlot());
758      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
759      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
760      __ LoadFromOffset(kLoadWord, IP, SP, source.GetHighStackIndex(kArmWordSize));
761      __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
762    }
763  }
764}
765
766void CodeGeneratorARM::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
767  LocationSummary* locations = instruction->GetLocations();
768  if (locations != nullptr && locations->Out().Equals(location)) {
769    return;
770  }
771
772  if (instruction->IsIntConstant()) {
773    int32_t value = instruction->AsIntConstant()->GetValue();
774    if (location.IsRegister()) {
775      __ LoadImmediate(location.As<Register>(), value);
776    } else {
777      DCHECK(location.IsStackSlot());
778      __ LoadImmediate(IP, value);
779      __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
780    }
781  } else if (instruction->IsLongConstant()) {
782    int64_t value = instruction->AsLongConstant()->GetValue();
783    if (location.IsRegisterPair()) {
784      __ LoadImmediate(location.AsRegisterPairLow<Register>(), Low32Bits(value));
785      __ LoadImmediate(location.AsRegisterPairHigh<Register>(), High32Bits(value));
786    } else {
787      DCHECK(location.IsDoubleStackSlot());
788      __ LoadImmediate(IP, Low32Bits(value));
789      __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
790      __ LoadImmediate(IP, High32Bits(value));
791      __ StoreToOffset(kStoreWord, IP, SP, location.GetHighStackIndex(kArmWordSize));
792    }
793  } else if (instruction->IsLoadLocal()) {
794    uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
795    switch (instruction->GetType()) {
796      case Primitive::kPrimBoolean:
797      case Primitive::kPrimByte:
798      case Primitive::kPrimChar:
799      case Primitive::kPrimShort:
800      case Primitive::kPrimInt:
801      case Primitive::kPrimNot:
802      case Primitive::kPrimFloat:
803        Move32(location, Location::StackSlot(stack_slot));
804        break;
805
806      case Primitive::kPrimLong:
807      case Primitive::kPrimDouble:
808        Move64(location, Location::DoubleStackSlot(stack_slot));
809        break;
810
811      default:
812        LOG(FATAL) << "Unexpected type " << instruction->GetType();
813    }
814  } else if (instruction->IsTemporary()) {
815    Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
816    Move32(location, temp_location);
817  } else {
818    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
819    switch (instruction->GetType()) {
820      case Primitive::kPrimBoolean:
821      case Primitive::kPrimByte:
822      case Primitive::kPrimChar:
823      case Primitive::kPrimShort:
824      case Primitive::kPrimNot:
825      case Primitive::kPrimInt:
826      case Primitive::kPrimFloat:
827        Move32(location, locations->Out());
828        break;
829
830      case Primitive::kPrimLong:
831      case Primitive::kPrimDouble:
832        Move64(location, locations->Out());
833        break;
834
835      default:
836        LOG(FATAL) << "Unexpected type " << instruction->GetType();
837    }
838  }
839}
840
841void CodeGeneratorARM::InvokeRuntime(int32_t entry_point_offset,
842                                     HInstruction* instruction,
843                                     uint32_t dex_pc) {
844  __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
845  __ blx(LR);
846  RecordPcInfo(instruction, dex_pc);
847  DCHECK(instruction->IsSuspendCheck()
848      || instruction->IsBoundsCheck()
849      || instruction->IsNullCheck()
850      || instruction->IsDivZeroCheck()
851      || !IsLeafMethod());
852}
853
854void LocationsBuilderARM::VisitGoto(HGoto* got) {
855  got->SetLocations(nullptr);
856}
857
858void InstructionCodeGeneratorARM::VisitGoto(HGoto* got) {
859  HBasicBlock* successor = got->GetSuccessor();
860  DCHECK(!successor->IsExitBlock());
861
862  HBasicBlock* block = got->GetBlock();
863  HInstruction* previous = got->GetPrevious();
864
865  HLoopInformation* info = block->GetLoopInformation();
866  if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
867    codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
868    GenerateSuspendCheck(info->GetSuspendCheck(), successor);
869    return;
870  }
871
872  if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
873    GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
874  }
875  if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
876    __ b(codegen_->GetLabelOf(successor));
877  }
878}
879
880void LocationsBuilderARM::VisitExit(HExit* exit) {
881  exit->SetLocations(nullptr);
882}
883
884void InstructionCodeGeneratorARM::VisitExit(HExit* exit) {
885  UNUSED(exit);
886  if (kIsDebugBuild) {
887    __ Comment("Unreachable");
888    __ bkpt(0);
889  }
890}
891
892void LocationsBuilderARM::VisitIf(HIf* if_instr) {
893  LocationSummary* locations =
894      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
895  HInstruction* cond = if_instr->InputAt(0);
896  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
897    locations->SetInAt(0, Location::RequiresRegister());
898  }
899}
900
901void InstructionCodeGeneratorARM::VisitIf(HIf* if_instr) {
902  HInstruction* cond = if_instr->InputAt(0);
903  if (cond->IsIntConstant()) {
904    // Constant condition, statically compared against 1.
905    int32_t cond_value = cond->AsIntConstant()->GetValue();
906    if (cond_value == 1) {
907      if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
908                                     if_instr->IfTrueSuccessor())) {
909        __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
910      }
911      return;
912    } else {
913      DCHECK_EQ(cond_value, 0);
914    }
915  } else {
916    if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
917      // Condition has been materialized, compare the output to 0
918      DCHECK(if_instr->GetLocations()->InAt(0).IsRegister());
919      __ cmp(if_instr->GetLocations()->InAt(0).As<Register>(),
920             ShifterOperand(0));
921      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()), NE);
922    } else {
923      // Condition has not been materialized, use its inputs as the
924      // comparison and its condition as the branch condition.
925      LocationSummary* locations = cond->GetLocations();
926      if (locations->InAt(1).IsRegister()) {
927        __ cmp(locations->InAt(0).As<Register>(),
928               ShifterOperand(locations->InAt(1).As<Register>()));
929      } else {
930        DCHECK(locations->InAt(1).IsConstant());
931        int32_t value =
932            locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
933        ShifterOperand operand;
934        if (ShifterOperand::CanHoldArm(value, &operand)) {
935          __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(value));
936        } else {
937          Register temp = IP;
938          __ LoadImmediate(temp, value);
939          __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(temp));
940        }
941      }
942      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()),
943           ARMCondition(cond->AsCondition()->GetCondition()));
944    }
945  }
946  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
947                                 if_instr->IfFalseSuccessor())) {
948    __ b(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
949  }
950}
951
952
953void LocationsBuilderARM::VisitCondition(HCondition* comp) {
954  LocationSummary* locations =
955      new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall);
956  locations->SetInAt(0, Location::RequiresRegister());
957  locations->SetInAt(1, Location::RegisterOrConstant(comp->InputAt(1)));
958  if (comp->NeedsMaterialization()) {
959    locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
960  }
961}
962
963void InstructionCodeGeneratorARM::VisitCondition(HCondition* comp) {
964  if (!comp->NeedsMaterialization()) return;
965
966  LocationSummary* locations = comp->GetLocations();
967  if (locations->InAt(1).IsRegister()) {
968    __ cmp(locations->InAt(0).As<Register>(),
969           ShifterOperand(locations->InAt(1).As<Register>()));
970  } else {
971    DCHECK(locations->InAt(1).IsConstant());
972    int32_t value = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
973    ShifterOperand operand;
974    if (ShifterOperand::CanHoldArm(value, &operand)) {
975      __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(value));
976    } else {
977      Register temp = IP;
978      __ LoadImmediate(temp, value);
979      __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(temp));
980    }
981  }
982  __ it(ARMCondition(comp->GetCondition()), kItElse);
983  __ mov(locations->Out().As<Register>(), ShifterOperand(1),
984         ARMCondition(comp->GetCondition()));
985  __ mov(locations->Out().As<Register>(), ShifterOperand(0),
986         ARMOppositeCondition(comp->GetCondition()));
987}
988
989void LocationsBuilderARM::VisitEqual(HEqual* comp) {
990  VisitCondition(comp);
991}
992
993void InstructionCodeGeneratorARM::VisitEqual(HEqual* comp) {
994  VisitCondition(comp);
995}
996
997void LocationsBuilderARM::VisitNotEqual(HNotEqual* comp) {
998  VisitCondition(comp);
999}
1000
1001void InstructionCodeGeneratorARM::VisitNotEqual(HNotEqual* comp) {
1002  VisitCondition(comp);
1003}
1004
1005void LocationsBuilderARM::VisitLessThan(HLessThan* comp) {
1006  VisitCondition(comp);
1007}
1008
1009void InstructionCodeGeneratorARM::VisitLessThan(HLessThan* comp) {
1010  VisitCondition(comp);
1011}
1012
1013void LocationsBuilderARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1014  VisitCondition(comp);
1015}
1016
1017void InstructionCodeGeneratorARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1018  VisitCondition(comp);
1019}
1020
1021void LocationsBuilderARM::VisitGreaterThan(HGreaterThan* comp) {
1022  VisitCondition(comp);
1023}
1024
1025void InstructionCodeGeneratorARM::VisitGreaterThan(HGreaterThan* comp) {
1026  VisitCondition(comp);
1027}
1028
1029void LocationsBuilderARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1030  VisitCondition(comp);
1031}
1032
1033void InstructionCodeGeneratorARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1034  VisitCondition(comp);
1035}
1036
1037void LocationsBuilderARM::VisitLocal(HLocal* local) {
1038  local->SetLocations(nullptr);
1039}
1040
1041void InstructionCodeGeneratorARM::VisitLocal(HLocal* local) {
1042  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
1043}
1044
1045void LocationsBuilderARM::VisitLoadLocal(HLoadLocal* load) {
1046  load->SetLocations(nullptr);
1047}
1048
1049void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load) {
1050  // Nothing to do, this is driven by the code generator.
1051  UNUSED(load);
1052}
1053
1054void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) {
1055  LocationSummary* locations =
1056      new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
1057  switch (store->InputAt(1)->GetType()) {
1058    case Primitive::kPrimBoolean:
1059    case Primitive::kPrimByte:
1060    case Primitive::kPrimChar:
1061    case Primitive::kPrimShort:
1062    case Primitive::kPrimInt:
1063    case Primitive::kPrimNot:
1064    case Primitive::kPrimFloat:
1065      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
1066      break;
1067
1068    case Primitive::kPrimLong:
1069    case Primitive::kPrimDouble:
1070      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
1071      break;
1072
1073    default:
1074      LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType();
1075  }
1076}
1077
1078void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store) {
1079  UNUSED(store);
1080}
1081
1082void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) {
1083  LocationSummary* locations =
1084      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1085  locations->SetOut(Location::ConstantLocation(constant));
1086}
1087
1088void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant) {
1089  // Will be generated at use site.
1090  UNUSED(constant);
1091}
1092
1093void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) {
1094  LocationSummary* locations =
1095      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1096  locations->SetOut(Location::ConstantLocation(constant));
1097}
1098
1099void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant) {
1100  // Will be generated at use site.
1101  UNUSED(constant);
1102}
1103
1104void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) {
1105  LocationSummary* locations =
1106      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1107  locations->SetOut(Location::ConstantLocation(constant));
1108}
1109
1110void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant) {
1111  // Will be generated at use site.
1112  UNUSED(constant);
1113}
1114
1115void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) {
1116  LocationSummary* locations =
1117      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1118  locations->SetOut(Location::ConstantLocation(constant));
1119}
1120
1121void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant) {
1122  // Will be generated at use site.
1123  UNUSED(constant);
1124}
1125
1126void LocationsBuilderARM::VisitReturnVoid(HReturnVoid* ret) {
1127  ret->SetLocations(nullptr);
1128}
1129
1130void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret) {
1131  UNUSED(ret);
1132  codegen_->GenerateFrameExit();
1133}
1134
1135void LocationsBuilderARM::VisitReturn(HReturn* ret) {
1136  LocationSummary* locations =
1137      new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
1138  locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType()));
1139}
1140
1141void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret) {
1142  UNUSED(ret);
1143  codegen_->GenerateFrameExit();
1144}
1145
1146void LocationsBuilderARM::VisitInvokeStatic(HInvokeStatic* invoke) {
1147  HandleInvoke(invoke);
1148}
1149
1150void CodeGeneratorARM::LoadCurrentMethod(Register reg) {
1151  __ LoadFromOffset(kLoadWord, reg, SP, kCurrentMethodStackOffset);
1152}
1153
1154void InstructionCodeGeneratorARM::VisitInvokeStatic(HInvokeStatic* invoke) {
1155  Register temp = invoke->GetLocations()->GetTemp(0).As<Register>();
1156
1157  // TODO: Implement all kinds of calls:
1158  // 1) boot -> boot
1159  // 2) app -> boot
1160  // 3) app -> app
1161  //
1162  // Currently we implement the app -> app logic, which looks up in the resolve cache.
1163
1164  // temp = method;
1165  codegen_->LoadCurrentMethod(temp);
1166  // temp = temp->dex_cache_resolved_methods_;
1167  __ LoadFromOffset(
1168      kLoadWord, temp, temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value());
1169  // temp = temp[index_in_cache]
1170  __ LoadFromOffset(
1171      kLoadWord, temp, temp, CodeGenerator::GetCacheOffset(invoke->GetIndexInDexCache()));
1172  // LR = temp[offset_of_quick_compiled_code]
1173  __ LoadFromOffset(kLoadWord, LR, temp,
1174                     mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value());
1175  // LR()
1176  __ blx(LR);
1177
1178  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1179  DCHECK(!codegen_->IsLeafMethod());
1180}
1181
1182void LocationsBuilderARM::HandleInvoke(HInvoke* invoke) {
1183  LocationSummary* locations =
1184      new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1185  locations->AddTemp(Location::RegisterLocation(R0));
1186
1187  InvokeDexCallingConventionVisitor calling_convention_visitor;
1188  for (size_t i = 0; i < invoke->InputCount(); i++) {
1189    HInstruction* input = invoke->InputAt(i);
1190    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1191  }
1192
1193  locations->SetOut(calling_convention_visitor.GetReturnLocation(invoke->GetType()));
1194}
1195
1196void LocationsBuilderARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1197  HandleInvoke(invoke);
1198}
1199
1200void InstructionCodeGeneratorARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1201  Register temp = invoke->GetLocations()->GetTemp(0).As<Register>();
1202  uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() +
1203          invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
1204  LocationSummary* locations = invoke->GetLocations();
1205  Location receiver = locations->InAt(0);
1206  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1207  // temp = object->GetClass();
1208  if (receiver.IsStackSlot()) {
1209    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1210    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1211  } else {
1212    __ LoadFromOffset(kLoadWord, temp, receiver.As<Register>(), class_offset);
1213  }
1214  // temp = temp->GetMethodAt(method_offset);
1215  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value();
1216  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1217  // LR = temp->GetEntryPoint();
1218  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1219  // LR();
1220  __ blx(LR);
1221  DCHECK(!codegen_->IsLeafMethod());
1222  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1223}
1224
1225void LocationsBuilderARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1226  HandleInvoke(invoke);
1227  // Add the hidden argument.
1228  invoke->GetLocations()->AddTemp(Location::RegisterLocation(R12));
1229}
1230
1231void InstructionCodeGeneratorARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1232  // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
1233  Register temp = invoke->GetLocations()->GetTemp(0).As<Register>();
1234  uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() +
1235          (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry);
1236  LocationSummary* locations = invoke->GetLocations();
1237  Location receiver = locations->InAt(0);
1238  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1239
1240  // Set the hidden argument.
1241  __ LoadImmediate(invoke->GetLocations()->GetTemp(1).As<Register>(), invoke->GetDexMethodIndex());
1242
1243  // temp = object->GetClass();
1244  if (receiver.IsStackSlot()) {
1245    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1246    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1247  } else {
1248    __ LoadFromOffset(kLoadWord, temp, receiver.As<Register>(), class_offset);
1249  }
1250  // temp = temp->GetImtEntryAt(method_offset);
1251  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value();
1252  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1253  // LR = temp->GetEntryPoint();
1254  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1255  // LR();
1256  __ blx(LR);
1257  DCHECK(!codegen_->IsLeafMethod());
1258  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1259}
1260
1261void LocationsBuilderARM::VisitNeg(HNeg* neg) {
1262  LocationSummary* locations =
1263      new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
1264  switch (neg->GetResultType()) {
1265    case Primitive::kPrimInt:
1266    case Primitive::kPrimLong: {
1267      bool output_overlaps = (neg->GetResultType() == Primitive::kPrimLong);
1268      locations->SetInAt(0, Location::RequiresRegister());
1269      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1270      break;
1271    }
1272
1273    case Primitive::kPrimFloat:
1274    case Primitive::kPrimDouble:
1275      locations->SetInAt(0, Location::RequiresFpuRegister());
1276      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1277      break;
1278
1279    default:
1280      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1281  }
1282}
1283
1284void InstructionCodeGeneratorARM::VisitNeg(HNeg* neg) {
1285  LocationSummary* locations = neg->GetLocations();
1286  Location out = locations->Out();
1287  Location in = locations->InAt(0);
1288  switch (neg->GetResultType()) {
1289    case Primitive::kPrimInt:
1290      DCHECK(in.IsRegister());
1291      __ rsb(out.As<Register>(), in.As<Register>(), ShifterOperand(0));
1292      break;
1293
1294    case Primitive::kPrimLong:
1295      DCHECK(in.IsRegisterPair());
1296      // out.lo = 0 - in.lo (and update the carry/borrow (C) flag)
1297      __ rsbs(out.AsRegisterPairLow<Register>(),
1298              in.AsRegisterPairLow<Register>(),
1299              ShifterOperand(0));
1300      // We cannot emit an RSC (Reverse Subtract with Carry)
1301      // instruction here, as it does not exist in the Thumb-2
1302      // instruction set.  We use the following approach
1303      // using SBC and SUB instead.
1304      //
1305      // out.hi = -C
1306      __ sbc(out.AsRegisterPairHigh<Register>(),
1307             out.AsRegisterPairHigh<Register>(),
1308             ShifterOperand(out.AsRegisterPairHigh<Register>()));
1309      // out.hi = out.hi - in.hi
1310      __ sub(out.AsRegisterPairHigh<Register>(),
1311             out.AsRegisterPairHigh<Register>(),
1312             ShifterOperand(in.AsRegisterPairHigh<Register>()));
1313      break;
1314
1315    case Primitive::kPrimFloat:
1316      DCHECK(in.IsFpuRegister());
1317      __ vnegs(out.As<SRegister>(), in.As<SRegister>());
1318      break;
1319
1320    case Primitive::kPrimDouble:
1321      DCHECK(in.IsFpuRegisterPair());
1322      __ vnegd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1323               FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1324      break;
1325
1326    default:
1327      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1328  }
1329}
1330
1331void LocationsBuilderARM::VisitTypeConversion(HTypeConversion* conversion) {
1332  LocationSummary* locations =
1333      new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
1334  Primitive::Type result_type = conversion->GetResultType();
1335  Primitive::Type input_type = conversion->GetInputType();
1336  switch (result_type) {
1337    case Primitive::kPrimInt:
1338      switch (input_type) {
1339        case Primitive::kPrimLong:
1340          // long-to-int conversion.
1341          locations->SetInAt(0, Location::Any());
1342          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1343          break;
1344
1345        case Primitive::kPrimFloat:
1346        case Primitive::kPrimDouble:
1347          LOG(FATAL) << "Type conversion from " << input_type
1348                     << " to " << result_type << " not yet implemented";
1349          break;
1350
1351        default:
1352          LOG(FATAL) << "Unexpected type conversion from " << input_type
1353                     << " to " << result_type;
1354      }
1355      break;
1356
1357    case Primitive::kPrimLong:
1358      switch (input_type) {
1359        case Primitive::kPrimByte:
1360        case Primitive::kPrimShort:
1361        case Primitive::kPrimInt:
1362        case Primitive::kPrimChar:
1363          // int-to-long conversion.
1364          locations->SetInAt(0, Location::RequiresRegister());
1365          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1366          break;
1367
1368        case Primitive::kPrimFloat:
1369        case Primitive::kPrimDouble:
1370          LOG(FATAL) << "Type conversion from " << input_type << " to "
1371                     << result_type << " not yet implemented";
1372          break;
1373
1374        default:
1375          LOG(FATAL) << "Unexpected type conversion from " << input_type
1376                     << " to " << result_type;
1377      }
1378      break;
1379
1380    case Primitive::kPrimFloat:
1381    case Primitive::kPrimDouble:
1382      LOG(FATAL) << "Type conversion from " << input_type
1383                 << " to " << result_type << " not yet implemented";
1384      break;
1385
1386    default:
1387      LOG(FATAL) << "Unexpected type conversion from " << input_type
1388                 << " to " << result_type;
1389  }
1390}
1391
1392void InstructionCodeGeneratorARM::VisitTypeConversion(HTypeConversion* conversion) {
1393  LocationSummary* locations = conversion->GetLocations();
1394  Location out = locations->Out();
1395  Location in = locations->InAt(0);
1396  Primitive::Type result_type = conversion->GetResultType();
1397  Primitive::Type input_type = conversion->GetInputType();
1398  switch (result_type) {
1399    case Primitive::kPrimInt:
1400      switch (input_type) {
1401        case Primitive::kPrimLong:
1402          // long-to-int conversion.
1403          DCHECK(out.IsRegister());
1404          if (in.IsRegisterPair()) {
1405            __ Mov(out.As<Register>(), in.AsRegisterPairLow<Register>());
1406          } else if (in.IsDoubleStackSlot()) {
1407            __ LoadFromOffset(kLoadWord, out.As<Register>(), SP, in.GetStackIndex());
1408          } else {
1409            DCHECK(in.IsConstant());
1410            DCHECK(in.GetConstant()->IsLongConstant());
1411            __ LoadImmediate(out.As<Register>(),
1412                             Low32Bits(in.GetConstant()->AsLongConstant()->GetValue()));
1413          }
1414          break;
1415
1416        case Primitive::kPrimFloat:
1417        case Primitive::kPrimDouble:
1418          LOG(FATAL) << "Type conversion from " << input_type
1419                     << " to " << result_type << " not yet implemented";
1420          break;
1421
1422        default:
1423          LOG(FATAL) << "Unexpected type conversion from " << input_type
1424                     << " to " << result_type;
1425      }
1426      break;
1427
1428    case Primitive::kPrimLong:
1429      switch (input_type) {
1430        case Primitive::kPrimByte:
1431        case Primitive::kPrimShort:
1432        case Primitive::kPrimInt:
1433        case Primitive::kPrimChar:
1434          // int-to-long conversion.
1435          DCHECK(out.IsRegisterPair());
1436          DCHECK(in.IsRegister());
1437          __ Mov(out.AsRegisterPairLow<Register>(), in.As<Register>());
1438          // Sign extension.
1439          __ Asr(out.AsRegisterPairHigh<Register>(),
1440                 out.AsRegisterPairLow<Register>(),
1441                 31);
1442          break;
1443
1444        case Primitive::kPrimFloat:
1445        case Primitive::kPrimDouble:
1446          LOG(FATAL) << "Type conversion from " << input_type << " to "
1447                     << result_type << " not yet implemented";
1448          break;
1449
1450        default:
1451          LOG(FATAL) << "Unexpected type conversion from " << input_type
1452                     << " to " << result_type;
1453      }
1454      break;
1455
1456    case Primitive::kPrimFloat:
1457    case Primitive::kPrimDouble:
1458      LOG(FATAL) << "Type conversion from " << input_type
1459                 << " to " << result_type << " not yet implemented";
1460      break;
1461
1462    default:
1463      LOG(FATAL) << "Unexpected type conversion from " << input_type
1464                 << " to " << result_type;
1465  }
1466}
1467
1468void LocationsBuilderARM::VisitAdd(HAdd* add) {
1469  LocationSummary* locations =
1470      new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
1471  switch (add->GetResultType()) {
1472    case Primitive::kPrimInt:
1473    case Primitive::kPrimLong: {
1474      bool output_overlaps = (add->GetResultType() == Primitive::kPrimLong);
1475      locations->SetInAt(0, Location::RequiresRegister());
1476      locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
1477      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1478      break;
1479    }
1480
1481    case Primitive::kPrimFloat:
1482    case Primitive::kPrimDouble: {
1483      locations->SetInAt(0, Location::RequiresFpuRegister());
1484      locations->SetInAt(1, Location::RequiresFpuRegister());
1485      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1486      break;
1487    }
1488
1489    default:
1490      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1491  }
1492}
1493
1494void InstructionCodeGeneratorARM::VisitAdd(HAdd* add) {
1495  LocationSummary* locations = add->GetLocations();
1496  Location out = locations->Out();
1497  Location first = locations->InAt(0);
1498  Location second = locations->InAt(1);
1499  switch (add->GetResultType()) {
1500    case Primitive::kPrimInt:
1501      if (second.IsRegister()) {
1502        __ add(out.As<Register>(), first.As<Register>(), ShifterOperand(second.As<Register>()));
1503      } else {
1504        __ AddConstant(out.As<Register>(),
1505                       first.As<Register>(),
1506                       second.GetConstant()->AsIntConstant()->GetValue());
1507      }
1508      break;
1509
1510    case Primitive::kPrimLong:
1511      __ adds(out.AsRegisterPairLow<Register>(),
1512              first.AsRegisterPairLow<Register>(),
1513              ShifterOperand(second.AsRegisterPairLow<Register>()));
1514      __ adc(out.AsRegisterPairHigh<Register>(),
1515             first.AsRegisterPairHigh<Register>(),
1516             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1517      break;
1518
1519    case Primitive::kPrimFloat:
1520      __ vadds(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>());
1521      break;
1522
1523    case Primitive::kPrimDouble:
1524      __ vaddd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1525               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1526               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1527      break;
1528
1529    default:
1530      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1531  }
1532}
1533
1534void LocationsBuilderARM::VisitSub(HSub* sub) {
1535  LocationSummary* locations =
1536      new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
1537  switch (sub->GetResultType()) {
1538    case Primitive::kPrimInt:
1539    case Primitive::kPrimLong: {
1540      bool output_overlaps = (sub->GetResultType() == Primitive::kPrimLong);
1541      locations->SetInAt(0, Location::RequiresRegister());
1542      locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1)));
1543      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1544      break;
1545    }
1546    case Primitive::kPrimFloat:
1547    case Primitive::kPrimDouble: {
1548      locations->SetInAt(0, Location::RequiresFpuRegister());
1549      locations->SetInAt(1, Location::RequiresFpuRegister());
1550      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1551      break;
1552    }
1553    default:
1554      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1555  }
1556}
1557
1558void InstructionCodeGeneratorARM::VisitSub(HSub* sub) {
1559  LocationSummary* locations = sub->GetLocations();
1560  Location out = locations->Out();
1561  Location first = locations->InAt(0);
1562  Location second = locations->InAt(1);
1563  switch (sub->GetResultType()) {
1564    case Primitive::kPrimInt: {
1565      if (second.IsRegister()) {
1566        __ sub(out.As<Register>(), first.As<Register>(), ShifterOperand(second.As<Register>()));
1567      } else {
1568        __ AddConstant(out.As<Register>(),
1569                       first.As<Register>(),
1570                       -second.GetConstant()->AsIntConstant()->GetValue());
1571      }
1572      break;
1573    }
1574
1575    case Primitive::kPrimLong: {
1576      __ subs(out.AsRegisterPairLow<Register>(),
1577              first.AsRegisterPairLow<Register>(),
1578              ShifterOperand(second.AsRegisterPairLow<Register>()));
1579      __ sbc(out.AsRegisterPairHigh<Register>(),
1580             first.AsRegisterPairHigh<Register>(),
1581             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1582      break;
1583    }
1584
1585    case Primitive::kPrimFloat: {
1586      __ vsubs(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>());
1587      break;
1588    }
1589
1590    case Primitive::kPrimDouble: {
1591      __ vsubd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1592               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1593               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1594      break;
1595    }
1596
1597
1598    default:
1599      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1600  }
1601}
1602
1603void LocationsBuilderARM::VisitMul(HMul* mul) {
1604  LocationSummary* locations =
1605      new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
1606  switch (mul->GetResultType()) {
1607    case Primitive::kPrimInt:
1608    case Primitive::kPrimLong:  {
1609      locations->SetInAt(0, Location::RequiresRegister());
1610      locations->SetInAt(1, Location::RequiresRegister());
1611      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1612      break;
1613    }
1614
1615    case Primitive::kPrimFloat:
1616    case Primitive::kPrimDouble: {
1617      locations->SetInAt(0, Location::RequiresFpuRegister());
1618      locations->SetInAt(1, Location::RequiresFpuRegister());
1619      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1620      break;
1621    }
1622
1623    default:
1624      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1625  }
1626}
1627
1628void InstructionCodeGeneratorARM::VisitMul(HMul* mul) {
1629  LocationSummary* locations = mul->GetLocations();
1630  Location out = locations->Out();
1631  Location first = locations->InAt(0);
1632  Location second = locations->InAt(1);
1633  switch (mul->GetResultType()) {
1634    case Primitive::kPrimInt: {
1635      __ mul(out.As<Register>(), first.As<Register>(), second.As<Register>());
1636      break;
1637    }
1638    case Primitive::kPrimLong: {
1639      Register out_hi = out.AsRegisterPairHigh<Register>();
1640      Register out_lo = out.AsRegisterPairLow<Register>();
1641      Register in1_hi = first.AsRegisterPairHigh<Register>();
1642      Register in1_lo = first.AsRegisterPairLow<Register>();
1643      Register in2_hi = second.AsRegisterPairHigh<Register>();
1644      Register in2_lo = second.AsRegisterPairLow<Register>();
1645
1646      // Extra checks to protect caused by the existence of R1_R2.
1647      // The algorithm is wrong if out.hi is either in1.lo or in2.lo:
1648      // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2);
1649      DCHECK_NE(out_hi, in1_lo);
1650      DCHECK_NE(out_hi, in2_lo);
1651
1652      // input: in1 - 64 bits, in2 - 64 bits
1653      // output: out
1654      // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
1655      // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
1656      // parts: out.lo = (in1.lo * in2.lo)[31:0]
1657
1658      // IP <- in1.lo * in2.hi
1659      __ mul(IP, in1_lo, in2_hi);
1660      // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo
1661      __ mla(out_hi, in1_hi, in2_lo, IP);
1662      // out.lo <- (in1.lo * in2.lo)[31:0];
1663      __ umull(out_lo, IP, in1_lo, in2_lo);
1664      // out.hi <- in2.hi * in1.lo +  in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
1665      __ add(out_hi, out_hi, ShifterOperand(IP));
1666      break;
1667    }
1668
1669    case Primitive::kPrimFloat: {
1670      __ vmuls(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>());
1671      break;
1672    }
1673
1674    case Primitive::kPrimDouble: {
1675      __ vmuld(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1676               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1677               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1678      break;
1679    }
1680
1681    default:
1682      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1683  }
1684}
1685
1686void LocationsBuilderARM::VisitDiv(HDiv* div) {
1687  LocationSummary* locations =
1688      new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
1689  switch (div->GetResultType()) {
1690    case Primitive::kPrimInt: {
1691      locations->SetInAt(0, Location::RequiresRegister());
1692      locations->SetInAt(1, Location::RequiresRegister());
1693      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1694      break;
1695    }
1696    case Primitive::kPrimLong: {
1697      LOG(FATAL) << "Not implemented div type" << div->GetResultType();
1698      break;
1699    }
1700    case Primitive::kPrimFloat:
1701    case Primitive::kPrimDouble: {
1702      locations->SetInAt(0, Location::RequiresFpuRegister());
1703      locations->SetInAt(1, Location::RequiresFpuRegister());
1704      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1705      break;
1706    }
1707
1708    default:
1709      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
1710  }
1711}
1712
1713void InstructionCodeGeneratorARM::VisitDiv(HDiv* div) {
1714  LocationSummary* locations = div->GetLocations();
1715  Location out = locations->Out();
1716  Location first = locations->InAt(0);
1717  Location second = locations->InAt(1);
1718
1719  switch (div->GetResultType()) {
1720    case Primitive::kPrimInt: {
1721      __ sdiv(out.As<Register>(), first.As<Register>(), second.As<Register>());
1722      break;
1723    }
1724
1725    case Primitive::kPrimLong: {
1726      LOG(FATAL) << "Not implemented div type" << div->GetResultType();
1727      break;
1728    }
1729
1730    case Primitive::kPrimFloat: {
1731      __ vdivs(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>());
1732      break;
1733    }
1734
1735    case Primitive::kPrimDouble: {
1736      __ vdivd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1737               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1738               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1739      break;
1740    }
1741
1742    default:
1743      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
1744  }
1745}
1746
1747void LocationsBuilderARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1748  LocationSummary* locations =
1749      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1750  locations->SetInAt(0, Location::RequiresRegister());
1751  if (instruction->HasUses()) {
1752    locations->SetOut(Location::SameAsFirstInput());
1753  }
1754}
1755
1756void InstructionCodeGeneratorARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1757  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM(instruction);
1758  codegen_->AddSlowPath(slow_path);
1759
1760  LocationSummary* locations = instruction->GetLocations();
1761  Location value = locations->InAt(0);
1762
1763  DCHECK(value.IsRegister()) << value;
1764  __ cmp(value.As<Register>(), ShifterOperand(0));
1765  __ b(slow_path->GetEntryLabel(), EQ);
1766}
1767
1768void LocationsBuilderARM::VisitNewInstance(HNewInstance* instruction) {
1769  LocationSummary* locations =
1770      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
1771  InvokeRuntimeCallingConvention calling_convention;
1772  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1773  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1774  locations->SetOut(Location::RegisterLocation(R0));
1775}
1776
1777void InstructionCodeGeneratorARM::VisitNewInstance(HNewInstance* instruction) {
1778  InvokeRuntimeCallingConvention calling_convention;
1779  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
1780  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
1781  codegen_->InvokeRuntime(
1782      QUICK_ENTRY_POINT(pAllocObjectWithAccessCheck), instruction, instruction->GetDexPc());
1783}
1784
1785void LocationsBuilderARM::VisitNewArray(HNewArray* instruction) {
1786  LocationSummary* locations =
1787      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
1788  InvokeRuntimeCallingConvention calling_convention;
1789  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1790  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1791  locations->SetOut(Location::RegisterLocation(R0));
1792  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1793}
1794
1795void InstructionCodeGeneratorARM::VisitNewArray(HNewArray* instruction) {
1796  InvokeRuntimeCallingConvention calling_convention;
1797  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
1798  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
1799  codegen_->InvokeRuntime(
1800      QUICK_ENTRY_POINT(pAllocArrayWithAccessCheck), instruction, instruction->GetDexPc());
1801}
1802
1803void LocationsBuilderARM::VisitParameterValue(HParameterValue* instruction) {
1804  LocationSummary* locations =
1805      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1806  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
1807  if (location.IsStackSlot()) {
1808    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1809  } else if (location.IsDoubleStackSlot()) {
1810    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1811  }
1812  locations->SetOut(location);
1813}
1814
1815void InstructionCodeGeneratorARM::VisitParameterValue(HParameterValue* instruction) {
1816  // Nothing to do, the parameter is already at its location.
1817  UNUSED(instruction);
1818}
1819
1820void LocationsBuilderARM::VisitNot(HNot* not_) {
1821  LocationSummary* locations =
1822      new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
1823  locations->SetInAt(0, Location::RequiresRegister());
1824  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1825}
1826
1827void InstructionCodeGeneratorARM::VisitNot(HNot* not_) {
1828  LocationSummary* locations = not_->GetLocations();
1829  Location out = locations->Out();
1830  Location in = locations->InAt(0);
1831  switch (not_->InputAt(0)->GetType()) {
1832    case Primitive::kPrimBoolean:
1833      __ eor(out.As<Register>(), in.As<Register>(), ShifterOperand(1));
1834      break;
1835
1836    case Primitive::kPrimInt:
1837      __ mvn(out.As<Register>(), ShifterOperand(in.As<Register>()));
1838      break;
1839
1840    case Primitive::kPrimLong:
1841      __ mvn(out.AsRegisterPairLow<Register>(),
1842             ShifterOperand(in.AsRegisterPairLow<Register>()));
1843      __ mvn(out.AsRegisterPairHigh<Register>(),
1844             ShifterOperand(in.AsRegisterPairHigh<Register>()));
1845      break;
1846
1847    default:
1848      LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
1849  }
1850}
1851
1852void LocationsBuilderARM::VisitCompare(HCompare* compare) {
1853  LocationSummary* locations =
1854      new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
1855  locations->SetInAt(0, Location::RequiresRegister());
1856  locations->SetInAt(1, Location::RequiresRegister());
1857  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1858}
1859
1860void InstructionCodeGeneratorARM::VisitCompare(HCompare* compare) {
1861  LocationSummary* locations = compare->GetLocations();
1862  switch (compare->InputAt(0)->GetType()) {
1863    case Primitive::kPrimLong: {
1864      Register output = locations->Out().As<Register>();
1865      Location left = locations->InAt(0);
1866      Location right = locations->InAt(1);
1867      Label less, greater, done;
1868      __ cmp(left.AsRegisterPairHigh<Register>(),
1869             ShifterOperand(right.AsRegisterPairHigh<Register>()));  // Signed compare.
1870      __ b(&less, LT);
1871      __ b(&greater, GT);
1872      // Do LoadImmediate before any `cmp`, as LoadImmediate might affect
1873      // the status flags.
1874      __ LoadImmediate(output, 0);
1875      __ cmp(left.AsRegisterPairLow<Register>(),
1876             ShifterOperand(right.AsRegisterPairLow<Register>()));  // Unsigned compare.
1877      __ b(&done, EQ);
1878      __ b(&less, CC);
1879
1880      __ Bind(&greater);
1881      __ LoadImmediate(output, 1);
1882      __ b(&done);
1883
1884      __ Bind(&less);
1885      __ LoadImmediate(output, -1);
1886
1887      __ Bind(&done);
1888      break;
1889    }
1890    default:
1891      LOG(FATAL) << "Unimplemented compare type " << compare->InputAt(0)->GetType();
1892  }
1893}
1894
1895void LocationsBuilderARM::VisitPhi(HPhi* instruction) {
1896  LocationSummary* locations =
1897      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1898  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
1899    locations->SetInAt(i, Location::Any());
1900  }
1901  locations->SetOut(Location::Any());
1902}
1903
1904void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction) {
1905  UNUSED(instruction);
1906  LOG(FATAL) << "Unreachable";
1907}
1908
1909void LocationsBuilderARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
1910  LocationSummary* locations =
1911      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1912  bool is_object_type = instruction->GetFieldType() == Primitive::kPrimNot;
1913  locations->SetInAt(0, Location::RequiresRegister());
1914  locations->SetInAt(1, Location::RequiresRegister());
1915  // Temporary registers for the write barrier.
1916  if (is_object_type) {
1917    locations->AddTemp(Location::RequiresRegister());
1918    locations->AddTemp(Location::RequiresRegister());
1919  }
1920}
1921
1922void InstructionCodeGeneratorARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
1923  LocationSummary* locations = instruction->GetLocations();
1924  Register obj = locations->InAt(0).As<Register>();
1925  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
1926  Primitive::Type field_type = instruction->GetFieldType();
1927
1928  switch (field_type) {
1929    case Primitive::kPrimBoolean:
1930    case Primitive::kPrimByte: {
1931      Register value = locations->InAt(1).As<Register>();
1932      __ StoreToOffset(kStoreByte, value, obj, offset);
1933      break;
1934    }
1935
1936    case Primitive::kPrimShort:
1937    case Primitive::kPrimChar: {
1938      Register value = locations->InAt(1).As<Register>();
1939      __ StoreToOffset(kStoreHalfword, value, obj, offset);
1940      break;
1941    }
1942
1943    case Primitive::kPrimInt:
1944    case Primitive::kPrimNot: {
1945      Register value = locations->InAt(1).As<Register>();
1946      __ StoreToOffset(kStoreWord, value, obj, offset);
1947      if (field_type == Primitive::kPrimNot) {
1948        Register temp = locations->GetTemp(0).As<Register>();
1949        Register card = locations->GetTemp(1).As<Register>();
1950        codegen_->MarkGCCard(temp, card, obj, value);
1951      }
1952      break;
1953    }
1954
1955    case Primitive::kPrimLong: {
1956      Location value = locations->InAt(1);
1957      __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset);
1958      break;
1959    }
1960
1961    case Primitive::kPrimFloat: {
1962      SRegister value = locations->InAt(1).As<SRegister>();
1963      __ StoreSToOffset(value, obj, offset);
1964      break;
1965    }
1966
1967    case Primitive::kPrimDouble: {
1968      DRegister value = FromLowSToD(locations->InAt(1).AsFpuRegisterPairLow<SRegister>());
1969      __ StoreDToOffset(value, obj, offset);
1970      break;
1971    }
1972
1973    case Primitive::kPrimVoid:
1974      LOG(FATAL) << "Unreachable type " << field_type;
1975      UNREACHABLE();
1976  }
1977}
1978
1979void LocationsBuilderARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
1980  LocationSummary* locations =
1981      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1982  locations->SetInAt(0, Location::RequiresRegister());
1983  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1984}
1985
1986void InstructionCodeGeneratorARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
1987  LocationSummary* locations = instruction->GetLocations();
1988  Register obj = locations->InAt(0).As<Register>();
1989  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
1990
1991  switch (instruction->GetType()) {
1992    case Primitive::kPrimBoolean: {
1993      Register out = locations->Out().As<Register>();
1994      __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
1995      break;
1996    }
1997
1998    case Primitive::kPrimByte: {
1999      Register out = locations->Out().As<Register>();
2000      __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
2001      break;
2002    }
2003
2004    case Primitive::kPrimShort: {
2005      Register out = locations->Out().As<Register>();
2006      __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
2007      break;
2008    }
2009
2010    case Primitive::kPrimChar: {
2011      Register out = locations->Out().As<Register>();
2012      __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
2013      break;
2014    }
2015
2016    case Primitive::kPrimInt:
2017    case Primitive::kPrimNot: {
2018      Register out = locations->Out().As<Register>();
2019      __ LoadFromOffset(kLoadWord, out, obj, offset);
2020      break;
2021    }
2022
2023    case Primitive::kPrimLong: {
2024      // TODO: support volatile.
2025      Location out = locations->Out();
2026      __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset);
2027      break;
2028    }
2029
2030    case Primitive::kPrimFloat: {
2031      SRegister out = locations->Out().As<SRegister>();
2032      __ LoadSFromOffset(out, obj, offset);
2033      break;
2034    }
2035
2036    case Primitive::kPrimDouble: {
2037      DRegister out = FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>());
2038      __ LoadDFromOffset(out, obj, offset);
2039      break;
2040    }
2041
2042    case Primitive::kPrimVoid:
2043      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2044      UNREACHABLE();
2045  }
2046}
2047
2048void LocationsBuilderARM::VisitNullCheck(HNullCheck* instruction) {
2049  LocationSummary* locations =
2050      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2051  locations->SetInAt(0, Location::RequiresRegister());
2052  if (instruction->HasUses()) {
2053    locations->SetOut(Location::SameAsFirstInput());
2054  }
2055}
2056
2057void InstructionCodeGeneratorARM::VisitNullCheck(HNullCheck* instruction) {
2058  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM(instruction);
2059  codegen_->AddSlowPath(slow_path);
2060
2061  LocationSummary* locations = instruction->GetLocations();
2062  Location obj = locations->InAt(0);
2063
2064  if (obj.IsRegister()) {
2065    __ cmp(obj.As<Register>(), ShifterOperand(0));
2066    __ b(slow_path->GetEntryLabel(), EQ);
2067  } else {
2068    DCHECK(obj.IsConstant()) << obj;
2069    DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0);
2070    __ b(slow_path->GetEntryLabel());
2071  }
2072}
2073
2074void LocationsBuilderARM::VisitArrayGet(HArrayGet* instruction) {
2075  LocationSummary* locations =
2076      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2077  locations->SetInAt(0, Location::RequiresRegister());
2078  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2079  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2080}
2081
2082void InstructionCodeGeneratorARM::VisitArrayGet(HArrayGet* instruction) {
2083  LocationSummary* locations = instruction->GetLocations();
2084  Register obj = locations->InAt(0).As<Register>();
2085  Location index = locations->InAt(1);
2086
2087  switch (instruction->GetType()) {
2088    case Primitive::kPrimBoolean: {
2089      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
2090      Register out = locations->Out().As<Register>();
2091      if (index.IsConstant()) {
2092        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2093        __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
2094      } else {
2095        __ add(IP, obj, ShifterOperand(index.As<Register>()));
2096        __ LoadFromOffset(kLoadUnsignedByte, out, IP, data_offset);
2097      }
2098      break;
2099    }
2100
2101    case Primitive::kPrimByte: {
2102      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
2103      Register out = locations->Out().As<Register>();
2104      if (index.IsConstant()) {
2105        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2106        __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
2107      } else {
2108        __ add(IP, obj, ShifterOperand(index.As<Register>()));
2109        __ LoadFromOffset(kLoadSignedByte, out, IP, data_offset);
2110      }
2111      break;
2112    }
2113
2114    case Primitive::kPrimShort: {
2115      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
2116      Register out = locations->Out().As<Register>();
2117      if (index.IsConstant()) {
2118        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2119        __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
2120      } else {
2121        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_2));
2122        __ LoadFromOffset(kLoadSignedHalfword, out, IP, data_offset);
2123      }
2124      break;
2125    }
2126
2127    case Primitive::kPrimChar: {
2128      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
2129      Register out = locations->Out().As<Register>();
2130      if (index.IsConstant()) {
2131        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2132        __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
2133      } else {
2134        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_2));
2135        __ LoadFromOffset(kLoadUnsignedHalfword, out, IP, data_offset);
2136      }
2137      break;
2138    }
2139
2140    case Primitive::kPrimInt:
2141    case Primitive::kPrimNot: {
2142      DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
2143      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2144      Register out = locations->Out().As<Register>();
2145      if (index.IsConstant()) {
2146        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2147        __ LoadFromOffset(kLoadWord, out, obj, offset);
2148      } else {
2149        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_4));
2150        __ LoadFromOffset(kLoadWord, out, IP, data_offset);
2151      }
2152      break;
2153    }
2154
2155    case Primitive::kPrimLong: {
2156      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
2157      Location out = locations->Out();
2158      if (index.IsConstant()) {
2159        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
2160        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset);
2161      } else {
2162        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_8));
2163        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), IP, data_offset);
2164      }
2165      break;
2166    }
2167
2168    case Primitive::kPrimFloat:
2169    case Primitive::kPrimDouble:
2170      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
2171      UNREACHABLE();
2172    case Primitive::kPrimVoid:
2173      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2174      UNREACHABLE();
2175  }
2176}
2177
2178void LocationsBuilderARM::VisitArraySet(HArraySet* instruction) {
2179  Primitive::Type value_type = instruction->GetComponentType();
2180  bool is_object = value_type == Primitive::kPrimNot;
2181  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2182      instruction, is_object ? LocationSummary::kCall : LocationSummary::kNoCall);
2183  if (is_object) {
2184    InvokeRuntimeCallingConvention calling_convention;
2185    locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2186    locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2187    locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2188  } else {
2189    locations->SetInAt(0, Location::RequiresRegister());
2190    locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2191    locations->SetInAt(2, Location::RequiresRegister());
2192  }
2193}
2194
2195void InstructionCodeGeneratorARM::VisitArraySet(HArraySet* instruction) {
2196  LocationSummary* locations = instruction->GetLocations();
2197  Register obj = locations->InAt(0).As<Register>();
2198  Location index = locations->InAt(1);
2199  Primitive::Type value_type = instruction->GetComponentType();
2200
2201  switch (value_type) {
2202    case Primitive::kPrimBoolean:
2203    case Primitive::kPrimByte: {
2204      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
2205      Register value = locations->InAt(2).As<Register>();
2206      if (index.IsConstant()) {
2207        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2208        __ StoreToOffset(kStoreByte, value, obj, offset);
2209      } else {
2210        __ add(IP, obj, ShifterOperand(index.As<Register>()));
2211        __ StoreToOffset(kStoreByte, value, IP, data_offset);
2212      }
2213      break;
2214    }
2215
2216    case Primitive::kPrimShort:
2217    case Primitive::kPrimChar: {
2218      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
2219      Register value = locations->InAt(2).As<Register>();
2220      if (index.IsConstant()) {
2221        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2222        __ StoreToOffset(kStoreHalfword, value, obj, offset);
2223      } else {
2224        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_2));
2225        __ StoreToOffset(kStoreHalfword, value, IP, data_offset);
2226      }
2227      break;
2228    }
2229
2230    case Primitive::kPrimInt: {
2231      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2232      Register value = locations->InAt(2).As<Register>();
2233      if (index.IsConstant()) {
2234        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2235        __ StoreToOffset(kStoreWord, value, obj, offset);
2236      } else {
2237        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_4));
2238        __ StoreToOffset(kStoreWord, value, IP, data_offset);
2239      }
2240      break;
2241    }
2242
2243    case Primitive::kPrimNot: {
2244      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject), instruction, instruction->GetDexPc());
2245      break;
2246    }
2247
2248    case Primitive::kPrimLong: {
2249      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
2250      Location value = locations->InAt(2);
2251      if (index.IsConstant()) {
2252        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
2253        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset);
2254      } else {
2255        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_8));
2256        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), IP, data_offset);
2257      }
2258      break;
2259    }
2260
2261    case Primitive::kPrimFloat:
2262    case Primitive::kPrimDouble:
2263      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
2264      UNREACHABLE();
2265    case Primitive::kPrimVoid:
2266      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2267      UNREACHABLE();
2268  }
2269}
2270
2271void LocationsBuilderARM::VisitArrayLength(HArrayLength* instruction) {
2272  LocationSummary* locations =
2273      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2274  locations->SetInAt(0, Location::RequiresRegister());
2275  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2276}
2277
2278void InstructionCodeGeneratorARM::VisitArrayLength(HArrayLength* instruction) {
2279  LocationSummary* locations = instruction->GetLocations();
2280  uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
2281  Register obj = locations->InAt(0).As<Register>();
2282  Register out = locations->Out().As<Register>();
2283  __ LoadFromOffset(kLoadWord, out, obj, offset);
2284}
2285
2286void LocationsBuilderARM::VisitBoundsCheck(HBoundsCheck* instruction) {
2287  LocationSummary* locations =
2288      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2289  locations->SetInAt(0, Location::RequiresRegister());
2290  locations->SetInAt(1, Location::RequiresRegister());
2291  if (instruction->HasUses()) {
2292    locations->SetOut(Location::SameAsFirstInput());
2293  }
2294}
2295
2296void InstructionCodeGeneratorARM::VisitBoundsCheck(HBoundsCheck* instruction) {
2297  LocationSummary* locations = instruction->GetLocations();
2298  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM(
2299      instruction, locations->InAt(0), locations->InAt(1));
2300  codegen_->AddSlowPath(slow_path);
2301
2302  Register index = locations->InAt(0).As<Register>();
2303  Register length = locations->InAt(1).As<Register>();
2304
2305  __ cmp(index, ShifterOperand(length));
2306  __ b(slow_path->GetEntryLabel(), CS);
2307}
2308
2309void CodeGeneratorARM::MarkGCCard(Register temp, Register card, Register object, Register value) {
2310  Label is_null;
2311  __ CompareAndBranchIfZero(value, &is_null);
2312  __ LoadFromOffset(kLoadWord, card, TR, Thread::CardTableOffset<kArmWordSize>().Int32Value());
2313  __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
2314  __ strb(card, Address(card, temp));
2315  __ Bind(&is_null);
2316}
2317
2318void LocationsBuilderARM::VisitTemporary(HTemporary* temp) {
2319  temp->SetLocations(nullptr);
2320}
2321
2322void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp) {
2323  // Nothing to do, this is driven by the code generator.
2324  UNUSED(temp);
2325}
2326
2327void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction) {
2328  UNUSED(instruction);
2329  LOG(FATAL) << "Unreachable";
2330}
2331
2332void InstructionCodeGeneratorARM::VisitParallelMove(HParallelMove* instruction) {
2333  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
2334}
2335
2336void LocationsBuilderARM::VisitSuspendCheck(HSuspendCheck* instruction) {
2337  new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
2338}
2339
2340void InstructionCodeGeneratorARM::VisitSuspendCheck(HSuspendCheck* instruction) {
2341  HBasicBlock* block = instruction->GetBlock();
2342  if (block->GetLoopInformation() != nullptr) {
2343    DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
2344    // The back edge will generate the suspend check.
2345    return;
2346  }
2347  if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
2348    // The goto will generate the suspend check.
2349    return;
2350  }
2351  GenerateSuspendCheck(instruction, nullptr);
2352}
2353
2354void InstructionCodeGeneratorARM::GenerateSuspendCheck(HSuspendCheck* instruction,
2355                                                       HBasicBlock* successor) {
2356  SuspendCheckSlowPathARM* slow_path =
2357      new (GetGraph()->GetArena()) SuspendCheckSlowPathARM(instruction, successor);
2358  codegen_->AddSlowPath(slow_path);
2359
2360  __ LoadFromOffset(
2361      kLoadUnsignedHalfword, IP, TR, Thread::ThreadFlagsOffset<kArmWordSize>().Int32Value());
2362  __ cmp(IP, ShifterOperand(0));
2363  // TODO: Figure out the branch offsets and use cbz/cbnz.
2364  if (successor == nullptr) {
2365    __ b(slow_path->GetEntryLabel(), NE);
2366    __ Bind(slow_path->GetReturnLabel());
2367  } else {
2368    __ b(codegen_->GetLabelOf(successor), EQ);
2369    __ b(slow_path->GetEntryLabel());
2370  }
2371}
2372
2373ArmAssembler* ParallelMoveResolverARM::GetAssembler() const {
2374  return codegen_->GetAssembler();
2375}
2376
2377void ParallelMoveResolverARM::EmitMove(size_t index) {
2378  MoveOperands* move = moves_.Get(index);
2379  Location source = move->GetSource();
2380  Location destination = move->GetDestination();
2381
2382  if (source.IsRegister()) {
2383    if (destination.IsRegister()) {
2384      __ Mov(destination.As<Register>(), source.As<Register>());
2385    } else {
2386      DCHECK(destination.IsStackSlot());
2387      __ StoreToOffset(kStoreWord, source.As<Register>(),
2388                       SP, destination.GetStackIndex());
2389    }
2390  } else if (source.IsStackSlot()) {
2391    if (destination.IsRegister()) {
2392      __ LoadFromOffset(kLoadWord, destination.As<Register>(),
2393                        SP, source.GetStackIndex());
2394    } else {
2395      DCHECK(destination.IsStackSlot());
2396      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
2397      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
2398    }
2399  } else {
2400    DCHECK(source.IsConstant());
2401    DCHECK(source.GetConstant()->IsIntConstant());
2402    int32_t value = source.GetConstant()->AsIntConstant()->GetValue();
2403    if (destination.IsRegister()) {
2404      __ LoadImmediate(destination.As<Register>(), value);
2405    } else {
2406      DCHECK(destination.IsStackSlot());
2407      __ LoadImmediate(IP, value);
2408      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
2409    }
2410  }
2411}
2412
2413void ParallelMoveResolverARM::Exchange(Register reg, int mem) {
2414  __ Mov(IP, reg);
2415  __ LoadFromOffset(kLoadWord, reg, SP, mem);
2416  __ StoreToOffset(kStoreWord, IP, SP, mem);
2417}
2418
2419void ParallelMoveResolverARM::Exchange(int mem1, int mem2) {
2420  ScratchRegisterScope ensure_scratch(this, IP, R0, codegen_->GetNumberOfCoreRegisters());
2421  int stack_offset = ensure_scratch.IsSpilled() ? kArmWordSize : 0;
2422  __ LoadFromOffset(kLoadWord, static_cast<Register>(ensure_scratch.GetRegister()),
2423                    SP, mem1 + stack_offset);
2424  __ LoadFromOffset(kLoadWord, IP, SP, mem2 + stack_offset);
2425  __ StoreToOffset(kStoreWord, static_cast<Register>(ensure_scratch.GetRegister()),
2426                   SP, mem2 + stack_offset);
2427  __ StoreToOffset(kStoreWord, IP, SP, mem1 + stack_offset);
2428}
2429
2430void ParallelMoveResolverARM::EmitSwap(size_t index) {
2431  MoveOperands* move = moves_.Get(index);
2432  Location source = move->GetSource();
2433  Location destination = move->GetDestination();
2434
2435  if (source.IsRegister() && destination.IsRegister()) {
2436    DCHECK_NE(source.As<Register>(), IP);
2437    DCHECK_NE(destination.As<Register>(), IP);
2438    __ Mov(IP, source.As<Register>());
2439    __ Mov(source.As<Register>(), destination.As<Register>());
2440    __ Mov(destination.As<Register>(), IP);
2441  } else if (source.IsRegister() && destination.IsStackSlot()) {
2442    Exchange(source.As<Register>(), destination.GetStackIndex());
2443  } else if (source.IsStackSlot() && destination.IsRegister()) {
2444    Exchange(destination.As<Register>(), source.GetStackIndex());
2445  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
2446    Exchange(source.GetStackIndex(), destination.GetStackIndex());
2447  } else {
2448    LOG(FATAL) << "Unimplemented";
2449  }
2450}
2451
2452void ParallelMoveResolverARM::SpillScratch(int reg) {
2453  __ Push(static_cast<Register>(reg));
2454}
2455
2456void ParallelMoveResolverARM::RestoreScratch(int reg) {
2457  __ Pop(static_cast<Register>(reg));
2458}
2459
2460void LocationsBuilderARM::VisitLoadClass(HLoadClass* cls) {
2461  LocationSummary::CallKind call_kind = cls->CanCallRuntime()
2462      ? LocationSummary::kCallOnSlowPath
2463      : LocationSummary::kNoCall;
2464  LocationSummary* locations =
2465      new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
2466  locations->SetOut(Location::RequiresRegister());
2467}
2468
2469void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) {
2470  Register out = cls->GetLocations()->Out().As<Register>();
2471  if (cls->IsReferrersClass()) {
2472    DCHECK(!cls->CanCallRuntime());
2473    DCHECK(!cls->MustGenerateClinitCheck());
2474    codegen_->LoadCurrentMethod(out);
2475    __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
2476  } else {
2477    DCHECK(cls->CanCallRuntime());
2478    codegen_->LoadCurrentMethod(out);
2479    __ LoadFromOffset(
2480        kLoadWord, out, out, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value());
2481    __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
2482
2483    SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
2484        cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
2485    codegen_->AddSlowPath(slow_path);
2486    __ cmp(out, ShifterOperand(0));
2487    __ b(slow_path->GetEntryLabel(), EQ);
2488    if (cls->MustGenerateClinitCheck()) {
2489      GenerateClassInitializationCheck(slow_path, out);
2490    } else {
2491      __ Bind(slow_path->GetExitLabel());
2492    }
2493  }
2494}
2495
2496void LocationsBuilderARM::VisitClinitCheck(HClinitCheck* check) {
2497  LocationSummary* locations =
2498      new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
2499  locations->SetInAt(0, Location::RequiresRegister());
2500  if (check->HasUses()) {
2501    locations->SetOut(Location::SameAsFirstInput());
2502  }
2503}
2504
2505void InstructionCodeGeneratorARM::VisitClinitCheck(HClinitCheck* check) {
2506  // We assume the class is not null.
2507  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
2508      check->GetLoadClass(), check, check->GetDexPc(), true);
2509  codegen_->AddSlowPath(slow_path);
2510  GenerateClassInitializationCheck(slow_path, check->GetLocations()->InAt(0).As<Register>());
2511}
2512
2513void InstructionCodeGeneratorARM::GenerateClassInitializationCheck(
2514    SlowPathCodeARM* slow_path, Register class_reg) {
2515  __ LoadFromOffset(kLoadWord, IP, class_reg, mirror::Class::StatusOffset().Int32Value());
2516  __ cmp(IP, ShifterOperand(mirror::Class::kStatusInitialized));
2517  __ b(slow_path->GetEntryLabel(), LT);
2518  // Even if the initialized flag is set, we may be in a situation where caches are not synced
2519  // properly. Therefore, we do a memory fence.
2520  __ dmb(ISH);
2521  __ Bind(slow_path->GetExitLabel());
2522}
2523
2524void LocationsBuilderARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2525  LocationSummary* locations =
2526      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2527  locations->SetInAt(0, Location::RequiresRegister());
2528  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2529}
2530
2531void InstructionCodeGeneratorARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2532  LocationSummary* locations = instruction->GetLocations();
2533  Register cls = locations->InAt(0).As<Register>();
2534  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
2535
2536  switch (instruction->GetType()) {
2537    case Primitive::kPrimBoolean: {
2538      Register out = locations->Out().As<Register>();
2539      __ LoadFromOffset(kLoadUnsignedByte, out, cls, offset);
2540      break;
2541    }
2542
2543    case Primitive::kPrimByte: {
2544      Register out = locations->Out().As<Register>();
2545      __ LoadFromOffset(kLoadSignedByte, out, cls, offset);
2546      break;
2547    }
2548
2549    case Primitive::kPrimShort: {
2550      Register out = locations->Out().As<Register>();
2551      __ LoadFromOffset(kLoadSignedHalfword, out, cls, offset);
2552      break;
2553    }
2554
2555    case Primitive::kPrimChar: {
2556      Register out = locations->Out().As<Register>();
2557      __ LoadFromOffset(kLoadUnsignedHalfword, out, cls, offset);
2558      break;
2559    }
2560
2561    case Primitive::kPrimInt:
2562    case Primitive::kPrimNot: {
2563      Register out = locations->Out().As<Register>();
2564      __ LoadFromOffset(kLoadWord, out, cls, offset);
2565      break;
2566    }
2567
2568    case Primitive::kPrimLong: {
2569      // TODO: support volatile.
2570      Location out = locations->Out();
2571      __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), cls, offset);
2572      break;
2573    }
2574
2575    case Primitive::kPrimFloat: {
2576      SRegister out = locations->Out().As<SRegister>();
2577      __ LoadSFromOffset(out, cls, offset);
2578      break;
2579    }
2580
2581    case Primitive::kPrimDouble: {
2582      DRegister out = FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>());
2583      __ LoadDFromOffset(out, cls, offset);
2584      break;
2585    }
2586
2587    case Primitive::kPrimVoid:
2588      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2589      UNREACHABLE();
2590  }
2591}
2592
2593void LocationsBuilderARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2594  LocationSummary* locations =
2595      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2596  bool is_object_type = instruction->GetFieldType() == Primitive::kPrimNot;
2597  locations->SetInAt(0, Location::RequiresRegister());
2598  locations->SetInAt(1, Location::RequiresRegister());
2599  // Temporary registers for the write barrier.
2600  if (is_object_type) {
2601    locations->AddTemp(Location::RequiresRegister());
2602    locations->AddTemp(Location::RequiresRegister());
2603  }
2604}
2605
2606void InstructionCodeGeneratorARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2607  LocationSummary* locations = instruction->GetLocations();
2608  Register cls = locations->InAt(0).As<Register>();
2609  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
2610  Primitive::Type field_type = instruction->GetFieldType();
2611
2612  switch (field_type) {
2613    case Primitive::kPrimBoolean:
2614    case Primitive::kPrimByte: {
2615      Register value = locations->InAt(1).As<Register>();
2616      __ StoreToOffset(kStoreByte, value, cls, offset);
2617      break;
2618    }
2619
2620    case Primitive::kPrimShort:
2621    case Primitive::kPrimChar: {
2622      Register value = locations->InAt(1).As<Register>();
2623      __ StoreToOffset(kStoreHalfword, value, cls, offset);
2624      break;
2625    }
2626
2627    case Primitive::kPrimInt:
2628    case Primitive::kPrimNot: {
2629      Register value = locations->InAt(1).As<Register>();
2630      __ StoreToOffset(kStoreWord, value, cls, offset);
2631      if (field_type == Primitive::kPrimNot) {
2632        Register temp = locations->GetTemp(0).As<Register>();
2633        Register card = locations->GetTemp(1).As<Register>();
2634        codegen_->MarkGCCard(temp, card, cls, value);
2635      }
2636      break;
2637    }
2638
2639    case Primitive::kPrimLong: {
2640      Location value = locations->InAt(1);
2641      __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), cls, offset);
2642      break;
2643    }
2644
2645    case Primitive::kPrimFloat: {
2646      SRegister value = locations->InAt(1).As<SRegister>();
2647      __ StoreSToOffset(value, cls, offset);
2648      break;
2649    }
2650
2651    case Primitive::kPrimDouble: {
2652      DRegister value = FromLowSToD(locations->InAt(1).AsFpuRegisterPairLow<SRegister>());
2653      __ StoreDToOffset(value, cls, offset);
2654      break;
2655    }
2656
2657    case Primitive::kPrimVoid:
2658      LOG(FATAL) << "Unreachable type " << field_type;
2659      UNREACHABLE();
2660  }
2661}
2662
2663void LocationsBuilderARM::VisitLoadString(HLoadString* load) {
2664  LocationSummary* locations =
2665      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
2666  locations->SetOut(Location::RequiresRegister());
2667}
2668
2669void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) {
2670  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM(load);
2671  codegen_->AddSlowPath(slow_path);
2672
2673  Register out = load->GetLocations()->Out().As<Register>();
2674  codegen_->LoadCurrentMethod(out);
2675  __ LoadFromOffset(
2676      kLoadWord, out, out, mirror::ArtMethod::DexCacheStringsOffset().Int32Value());
2677  __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(load->GetStringIndex()));
2678  __ cmp(out, ShifterOperand(0));
2679  __ b(slow_path->GetEntryLabel(), EQ);
2680  __ Bind(slow_path->GetExitLabel());
2681}
2682
2683void LocationsBuilderARM::VisitLoadException(HLoadException* load) {
2684  LocationSummary* locations =
2685      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
2686  locations->SetOut(Location::RequiresRegister());
2687}
2688
2689void InstructionCodeGeneratorARM::VisitLoadException(HLoadException* load) {
2690  Register out = load->GetLocations()->Out().As<Register>();
2691  int32_t offset = Thread::ExceptionOffset<kArmWordSize>().Int32Value();
2692  __ LoadFromOffset(kLoadWord, out, TR, offset);
2693  __ LoadImmediate(IP, 0);
2694  __ StoreToOffset(kStoreWord, IP, TR, offset);
2695}
2696
2697void LocationsBuilderARM::VisitThrow(HThrow* instruction) {
2698  LocationSummary* locations =
2699      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2700  InvokeRuntimeCallingConvention calling_convention;
2701  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2702}
2703
2704void InstructionCodeGeneratorARM::VisitThrow(HThrow* instruction) {
2705  codegen_->InvokeRuntime(
2706      QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc());
2707}
2708
2709void LocationsBuilderARM::VisitTypeCheck(HTypeCheck* instruction) {
2710  LocationSummary::CallKind call_kind = instruction->IsClassFinal()
2711      ? LocationSummary::kNoCall
2712      : LocationSummary::kCallOnSlowPath;
2713  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
2714  locations->SetInAt(0, Location::RequiresRegister());
2715  locations->SetInAt(1, Location::RequiresRegister());
2716  locations->SetOut(Location::RequiresRegister());
2717}
2718
2719void InstructionCodeGeneratorARM::VisitTypeCheck(HTypeCheck* instruction) {
2720  LocationSummary* locations = instruction->GetLocations();
2721  Register obj = locations->InAt(0).As<Register>();
2722  Register cls = locations->InAt(1).As<Register>();
2723  Register out = locations->Out().As<Register>();
2724  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2725  Label done, zero;
2726  SlowPathCodeARM* slow_path = nullptr;
2727
2728  // Return 0 if `obj` is null.
2729  // TODO: avoid this check if we know obj is not null.
2730  __ cmp(obj, ShifterOperand(0));
2731  __ b(&zero, EQ);
2732  // Compare the class of `obj` with `cls`.
2733  __ LoadFromOffset(kLoadWord, out, obj, class_offset);
2734  __ cmp(out, ShifterOperand(cls));
2735  if (instruction->IsClassFinal()) {
2736    // Classes must be equal for the instanceof to succeed.
2737    __ b(&zero, NE);
2738    __ LoadImmediate(out, 1);
2739    __ b(&done);
2740  } else {
2741    // If the classes are not equal, we go into a slow path.
2742    DCHECK(locations->OnlyCallsOnSlowPath());
2743    slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
2744        instruction, Location::RegisterLocation(out));
2745    codegen_->AddSlowPath(slow_path);
2746    __ b(slow_path->GetEntryLabel(), NE);
2747    __ LoadImmediate(out, 1);
2748    __ b(&done);
2749  }
2750  __ Bind(&zero);
2751  __ LoadImmediate(out, 0);
2752  if (slow_path != nullptr) {
2753    __ Bind(slow_path->GetExitLabel());
2754  }
2755  __ Bind(&done);
2756}
2757
2758}  // namespace arm
2759}  // namespace art
2760