code_generator_arm.cc revision 425f239c291d435f519a1cf4bdd9ccc9a2c0c070
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
20#include "entrypoints/quick/quick_entrypoints.h"
21#include "gc/accounting/card_table.h"
22#include "mirror/array-inl.h"
23#include "mirror/art_method.h"
24#include "mirror/class.h"
25#include "thread.h"
26#include "utils/arm/assembler_arm.h"
27#include "utils/arm/managed_register_arm.h"
28#include "utils/assembler.h"
29#include "utils/stack_checks.h"
30
31namespace art {
32
33namespace arm {
34
35static DRegister FromLowSToD(SRegister reg) {
36  DCHECK_EQ(reg % 2, 0);
37  return static_cast<DRegister>(reg / 2);
38}
39
40static constexpr bool kExplicitStackOverflowCheck = false;
41
42static constexpr int kNumberOfPushedRegistersAtEntry = 1 + 2;  // LR, R6, R7
43static constexpr int kCurrentMethodStackOffset = 0;
44
45static constexpr Register kRuntimeParameterCoreRegisters[] = { R0, R1, R2, R3 };
46static constexpr size_t kRuntimeParameterCoreRegistersLength =
47    arraysize(kRuntimeParameterCoreRegisters);
48static constexpr SRegister kRuntimeParameterFpuRegisters[] = { S0, S1, S2, S3 };
49static constexpr size_t kRuntimeParameterFpuRegistersLength =
50    arraysize(kRuntimeParameterFpuRegisters);
51
52class InvokeRuntimeCallingConvention : public CallingConvention<Register, SRegister> {
53 public:
54  InvokeRuntimeCallingConvention()
55      : CallingConvention(kRuntimeParameterCoreRegisters,
56                          kRuntimeParameterCoreRegistersLength,
57                          kRuntimeParameterFpuRegisters,
58                          kRuntimeParameterFpuRegistersLength) {}
59
60 private:
61  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
62};
63
64#define __ reinterpret_cast<ArmAssembler*>(codegen->GetAssembler())->
65#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArmWordSize, x).Int32Value()
66
67class SlowPathCodeARM : public SlowPathCode {
68 public:
69  SlowPathCodeARM() : entry_label_(), exit_label_() {}
70
71  Label* GetEntryLabel() { return &entry_label_; }
72  Label* GetExitLabel() { return &exit_label_; }
73
74 private:
75  Label entry_label_;
76  Label exit_label_;
77
78  DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM);
79};
80
81class NullCheckSlowPathARM : public SlowPathCodeARM {
82 public:
83  explicit NullCheckSlowPathARM(HNullCheck* instruction) : instruction_(instruction) {}
84
85  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
86    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
87    __ Bind(GetEntryLabel());
88    arm_codegen->InvokeRuntime(
89        QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc());
90  }
91
92 private:
93  HNullCheck* const instruction_;
94  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM);
95};
96
97class DivZeroCheckSlowPathARM : public SlowPathCodeARM {
98 public:
99  explicit DivZeroCheckSlowPathARM(HDivZeroCheck* instruction) : instruction_(instruction) {}
100
101  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
102    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
103    __ Bind(GetEntryLabel());
104    arm_codegen->InvokeRuntime(
105        QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc());
106  }
107
108 private:
109  HDivZeroCheck* const instruction_;
110  DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM);
111};
112
113class StackOverflowCheckSlowPathARM : public SlowPathCodeARM {
114 public:
115  StackOverflowCheckSlowPathARM() {}
116
117  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
118    __ Bind(GetEntryLabel());
119    __ LoadFromOffset(kLoadWord, PC, TR,
120        QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pThrowStackOverflow).Int32Value());
121  }
122
123 private:
124  DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathARM);
125};
126
127class SuspendCheckSlowPathARM : public SlowPathCodeARM {
128 public:
129  SuspendCheckSlowPathARM(HSuspendCheck* instruction, HBasicBlock* successor)
130      : instruction_(instruction), successor_(successor) {}
131
132  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
133    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
134    __ Bind(GetEntryLabel());
135    codegen->SaveLiveRegisters(instruction_->GetLocations());
136    arm_codegen->InvokeRuntime(
137        QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc());
138    codegen->RestoreLiveRegisters(instruction_->GetLocations());
139    if (successor_ == nullptr) {
140      __ b(GetReturnLabel());
141    } else {
142      __ b(arm_codegen->GetLabelOf(successor_));
143    }
144  }
145
146  Label* GetReturnLabel() {
147    DCHECK(successor_ == nullptr);
148    return &return_label_;
149  }
150
151 private:
152  HSuspendCheck* const instruction_;
153  // If not null, the block to branch to after the suspend check.
154  HBasicBlock* const successor_;
155
156  // If `successor_` is null, the label to branch to after the suspend check.
157  Label return_label_;
158
159  DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM);
160};
161
162class BoundsCheckSlowPathARM : public SlowPathCodeARM {
163 public:
164  BoundsCheckSlowPathARM(HBoundsCheck* instruction,
165                         Location index_location,
166                         Location length_location)
167      : instruction_(instruction),
168        index_location_(index_location),
169        length_location_(length_location) {}
170
171  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
172    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
173    __ Bind(GetEntryLabel());
174    // We're moving two locations to locations that could overlap, so we need a parallel
175    // move resolver.
176    InvokeRuntimeCallingConvention calling_convention;
177    codegen->EmitParallelMoves(
178        index_location_,
179        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
180        length_location_,
181        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
182    arm_codegen->InvokeRuntime(
183        QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc());
184  }
185
186 private:
187  HBoundsCheck* const instruction_;
188  const Location index_location_;
189  const Location length_location_;
190
191  DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM);
192};
193
194class LoadClassSlowPathARM : public SlowPathCodeARM {
195 public:
196  LoadClassSlowPathARM(HLoadClass* cls,
197                       HInstruction* at,
198                       uint32_t dex_pc,
199                       bool do_clinit)
200      : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
201    DCHECK(at->IsLoadClass() || at->IsClinitCheck());
202  }
203
204  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
205    LocationSummary* locations = at_->GetLocations();
206
207    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
208    __ Bind(GetEntryLabel());
209    codegen->SaveLiveRegisters(locations);
210
211    InvokeRuntimeCallingConvention calling_convention;
212    __ LoadImmediate(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex());
213    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
214    int32_t entry_point_offset = do_clinit_
215        ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
216        : QUICK_ENTRY_POINT(pInitializeType);
217    arm_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_);
218
219    // Move the class to the desired location.
220    Location out = locations->Out();
221    if (out.IsValid()) {
222      DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
223      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
224    }
225    codegen->RestoreLiveRegisters(locations);
226    __ b(GetExitLabel());
227  }
228
229 private:
230  // The class this slow path will load.
231  HLoadClass* const cls_;
232
233  // The instruction where this slow path is happening.
234  // (Might be the load class or an initialization check).
235  HInstruction* const at_;
236
237  // The dex PC of `at_`.
238  const uint32_t dex_pc_;
239
240  // Whether to initialize the class.
241  const bool do_clinit_;
242
243  DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM);
244};
245
246class LoadStringSlowPathARM : public SlowPathCodeARM {
247 public:
248  explicit LoadStringSlowPathARM(HLoadString* instruction) : instruction_(instruction) {}
249
250  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
251    LocationSummary* locations = instruction_->GetLocations();
252    DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
253
254    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
255    __ Bind(GetEntryLabel());
256    codegen->SaveLiveRegisters(locations);
257
258    InvokeRuntimeCallingConvention calling_convention;
259    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
260    __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction_->GetStringIndex());
261    arm_codegen->InvokeRuntime(
262        QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc());
263    arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
264
265    codegen->RestoreLiveRegisters(locations);
266    __ b(GetExitLabel());
267  }
268
269 private:
270  HLoadString* const instruction_;
271
272  DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM);
273};
274
275class TypeCheckSlowPathARM : public SlowPathCodeARM {
276 public:
277  TypeCheckSlowPathARM(HInstruction* instruction,
278                       Location class_to_check,
279                       Location object_class,
280                       uint32_t dex_pc)
281      : instruction_(instruction),
282        class_to_check_(class_to_check),
283        object_class_(object_class),
284        dex_pc_(dex_pc) {}
285
286  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
287    LocationSummary* locations = instruction_->GetLocations();
288    DCHECK(instruction_->IsCheckCast()
289           || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
290
291    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
292    __ Bind(GetEntryLabel());
293    codegen->SaveLiveRegisters(locations);
294
295    // We're moving two locations to locations that could overlap, so we need a parallel
296    // move resolver.
297    InvokeRuntimeCallingConvention calling_convention;
298    codegen->EmitParallelMoves(
299        class_to_check_,
300        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
301        object_class_,
302        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
303
304    if (instruction_->IsInstanceOf()) {
305      arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc_);
306      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
307    } else {
308      DCHECK(instruction_->IsCheckCast());
309      arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc_);
310    }
311
312    codegen->RestoreLiveRegisters(locations);
313    __ b(GetExitLabel());
314  }
315
316 private:
317  HInstruction* const instruction_;
318  const Location class_to_check_;
319  const Location object_class_;
320  uint32_t dex_pc_;
321
322  DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM);
323};
324
325#undef __
326
327#undef __
328#define __ reinterpret_cast<ArmAssembler*>(GetAssembler())->
329
330inline Condition ARMCondition(IfCondition cond) {
331  switch (cond) {
332    case kCondEQ: return EQ;
333    case kCondNE: return NE;
334    case kCondLT: return LT;
335    case kCondLE: return LE;
336    case kCondGT: return GT;
337    case kCondGE: return GE;
338    default:
339      LOG(FATAL) << "Unknown if condition";
340  }
341  return EQ;        // Unreachable.
342}
343
344inline Condition ARMOppositeCondition(IfCondition cond) {
345  switch (cond) {
346    case kCondEQ: return NE;
347    case kCondNE: return EQ;
348    case kCondLT: return GE;
349    case kCondLE: return GT;
350    case kCondGT: return LE;
351    case kCondGE: return LT;
352    default:
353      LOG(FATAL) << "Unknown if condition";
354  }
355  return EQ;        // Unreachable.
356}
357
358void CodeGeneratorARM::DumpCoreRegister(std::ostream& stream, int reg) const {
359  stream << ArmManagedRegister::FromCoreRegister(Register(reg));
360}
361
362void CodeGeneratorARM::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
363  stream << ArmManagedRegister::FromSRegister(SRegister(reg));
364}
365
366size_t CodeGeneratorARM::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
367  __ StoreToOffset(kStoreWord, static_cast<Register>(reg_id), SP, stack_index);
368  return kArmWordSize;
369}
370
371size_t CodeGeneratorARM::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
372  __ LoadFromOffset(kLoadWord, static_cast<Register>(reg_id), SP, stack_index);
373  return kArmWordSize;
374}
375
376size_t CodeGeneratorARM::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
377  __ StoreSToOffset(static_cast<SRegister>(reg_id), SP, stack_index);
378  return kArmWordSize;
379}
380
381size_t CodeGeneratorARM::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
382  __ LoadSFromOffset(static_cast<SRegister>(reg_id), SP, stack_index);
383  return kArmWordSize;
384}
385
386CodeGeneratorARM::CodeGeneratorARM(HGraph* graph,
387                                   const ArmInstructionSetFeatures* isa_features)
388    : CodeGenerator(graph, kNumberOfCoreRegisters, kNumberOfSRegisters, kNumberOfRegisterPairs),
389      block_labels_(graph->GetArena(), 0),
390      location_builder_(graph, this),
391      instruction_visitor_(graph, this),
392      move_resolver_(graph->GetArena(), this),
393      assembler_(true),
394      isa_features_(isa_features) {}
395
396size_t CodeGeneratorARM::FrameEntrySpillSize() const {
397  return kNumberOfPushedRegistersAtEntry * kArmWordSize;
398}
399
400Location CodeGeneratorARM::AllocateFreeRegister(Primitive::Type type) const {
401  switch (type) {
402    case Primitive::kPrimLong: {
403      size_t reg = FindFreeEntry(blocked_register_pairs_, kNumberOfRegisterPairs);
404      ArmManagedRegister pair =
405          ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
406      DCHECK(!blocked_core_registers_[pair.AsRegisterPairLow()]);
407      DCHECK(!blocked_core_registers_[pair.AsRegisterPairHigh()]);
408
409      blocked_core_registers_[pair.AsRegisterPairLow()] = true;
410      blocked_core_registers_[pair.AsRegisterPairHigh()] = true;
411      UpdateBlockedPairRegisters();
412      return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
413    }
414
415    case Primitive::kPrimByte:
416    case Primitive::kPrimBoolean:
417    case Primitive::kPrimChar:
418    case Primitive::kPrimShort:
419    case Primitive::kPrimInt:
420    case Primitive::kPrimNot: {
421      int reg = FindFreeEntry(blocked_core_registers_, kNumberOfCoreRegisters);
422      // Block all register pairs that contain `reg`.
423      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
424        ArmManagedRegister current =
425            ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
426        if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
427          blocked_register_pairs_[i] = true;
428        }
429      }
430      return Location::RegisterLocation(reg);
431    }
432
433    case Primitive::kPrimFloat: {
434      int reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfSRegisters);
435      return Location::FpuRegisterLocation(reg);
436    }
437
438    case Primitive::kPrimDouble: {
439      int reg = FindTwoFreeConsecutiveAlignedEntries(blocked_fpu_registers_, kNumberOfSRegisters);
440      DCHECK_EQ(reg % 2, 0);
441      return Location::FpuRegisterPairLocation(reg, reg + 1);
442    }
443
444    case Primitive::kPrimVoid:
445      LOG(FATAL) << "Unreachable type " << type;
446  }
447
448  return Location();
449}
450
451void CodeGeneratorARM::SetupBlockedRegisters() const {
452  // Don't allocate the dalvik style register pair passing.
453  blocked_register_pairs_[R1_R2] = true;
454
455  // Stack register, LR and PC are always reserved.
456  blocked_core_registers_[SP] = true;
457  blocked_core_registers_[LR] = true;
458  blocked_core_registers_[PC] = true;
459
460  // Reserve thread register.
461  blocked_core_registers_[TR] = true;
462
463  // Reserve temp register.
464  blocked_core_registers_[IP] = true;
465
466  // TODO: We currently don't use Quick's callee saved registers.
467  // We always save and restore R6 and R7 to make sure we can use three
468  // register pairs for long operations.
469  blocked_core_registers_[R4] = true;
470  blocked_core_registers_[R5] = true;
471  blocked_core_registers_[R8] = true;
472  blocked_core_registers_[R10] = true;
473  blocked_core_registers_[R11] = true;
474
475  blocked_fpu_registers_[S16] = true;
476  blocked_fpu_registers_[S17] = true;
477  blocked_fpu_registers_[S18] = true;
478  blocked_fpu_registers_[S19] = true;
479  blocked_fpu_registers_[S20] = true;
480  blocked_fpu_registers_[S21] = true;
481  blocked_fpu_registers_[S22] = true;
482  blocked_fpu_registers_[S23] = true;
483  blocked_fpu_registers_[S24] = true;
484  blocked_fpu_registers_[S25] = true;
485  blocked_fpu_registers_[S26] = true;
486  blocked_fpu_registers_[S27] = true;
487  blocked_fpu_registers_[S28] = true;
488  blocked_fpu_registers_[S29] = true;
489  blocked_fpu_registers_[S30] = true;
490  blocked_fpu_registers_[S31] = true;
491
492  UpdateBlockedPairRegisters();
493}
494
495void CodeGeneratorARM::UpdateBlockedPairRegisters() const {
496  for (int i = 0; i < kNumberOfRegisterPairs; i++) {
497    ArmManagedRegister current =
498        ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
499    if (blocked_core_registers_[current.AsRegisterPairLow()]
500        || blocked_core_registers_[current.AsRegisterPairHigh()]) {
501      blocked_register_pairs_[i] = true;
502    }
503  }
504}
505
506InstructionCodeGeneratorARM::InstructionCodeGeneratorARM(HGraph* graph, CodeGeneratorARM* codegen)
507      : HGraphVisitor(graph),
508        assembler_(codegen->GetAssembler()),
509        codegen_(codegen) {}
510
511void CodeGeneratorARM::GenerateFrameEntry() {
512  bool skip_overflow_check =
513      IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm);
514  if (!skip_overflow_check) {
515    if (kExplicitStackOverflowCheck) {
516      SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathARM();
517      AddSlowPath(slow_path);
518
519      __ LoadFromOffset(kLoadWord, IP, TR, Thread::StackEndOffset<kArmWordSize>().Int32Value());
520      __ cmp(SP, ShifterOperand(IP));
521      __ b(slow_path->GetEntryLabel(), CC);
522    } else {
523      __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm)));
524      __ LoadFromOffset(kLoadWord, IP, IP, 0);
525      RecordPcInfo(nullptr, 0);
526    }
527  }
528
529  core_spill_mask_ |= (1 << LR | 1 << R6 | 1 << R7);
530  __ PushList(1 << LR | 1 << R6 | 1 << R7);
531
532  // The return PC has already been pushed on the stack.
533  __ AddConstant(SP, -(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize));
534  __ StoreToOffset(kStoreWord, R0, SP, 0);
535}
536
537void CodeGeneratorARM::GenerateFrameExit() {
538  __ AddConstant(SP, GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize);
539  __ PopList(1 << PC | 1 << R6 | 1 << R7);
540}
541
542void CodeGeneratorARM::Bind(HBasicBlock* block) {
543  __ Bind(GetLabelOf(block));
544}
545
546Location CodeGeneratorARM::GetStackLocation(HLoadLocal* load) const {
547  switch (load->GetType()) {
548    case Primitive::kPrimLong:
549    case Primitive::kPrimDouble:
550      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
551      break;
552
553    case Primitive::kPrimInt:
554    case Primitive::kPrimNot:
555    case Primitive::kPrimFloat:
556      return Location::StackSlot(GetStackSlot(load->GetLocal()));
557
558    case Primitive::kPrimBoolean:
559    case Primitive::kPrimByte:
560    case Primitive::kPrimChar:
561    case Primitive::kPrimShort:
562    case Primitive::kPrimVoid:
563      LOG(FATAL) << "Unexpected type " << load->GetType();
564  }
565
566  LOG(FATAL) << "Unreachable";
567  return Location();
568}
569
570Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
571  switch (type) {
572    case Primitive::kPrimBoolean:
573    case Primitive::kPrimByte:
574    case Primitive::kPrimChar:
575    case Primitive::kPrimShort:
576    case Primitive::kPrimInt:
577    case Primitive::kPrimNot: {
578      uint32_t index = gp_index_++;
579      uint32_t stack_index = stack_index_++;
580      if (index < calling_convention.GetNumberOfRegisters()) {
581        return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
582      } else {
583        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
584      }
585    }
586
587    case Primitive::kPrimLong: {
588      uint32_t index = gp_index_;
589      uint32_t stack_index = stack_index_;
590      gp_index_ += 2;
591      stack_index_ += 2;
592      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
593        ArmManagedRegister pair = ArmManagedRegister::FromRegisterPair(
594            calling_convention.GetRegisterPairAt(index));
595        return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
596      } else {
597        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
598      }
599    }
600
601    case Primitive::kPrimFloat: {
602      uint32_t stack_index = stack_index_++;
603      if (float_index_ % 2 == 0) {
604        float_index_ = std::max(double_index_, float_index_);
605      }
606      if (float_index_ < calling_convention.GetNumberOfFpuRegisters()) {
607        return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(float_index_++));
608      } else {
609        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
610      }
611    }
612
613    case Primitive::kPrimDouble: {
614      double_index_ = std::max(double_index_, RoundUp(float_index_, 2));
615      uint32_t stack_index = stack_index_;
616      stack_index_ += 2;
617      if (double_index_ + 1 < calling_convention.GetNumberOfFpuRegisters()) {
618        uint32_t index = double_index_;
619        double_index_ += 2;
620        return Location::FpuRegisterPairLocation(
621          calling_convention.GetFpuRegisterAt(index),
622          calling_convention.GetFpuRegisterAt(index + 1));
623      } else {
624        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
625      }
626    }
627
628    case Primitive::kPrimVoid:
629      LOG(FATAL) << "Unexpected parameter type " << type;
630      break;
631  }
632  return Location();
633}
634
635Location InvokeDexCallingConventionVisitor::GetReturnLocation(Primitive::Type type) {
636  switch (type) {
637    case Primitive::kPrimBoolean:
638    case Primitive::kPrimByte:
639    case Primitive::kPrimChar:
640    case Primitive::kPrimShort:
641    case Primitive::kPrimInt:
642    case Primitive::kPrimNot: {
643      return Location::RegisterLocation(R0);
644    }
645
646    case Primitive::kPrimFloat: {
647      return Location::FpuRegisterLocation(S0);
648    }
649
650    case Primitive::kPrimLong: {
651      return Location::RegisterPairLocation(R0, R1);
652    }
653
654    case Primitive::kPrimDouble: {
655      return Location::FpuRegisterPairLocation(S0, S1);
656    }
657
658    case Primitive::kPrimVoid:
659      return Location();
660  }
661  UNREACHABLE();
662  return Location();
663}
664
665void CodeGeneratorARM::Move32(Location destination, Location source) {
666  if (source.Equals(destination)) {
667    return;
668  }
669  if (destination.IsRegister()) {
670    if (source.IsRegister()) {
671      __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>());
672    } else if (source.IsFpuRegister()) {
673      __ vmovrs(destination.AsRegister<Register>(), source.AsFpuRegister<SRegister>());
674    } else {
675      __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(), SP, source.GetStackIndex());
676    }
677  } else if (destination.IsFpuRegister()) {
678    if (source.IsRegister()) {
679      __ vmovsr(destination.AsFpuRegister<SRegister>(), source.AsRegister<Register>());
680    } else if (source.IsFpuRegister()) {
681      __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>());
682    } else {
683      __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex());
684    }
685  } else {
686    DCHECK(destination.IsStackSlot()) << destination;
687    if (source.IsRegister()) {
688      __ StoreToOffset(kStoreWord, source.AsRegister<Register>(), SP, destination.GetStackIndex());
689    } else if (source.IsFpuRegister()) {
690      __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex());
691    } else {
692      DCHECK(source.IsStackSlot()) << source;
693      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
694      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
695    }
696  }
697}
698
699void CodeGeneratorARM::Move64(Location destination, Location source) {
700  if (source.Equals(destination)) {
701    return;
702  }
703  if (destination.IsRegisterPair()) {
704    if (source.IsRegisterPair()) {
705      EmitParallelMoves(
706          Location::RegisterLocation(source.AsRegisterPairHigh<Register>()),
707          Location::RegisterLocation(destination.AsRegisterPairHigh<Register>()),
708          Location::RegisterLocation(source.AsRegisterPairLow<Register>()),
709          Location::RegisterLocation(destination.AsRegisterPairLow<Register>()));
710    } else if (source.IsFpuRegister()) {
711      UNIMPLEMENTED(FATAL);
712    } else {
713      // No conflict possible, so just do the moves.
714      DCHECK(source.IsDoubleStackSlot());
715      if (destination.AsRegisterPairLow<Register>() == R1) {
716        DCHECK_EQ(destination.AsRegisterPairHigh<Register>(), R2);
717        __ LoadFromOffset(kLoadWord, R1, SP, source.GetStackIndex());
718        __ LoadFromOffset(kLoadWord, R2, SP, source.GetHighStackIndex(kArmWordSize));
719      } else {
720        __ LoadFromOffset(kLoadWordPair, destination.AsRegisterPairLow<Register>(),
721                          SP, source.GetStackIndex());
722      }
723    }
724  } else if (destination.IsFpuRegisterPair()) {
725    if (source.IsDoubleStackSlot()) {
726      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
727                         SP,
728                         source.GetStackIndex());
729    } else {
730      UNIMPLEMENTED(FATAL);
731    }
732  } else {
733    DCHECK(destination.IsDoubleStackSlot());
734    if (source.IsRegisterPair()) {
735      // No conflict possible, so just do the moves.
736      if (source.AsRegisterPairLow<Register>() == R1) {
737        DCHECK_EQ(source.AsRegisterPairHigh<Register>(), R2);
738        __ StoreToOffset(kStoreWord, R1, SP, destination.GetStackIndex());
739        __ StoreToOffset(kStoreWord, R2, SP, destination.GetHighStackIndex(kArmWordSize));
740      } else {
741        __ StoreToOffset(kStoreWordPair, source.AsRegisterPairLow<Register>(),
742                         SP, destination.GetStackIndex());
743      }
744    } else if (source.IsFpuRegisterPair()) {
745      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
746                        SP,
747                        destination.GetStackIndex());
748    } else {
749      DCHECK(source.IsDoubleStackSlot());
750      EmitParallelMoves(
751          Location::StackSlot(source.GetStackIndex()),
752          Location::StackSlot(destination.GetStackIndex()),
753          Location::StackSlot(source.GetHighStackIndex(kArmWordSize)),
754          Location::StackSlot(destination.GetHighStackIndex(kArmWordSize)));
755    }
756  }
757}
758
759void CodeGeneratorARM::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
760  LocationSummary* locations = instruction->GetLocations();
761  if (locations != nullptr && locations->Out().Equals(location)) {
762    return;
763  }
764
765  if (locations != nullptr && locations->Out().IsConstant()) {
766    HConstant* const_to_move = locations->Out().GetConstant();
767    if (const_to_move->IsIntConstant()) {
768      int32_t value = const_to_move->AsIntConstant()->GetValue();
769      if (location.IsRegister()) {
770        __ LoadImmediate(location.AsRegister<Register>(), value);
771      } else {
772        DCHECK(location.IsStackSlot());
773        __ LoadImmediate(IP, value);
774        __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
775      }
776    } else {
777      DCHECK(const_to_move->IsLongConstant()) << const_to_move;
778      int64_t value = const_to_move->AsLongConstant()->GetValue();
779      if (location.IsRegisterPair()) {
780        __ LoadImmediate(location.AsRegisterPairLow<Register>(), Low32Bits(value));
781        __ LoadImmediate(location.AsRegisterPairHigh<Register>(), High32Bits(value));
782      } else {
783        DCHECK(location.IsDoubleStackSlot());
784        __ LoadImmediate(IP, Low32Bits(value));
785        __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
786        __ LoadImmediate(IP, High32Bits(value));
787        __ StoreToOffset(kStoreWord, IP, SP, location.GetHighStackIndex(kArmWordSize));
788      }
789    }
790  } else if (instruction->IsLoadLocal()) {
791    uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
792    switch (instruction->GetType()) {
793      case Primitive::kPrimBoolean:
794      case Primitive::kPrimByte:
795      case Primitive::kPrimChar:
796      case Primitive::kPrimShort:
797      case Primitive::kPrimInt:
798      case Primitive::kPrimNot:
799      case Primitive::kPrimFloat:
800        Move32(location, Location::StackSlot(stack_slot));
801        break;
802
803      case Primitive::kPrimLong:
804      case Primitive::kPrimDouble:
805        Move64(location, Location::DoubleStackSlot(stack_slot));
806        break;
807
808      default:
809        LOG(FATAL) << "Unexpected type " << instruction->GetType();
810    }
811  } else if (instruction->IsTemporary()) {
812    Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
813    if (temp_location.IsStackSlot()) {
814      Move32(location, temp_location);
815    } else {
816      DCHECK(temp_location.IsDoubleStackSlot());
817      Move64(location, temp_location);
818    }
819  } else {
820    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
821    switch (instruction->GetType()) {
822      case Primitive::kPrimBoolean:
823      case Primitive::kPrimByte:
824      case Primitive::kPrimChar:
825      case Primitive::kPrimShort:
826      case Primitive::kPrimNot:
827      case Primitive::kPrimInt:
828      case Primitive::kPrimFloat:
829        Move32(location, locations->Out());
830        break;
831
832      case Primitive::kPrimLong:
833      case Primitive::kPrimDouble:
834        Move64(location, locations->Out());
835        break;
836
837      default:
838        LOG(FATAL) << "Unexpected type " << instruction->GetType();
839    }
840  }
841}
842
843void CodeGeneratorARM::InvokeRuntime(int32_t entry_point_offset,
844                                     HInstruction* instruction,
845                                     uint32_t dex_pc) {
846  __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
847  __ blx(LR);
848  RecordPcInfo(instruction, dex_pc);
849  DCHECK(instruction->IsSuspendCheck()
850      || instruction->IsBoundsCheck()
851      || instruction->IsNullCheck()
852      || instruction->IsDivZeroCheck()
853      || instruction->GetLocations()->CanCall()
854      || !IsLeafMethod());
855}
856
857void LocationsBuilderARM::VisitGoto(HGoto* got) {
858  got->SetLocations(nullptr);
859}
860
861void InstructionCodeGeneratorARM::VisitGoto(HGoto* got) {
862  HBasicBlock* successor = got->GetSuccessor();
863  DCHECK(!successor->IsExitBlock());
864
865  HBasicBlock* block = got->GetBlock();
866  HInstruction* previous = got->GetPrevious();
867
868  HLoopInformation* info = block->GetLoopInformation();
869  if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
870    codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
871    GenerateSuspendCheck(info->GetSuspendCheck(), successor);
872    return;
873  }
874
875  if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
876    GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
877  }
878  if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
879    __ b(codegen_->GetLabelOf(successor));
880  }
881}
882
883void LocationsBuilderARM::VisitExit(HExit* exit) {
884  exit->SetLocations(nullptr);
885}
886
887void InstructionCodeGeneratorARM::VisitExit(HExit* exit) {
888  UNUSED(exit);
889  if (kIsDebugBuild) {
890    __ Comment("Unreachable");
891    __ bkpt(0);
892  }
893}
894
895void LocationsBuilderARM::VisitIf(HIf* if_instr) {
896  LocationSummary* locations =
897      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
898  HInstruction* cond = if_instr->InputAt(0);
899  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
900    locations->SetInAt(0, Location::RequiresRegister());
901  }
902}
903
904void InstructionCodeGeneratorARM::VisitIf(HIf* if_instr) {
905  HInstruction* cond = if_instr->InputAt(0);
906  if (cond->IsIntConstant()) {
907    // Constant condition, statically compared against 1.
908    int32_t cond_value = cond->AsIntConstant()->GetValue();
909    if (cond_value == 1) {
910      if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
911                                     if_instr->IfTrueSuccessor())) {
912        __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
913      }
914      return;
915    } else {
916      DCHECK_EQ(cond_value, 0);
917    }
918  } else {
919    if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
920      // Condition has been materialized, compare the output to 0
921      DCHECK(if_instr->GetLocations()->InAt(0).IsRegister());
922      __ cmp(if_instr->GetLocations()->InAt(0).AsRegister<Register>(),
923             ShifterOperand(0));
924      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()), NE);
925    } else {
926      // Condition has not been materialized, use its inputs as the
927      // comparison and its condition as the branch condition.
928      LocationSummary* locations = cond->GetLocations();
929      Register left = locations->InAt(0).AsRegister<Register>();
930      if (locations->InAt(1).IsRegister()) {
931        __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>()));
932      } else {
933        DCHECK(locations->InAt(1).IsConstant());
934        int32_t value =
935            locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
936        ShifterOperand operand;
937        if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) {
938          __ cmp(left, operand);
939        } else {
940          Register temp = IP;
941          __ LoadImmediate(temp, value);
942          __ cmp(left, ShifterOperand(temp));
943        }
944      }
945      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()),
946           ARMCondition(cond->AsCondition()->GetCondition()));
947    }
948  }
949  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
950                                 if_instr->IfFalseSuccessor())) {
951    __ b(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
952  }
953}
954
955
956void LocationsBuilderARM::VisitCondition(HCondition* comp) {
957  LocationSummary* locations =
958      new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall);
959  locations->SetInAt(0, Location::RequiresRegister());
960  locations->SetInAt(1, Location::RegisterOrConstant(comp->InputAt(1)));
961  if (comp->NeedsMaterialization()) {
962    locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
963  }
964}
965
966void InstructionCodeGeneratorARM::VisitCondition(HCondition* comp) {
967  if (!comp->NeedsMaterialization()) return;
968  LocationSummary* locations = comp->GetLocations();
969  Register left = locations->InAt(0).AsRegister<Register>();
970
971  if (locations->InAt(1).IsRegister()) {
972    __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>()));
973  } else {
974    DCHECK(locations->InAt(1).IsConstant());
975    int32_t value = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
976    ShifterOperand operand;
977    if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) {
978      __ cmp(left, operand);
979    } else {
980      Register temp = IP;
981      __ LoadImmediate(temp, value);
982      __ cmp(left, ShifterOperand(temp));
983    }
984  }
985  __ it(ARMCondition(comp->GetCondition()), kItElse);
986  __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(1),
987         ARMCondition(comp->GetCondition()));
988  __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(0),
989         ARMOppositeCondition(comp->GetCondition()));
990}
991
992void LocationsBuilderARM::VisitEqual(HEqual* comp) {
993  VisitCondition(comp);
994}
995
996void InstructionCodeGeneratorARM::VisitEqual(HEqual* comp) {
997  VisitCondition(comp);
998}
999
1000void LocationsBuilderARM::VisitNotEqual(HNotEqual* comp) {
1001  VisitCondition(comp);
1002}
1003
1004void InstructionCodeGeneratorARM::VisitNotEqual(HNotEqual* comp) {
1005  VisitCondition(comp);
1006}
1007
1008void LocationsBuilderARM::VisitLessThan(HLessThan* comp) {
1009  VisitCondition(comp);
1010}
1011
1012void InstructionCodeGeneratorARM::VisitLessThan(HLessThan* comp) {
1013  VisitCondition(comp);
1014}
1015
1016void LocationsBuilderARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1017  VisitCondition(comp);
1018}
1019
1020void InstructionCodeGeneratorARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1021  VisitCondition(comp);
1022}
1023
1024void LocationsBuilderARM::VisitGreaterThan(HGreaterThan* comp) {
1025  VisitCondition(comp);
1026}
1027
1028void InstructionCodeGeneratorARM::VisitGreaterThan(HGreaterThan* comp) {
1029  VisitCondition(comp);
1030}
1031
1032void LocationsBuilderARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1033  VisitCondition(comp);
1034}
1035
1036void InstructionCodeGeneratorARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1037  VisitCondition(comp);
1038}
1039
1040void LocationsBuilderARM::VisitLocal(HLocal* local) {
1041  local->SetLocations(nullptr);
1042}
1043
1044void InstructionCodeGeneratorARM::VisitLocal(HLocal* local) {
1045  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
1046}
1047
1048void LocationsBuilderARM::VisitLoadLocal(HLoadLocal* load) {
1049  load->SetLocations(nullptr);
1050}
1051
1052void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load) {
1053  // Nothing to do, this is driven by the code generator.
1054  UNUSED(load);
1055}
1056
1057void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) {
1058  LocationSummary* locations =
1059      new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
1060  switch (store->InputAt(1)->GetType()) {
1061    case Primitive::kPrimBoolean:
1062    case Primitive::kPrimByte:
1063    case Primitive::kPrimChar:
1064    case Primitive::kPrimShort:
1065    case Primitive::kPrimInt:
1066    case Primitive::kPrimNot:
1067    case Primitive::kPrimFloat:
1068      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
1069      break;
1070
1071    case Primitive::kPrimLong:
1072    case Primitive::kPrimDouble:
1073      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
1074      break;
1075
1076    default:
1077      LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType();
1078  }
1079}
1080
1081void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store) {
1082  UNUSED(store);
1083}
1084
1085void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) {
1086  LocationSummary* locations =
1087      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1088  locations->SetOut(Location::ConstantLocation(constant));
1089}
1090
1091void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant) {
1092  // Will be generated at use site.
1093  UNUSED(constant);
1094}
1095
1096void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) {
1097  LocationSummary* locations =
1098      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1099  locations->SetOut(Location::ConstantLocation(constant));
1100}
1101
1102void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant) {
1103  // Will be generated at use site.
1104  UNUSED(constant);
1105}
1106
1107void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) {
1108  LocationSummary* locations =
1109      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1110  locations->SetOut(Location::ConstantLocation(constant));
1111}
1112
1113void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant) {
1114  // Will be generated at use site.
1115  UNUSED(constant);
1116}
1117
1118void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) {
1119  LocationSummary* locations =
1120      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1121  locations->SetOut(Location::ConstantLocation(constant));
1122}
1123
1124void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant) {
1125  // Will be generated at use site.
1126  UNUSED(constant);
1127}
1128
1129void LocationsBuilderARM::VisitReturnVoid(HReturnVoid* ret) {
1130  ret->SetLocations(nullptr);
1131}
1132
1133void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret) {
1134  UNUSED(ret);
1135  codegen_->GenerateFrameExit();
1136}
1137
1138void LocationsBuilderARM::VisitReturn(HReturn* ret) {
1139  LocationSummary* locations =
1140      new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
1141  locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType()));
1142}
1143
1144void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret) {
1145  UNUSED(ret);
1146  codegen_->GenerateFrameExit();
1147}
1148
1149void LocationsBuilderARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1150  HandleInvoke(invoke);
1151}
1152
1153void CodeGeneratorARM::LoadCurrentMethod(Register reg) {
1154  __ LoadFromOffset(kLoadWord, reg, SP, kCurrentMethodStackOffset);
1155}
1156
1157void InstructionCodeGeneratorARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1158  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1159
1160  // TODO: Implement all kinds of calls:
1161  // 1) boot -> boot
1162  // 2) app -> boot
1163  // 3) app -> app
1164  //
1165  // Currently we implement the app -> app logic, which looks up in the resolve cache.
1166
1167  // temp = method;
1168  codegen_->LoadCurrentMethod(temp);
1169  // temp = temp->dex_cache_resolved_methods_;
1170  __ LoadFromOffset(
1171      kLoadWord, temp, temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value());
1172  // temp = temp[index_in_cache]
1173  __ LoadFromOffset(
1174      kLoadWord, temp, temp, CodeGenerator::GetCacheOffset(invoke->GetIndexInDexCache()));
1175  // LR = temp[offset_of_quick_compiled_code]
1176  __ LoadFromOffset(kLoadWord, LR, temp,
1177                     mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1178                         kArmWordSize).Int32Value());
1179  // LR()
1180  __ blx(LR);
1181
1182  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1183  DCHECK(!codegen_->IsLeafMethod());
1184}
1185
1186void LocationsBuilderARM::HandleInvoke(HInvoke* invoke) {
1187  LocationSummary* locations =
1188      new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1189  locations->AddTemp(Location::RegisterLocation(R0));
1190
1191  InvokeDexCallingConventionVisitor calling_convention_visitor;
1192  for (size_t i = 0; i < invoke->InputCount(); i++) {
1193    HInstruction* input = invoke->InputAt(i);
1194    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1195  }
1196
1197  locations->SetOut(calling_convention_visitor.GetReturnLocation(invoke->GetType()));
1198}
1199
1200void LocationsBuilderARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1201  HandleInvoke(invoke);
1202}
1203
1204void InstructionCodeGeneratorARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1205  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1206  uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() +
1207          invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
1208  LocationSummary* locations = invoke->GetLocations();
1209  Location receiver = locations->InAt(0);
1210  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1211  // temp = object->GetClass();
1212  if (receiver.IsStackSlot()) {
1213    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1214    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1215  } else {
1216    __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
1217  }
1218  // temp = temp->GetMethodAt(method_offset);
1219  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1220      kArmWordSize).Int32Value();
1221  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1222  // LR = temp->GetEntryPoint();
1223  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1224  // LR();
1225  __ blx(LR);
1226  DCHECK(!codegen_->IsLeafMethod());
1227  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1228}
1229
1230void LocationsBuilderARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1231  HandleInvoke(invoke);
1232  // Add the hidden argument.
1233  invoke->GetLocations()->AddTemp(Location::RegisterLocation(R12));
1234}
1235
1236void InstructionCodeGeneratorARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1237  // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
1238  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1239  uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() +
1240          (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry);
1241  LocationSummary* locations = invoke->GetLocations();
1242  Location receiver = locations->InAt(0);
1243  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1244
1245  // Set the hidden argument.
1246  __ LoadImmediate(invoke->GetLocations()->GetTemp(1).AsRegister<Register>(),
1247                   invoke->GetDexMethodIndex());
1248
1249  // temp = object->GetClass();
1250  if (receiver.IsStackSlot()) {
1251    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1252    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1253  } else {
1254    __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
1255  }
1256  // temp = temp->GetImtEntryAt(method_offset);
1257  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1258      kArmWordSize).Int32Value();
1259  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1260  // LR = temp->GetEntryPoint();
1261  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1262  // LR();
1263  __ blx(LR);
1264  DCHECK(!codegen_->IsLeafMethod());
1265  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1266}
1267
1268void LocationsBuilderARM::VisitNeg(HNeg* neg) {
1269  LocationSummary* locations =
1270      new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
1271  switch (neg->GetResultType()) {
1272    case Primitive::kPrimInt:
1273    case Primitive::kPrimLong: {
1274      bool output_overlaps = (neg->GetResultType() == Primitive::kPrimLong);
1275      locations->SetInAt(0, Location::RequiresRegister());
1276      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1277      break;
1278    }
1279
1280    case Primitive::kPrimFloat:
1281    case Primitive::kPrimDouble:
1282      locations->SetInAt(0, Location::RequiresFpuRegister());
1283      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1284      break;
1285
1286    default:
1287      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1288  }
1289}
1290
1291void InstructionCodeGeneratorARM::VisitNeg(HNeg* neg) {
1292  LocationSummary* locations = neg->GetLocations();
1293  Location out = locations->Out();
1294  Location in = locations->InAt(0);
1295  switch (neg->GetResultType()) {
1296    case Primitive::kPrimInt:
1297      DCHECK(in.IsRegister());
1298      __ rsb(out.AsRegister<Register>(), in.AsRegister<Register>(), ShifterOperand(0));
1299      break;
1300
1301    case Primitive::kPrimLong:
1302      DCHECK(in.IsRegisterPair());
1303      // out.lo = 0 - in.lo (and update the carry/borrow (C) flag)
1304      __ rsbs(out.AsRegisterPairLow<Register>(),
1305              in.AsRegisterPairLow<Register>(),
1306              ShifterOperand(0));
1307      // We cannot emit an RSC (Reverse Subtract with Carry)
1308      // instruction here, as it does not exist in the Thumb-2
1309      // instruction set.  We use the following approach
1310      // using SBC and SUB instead.
1311      //
1312      // out.hi = -C
1313      __ sbc(out.AsRegisterPairHigh<Register>(),
1314             out.AsRegisterPairHigh<Register>(),
1315             ShifterOperand(out.AsRegisterPairHigh<Register>()));
1316      // out.hi = out.hi - in.hi
1317      __ sub(out.AsRegisterPairHigh<Register>(),
1318             out.AsRegisterPairHigh<Register>(),
1319             ShifterOperand(in.AsRegisterPairHigh<Register>()));
1320      break;
1321
1322    case Primitive::kPrimFloat:
1323      DCHECK(in.IsFpuRegister());
1324      __ vnegs(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>());
1325      break;
1326
1327    case Primitive::kPrimDouble:
1328      DCHECK(in.IsFpuRegisterPair());
1329      __ vnegd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1330               FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1331      break;
1332
1333    default:
1334      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1335  }
1336}
1337
1338void LocationsBuilderARM::VisitTypeConversion(HTypeConversion* conversion) {
1339  Primitive::Type result_type = conversion->GetResultType();
1340  Primitive::Type input_type = conversion->GetInputType();
1341  DCHECK_NE(result_type, input_type);
1342
1343  // The float-to-long and double-to-long type conversions rely on a
1344  // call to the runtime.
1345  LocationSummary::CallKind call_kind =
1346      ((input_type == Primitive::kPrimFloat || input_type == Primitive::kPrimDouble)
1347       && result_type == Primitive::kPrimLong)
1348      ? LocationSummary::kCall
1349      : LocationSummary::kNoCall;
1350  LocationSummary* locations =
1351      new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind);
1352
1353  switch (result_type) {
1354    case Primitive::kPrimByte:
1355      switch (input_type) {
1356        case Primitive::kPrimShort:
1357        case Primitive::kPrimInt:
1358        case Primitive::kPrimChar:
1359          // Processing a Dex `int-to-byte' instruction.
1360          locations->SetInAt(0, Location::RequiresRegister());
1361          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1362          break;
1363
1364        default:
1365          LOG(FATAL) << "Unexpected type conversion from " << input_type
1366                     << " to " << result_type;
1367      }
1368      break;
1369
1370    case Primitive::kPrimShort:
1371      switch (input_type) {
1372        case Primitive::kPrimByte:
1373        case Primitive::kPrimInt:
1374        case Primitive::kPrimChar:
1375          // Processing a Dex `int-to-short' instruction.
1376          locations->SetInAt(0, Location::RequiresRegister());
1377          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1378          break;
1379
1380        default:
1381          LOG(FATAL) << "Unexpected type conversion from " << input_type
1382                     << " to " << result_type;
1383      }
1384      break;
1385
1386    case Primitive::kPrimInt:
1387      switch (input_type) {
1388        case Primitive::kPrimLong:
1389          // Processing a Dex `long-to-int' instruction.
1390          locations->SetInAt(0, Location::Any());
1391          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1392          break;
1393
1394        case Primitive::kPrimFloat:
1395          // Processing a Dex `float-to-int' instruction.
1396          locations->SetInAt(0, Location::RequiresFpuRegister());
1397          locations->SetOut(Location::RequiresRegister());
1398          locations->AddTemp(Location::RequiresFpuRegister());
1399          break;
1400
1401        case Primitive::kPrimDouble:
1402          // Processing a Dex `double-to-int' instruction.
1403          locations->SetInAt(0, Location::RequiresFpuRegister());
1404          locations->SetOut(Location::RequiresRegister());
1405          locations->AddTemp(Location::RequiresFpuRegister());
1406          break;
1407
1408        default:
1409          LOG(FATAL) << "Unexpected type conversion from " << input_type
1410                     << " to " << result_type;
1411      }
1412      break;
1413
1414    case Primitive::kPrimLong:
1415      switch (input_type) {
1416        case Primitive::kPrimByte:
1417        case Primitive::kPrimShort:
1418        case Primitive::kPrimInt:
1419        case Primitive::kPrimChar:
1420          // Processing a Dex `int-to-long' instruction.
1421          locations->SetInAt(0, Location::RequiresRegister());
1422          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1423          break;
1424
1425        case Primitive::kPrimFloat: {
1426          // Processing a Dex `float-to-long' instruction.
1427          InvokeRuntimeCallingConvention calling_convention;
1428          locations->SetInAt(0, Location::FpuRegisterLocation(
1429              calling_convention.GetFpuRegisterAt(0)));
1430          locations->SetOut(Location::RegisterPairLocation(R0, R1));
1431          break;
1432        }
1433
1434        case Primitive::kPrimDouble: {
1435          // Processing a Dex `double-to-long' instruction.
1436          InvokeRuntimeCallingConvention calling_convention;
1437          locations->SetInAt(0, Location::FpuRegisterPairLocation(
1438              calling_convention.GetFpuRegisterAt(0),
1439              calling_convention.GetFpuRegisterAt(1)));
1440          locations->SetOut(Location::RegisterPairLocation(R0, R1));
1441          break;
1442        }
1443
1444        default:
1445          LOG(FATAL) << "Unexpected type conversion from " << input_type
1446                     << " to " << result_type;
1447      }
1448      break;
1449
1450    case Primitive::kPrimChar:
1451      switch (input_type) {
1452        case Primitive::kPrimByte:
1453        case Primitive::kPrimShort:
1454        case Primitive::kPrimInt:
1455          // Processing a Dex `int-to-char' instruction.
1456          locations->SetInAt(0, Location::RequiresRegister());
1457          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1458          break;
1459
1460        default:
1461          LOG(FATAL) << "Unexpected type conversion from " << input_type
1462                     << " to " << result_type;
1463      }
1464      break;
1465
1466    case Primitive::kPrimFloat:
1467      switch (input_type) {
1468        case Primitive::kPrimByte:
1469        case Primitive::kPrimShort:
1470        case Primitive::kPrimInt:
1471        case Primitive::kPrimChar:
1472          // Processing a Dex `int-to-float' instruction.
1473          locations->SetInAt(0, Location::RequiresRegister());
1474          locations->SetOut(Location::RequiresFpuRegister());
1475          break;
1476
1477        case Primitive::kPrimLong:
1478          // Processing a Dex `long-to-float' instruction.
1479          locations->SetInAt(0, Location::RequiresRegister());
1480          locations->SetOut(Location::RequiresFpuRegister());
1481          locations->AddTemp(Location::RequiresRegister());
1482          locations->AddTemp(Location::RequiresRegister());
1483          locations->AddTemp(Location::RequiresFpuRegister());
1484          locations->AddTemp(Location::RequiresFpuRegister());
1485          break;
1486
1487        case Primitive::kPrimDouble:
1488          // Processing a Dex `double-to-float' instruction.
1489          locations->SetInAt(0, Location::RequiresFpuRegister());
1490          locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1491          break;
1492
1493        default:
1494          LOG(FATAL) << "Unexpected type conversion from " << input_type
1495                     << " to " << result_type;
1496      };
1497      break;
1498
1499    case Primitive::kPrimDouble:
1500      switch (input_type) {
1501        case Primitive::kPrimByte:
1502        case Primitive::kPrimShort:
1503        case Primitive::kPrimInt:
1504        case Primitive::kPrimChar:
1505          // Processing a Dex `int-to-double' instruction.
1506          locations->SetInAt(0, Location::RequiresRegister());
1507          locations->SetOut(Location::RequiresFpuRegister());
1508          break;
1509
1510        case Primitive::kPrimLong:
1511          // Processing a Dex `long-to-double' instruction.
1512          locations->SetInAt(0, Location::RequiresRegister());
1513          locations->SetOut(Location::RequiresFpuRegister());
1514          locations->AddTemp(Location::RequiresRegister());
1515          locations->AddTemp(Location::RequiresRegister());
1516          locations->AddTemp(Location::RequiresFpuRegister());
1517          break;
1518
1519        case Primitive::kPrimFloat:
1520          // Processing a Dex `float-to-double' instruction.
1521          locations->SetInAt(0, Location::RequiresFpuRegister());
1522          locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1523          break;
1524
1525        default:
1526          LOG(FATAL) << "Unexpected type conversion from " << input_type
1527                     << " to " << result_type;
1528      };
1529      break;
1530
1531    default:
1532      LOG(FATAL) << "Unexpected type conversion from " << input_type
1533                 << " to " << result_type;
1534  }
1535}
1536
1537void InstructionCodeGeneratorARM::VisitTypeConversion(HTypeConversion* conversion) {
1538  LocationSummary* locations = conversion->GetLocations();
1539  Location out = locations->Out();
1540  Location in = locations->InAt(0);
1541  Primitive::Type result_type = conversion->GetResultType();
1542  Primitive::Type input_type = conversion->GetInputType();
1543  DCHECK_NE(result_type, input_type);
1544  switch (result_type) {
1545    case Primitive::kPrimByte:
1546      switch (input_type) {
1547        case Primitive::kPrimShort:
1548        case Primitive::kPrimInt:
1549        case Primitive::kPrimChar:
1550          // Processing a Dex `int-to-byte' instruction.
1551          __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 8);
1552          break;
1553
1554        default:
1555          LOG(FATAL) << "Unexpected type conversion from " << input_type
1556                     << " to " << result_type;
1557      }
1558      break;
1559
1560    case Primitive::kPrimShort:
1561      switch (input_type) {
1562        case Primitive::kPrimByte:
1563        case Primitive::kPrimInt:
1564        case Primitive::kPrimChar:
1565          // Processing a Dex `int-to-short' instruction.
1566          __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16);
1567          break;
1568
1569        default:
1570          LOG(FATAL) << "Unexpected type conversion from " << input_type
1571                     << " to " << result_type;
1572      }
1573      break;
1574
1575    case Primitive::kPrimInt:
1576      switch (input_type) {
1577        case Primitive::kPrimLong:
1578          // Processing a Dex `long-to-int' instruction.
1579          DCHECK(out.IsRegister());
1580          if (in.IsRegisterPair()) {
1581            __ Mov(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
1582          } else if (in.IsDoubleStackSlot()) {
1583            __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), SP, in.GetStackIndex());
1584          } else {
1585            DCHECK(in.IsConstant());
1586            DCHECK(in.GetConstant()->IsLongConstant());
1587            int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
1588            __ LoadImmediate(out.AsRegister<Register>(), static_cast<int32_t>(value));
1589          }
1590          break;
1591
1592        case Primitive::kPrimFloat: {
1593          // Processing a Dex `float-to-int' instruction.
1594          SRegister temp = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
1595          __ vmovs(temp, in.AsFpuRegister<SRegister>());
1596          __ vcvtis(temp, temp);
1597          __ vmovrs(out.AsRegister<Register>(), temp);
1598          break;
1599        }
1600
1601        case Primitive::kPrimDouble: {
1602          // Processing a Dex `double-to-int' instruction.
1603          SRegister temp_s = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
1604          DRegister temp_d = FromLowSToD(temp_s);
1605          __ vmovd(temp_d, FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1606          __ vcvtid(temp_s, temp_d);
1607          __ vmovrs(out.AsRegister<Register>(), temp_s);
1608          break;
1609        }
1610
1611        default:
1612          LOG(FATAL) << "Unexpected type conversion from " << input_type
1613                     << " to " << result_type;
1614      }
1615      break;
1616
1617    case Primitive::kPrimLong:
1618      switch (input_type) {
1619        case Primitive::kPrimByte:
1620        case Primitive::kPrimShort:
1621        case Primitive::kPrimInt:
1622        case Primitive::kPrimChar:
1623          // Processing a Dex `int-to-long' instruction.
1624          DCHECK(out.IsRegisterPair());
1625          DCHECK(in.IsRegister());
1626          __ Mov(out.AsRegisterPairLow<Register>(), in.AsRegister<Register>());
1627          // Sign extension.
1628          __ Asr(out.AsRegisterPairHigh<Register>(),
1629                 out.AsRegisterPairLow<Register>(),
1630                 31);
1631          break;
1632
1633        case Primitive::kPrimFloat:
1634          // Processing a Dex `float-to-long' instruction.
1635          codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pF2l),
1636                                  conversion,
1637                                  conversion->GetDexPc());
1638          break;
1639
1640        case Primitive::kPrimDouble:
1641          // Processing a Dex `double-to-long' instruction.
1642          codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pD2l),
1643                                  conversion,
1644                                  conversion->GetDexPc());
1645          break;
1646
1647        default:
1648          LOG(FATAL) << "Unexpected type conversion from " << input_type
1649                     << " to " << result_type;
1650      }
1651      break;
1652
1653    case Primitive::kPrimChar:
1654      switch (input_type) {
1655        case Primitive::kPrimByte:
1656        case Primitive::kPrimShort:
1657        case Primitive::kPrimInt:
1658          // Processing a Dex `int-to-char' instruction.
1659          __ ubfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16);
1660          break;
1661
1662        default:
1663          LOG(FATAL) << "Unexpected type conversion from " << input_type
1664                     << " to " << result_type;
1665      }
1666      break;
1667
1668    case Primitive::kPrimFloat:
1669      switch (input_type) {
1670        case Primitive::kPrimByte:
1671        case Primitive::kPrimShort:
1672        case Primitive::kPrimInt:
1673        case Primitive::kPrimChar: {
1674          // Processing a Dex `int-to-float' instruction.
1675          __ vmovsr(out.AsFpuRegister<SRegister>(), in.AsRegister<Register>());
1676          __ vcvtsi(out.AsFpuRegister<SRegister>(), out.AsFpuRegister<SRegister>());
1677          break;
1678        }
1679
1680        case Primitive::kPrimLong: {
1681          // Processing a Dex `long-to-float' instruction.
1682          Register low = in.AsRegisterPairLow<Register>();
1683          Register high = in.AsRegisterPairHigh<Register>();
1684          SRegister output = out.AsFpuRegister<SRegister>();
1685          Register constant_low = locations->GetTemp(0).AsRegister<Register>();
1686          Register constant_high = locations->GetTemp(1).AsRegister<Register>();
1687          SRegister temp1_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>();
1688          DRegister temp1_d = FromLowSToD(temp1_s);
1689          SRegister temp2_s = locations->GetTemp(3).AsFpuRegisterPairLow<SRegister>();
1690          DRegister temp2_d = FromLowSToD(temp2_s);
1691
1692          // Operations use doubles for precision reasons (each 32-bit
1693          // half of a long fits in the 53-bit mantissa of a double,
1694          // but not in the 24-bit mantissa of a float).  This is
1695          // especially important for the low bits.  The result is
1696          // eventually converted to float.
1697
1698          // temp1_d = int-to-double(high)
1699          __ vmovsr(temp1_s, high);
1700          __ vcvtdi(temp1_d, temp1_s);
1701          // Using vmovd to load the `k2Pow32EncodingForDouble` constant
1702          // as an immediate value into `temp2_d` does not work, as
1703          // this instruction only transfers 8 significant bits of its
1704          // immediate operand.  Instead, use two 32-bit core
1705          // registers to load `k2Pow32EncodingForDouble` into
1706          // `temp2_d`.
1707          __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble));
1708          __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble));
1709          __ vmovdrr(temp2_d, constant_low, constant_high);
1710          // temp1_d = temp1_d * 2^32
1711          __ vmuld(temp1_d, temp1_d, temp2_d);
1712          // temp2_d = unsigned-to-double(low)
1713          __ vmovsr(temp2_s, low);
1714          __ vcvtdu(temp2_d, temp2_s);
1715          // temp1_d = temp1_d + temp2_d
1716          __ vaddd(temp1_d, temp1_d, temp2_d);
1717          // output = double-to-float(temp1_d);
1718          __ vcvtsd(output, temp1_d);
1719          break;
1720        }
1721
1722        case Primitive::kPrimDouble:
1723          // Processing a Dex `double-to-float' instruction.
1724          __ vcvtsd(out.AsFpuRegister<SRegister>(),
1725                    FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1726          break;
1727
1728        default:
1729          LOG(FATAL) << "Unexpected type conversion from " << input_type
1730                     << " to " << result_type;
1731      };
1732      break;
1733
1734    case Primitive::kPrimDouble:
1735      switch (input_type) {
1736        case Primitive::kPrimByte:
1737        case Primitive::kPrimShort:
1738        case Primitive::kPrimInt:
1739        case Primitive::kPrimChar: {
1740          // Processing a Dex `int-to-double' instruction.
1741          __ vmovsr(out.AsFpuRegisterPairLow<SRegister>(), in.AsRegister<Register>());
1742          __ vcvtdi(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1743                    out.AsFpuRegisterPairLow<SRegister>());
1744          break;
1745        }
1746
1747        case Primitive::kPrimLong: {
1748          // Processing a Dex `long-to-double' instruction.
1749          Register low = in.AsRegisterPairLow<Register>();
1750          Register high = in.AsRegisterPairHigh<Register>();
1751          SRegister out_s = out.AsFpuRegisterPairLow<SRegister>();
1752          DRegister out_d = FromLowSToD(out_s);
1753          Register constant_low = locations->GetTemp(0).AsRegister<Register>();
1754          Register constant_high = locations->GetTemp(1).AsRegister<Register>();
1755          SRegister temp_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>();
1756          DRegister temp_d = FromLowSToD(temp_s);
1757
1758          // out_d = int-to-double(high)
1759          __ vmovsr(out_s, high);
1760          __ vcvtdi(out_d, out_s);
1761          // Using vmovd to load the `k2Pow32EncodingForDouble` constant
1762          // as an immediate value into `temp_d` does not work, as
1763          // this instruction only transfers 8 significant bits of its
1764          // immediate operand.  Instead, use two 32-bit core
1765          // registers to load `k2Pow32EncodingForDouble` into `temp_d`.
1766          __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble));
1767          __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble));
1768          __ vmovdrr(temp_d, constant_low, constant_high);
1769          // out_d = out_d * 2^32
1770          __ vmuld(out_d, out_d, temp_d);
1771          // temp_d = unsigned-to-double(low)
1772          __ vmovsr(temp_s, low);
1773          __ vcvtdu(temp_d, temp_s);
1774          // out_d = out_d + temp_d
1775          __ vaddd(out_d, out_d, temp_d);
1776          break;
1777        }
1778
1779        case Primitive::kPrimFloat:
1780          // Processing a Dex `float-to-double' instruction.
1781          __ vcvtds(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1782                    in.AsFpuRegister<SRegister>());
1783          break;
1784
1785        default:
1786          LOG(FATAL) << "Unexpected type conversion from " << input_type
1787                     << " to " << result_type;
1788      };
1789      break;
1790
1791    default:
1792      LOG(FATAL) << "Unexpected type conversion from " << input_type
1793                 << " to " << result_type;
1794  }
1795}
1796
1797void LocationsBuilderARM::VisitAdd(HAdd* add) {
1798  LocationSummary* locations =
1799      new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
1800  switch (add->GetResultType()) {
1801    case Primitive::kPrimInt:
1802    case Primitive::kPrimLong: {
1803      bool output_overlaps = (add->GetResultType() == Primitive::kPrimLong);
1804      locations->SetInAt(0, Location::RequiresRegister());
1805      locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
1806      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1807      break;
1808    }
1809
1810    case Primitive::kPrimFloat:
1811    case Primitive::kPrimDouble: {
1812      locations->SetInAt(0, Location::RequiresFpuRegister());
1813      locations->SetInAt(1, Location::RequiresFpuRegister());
1814      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1815      break;
1816    }
1817
1818    default:
1819      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1820  }
1821}
1822
1823void InstructionCodeGeneratorARM::VisitAdd(HAdd* add) {
1824  LocationSummary* locations = add->GetLocations();
1825  Location out = locations->Out();
1826  Location first = locations->InAt(0);
1827  Location second = locations->InAt(1);
1828  switch (add->GetResultType()) {
1829    case Primitive::kPrimInt:
1830      if (second.IsRegister()) {
1831        __ add(out.AsRegister<Register>(),
1832               first.AsRegister<Register>(),
1833               ShifterOperand(second.AsRegister<Register>()));
1834      } else {
1835        __ AddConstant(out.AsRegister<Register>(),
1836                       first.AsRegister<Register>(),
1837                       second.GetConstant()->AsIntConstant()->GetValue());
1838      }
1839      break;
1840
1841    case Primitive::kPrimLong:
1842      __ adds(out.AsRegisterPairLow<Register>(),
1843              first.AsRegisterPairLow<Register>(),
1844              ShifterOperand(second.AsRegisterPairLow<Register>()));
1845      __ adc(out.AsRegisterPairHigh<Register>(),
1846             first.AsRegisterPairHigh<Register>(),
1847             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1848      break;
1849
1850    case Primitive::kPrimFloat:
1851      __ vadds(out.AsFpuRegister<SRegister>(),
1852               first.AsFpuRegister<SRegister>(),
1853               second.AsFpuRegister<SRegister>());
1854      break;
1855
1856    case Primitive::kPrimDouble:
1857      __ vaddd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1858               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1859               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1860      break;
1861
1862    default:
1863      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1864  }
1865}
1866
1867void LocationsBuilderARM::VisitSub(HSub* sub) {
1868  LocationSummary* locations =
1869      new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
1870  switch (sub->GetResultType()) {
1871    case Primitive::kPrimInt:
1872    case Primitive::kPrimLong: {
1873      bool output_overlaps = (sub->GetResultType() == Primitive::kPrimLong);
1874      locations->SetInAt(0, Location::RequiresRegister());
1875      locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1)));
1876      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1877      break;
1878    }
1879    case Primitive::kPrimFloat:
1880    case Primitive::kPrimDouble: {
1881      locations->SetInAt(0, Location::RequiresFpuRegister());
1882      locations->SetInAt(1, Location::RequiresFpuRegister());
1883      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1884      break;
1885    }
1886    default:
1887      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1888  }
1889}
1890
1891void InstructionCodeGeneratorARM::VisitSub(HSub* sub) {
1892  LocationSummary* locations = sub->GetLocations();
1893  Location out = locations->Out();
1894  Location first = locations->InAt(0);
1895  Location second = locations->InAt(1);
1896  switch (sub->GetResultType()) {
1897    case Primitive::kPrimInt: {
1898      if (second.IsRegister()) {
1899        __ sub(out.AsRegister<Register>(),
1900               first.AsRegister<Register>(),
1901               ShifterOperand(second.AsRegister<Register>()));
1902      } else {
1903        __ AddConstant(out.AsRegister<Register>(),
1904                       first.AsRegister<Register>(),
1905                       -second.GetConstant()->AsIntConstant()->GetValue());
1906      }
1907      break;
1908    }
1909
1910    case Primitive::kPrimLong: {
1911      __ subs(out.AsRegisterPairLow<Register>(),
1912              first.AsRegisterPairLow<Register>(),
1913              ShifterOperand(second.AsRegisterPairLow<Register>()));
1914      __ sbc(out.AsRegisterPairHigh<Register>(),
1915             first.AsRegisterPairHigh<Register>(),
1916             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1917      break;
1918    }
1919
1920    case Primitive::kPrimFloat: {
1921      __ vsubs(out.AsFpuRegister<SRegister>(),
1922               first.AsFpuRegister<SRegister>(),
1923               second.AsFpuRegister<SRegister>());
1924      break;
1925    }
1926
1927    case Primitive::kPrimDouble: {
1928      __ vsubd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1929               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1930               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1931      break;
1932    }
1933
1934
1935    default:
1936      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1937  }
1938}
1939
1940void LocationsBuilderARM::VisitMul(HMul* mul) {
1941  LocationSummary* locations =
1942      new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
1943  switch (mul->GetResultType()) {
1944    case Primitive::kPrimInt:
1945    case Primitive::kPrimLong:  {
1946      locations->SetInAt(0, Location::RequiresRegister());
1947      locations->SetInAt(1, Location::RequiresRegister());
1948      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1949      break;
1950    }
1951
1952    case Primitive::kPrimFloat:
1953    case Primitive::kPrimDouble: {
1954      locations->SetInAt(0, Location::RequiresFpuRegister());
1955      locations->SetInAt(1, Location::RequiresFpuRegister());
1956      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1957      break;
1958    }
1959
1960    default:
1961      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1962  }
1963}
1964
1965void InstructionCodeGeneratorARM::VisitMul(HMul* mul) {
1966  LocationSummary* locations = mul->GetLocations();
1967  Location out = locations->Out();
1968  Location first = locations->InAt(0);
1969  Location second = locations->InAt(1);
1970  switch (mul->GetResultType()) {
1971    case Primitive::kPrimInt: {
1972      __ mul(out.AsRegister<Register>(),
1973             first.AsRegister<Register>(),
1974             second.AsRegister<Register>());
1975      break;
1976    }
1977    case Primitive::kPrimLong: {
1978      Register out_hi = out.AsRegisterPairHigh<Register>();
1979      Register out_lo = out.AsRegisterPairLow<Register>();
1980      Register in1_hi = first.AsRegisterPairHigh<Register>();
1981      Register in1_lo = first.AsRegisterPairLow<Register>();
1982      Register in2_hi = second.AsRegisterPairHigh<Register>();
1983      Register in2_lo = second.AsRegisterPairLow<Register>();
1984
1985      // Extra checks to protect caused by the existence of R1_R2.
1986      // The algorithm is wrong if out.hi is either in1.lo or in2.lo:
1987      // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2);
1988      DCHECK_NE(out_hi, in1_lo);
1989      DCHECK_NE(out_hi, in2_lo);
1990
1991      // input: in1 - 64 bits, in2 - 64 bits
1992      // output: out
1993      // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
1994      // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
1995      // parts: out.lo = (in1.lo * in2.lo)[31:0]
1996
1997      // IP <- in1.lo * in2.hi
1998      __ mul(IP, in1_lo, in2_hi);
1999      // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo
2000      __ mla(out_hi, in1_hi, in2_lo, IP);
2001      // out.lo <- (in1.lo * in2.lo)[31:0];
2002      __ umull(out_lo, IP, in1_lo, in2_lo);
2003      // out.hi <- in2.hi * in1.lo +  in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
2004      __ add(out_hi, out_hi, ShifterOperand(IP));
2005      break;
2006    }
2007
2008    case Primitive::kPrimFloat: {
2009      __ vmuls(out.AsFpuRegister<SRegister>(),
2010               first.AsFpuRegister<SRegister>(),
2011               second.AsFpuRegister<SRegister>());
2012      break;
2013    }
2014
2015    case Primitive::kPrimDouble: {
2016      __ vmuld(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2017               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2018               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2019      break;
2020    }
2021
2022    default:
2023      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2024  }
2025}
2026
2027void LocationsBuilderARM::VisitDiv(HDiv* div) {
2028  LocationSummary::CallKind call_kind = div->GetResultType() == Primitive::kPrimLong
2029      ? LocationSummary::kCall
2030      : LocationSummary::kNoCall;
2031  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
2032
2033  switch (div->GetResultType()) {
2034    case Primitive::kPrimInt: {
2035      locations->SetInAt(0, Location::RequiresRegister());
2036      locations->SetInAt(1, Location::RequiresRegister());
2037      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2038      break;
2039    }
2040    case Primitive::kPrimLong: {
2041      InvokeRuntimeCallingConvention calling_convention;
2042      locations->SetInAt(0, Location::RegisterPairLocation(
2043          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2044      locations->SetInAt(1, Location::RegisterPairLocation(
2045          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2046      // The runtime helper puts the output in R0,R2.
2047      locations->SetOut(Location::RegisterPairLocation(R0, R2));
2048      break;
2049    }
2050    case Primitive::kPrimFloat:
2051    case Primitive::kPrimDouble: {
2052      locations->SetInAt(0, Location::RequiresFpuRegister());
2053      locations->SetInAt(1, Location::RequiresFpuRegister());
2054      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2055      break;
2056    }
2057
2058    default:
2059      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2060  }
2061}
2062
2063void InstructionCodeGeneratorARM::VisitDiv(HDiv* div) {
2064  LocationSummary* locations = div->GetLocations();
2065  Location out = locations->Out();
2066  Location first = locations->InAt(0);
2067  Location second = locations->InAt(1);
2068
2069  switch (div->GetResultType()) {
2070    case Primitive::kPrimInt: {
2071      __ sdiv(out.AsRegister<Register>(),
2072              first.AsRegister<Register>(),
2073              second.AsRegister<Register>());
2074      break;
2075    }
2076
2077    case Primitive::kPrimLong: {
2078      InvokeRuntimeCallingConvention calling_convention;
2079      DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
2080      DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
2081      DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegisterPairLow<Register>());
2082      DCHECK_EQ(calling_convention.GetRegisterAt(3), second.AsRegisterPairHigh<Register>());
2083      DCHECK_EQ(R0, out.AsRegisterPairLow<Register>());
2084      DCHECK_EQ(R2, out.AsRegisterPairHigh<Register>());
2085
2086      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLdiv), div, div->GetDexPc());
2087      break;
2088    }
2089
2090    case Primitive::kPrimFloat: {
2091      __ vdivs(out.AsFpuRegister<SRegister>(),
2092               first.AsFpuRegister<SRegister>(),
2093               second.AsFpuRegister<SRegister>());
2094      break;
2095    }
2096
2097    case Primitive::kPrimDouble: {
2098      __ vdivd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2099               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2100               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2101      break;
2102    }
2103
2104    default:
2105      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2106  }
2107}
2108
2109void LocationsBuilderARM::VisitRem(HRem* rem) {
2110  Primitive::Type type = rem->GetResultType();
2111  LocationSummary::CallKind call_kind = type == Primitive::kPrimInt
2112      ? LocationSummary::kNoCall
2113      : LocationSummary::kCall;
2114  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
2115
2116  switch (type) {
2117    case Primitive::kPrimInt: {
2118      locations->SetInAt(0, Location::RequiresRegister());
2119      locations->SetInAt(1, Location::RequiresRegister());
2120      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2121      locations->AddTemp(Location::RequiresRegister());
2122      break;
2123    }
2124    case Primitive::kPrimLong: {
2125      InvokeRuntimeCallingConvention calling_convention;
2126      locations->SetInAt(0, Location::RegisterPairLocation(
2127          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2128      locations->SetInAt(1, Location::RegisterPairLocation(
2129          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2130      // The runtime helper puts the output in R2,R3.
2131      locations->SetOut(Location::RegisterPairLocation(R2, R3));
2132      break;
2133    }
2134    case Primitive::kPrimFloat: {
2135      InvokeRuntimeCallingConvention calling_convention;
2136      locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
2137      locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
2138      locations->SetOut(Location::FpuRegisterLocation(S0));
2139      break;
2140    }
2141
2142    case Primitive::kPrimDouble: {
2143      InvokeRuntimeCallingConvention calling_convention;
2144      locations->SetInAt(0, Location::FpuRegisterPairLocation(
2145          calling_convention.GetFpuRegisterAt(0), calling_convention.GetFpuRegisterAt(1)));
2146      locations->SetInAt(1, Location::FpuRegisterPairLocation(
2147          calling_convention.GetFpuRegisterAt(2), calling_convention.GetFpuRegisterAt(3)));
2148      locations->SetOut(Location::Location::FpuRegisterPairLocation(S0, S1));
2149      break;
2150    }
2151
2152    default:
2153      LOG(FATAL) << "Unexpected rem type " << type;
2154  }
2155}
2156
2157void InstructionCodeGeneratorARM::VisitRem(HRem* rem) {
2158  LocationSummary* locations = rem->GetLocations();
2159  Location out = locations->Out();
2160  Location first = locations->InAt(0);
2161  Location second = locations->InAt(1);
2162
2163  Primitive::Type type = rem->GetResultType();
2164  switch (type) {
2165    case Primitive::kPrimInt: {
2166      Register reg1 = first.AsRegister<Register>();
2167      Register reg2 = second.AsRegister<Register>();
2168      Register temp = locations->GetTemp(0).AsRegister<Register>();
2169
2170      // temp = reg1 / reg2  (integer division)
2171      // temp = temp * reg2
2172      // dest = reg1 - temp
2173      __ sdiv(temp, reg1, reg2);
2174      __ mul(temp, temp, reg2);
2175      __ sub(out.AsRegister<Register>(), reg1, ShifterOperand(temp));
2176      break;
2177    }
2178
2179    case Primitive::kPrimLong: {
2180      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLmod), rem, rem->GetDexPc());
2181      break;
2182    }
2183
2184    case Primitive::kPrimFloat: {
2185      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmodf), rem, rem->GetDexPc());
2186      break;
2187    }
2188
2189    case Primitive::kPrimDouble: {
2190      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmod), rem, rem->GetDexPc());
2191      break;
2192    }
2193
2194    default:
2195      LOG(FATAL) << "Unexpected rem type " << type;
2196  }
2197}
2198
2199void LocationsBuilderARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2200  LocationSummary* locations =
2201      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2202  locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2203  if (instruction->HasUses()) {
2204    locations->SetOut(Location::SameAsFirstInput());
2205  }
2206}
2207
2208void InstructionCodeGeneratorARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2209  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM(instruction);
2210  codegen_->AddSlowPath(slow_path);
2211
2212  LocationSummary* locations = instruction->GetLocations();
2213  Location value = locations->InAt(0);
2214
2215  switch (instruction->GetType()) {
2216    case Primitive::kPrimInt: {
2217      if (value.IsRegister()) {
2218        __ cmp(value.AsRegister<Register>(), ShifterOperand(0));
2219        __ b(slow_path->GetEntryLabel(), EQ);
2220      } else {
2221        DCHECK(value.IsConstant()) << value;
2222        if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
2223          __ b(slow_path->GetEntryLabel());
2224        }
2225      }
2226      break;
2227    }
2228    case Primitive::kPrimLong: {
2229      if (value.IsRegisterPair()) {
2230        __ orrs(IP,
2231                value.AsRegisterPairLow<Register>(),
2232                ShifterOperand(value.AsRegisterPairHigh<Register>()));
2233        __ b(slow_path->GetEntryLabel(), EQ);
2234      } else {
2235        DCHECK(value.IsConstant()) << value;
2236        if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
2237          __ b(slow_path->GetEntryLabel());
2238        }
2239      }
2240      break;
2241    default:
2242      LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
2243    }
2244  }
2245}
2246
2247void LocationsBuilderARM::HandleShift(HBinaryOperation* op) {
2248  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2249
2250  LocationSummary::CallKind call_kind = op->GetResultType() == Primitive::kPrimLong
2251      ? LocationSummary::kCall
2252      : LocationSummary::kNoCall;
2253  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(op, call_kind);
2254
2255  switch (op->GetResultType()) {
2256    case Primitive::kPrimInt: {
2257      locations->SetInAt(0, Location::RequiresRegister());
2258      locations->SetInAt(1, Location::RegisterOrConstant(op->InputAt(1)));
2259      locations->SetOut(Location::RequiresRegister());
2260      break;
2261    }
2262    case Primitive::kPrimLong: {
2263      InvokeRuntimeCallingConvention calling_convention;
2264      locations->SetInAt(0, Location::RegisterPairLocation(
2265          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2266      locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2267      // The runtime helper puts the output in R0,R2.
2268      locations->SetOut(Location::RegisterPairLocation(R0, R2));
2269      break;
2270    }
2271    default:
2272      LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
2273  }
2274}
2275
2276void InstructionCodeGeneratorARM::HandleShift(HBinaryOperation* op) {
2277  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2278
2279  LocationSummary* locations = op->GetLocations();
2280  Location out = locations->Out();
2281  Location first = locations->InAt(0);
2282  Location second = locations->InAt(1);
2283
2284  Primitive::Type type = op->GetResultType();
2285  switch (type) {
2286    case Primitive::kPrimInt: {
2287      Register out_reg = out.AsRegister<Register>();
2288      Register first_reg = first.AsRegister<Register>();
2289      // Arm doesn't mask the shift count so we need to do it ourselves.
2290      if (second.IsRegister()) {
2291        Register second_reg = second.AsRegister<Register>();
2292        __ and_(second_reg, second_reg, ShifterOperand(kMaxIntShiftValue));
2293        if (op->IsShl()) {
2294          __ Lsl(out_reg, first_reg, second_reg);
2295        } else if (op->IsShr()) {
2296          __ Asr(out_reg, first_reg, second_reg);
2297        } else {
2298          __ Lsr(out_reg, first_reg, second_reg);
2299        }
2300      } else {
2301        int32_t cst = second.GetConstant()->AsIntConstant()->GetValue();
2302        uint32_t shift_value = static_cast<uint32_t>(cst & kMaxIntShiftValue);
2303        if (shift_value == 0) {  // arm does not support shifting with 0 immediate.
2304          __ Mov(out_reg, first_reg);
2305        } else if (op->IsShl()) {
2306          __ Lsl(out_reg, first_reg, shift_value);
2307        } else if (op->IsShr()) {
2308          __ Asr(out_reg, first_reg, shift_value);
2309        } else {
2310          __ Lsr(out_reg, first_reg, shift_value);
2311        }
2312      }
2313      break;
2314    }
2315    case Primitive::kPrimLong: {
2316      // TODO: Inline the assembly instead of calling the runtime.
2317      InvokeRuntimeCallingConvention calling_convention;
2318      DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
2319      DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
2320      DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegister<Register>());
2321      DCHECK_EQ(R0, out.AsRegisterPairLow<Register>());
2322      DCHECK_EQ(R2, out.AsRegisterPairHigh<Register>());
2323
2324      int32_t entry_point_offset;
2325      if (op->IsShl()) {
2326        entry_point_offset = QUICK_ENTRY_POINT(pShlLong);
2327      } else if (op->IsShr()) {
2328        entry_point_offset = QUICK_ENTRY_POINT(pShrLong);
2329      } else {
2330        entry_point_offset = QUICK_ENTRY_POINT(pUshrLong);
2331      }
2332      __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
2333      __ blx(LR);
2334      break;
2335    }
2336    default:
2337      LOG(FATAL) << "Unexpected operation type " << type;
2338  }
2339}
2340
2341void LocationsBuilderARM::VisitShl(HShl* shl) {
2342  HandleShift(shl);
2343}
2344
2345void InstructionCodeGeneratorARM::VisitShl(HShl* shl) {
2346  HandleShift(shl);
2347}
2348
2349void LocationsBuilderARM::VisitShr(HShr* shr) {
2350  HandleShift(shr);
2351}
2352
2353void InstructionCodeGeneratorARM::VisitShr(HShr* shr) {
2354  HandleShift(shr);
2355}
2356
2357void LocationsBuilderARM::VisitUShr(HUShr* ushr) {
2358  HandleShift(ushr);
2359}
2360
2361void InstructionCodeGeneratorARM::VisitUShr(HUShr* ushr) {
2362  HandleShift(ushr);
2363}
2364
2365void LocationsBuilderARM::VisitNewInstance(HNewInstance* instruction) {
2366  LocationSummary* locations =
2367      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2368  InvokeRuntimeCallingConvention calling_convention;
2369  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2370  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2371  locations->SetOut(Location::RegisterLocation(R0));
2372}
2373
2374void InstructionCodeGeneratorARM::VisitNewInstance(HNewInstance* instruction) {
2375  InvokeRuntimeCallingConvention calling_convention;
2376  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
2377  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
2378  codegen_->InvokeRuntime(
2379      QUICK_ENTRY_POINT(pAllocObjectWithAccessCheck), instruction, instruction->GetDexPc());
2380}
2381
2382void LocationsBuilderARM::VisitNewArray(HNewArray* instruction) {
2383  LocationSummary* locations =
2384      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2385  InvokeRuntimeCallingConvention calling_convention;
2386  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2387  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2388  locations->SetOut(Location::RegisterLocation(R0));
2389  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2390}
2391
2392void InstructionCodeGeneratorARM::VisitNewArray(HNewArray* instruction) {
2393  InvokeRuntimeCallingConvention calling_convention;
2394  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(2));
2395  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
2396  codegen_->InvokeRuntime(
2397      QUICK_ENTRY_POINT(pAllocArrayWithAccessCheck), instruction, instruction->GetDexPc());
2398}
2399
2400void LocationsBuilderARM::VisitParameterValue(HParameterValue* instruction) {
2401  LocationSummary* locations =
2402      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2403  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
2404  if (location.IsStackSlot()) {
2405    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2406  } else if (location.IsDoubleStackSlot()) {
2407    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2408  }
2409  locations->SetOut(location);
2410}
2411
2412void InstructionCodeGeneratorARM::VisitParameterValue(HParameterValue* instruction) {
2413  // Nothing to do, the parameter is already at its location.
2414  UNUSED(instruction);
2415}
2416
2417void LocationsBuilderARM::VisitNot(HNot* not_) {
2418  LocationSummary* locations =
2419      new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
2420  locations->SetInAt(0, Location::RequiresRegister());
2421  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2422}
2423
2424void InstructionCodeGeneratorARM::VisitNot(HNot* not_) {
2425  LocationSummary* locations = not_->GetLocations();
2426  Location out = locations->Out();
2427  Location in = locations->InAt(0);
2428  switch (not_->InputAt(0)->GetType()) {
2429    case Primitive::kPrimBoolean:
2430      __ eor(out.AsRegister<Register>(), in.AsRegister<Register>(), ShifterOperand(1));
2431      break;
2432
2433    case Primitive::kPrimInt:
2434      __ mvn(out.AsRegister<Register>(), ShifterOperand(in.AsRegister<Register>()));
2435      break;
2436
2437    case Primitive::kPrimLong:
2438      __ mvn(out.AsRegisterPairLow<Register>(),
2439             ShifterOperand(in.AsRegisterPairLow<Register>()));
2440      __ mvn(out.AsRegisterPairHigh<Register>(),
2441             ShifterOperand(in.AsRegisterPairHigh<Register>()));
2442      break;
2443
2444    default:
2445      LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
2446  }
2447}
2448
2449void LocationsBuilderARM::VisitCompare(HCompare* compare) {
2450  LocationSummary* locations =
2451      new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2452  switch (compare->InputAt(0)->GetType()) {
2453    case Primitive::kPrimLong: {
2454      locations->SetInAt(0, Location::RequiresRegister());
2455      locations->SetInAt(1, Location::RequiresRegister());
2456      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2457      break;
2458    }
2459    case Primitive::kPrimFloat:
2460    case Primitive::kPrimDouble: {
2461      locations->SetInAt(0, Location::RequiresFpuRegister());
2462      locations->SetInAt(1, Location::RequiresFpuRegister());
2463      locations->SetOut(Location::RequiresRegister());
2464      break;
2465    }
2466    default:
2467      LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2468  }
2469}
2470
2471void InstructionCodeGeneratorARM::VisitCompare(HCompare* compare) {
2472  LocationSummary* locations = compare->GetLocations();
2473  Register out = locations->Out().AsRegister<Register>();
2474  Location left = locations->InAt(0);
2475  Location right = locations->InAt(1);
2476
2477  Label less, greater, done;
2478  Primitive::Type type = compare->InputAt(0)->GetType();
2479  switch (type) {
2480    case Primitive::kPrimLong: {
2481      __ cmp(left.AsRegisterPairHigh<Register>(),
2482             ShifterOperand(right.AsRegisterPairHigh<Register>()));  // Signed compare.
2483      __ b(&less, LT);
2484      __ b(&greater, GT);
2485      // Do LoadImmediate before any `cmp`, as LoadImmediate might affect the status flags.
2486      __ LoadImmediate(out, 0);
2487      __ cmp(left.AsRegisterPairLow<Register>(),
2488             ShifterOperand(right.AsRegisterPairLow<Register>()));  // Unsigned compare.
2489      break;
2490    }
2491    case Primitive::kPrimFloat:
2492    case Primitive::kPrimDouble: {
2493      __ LoadImmediate(out, 0);
2494      if (type == Primitive::kPrimFloat) {
2495        __ vcmps(left.AsFpuRegister<SRegister>(), right.AsFpuRegister<SRegister>());
2496      } else {
2497        __ vcmpd(FromLowSToD(left.AsFpuRegisterPairLow<SRegister>()),
2498                 FromLowSToD(right.AsFpuRegisterPairLow<SRegister>()));
2499      }
2500      __ vmstat();  // transfer FP status register to ARM APSR.
2501      __ b(compare->IsGtBias() ? &greater : &less, VS);  // VS for unordered.
2502      break;
2503    }
2504    default:
2505      LOG(FATAL) << "Unexpected compare type " << type;
2506  }
2507  __ b(&done, EQ);
2508  __ b(&less, CC);  // CC is for both: unsigned compare for longs and 'less than' for floats.
2509
2510  __ Bind(&greater);
2511  __ LoadImmediate(out, 1);
2512  __ b(&done);
2513
2514  __ Bind(&less);
2515  __ LoadImmediate(out, -1);
2516
2517  __ Bind(&done);
2518}
2519
2520void LocationsBuilderARM::VisitPhi(HPhi* instruction) {
2521  LocationSummary* locations =
2522      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2523  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
2524    locations->SetInAt(i, Location::Any());
2525  }
2526  locations->SetOut(Location::Any());
2527}
2528
2529void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction) {
2530  UNUSED(instruction);
2531  LOG(FATAL) << "Unreachable";
2532}
2533
2534void InstructionCodeGeneratorARM::GenerateMemoryBarrier(MemBarrierKind kind) {
2535  // TODO (ported from quick): revisit Arm barrier kinds
2536  DmbOptions flavour = DmbOptions::ISH;  // quiet c++ warnings
2537  switch (kind) {
2538    case MemBarrierKind::kAnyStore:
2539    case MemBarrierKind::kLoadAny:
2540    case MemBarrierKind::kAnyAny: {
2541      flavour = DmbOptions::ISH;
2542      break;
2543    }
2544    case MemBarrierKind::kStoreStore: {
2545      flavour = DmbOptions::ISHST;
2546      break;
2547    }
2548    default:
2549      LOG(FATAL) << "Unexpected memory barrier " << kind;
2550  }
2551  __ dmb(flavour);
2552}
2553
2554void InstructionCodeGeneratorARM::GenerateWideAtomicLoad(Register addr,
2555                                                         uint32_t offset,
2556                                                         Register out_lo,
2557                                                         Register out_hi) {
2558  if (offset != 0) {
2559    __ LoadImmediate(out_lo, offset);
2560    __ add(IP, addr, ShifterOperand(out_lo));
2561    addr = IP;
2562  }
2563  __ ldrexd(out_lo, out_hi, addr);
2564}
2565
2566void InstructionCodeGeneratorARM::GenerateWideAtomicStore(Register addr,
2567                                                          uint32_t offset,
2568                                                          Register value_lo,
2569                                                          Register value_hi,
2570                                                          Register temp1,
2571                                                          Register temp2) {
2572  Label fail;
2573  if (offset != 0) {
2574    __ LoadImmediate(temp1, offset);
2575    __ add(IP, addr, ShifterOperand(temp1));
2576    addr = IP;
2577  }
2578  __ Bind(&fail);
2579  // We need a load followed by store. (The address used in a STREX instruction must
2580  // be the same as the address in the most recently executed LDREX instruction.)
2581  __ ldrexd(temp1, temp2, addr);
2582  __ strexd(temp1, value_lo, value_hi, addr);
2583  __ cmp(temp1, ShifterOperand(0));
2584  __ b(&fail, NE);
2585}
2586
2587void LocationsBuilderARM::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) {
2588  DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2589
2590  LocationSummary* locations =
2591      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2592  locations->SetInAt(0, Location::RequiresRegister());
2593  locations->SetInAt(1, Location::RequiresRegister());
2594
2595
2596  Primitive::Type field_type = field_info.GetFieldType();
2597  bool is_wide = field_type == Primitive::kPrimLong || field_type == Primitive::kPrimDouble;
2598  bool generate_volatile = field_info.IsVolatile()
2599      && is_wide
2600      && !codegen_->GetInstructionSetFeatures()->HasAtomicLdrdAndStrd();
2601  // Temporary registers for the write barrier.
2602  // TODO: consider renaming StoreNeedsWriteBarrier to StoreNeedsGCMark.
2603  if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
2604    locations->AddTemp(Location::RequiresRegister());
2605    locations->AddTemp(Location::RequiresRegister());
2606  } else if (generate_volatile) {
2607    // Arm encoding have some additional constraints for ldrexd/strexd:
2608    // - registers need to be consecutive
2609    // - the first register should be even but not R14.
2610    // We don't test for Arm yet, and the assertion makes sure that we revisit this if we ever
2611    // enable Arm encoding.
2612    DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
2613
2614    locations->AddTemp(Location::RequiresRegister());
2615    locations->AddTemp(Location::RequiresRegister());
2616    if (field_type == Primitive::kPrimDouble) {
2617      // For doubles we need two more registers to copy the value.
2618      locations->AddTemp(Location::RegisterLocation(R2));
2619      locations->AddTemp(Location::RegisterLocation(R3));
2620    }
2621  }
2622}
2623
2624void InstructionCodeGeneratorARM::HandleFieldSet(HInstruction* instruction,
2625                                                 const FieldInfo& field_info) {
2626  DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2627
2628  LocationSummary* locations = instruction->GetLocations();
2629  Register base = locations->InAt(0).AsRegister<Register>();
2630  Location value = locations->InAt(1);
2631
2632  bool is_volatile = field_info.IsVolatile();
2633  bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures()->HasAtomicLdrdAndStrd();
2634  Primitive::Type field_type = field_info.GetFieldType();
2635  uint32_t offset = field_info.GetFieldOffset().Uint32Value();
2636
2637  if (is_volatile) {
2638    GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
2639  }
2640
2641  switch (field_type) {
2642    case Primitive::kPrimBoolean:
2643    case Primitive::kPrimByte: {
2644      __ StoreToOffset(kStoreByte, value.AsRegister<Register>(), base, offset);
2645      break;
2646    }
2647
2648    case Primitive::kPrimShort:
2649    case Primitive::kPrimChar: {
2650      __ StoreToOffset(kStoreHalfword, value.AsRegister<Register>(), base, offset);
2651      break;
2652    }
2653
2654    case Primitive::kPrimInt:
2655    case Primitive::kPrimNot: {
2656      Register value_reg = value.AsRegister<Register>();
2657      __ StoreToOffset(kStoreWord, value_reg, base, offset);
2658      if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
2659        Register temp = locations->GetTemp(0).AsRegister<Register>();
2660        Register card = locations->GetTemp(1).AsRegister<Register>();
2661        codegen_->MarkGCCard(temp, card, base, value_reg);
2662      }
2663      break;
2664    }
2665
2666    case Primitive::kPrimLong: {
2667      if (is_volatile && !atomic_ldrd_strd) {
2668        GenerateWideAtomicStore(base, offset,
2669                                value.AsRegisterPairLow<Register>(),
2670                                value.AsRegisterPairHigh<Register>(),
2671                                locations->GetTemp(0).AsRegister<Register>(),
2672                                locations->GetTemp(1).AsRegister<Register>());
2673      } else {
2674        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), base, offset);
2675      }
2676      break;
2677    }
2678
2679    case Primitive::kPrimFloat: {
2680      __ StoreSToOffset(value.AsFpuRegister<SRegister>(), base, offset);
2681      break;
2682    }
2683
2684    case Primitive::kPrimDouble: {
2685      DRegister value_reg = FromLowSToD(value.AsFpuRegisterPairLow<SRegister>());
2686      if (is_volatile && !atomic_ldrd_strd) {
2687        Register value_reg_lo = locations->GetTemp(0).AsRegister<Register>();
2688        Register value_reg_hi = locations->GetTemp(1).AsRegister<Register>();
2689
2690        __ vmovrrd(value_reg_lo, value_reg_hi, value_reg);
2691
2692        GenerateWideAtomicStore(base, offset,
2693                                value_reg_lo,
2694                                value_reg_hi,
2695                                locations->GetTemp(2).AsRegister<Register>(),
2696                                locations->GetTemp(3).AsRegister<Register>());
2697      } else {
2698        __ StoreDToOffset(value_reg, base, offset);
2699      }
2700      break;
2701    }
2702
2703    case Primitive::kPrimVoid:
2704      LOG(FATAL) << "Unreachable type " << field_type;
2705      UNREACHABLE();
2706  }
2707
2708  if (is_volatile) {
2709    GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
2710  }
2711}
2712
2713void LocationsBuilderARM::HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info) {
2714  DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2715  LocationSummary* locations =
2716      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2717  locations->SetInAt(0, Location::RequiresRegister());
2718  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2719
2720  bool generate_volatile = field_info.IsVolatile()
2721      && (field_info.GetFieldType() == Primitive::kPrimDouble)
2722      && !codegen_->GetInstructionSetFeatures()->HasAtomicLdrdAndStrd();
2723  if (generate_volatile) {
2724    // Arm encoding have some additional constraints for ldrexd/strexd:
2725    // - registers need to be consecutive
2726    // - the first register should be even but not R14.
2727    // We don't test for Arm yet, and the assertion makes sure that we revisit this if we ever
2728    // enable Arm encoding.
2729    DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
2730    locations->AddTemp(Location::RequiresRegister());
2731    locations->AddTemp(Location::RequiresRegister());
2732  }
2733}
2734
2735void InstructionCodeGeneratorARM::HandleFieldGet(HInstruction* instruction,
2736                                                 const FieldInfo& field_info) {
2737  DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2738
2739  LocationSummary* locations = instruction->GetLocations();
2740  Register base = locations->InAt(0).AsRegister<Register>();
2741  Location out = locations->Out();
2742  bool is_volatile = field_info.IsVolatile();
2743  bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures()->HasAtomicLdrdAndStrd();
2744  Primitive::Type field_type = field_info.GetFieldType();
2745  uint32_t offset = field_info.GetFieldOffset().Uint32Value();
2746
2747  switch (field_type) {
2748    case Primitive::kPrimBoolean: {
2749      __ LoadFromOffset(kLoadUnsignedByte, out.AsRegister<Register>(), base, offset);
2750      break;
2751    }
2752
2753    case Primitive::kPrimByte: {
2754      __ LoadFromOffset(kLoadSignedByte, out.AsRegister<Register>(), base, offset);
2755      break;
2756    }
2757
2758    case Primitive::kPrimShort: {
2759      __ LoadFromOffset(kLoadSignedHalfword, out.AsRegister<Register>(), base, offset);
2760      break;
2761    }
2762
2763    case Primitive::kPrimChar: {
2764      __ LoadFromOffset(kLoadUnsignedHalfword, out.AsRegister<Register>(), base, offset);
2765      break;
2766    }
2767
2768    case Primitive::kPrimInt:
2769    case Primitive::kPrimNot: {
2770      __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), base, offset);
2771      break;
2772    }
2773
2774    case Primitive::kPrimLong: {
2775      if (is_volatile && !atomic_ldrd_strd) {
2776        GenerateWideAtomicLoad(base, offset,
2777                               out.AsRegisterPairLow<Register>(),
2778                               out.AsRegisterPairHigh<Register>());
2779      } else {
2780        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), base, offset);
2781      }
2782      break;
2783    }
2784
2785    case Primitive::kPrimFloat: {
2786      __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), base, offset);
2787      break;
2788    }
2789
2790    case Primitive::kPrimDouble: {
2791      DRegister out_reg = FromLowSToD(out.AsFpuRegisterPairLow<SRegister>());
2792      if (is_volatile && !atomic_ldrd_strd) {
2793        Register lo = locations->GetTemp(0).AsRegister<Register>();
2794        Register hi = locations->GetTemp(1).AsRegister<Register>();
2795        GenerateWideAtomicLoad(base, offset, lo, hi);
2796        __ vmovdrr(out_reg, lo, hi);
2797      } else {
2798        __ LoadDFromOffset(out_reg, base, offset);
2799      }
2800      break;
2801    }
2802
2803    case Primitive::kPrimVoid:
2804      LOG(FATAL) << "Unreachable type " << field_type;
2805      UNREACHABLE();
2806  }
2807
2808  if (is_volatile) {
2809    GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
2810  }
2811}
2812
2813void LocationsBuilderARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2814  HandleFieldSet(instruction, instruction->GetFieldInfo());
2815}
2816
2817void InstructionCodeGeneratorARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2818  HandleFieldSet(instruction, instruction->GetFieldInfo());
2819}
2820
2821void LocationsBuilderARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2822  HandleFieldGet(instruction, instruction->GetFieldInfo());
2823}
2824
2825void InstructionCodeGeneratorARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2826  HandleFieldGet(instruction, instruction->GetFieldInfo());
2827}
2828
2829void LocationsBuilderARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2830  HandleFieldGet(instruction, instruction->GetFieldInfo());
2831}
2832
2833void InstructionCodeGeneratorARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2834  HandleFieldGet(instruction, instruction->GetFieldInfo());
2835}
2836
2837void LocationsBuilderARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2838  HandleFieldSet(instruction, instruction->GetFieldInfo());
2839}
2840
2841void InstructionCodeGeneratorARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2842  HandleFieldSet(instruction, instruction->GetFieldInfo());
2843}
2844
2845void LocationsBuilderARM::VisitNullCheck(HNullCheck* instruction) {
2846  LocationSummary* locations =
2847      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2848  locations->SetInAt(0, Location::RequiresRegister());
2849  if (instruction->HasUses()) {
2850    locations->SetOut(Location::SameAsFirstInput());
2851  }
2852}
2853
2854void InstructionCodeGeneratorARM::VisitNullCheck(HNullCheck* instruction) {
2855  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM(instruction);
2856  codegen_->AddSlowPath(slow_path);
2857
2858  LocationSummary* locations = instruction->GetLocations();
2859  Location obj = locations->InAt(0);
2860
2861  if (obj.IsRegister()) {
2862    __ cmp(obj.AsRegister<Register>(), ShifterOperand(0));
2863    __ b(slow_path->GetEntryLabel(), EQ);
2864  } else {
2865    DCHECK(obj.IsConstant()) << obj;
2866    DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0);
2867    __ b(slow_path->GetEntryLabel());
2868  }
2869}
2870
2871void LocationsBuilderARM::VisitArrayGet(HArrayGet* instruction) {
2872  LocationSummary* locations =
2873      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2874  locations->SetInAt(0, Location::RequiresRegister());
2875  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2876  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2877}
2878
2879void InstructionCodeGeneratorARM::VisitArrayGet(HArrayGet* instruction) {
2880  LocationSummary* locations = instruction->GetLocations();
2881  Register obj = locations->InAt(0).AsRegister<Register>();
2882  Location index = locations->InAt(1);
2883
2884  switch (instruction->GetType()) {
2885    case Primitive::kPrimBoolean: {
2886      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
2887      Register out = locations->Out().AsRegister<Register>();
2888      if (index.IsConstant()) {
2889        size_t offset =
2890            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2891        __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
2892      } else {
2893        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
2894        __ LoadFromOffset(kLoadUnsignedByte, out, IP, data_offset);
2895      }
2896      break;
2897    }
2898
2899    case Primitive::kPrimByte: {
2900      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
2901      Register out = locations->Out().AsRegister<Register>();
2902      if (index.IsConstant()) {
2903        size_t offset =
2904            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2905        __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
2906      } else {
2907        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
2908        __ LoadFromOffset(kLoadSignedByte, out, IP, data_offset);
2909      }
2910      break;
2911    }
2912
2913    case Primitive::kPrimShort: {
2914      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
2915      Register out = locations->Out().AsRegister<Register>();
2916      if (index.IsConstant()) {
2917        size_t offset =
2918            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2919        __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
2920      } else {
2921        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
2922        __ LoadFromOffset(kLoadSignedHalfword, out, IP, data_offset);
2923      }
2924      break;
2925    }
2926
2927    case Primitive::kPrimChar: {
2928      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
2929      Register out = locations->Out().AsRegister<Register>();
2930      if (index.IsConstant()) {
2931        size_t offset =
2932            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2933        __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
2934      } else {
2935        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
2936        __ LoadFromOffset(kLoadUnsignedHalfword, out, IP, data_offset);
2937      }
2938      break;
2939    }
2940
2941    case Primitive::kPrimInt:
2942    case Primitive::kPrimNot: {
2943      DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
2944      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2945      Register out = locations->Out().AsRegister<Register>();
2946      if (index.IsConstant()) {
2947        size_t offset =
2948            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2949        __ LoadFromOffset(kLoadWord, out, obj, offset);
2950      } else {
2951        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
2952        __ LoadFromOffset(kLoadWord, out, IP, data_offset);
2953      }
2954      break;
2955    }
2956
2957    case Primitive::kPrimLong: {
2958      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
2959      Location out = locations->Out();
2960      if (index.IsConstant()) {
2961        size_t offset =
2962            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
2963        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset);
2964      } else {
2965        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
2966        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), IP, data_offset);
2967      }
2968      break;
2969    }
2970
2971    case Primitive::kPrimFloat: {
2972      uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
2973      Location out = locations->Out();
2974      DCHECK(out.IsFpuRegister());
2975      if (index.IsConstant()) {
2976        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2977        __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), obj, offset);
2978      } else {
2979        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
2980        __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), IP, data_offset);
2981      }
2982      break;
2983    }
2984
2985    case Primitive::kPrimDouble: {
2986      uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
2987      Location out = locations->Out();
2988      DCHECK(out.IsFpuRegisterPair());
2989      if (index.IsConstant()) {
2990        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
2991        __ LoadDFromOffset(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), obj, offset);
2992      } else {
2993        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
2994        __ LoadDFromOffset(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), IP, data_offset);
2995      }
2996      break;
2997    }
2998
2999    case Primitive::kPrimVoid:
3000      LOG(FATAL) << "Unreachable type " << instruction->GetType();
3001      UNREACHABLE();
3002  }
3003}
3004
3005void LocationsBuilderARM::VisitArraySet(HArraySet* instruction) {
3006  Primitive::Type value_type = instruction->GetComponentType();
3007
3008  bool needs_write_barrier =
3009      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
3010  bool needs_runtime_call = instruction->NeedsTypeCheck();
3011
3012  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3013      instruction, needs_runtime_call ? LocationSummary::kCall : LocationSummary::kNoCall);
3014  if (needs_runtime_call) {
3015    InvokeRuntimeCallingConvention calling_convention;
3016    locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3017    locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3018    locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
3019  } else {
3020    locations->SetInAt(0, Location::RequiresRegister());
3021    locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3022    locations->SetInAt(2, Location::RequiresRegister());
3023
3024    if (needs_write_barrier) {
3025      // Temporary registers for the write barrier.
3026      locations->AddTemp(Location::RequiresRegister());
3027      locations->AddTemp(Location::RequiresRegister());
3028    }
3029  }
3030}
3031
3032void InstructionCodeGeneratorARM::VisitArraySet(HArraySet* instruction) {
3033  LocationSummary* locations = instruction->GetLocations();
3034  Register obj = locations->InAt(0).AsRegister<Register>();
3035  Location index = locations->InAt(1);
3036  Primitive::Type value_type = instruction->GetComponentType();
3037  bool needs_runtime_call = locations->WillCall();
3038  bool needs_write_barrier =
3039      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
3040
3041  switch (value_type) {
3042    case Primitive::kPrimBoolean:
3043    case Primitive::kPrimByte: {
3044      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
3045      Register value = locations->InAt(2).AsRegister<Register>();
3046      if (index.IsConstant()) {
3047        size_t offset =
3048            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
3049        __ StoreToOffset(kStoreByte, value, obj, offset);
3050      } else {
3051        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
3052        __ StoreToOffset(kStoreByte, value, IP, data_offset);
3053      }
3054      break;
3055    }
3056
3057    case Primitive::kPrimShort:
3058    case Primitive::kPrimChar: {
3059      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
3060      Register value = locations->InAt(2).AsRegister<Register>();
3061      if (index.IsConstant()) {
3062        size_t offset =
3063            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
3064        __ StoreToOffset(kStoreHalfword, value, obj, offset);
3065      } else {
3066        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
3067        __ StoreToOffset(kStoreHalfword, value, IP, data_offset);
3068      }
3069      break;
3070    }
3071
3072    case Primitive::kPrimInt:
3073    case Primitive::kPrimNot: {
3074      if (!needs_runtime_call) {
3075        uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
3076        Register value = locations->InAt(2).AsRegister<Register>();
3077        if (index.IsConstant()) {
3078          size_t offset =
3079              (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3080          __ StoreToOffset(kStoreWord, value, obj, offset);
3081        } else {
3082          DCHECK(index.IsRegister()) << index;
3083          __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3084          __ StoreToOffset(kStoreWord, value, IP, data_offset);
3085        }
3086        if (needs_write_barrier) {
3087          DCHECK_EQ(value_type, Primitive::kPrimNot);
3088          Register temp = locations->GetTemp(0).AsRegister<Register>();
3089          Register card = locations->GetTemp(1).AsRegister<Register>();
3090          codegen_->MarkGCCard(temp, card, obj, value);
3091        }
3092      } else {
3093        DCHECK_EQ(value_type, Primitive::kPrimNot);
3094        codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
3095                                instruction,
3096                                instruction->GetDexPc());
3097      }
3098      break;
3099    }
3100
3101    case Primitive::kPrimLong: {
3102      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
3103      Location value = locations->InAt(2);
3104      if (index.IsConstant()) {
3105        size_t offset =
3106            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3107        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset);
3108      } else {
3109        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3110        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), IP, data_offset);
3111      }
3112      break;
3113    }
3114
3115    case Primitive::kPrimFloat: {
3116      uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
3117      Location value = locations->InAt(2);
3118      DCHECK(value.IsFpuRegister());
3119      if (index.IsConstant()) {
3120        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3121        __ StoreSToOffset(value.AsFpuRegister<SRegister>(), obj, offset);
3122      } else {
3123        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3124        __ StoreSToOffset(value.AsFpuRegister<SRegister>(), IP, data_offset);
3125      }
3126      break;
3127    }
3128
3129    case Primitive::kPrimDouble: {
3130      uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
3131      Location value = locations->InAt(2);
3132      DCHECK(value.IsFpuRegisterPair());
3133      if (index.IsConstant()) {
3134        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3135        __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), obj, offset);
3136      } else {
3137        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3138        __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), IP, data_offset);
3139      }
3140      break;
3141    }
3142
3143    case Primitive::kPrimVoid:
3144      LOG(FATAL) << "Unreachable type " << value_type;
3145      UNREACHABLE();
3146  }
3147}
3148
3149void LocationsBuilderARM::VisitArrayLength(HArrayLength* instruction) {
3150  LocationSummary* locations =
3151      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3152  locations->SetInAt(0, Location::RequiresRegister());
3153  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3154}
3155
3156void InstructionCodeGeneratorARM::VisitArrayLength(HArrayLength* instruction) {
3157  LocationSummary* locations = instruction->GetLocations();
3158  uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
3159  Register obj = locations->InAt(0).AsRegister<Register>();
3160  Register out = locations->Out().AsRegister<Register>();
3161  __ LoadFromOffset(kLoadWord, out, obj, offset);
3162}
3163
3164void LocationsBuilderARM::VisitBoundsCheck(HBoundsCheck* instruction) {
3165  LocationSummary* locations =
3166      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3167  locations->SetInAt(0, Location::RequiresRegister());
3168  locations->SetInAt(1, Location::RequiresRegister());
3169  if (instruction->HasUses()) {
3170    locations->SetOut(Location::SameAsFirstInput());
3171  }
3172}
3173
3174void InstructionCodeGeneratorARM::VisitBoundsCheck(HBoundsCheck* instruction) {
3175  LocationSummary* locations = instruction->GetLocations();
3176  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM(
3177      instruction, locations->InAt(0), locations->InAt(1));
3178  codegen_->AddSlowPath(slow_path);
3179
3180  Register index = locations->InAt(0).AsRegister<Register>();
3181  Register length = locations->InAt(1).AsRegister<Register>();
3182
3183  __ cmp(index, ShifterOperand(length));
3184  __ b(slow_path->GetEntryLabel(), CS);
3185}
3186
3187void CodeGeneratorARM::MarkGCCard(Register temp, Register card, Register object, Register value) {
3188  Label is_null;
3189  __ CompareAndBranchIfZero(value, &is_null);
3190  __ LoadFromOffset(kLoadWord, card, TR, Thread::CardTableOffset<kArmWordSize>().Int32Value());
3191  __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
3192  __ strb(card, Address(card, temp));
3193  __ Bind(&is_null);
3194}
3195
3196void LocationsBuilderARM::VisitTemporary(HTemporary* temp) {
3197  temp->SetLocations(nullptr);
3198}
3199
3200void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp) {
3201  // Nothing to do, this is driven by the code generator.
3202  UNUSED(temp);
3203}
3204
3205void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction) {
3206  UNUSED(instruction);
3207  LOG(FATAL) << "Unreachable";
3208}
3209
3210void InstructionCodeGeneratorARM::VisitParallelMove(HParallelMove* instruction) {
3211  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
3212}
3213
3214void LocationsBuilderARM::VisitSuspendCheck(HSuspendCheck* instruction) {
3215  new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
3216}
3217
3218void InstructionCodeGeneratorARM::VisitSuspendCheck(HSuspendCheck* instruction) {
3219  HBasicBlock* block = instruction->GetBlock();
3220  if (block->GetLoopInformation() != nullptr) {
3221    DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
3222    // The back edge will generate the suspend check.
3223    return;
3224  }
3225  if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
3226    // The goto will generate the suspend check.
3227    return;
3228  }
3229  GenerateSuspendCheck(instruction, nullptr);
3230}
3231
3232void InstructionCodeGeneratorARM::GenerateSuspendCheck(HSuspendCheck* instruction,
3233                                                       HBasicBlock* successor) {
3234  SuspendCheckSlowPathARM* slow_path =
3235      new (GetGraph()->GetArena()) SuspendCheckSlowPathARM(instruction, successor);
3236  codegen_->AddSlowPath(slow_path);
3237
3238  __ LoadFromOffset(
3239      kLoadUnsignedHalfword, IP, TR, Thread::ThreadFlagsOffset<kArmWordSize>().Int32Value());
3240  __ cmp(IP, ShifterOperand(0));
3241  // TODO: Figure out the branch offsets and use cbz/cbnz.
3242  if (successor == nullptr) {
3243    __ b(slow_path->GetEntryLabel(), NE);
3244    __ Bind(slow_path->GetReturnLabel());
3245  } else {
3246    __ b(codegen_->GetLabelOf(successor), EQ);
3247    __ b(slow_path->GetEntryLabel());
3248  }
3249}
3250
3251ArmAssembler* ParallelMoveResolverARM::GetAssembler() const {
3252  return codegen_->GetAssembler();
3253}
3254
3255void ParallelMoveResolverARM::EmitMove(size_t index) {
3256  MoveOperands* move = moves_.Get(index);
3257  Location source = move->GetSource();
3258  Location destination = move->GetDestination();
3259
3260  if (source.IsRegister()) {
3261    if (destination.IsRegister()) {
3262      __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>());
3263    } else {
3264      DCHECK(destination.IsStackSlot());
3265      __ StoreToOffset(kStoreWord, source.AsRegister<Register>(),
3266                       SP, destination.GetStackIndex());
3267    }
3268  } else if (source.IsStackSlot()) {
3269    if (destination.IsRegister()) {
3270      __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(),
3271                        SP, source.GetStackIndex());
3272    } else if (destination.IsFpuRegister()) {
3273      __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex());
3274    } else {
3275      DCHECK(destination.IsStackSlot());
3276      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
3277      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3278    }
3279  } else if (source.IsFpuRegister()) {
3280    if (destination.IsFpuRegister()) {
3281      __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>());
3282    } else {
3283      DCHECK(destination.IsStackSlot());
3284      __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex());
3285    }
3286  } else if (source.IsFpuRegisterPair()) {
3287    if (destination.IsFpuRegisterPair()) {
3288      __ vmovd(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
3289               FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()));
3290    } else {
3291      DCHECK(destination.IsDoubleStackSlot()) << destination;
3292      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
3293                        SP, destination.GetStackIndex());
3294    }
3295  } else if (source.IsDoubleStackSlot()) {
3296    if (destination.IsFpuRegisterPair()) {
3297      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
3298                         SP, source.GetStackIndex());
3299    } else {
3300      DCHECK(destination.IsDoubleStackSlot()) << destination;
3301      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
3302      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3303      __ LoadFromOffset(kLoadWord, IP, SP, source.GetHighStackIndex(kArmWordSize));
3304      __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
3305    }
3306  } else {
3307    DCHECK(source.IsConstant()) << source;
3308    HInstruction* constant = source.GetConstant();
3309    if (constant->IsIntConstant()) {
3310      int32_t value = constant->AsIntConstant()->GetValue();
3311      if (destination.IsRegister()) {
3312        __ LoadImmediate(destination.AsRegister<Register>(), value);
3313      } else {
3314        DCHECK(destination.IsStackSlot());
3315        __ LoadImmediate(IP, value);
3316        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3317      }
3318    } else {
3319      DCHECK(constant->IsFloatConstant());
3320      float value = constant->AsFloatConstant()->GetValue();
3321      if (destination.IsFpuRegister()) {
3322        __ LoadSImmediate(destination.AsFpuRegister<SRegister>(), value);
3323      } else {
3324        DCHECK(destination.IsStackSlot());
3325        __ LoadImmediate(IP, bit_cast<int32_t, float>(value));
3326        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3327      }
3328    }
3329  }
3330}
3331
3332void ParallelMoveResolverARM::Exchange(Register reg, int mem) {
3333  __ Mov(IP, reg);
3334  __ LoadFromOffset(kLoadWord, reg, SP, mem);
3335  __ StoreToOffset(kStoreWord, IP, SP, mem);
3336}
3337
3338void ParallelMoveResolverARM::Exchange(int mem1, int mem2) {
3339  ScratchRegisterScope ensure_scratch(this, IP, R0, codegen_->GetNumberOfCoreRegisters());
3340  int stack_offset = ensure_scratch.IsSpilled() ? kArmWordSize : 0;
3341  __ LoadFromOffset(kLoadWord, static_cast<Register>(ensure_scratch.GetRegister()),
3342                    SP, mem1 + stack_offset);
3343  __ LoadFromOffset(kLoadWord, IP, SP, mem2 + stack_offset);
3344  __ StoreToOffset(kStoreWord, static_cast<Register>(ensure_scratch.GetRegister()),
3345                   SP, mem2 + stack_offset);
3346  __ StoreToOffset(kStoreWord, IP, SP, mem1 + stack_offset);
3347}
3348
3349void ParallelMoveResolverARM::EmitSwap(size_t index) {
3350  MoveOperands* move = moves_.Get(index);
3351  Location source = move->GetSource();
3352  Location destination = move->GetDestination();
3353
3354  if (source.IsRegister() && destination.IsRegister()) {
3355    DCHECK_NE(source.AsRegister<Register>(), IP);
3356    DCHECK_NE(destination.AsRegister<Register>(), IP);
3357    __ Mov(IP, source.AsRegister<Register>());
3358    __ Mov(source.AsRegister<Register>(), destination.AsRegister<Register>());
3359    __ Mov(destination.AsRegister<Register>(), IP);
3360  } else if (source.IsRegister() && destination.IsStackSlot()) {
3361    Exchange(source.AsRegister<Register>(), destination.GetStackIndex());
3362  } else if (source.IsStackSlot() && destination.IsRegister()) {
3363    Exchange(destination.AsRegister<Register>(), source.GetStackIndex());
3364  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
3365    Exchange(source.GetStackIndex(), destination.GetStackIndex());
3366  } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
3367    __ vmovrs(IP, source.AsFpuRegister<SRegister>());
3368    __ vmovs(source.AsFpuRegister<SRegister>(), destination.AsFpuRegister<SRegister>());
3369    __ vmovsr(destination.AsFpuRegister<SRegister>(), IP);
3370  } else if (source.IsFpuRegister() || destination.IsFpuRegister()) {
3371    SRegister reg = source.IsFpuRegister() ? source.AsFpuRegister<SRegister>()
3372                                           : destination.AsFpuRegister<SRegister>();
3373    int mem = source.IsFpuRegister()
3374        ? destination.GetStackIndex()
3375        : source.GetStackIndex();
3376
3377    __ vmovrs(IP, reg);
3378    __ LoadSFromOffset(reg, SP, mem);
3379    __ StoreToOffset(kStoreWord, IP, SP, mem);
3380  } else {
3381    LOG(FATAL) << "Unimplemented";
3382  }
3383}
3384
3385void ParallelMoveResolverARM::SpillScratch(int reg) {
3386  __ Push(static_cast<Register>(reg));
3387}
3388
3389void ParallelMoveResolverARM::RestoreScratch(int reg) {
3390  __ Pop(static_cast<Register>(reg));
3391}
3392
3393void LocationsBuilderARM::VisitLoadClass(HLoadClass* cls) {
3394  LocationSummary::CallKind call_kind = cls->CanCallRuntime()
3395      ? LocationSummary::kCallOnSlowPath
3396      : LocationSummary::kNoCall;
3397  LocationSummary* locations =
3398      new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
3399  locations->SetOut(Location::RequiresRegister());
3400}
3401
3402void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) {
3403  Register out = cls->GetLocations()->Out().AsRegister<Register>();
3404  if (cls->IsReferrersClass()) {
3405    DCHECK(!cls->CanCallRuntime());
3406    DCHECK(!cls->MustGenerateClinitCheck());
3407    codegen_->LoadCurrentMethod(out);
3408    __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
3409  } else {
3410    DCHECK(cls->CanCallRuntime());
3411    codegen_->LoadCurrentMethod(out);
3412    __ LoadFromOffset(
3413        kLoadWord, out, out, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value());
3414    __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
3415
3416    SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
3417        cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
3418    codegen_->AddSlowPath(slow_path);
3419    __ cmp(out, ShifterOperand(0));
3420    __ b(slow_path->GetEntryLabel(), EQ);
3421    if (cls->MustGenerateClinitCheck()) {
3422      GenerateClassInitializationCheck(slow_path, out);
3423    } else {
3424      __ Bind(slow_path->GetExitLabel());
3425    }
3426  }
3427}
3428
3429void LocationsBuilderARM::VisitClinitCheck(HClinitCheck* check) {
3430  LocationSummary* locations =
3431      new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
3432  locations->SetInAt(0, Location::RequiresRegister());
3433  if (check->HasUses()) {
3434    locations->SetOut(Location::SameAsFirstInput());
3435  }
3436}
3437
3438void InstructionCodeGeneratorARM::VisitClinitCheck(HClinitCheck* check) {
3439  // We assume the class is not null.
3440  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
3441      check->GetLoadClass(), check, check->GetDexPc(), true);
3442  codegen_->AddSlowPath(slow_path);
3443  GenerateClassInitializationCheck(slow_path,
3444                                   check->GetLocations()->InAt(0).AsRegister<Register>());
3445}
3446
3447void InstructionCodeGeneratorARM::GenerateClassInitializationCheck(
3448    SlowPathCodeARM* slow_path, Register class_reg) {
3449  __ LoadFromOffset(kLoadWord, IP, class_reg, mirror::Class::StatusOffset().Int32Value());
3450  __ cmp(IP, ShifterOperand(mirror::Class::kStatusInitialized));
3451  __ b(slow_path->GetEntryLabel(), LT);
3452  // Even if the initialized flag is set, we may be in a situation where caches are not synced
3453  // properly. Therefore, we do a memory fence.
3454  __ dmb(ISH);
3455  __ Bind(slow_path->GetExitLabel());
3456}
3457
3458void LocationsBuilderARM::VisitLoadString(HLoadString* load) {
3459  LocationSummary* locations =
3460      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
3461  locations->SetOut(Location::RequiresRegister());
3462}
3463
3464void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) {
3465  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM(load);
3466  codegen_->AddSlowPath(slow_path);
3467
3468  Register out = load->GetLocations()->Out().AsRegister<Register>();
3469  codegen_->LoadCurrentMethod(out);
3470  __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
3471  __ LoadFromOffset(kLoadWord, out, out, mirror::Class::DexCacheStringsOffset().Int32Value());
3472  __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(load->GetStringIndex()));
3473  __ cmp(out, ShifterOperand(0));
3474  __ b(slow_path->GetEntryLabel(), EQ);
3475  __ Bind(slow_path->GetExitLabel());
3476}
3477
3478void LocationsBuilderARM::VisitLoadException(HLoadException* load) {
3479  LocationSummary* locations =
3480      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
3481  locations->SetOut(Location::RequiresRegister());
3482}
3483
3484void InstructionCodeGeneratorARM::VisitLoadException(HLoadException* load) {
3485  Register out = load->GetLocations()->Out().AsRegister<Register>();
3486  int32_t offset = Thread::ExceptionOffset<kArmWordSize>().Int32Value();
3487  __ LoadFromOffset(kLoadWord, out, TR, offset);
3488  __ LoadImmediate(IP, 0);
3489  __ StoreToOffset(kStoreWord, IP, TR, offset);
3490}
3491
3492void LocationsBuilderARM::VisitThrow(HThrow* instruction) {
3493  LocationSummary* locations =
3494      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3495  InvokeRuntimeCallingConvention calling_convention;
3496  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3497}
3498
3499void InstructionCodeGeneratorARM::VisitThrow(HThrow* instruction) {
3500  codegen_->InvokeRuntime(
3501      QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc());
3502}
3503
3504void LocationsBuilderARM::VisitInstanceOf(HInstanceOf* instruction) {
3505  LocationSummary::CallKind call_kind = instruction->IsClassFinal()
3506      ? LocationSummary::kNoCall
3507      : LocationSummary::kCallOnSlowPath;
3508  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3509  locations->SetInAt(0, Location::RequiresRegister());
3510  locations->SetInAt(1, Location::RequiresRegister());
3511  locations->SetOut(Location::RequiresRegister());
3512}
3513
3514void InstructionCodeGeneratorARM::VisitInstanceOf(HInstanceOf* instruction) {
3515  LocationSummary* locations = instruction->GetLocations();
3516  Register obj = locations->InAt(0).AsRegister<Register>();
3517  Register cls = locations->InAt(1).AsRegister<Register>();
3518  Register out = locations->Out().AsRegister<Register>();
3519  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3520  Label done, zero;
3521  SlowPathCodeARM* slow_path = nullptr;
3522
3523  // Return 0 if `obj` is null.
3524  // TODO: avoid this check if we know obj is not null.
3525  __ cmp(obj, ShifterOperand(0));
3526  __ b(&zero, EQ);
3527  // Compare the class of `obj` with `cls`.
3528  __ LoadFromOffset(kLoadWord, out, obj, class_offset);
3529  __ cmp(out, ShifterOperand(cls));
3530  if (instruction->IsClassFinal()) {
3531    // Classes must be equal for the instanceof to succeed.
3532    __ b(&zero, NE);
3533    __ LoadImmediate(out, 1);
3534    __ b(&done);
3535  } else {
3536    // If the classes are not equal, we go into a slow path.
3537    DCHECK(locations->OnlyCallsOnSlowPath());
3538    slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
3539        instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc());
3540    codegen_->AddSlowPath(slow_path);
3541    __ b(slow_path->GetEntryLabel(), NE);
3542    __ LoadImmediate(out, 1);
3543    __ b(&done);
3544  }
3545  __ Bind(&zero);
3546  __ LoadImmediate(out, 0);
3547  if (slow_path != nullptr) {
3548    __ Bind(slow_path->GetExitLabel());
3549  }
3550  __ Bind(&done);
3551}
3552
3553void LocationsBuilderARM::VisitCheckCast(HCheckCast* instruction) {
3554  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3555      instruction, LocationSummary::kCallOnSlowPath);
3556  locations->SetInAt(0, Location::RequiresRegister());
3557  locations->SetInAt(1, Location::RequiresRegister());
3558  locations->AddTemp(Location::RequiresRegister());
3559}
3560
3561void InstructionCodeGeneratorARM::VisitCheckCast(HCheckCast* instruction) {
3562  LocationSummary* locations = instruction->GetLocations();
3563  Register obj = locations->InAt(0).AsRegister<Register>();
3564  Register cls = locations->InAt(1).AsRegister<Register>();
3565  Register temp = locations->GetTemp(0).AsRegister<Register>();
3566  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3567
3568  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
3569      instruction, locations->InAt(1), locations->GetTemp(0), instruction->GetDexPc());
3570  codegen_->AddSlowPath(slow_path);
3571
3572  // TODO: avoid this check if we know obj is not null.
3573  __ cmp(obj, ShifterOperand(0));
3574  __ b(slow_path->GetExitLabel(), EQ);
3575  // Compare the class of `obj` with `cls`.
3576  __ LoadFromOffset(kLoadWord, temp, obj, class_offset);
3577  __ cmp(temp, ShifterOperand(cls));
3578  __ b(slow_path->GetEntryLabel(), NE);
3579  __ Bind(slow_path->GetExitLabel());
3580}
3581
3582void LocationsBuilderARM::VisitMonitorOperation(HMonitorOperation* instruction) {
3583  LocationSummary* locations =
3584      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3585  InvokeRuntimeCallingConvention calling_convention;
3586  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3587}
3588
3589void InstructionCodeGeneratorARM::VisitMonitorOperation(HMonitorOperation* instruction) {
3590  codegen_->InvokeRuntime(instruction->IsEnter()
3591        ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
3592      instruction,
3593      instruction->GetDexPc());
3594}
3595
3596void LocationsBuilderARM::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
3597void LocationsBuilderARM::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
3598void LocationsBuilderARM::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
3599
3600void LocationsBuilderARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
3601  LocationSummary* locations =
3602      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3603  DCHECK(instruction->GetResultType() == Primitive::kPrimInt
3604         || instruction->GetResultType() == Primitive::kPrimLong);
3605  locations->SetInAt(0, Location::RequiresRegister());
3606  locations->SetInAt(1, Location::RequiresRegister());
3607  bool output_overlaps = (instruction->GetResultType() == Primitive::kPrimLong);
3608  locations->SetOut(Location::RequiresRegister(), output_overlaps);
3609}
3610
3611void InstructionCodeGeneratorARM::VisitAnd(HAnd* instruction) {
3612  HandleBitwiseOperation(instruction);
3613}
3614
3615void InstructionCodeGeneratorARM::VisitOr(HOr* instruction) {
3616  HandleBitwiseOperation(instruction);
3617}
3618
3619void InstructionCodeGeneratorARM::VisitXor(HXor* instruction) {
3620  HandleBitwiseOperation(instruction);
3621}
3622
3623void InstructionCodeGeneratorARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
3624  LocationSummary* locations = instruction->GetLocations();
3625
3626  if (instruction->GetResultType() == Primitive::kPrimInt) {
3627    Register first = locations->InAt(0).AsRegister<Register>();
3628    Register second = locations->InAt(1).AsRegister<Register>();
3629    Register out = locations->Out().AsRegister<Register>();
3630    if (instruction->IsAnd()) {
3631      __ and_(out, first, ShifterOperand(second));
3632    } else if (instruction->IsOr()) {
3633      __ orr(out, first, ShifterOperand(second));
3634    } else {
3635      DCHECK(instruction->IsXor());
3636      __ eor(out, first, ShifterOperand(second));
3637    }
3638  } else {
3639    DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3640    Location first = locations->InAt(0);
3641    Location second = locations->InAt(1);
3642    Location out = locations->Out();
3643    if (instruction->IsAnd()) {
3644      __ and_(out.AsRegisterPairLow<Register>(),
3645              first.AsRegisterPairLow<Register>(),
3646              ShifterOperand(second.AsRegisterPairLow<Register>()));
3647      __ and_(out.AsRegisterPairHigh<Register>(),
3648              first.AsRegisterPairHigh<Register>(),
3649              ShifterOperand(second.AsRegisterPairHigh<Register>()));
3650    } else if (instruction->IsOr()) {
3651      __ orr(out.AsRegisterPairLow<Register>(),
3652             first.AsRegisterPairLow<Register>(),
3653             ShifterOperand(second.AsRegisterPairLow<Register>()));
3654      __ orr(out.AsRegisterPairHigh<Register>(),
3655             first.AsRegisterPairHigh<Register>(),
3656             ShifterOperand(second.AsRegisterPairHigh<Register>()));
3657    } else {
3658      DCHECK(instruction->IsXor());
3659      __ eor(out.AsRegisterPairLow<Register>(),
3660             first.AsRegisterPairLow<Register>(),
3661             ShifterOperand(second.AsRegisterPairLow<Register>()));
3662      __ eor(out.AsRegisterPairHigh<Register>(),
3663             first.AsRegisterPairHigh<Register>(),
3664             ShifterOperand(second.AsRegisterPairHigh<Register>()));
3665    }
3666  }
3667}
3668
3669}  // namespace arm
3670}  // namespace art
3671