code_generator_arm.cc revision 77520bca97ec44e3758510cebd0f20e3bb4584ea
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
20#include "entrypoints/quick/quick_entrypoints.h"
21#include "gc/accounting/card_table.h"
22#include "mirror/array-inl.h"
23#include "mirror/art_method.h"
24#include "mirror/class.h"
25#include "thread.h"
26#include "utils/arm/assembler_arm.h"
27#include "utils/arm/managed_register_arm.h"
28#include "utils/assembler.h"
29#include "utils/stack_checks.h"
30
31namespace art {
32
33namespace arm {
34
35static DRegister FromLowSToD(SRegister reg) {
36  DCHECK_EQ(reg % 2, 0);
37  return static_cast<DRegister>(reg / 2);
38}
39
40static constexpr int kNumberOfPushedRegistersAtEntry = 1 + 2;  // LR, R6, R7
41static constexpr int kCurrentMethodStackOffset = 0;
42
43static constexpr Register kRuntimeParameterCoreRegisters[] = { R0, R1, R2, R3 };
44static constexpr size_t kRuntimeParameterCoreRegistersLength =
45    arraysize(kRuntimeParameterCoreRegisters);
46static constexpr SRegister kRuntimeParameterFpuRegisters[] = { S0, S1, S2, S3 };
47static constexpr size_t kRuntimeParameterFpuRegistersLength =
48    arraysize(kRuntimeParameterFpuRegisters);
49
50class InvokeRuntimeCallingConvention : public CallingConvention<Register, SRegister> {
51 public:
52  InvokeRuntimeCallingConvention()
53      : CallingConvention(kRuntimeParameterCoreRegisters,
54                          kRuntimeParameterCoreRegistersLength,
55                          kRuntimeParameterFpuRegisters,
56                          kRuntimeParameterFpuRegistersLength) {}
57
58 private:
59  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
60};
61
62#define __ reinterpret_cast<ArmAssembler*>(codegen->GetAssembler())->
63#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArmWordSize, x).Int32Value()
64
65class SlowPathCodeARM : public SlowPathCode {
66 public:
67  SlowPathCodeARM() : entry_label_(), exit_label_() {}
68
69  Label* GetEntryLabel() { return &entry_label_; }
70  Label* GetExitLabel() { return &exit_label_; }
71
72 private:
73  Label entry_label_;
74  Label exit_label_;
75
76  DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM);
77};
78
79class NullCheckSlowPathARM : public SlowPathCodeARM {
80 public:
81  explicit NullCheckSlowPathARM(HNullCheck* instruction) : instruction_(instruction) {}
82
83  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
84    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
85    __ Bind(GetEntryLabel());
86    arm_codegen->InvokeRuntime(
87        QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc());
88  }
89
90 private:
91  HNullCheck* const instruction_;
92  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM);
93};
94
95class DivZeroCheckSlowPathARM : public SlowPathCodeARM {
96 public:
97  explicit DivZeroCheckSlowPathARM(HDivZeroCheck* instruction) : instruction_(instruction) {}
98
99  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
100    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
101    __ Bind(GetEntryLabel());
102    arm_codegen->InvokeRuntime(
103        QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc());
104  }
105
106 private:
107  HDivZeroCheck* const instruction_;
108  DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM);
109};
110
111class StackOverflowCheckSlowPathARM : public SlowPathCodeARM {
112 public:
113  StackOverflowCheckSlowPathARM() {}
114
115  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
116    __ Bind(GetEntryLabel());
117    __ LoadFromOffset(kLoadWord, PC, TR,
118        QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pThrowStackOverflow).Int32Value());
119  }
120
121 private:
122  DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathARM);
123};
124
125class SuspendCheckSlowPathARM : public SlowPathCodeARM {
126 public:
127  SuspendCheckSlowPathARM(HSuspendCheck* instruction, HBasicBlock* successor)
128      : instruction_(instruction), successor_(successor) {}
129
130  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
131    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
132    __ Bind(GetEntryLabel());
133    codegen->SaveLiveRegisters(instruction_->GetLocations());
134    arm_codegen->InvokeRuntime(
135        QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc());
136    codegen->RestoreLiveRegisters(instruction_->GetLocations());
137    if (successor_ == nullptr) {
138      __ b(GetReturnLabel());
139    } else {
140      __ b(arm_codegen->GetLabelOf(successor_));
141    }
142  }
143
144  Label* GetReturnLabel() {
145    DCHECK(successor_ == nullptr);
146    return &return_label_;
147  }
148
149 private:
150  HSuspendCheck* const instruction_;
151  // If not null, the block to branch to after the suspend check.
152  HBasicBlock* const successor_;
153
154  // If `successor_` is null, the label to branch to after the suspend check.
155  Label return_label_;
156
157  DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM);
158};
159
160class BoundsCheckSlowPathARM : public SlowPathCodeARM {
161 public:
162  BoundsCheckSlowPathARM(HBoundsCheck* instruction,
163                         Location index_location,
164                         Location length_location)
165      : instruction_(instruction),
166        index_location_(index_location),
167        length_location_(length_location) {}
168
169  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
170    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
171    __ Bind(GetEntryLabel());
172    // We're moving two locations to locations that could overlap, so we need a parallel
173    // move resolver.
174    InvokeRuntimeCallingConvention calling_convention;
175    codegen->EmitParallelMoves(
176        index_location_,
177        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
178        length_location_,
179        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
180    arm_codegen->InvokeRuntime(
181        QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc());
182  }
183
184 private:
185  HBoundsCheck* const instruction_;
186  const Location index_location_;
187  const Location length_location_;
188
189  DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM);
190};
191
192class LoadClassSlowPathARM : public SlowPathCodeARM {
193 public:
194  LoadClassSlowPathARM(HLoadClass* cls,
195                       HInstruction* at,
196                       uint32_t dex_pc,
197                       bool do_clinit)
198      : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
199    DCHECK(at->IsLoadClass() || at->IsClinitCheck());
200  }
201
202  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
203    LocationSummary* locations = at_->GetLocations();
204
205    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
206    __ Bind(GetEntryLabel());
207    codegen->SaveLiveRegisters(locations);
208
209    InvokeRuntimeCallingConvention calling_convention;
210    __ LoadImmediate(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex());
211    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
212    int32_t entry_point_offset = do_clinit_
213        ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
214        : QUICK_ENTRY_POINT(pInitializeType);
215    arm_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_);
216
217    // Move the class to the desired location.
218    Location out = locations->Out();
219    if (out.IsValid()) {
220      DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
221      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
222    }
223    codegen->RestoreLiveRegisters(locations);
224    __ b(GetExitLabel());
225  }
226
227 private:
228  // The class this slow path will load.
229  HLoadClass* const cls_;
230
231  // The instruction where this slow path is happening.
232  // (Might be the load class or an initialization check).
233  HInstruction* const at_;
234
235  // The dex PC of `at_`.
236  const uint32_t dex_pc_;
237
238  // Whether to initialize the class.
239  const bool do_clinit_;
240
241  DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM);
242};
243
244class LoadStringSlowPathARM : public SlowPathCodeARM {
245 public:
246  explicit LoadStringSlowPathARM(HLoadString* instruction) : instruction_(instruction) {}
247
248  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
249    LocationSummary* locations = instruction_->GetLocations();
250    DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
251
252    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
253    __ Bind(GetEntryLabel());
254    codegen->SaveLiveRegisters(locations);
255
256    InvokeRuntimeCallingConvention calling_convention;
257    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
258    __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction_->GetStringIndex());
259    arm_codegen->InvokeRuntime(
260        QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc());
261    arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
262
263    codegen->RestoreLiveRegisters(locations);
264    __ b(GetExitLabel());
265  }
266
267 private:
268  HLoadString* const instruction_;
269
270  DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM);
271};
272
273class TypeCheckSlowPathARM : public SlowPathCodeARM {
274 public:
275  TypeCheckSlowPathARM(HInstruction* instruction,
276                       Location class_to_check,
277                       Location object_class,
278                       uint32_t dex_pc)
279      : instruction_(instruction),
280        class_to_check_(class_to_check),
281        object_class_(object_class),
282        dex_pc_(dex_pc) {}
283
284  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
285    LocationSummary* locations = instruction_->GetLocations();
286    DCHECK(instruction_->IsCheckCast()
287           || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
288
289    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
290    __ Bind(GetEntryLabel());
291    codegen->SaveLiveRegisters(locations);
292
293    // We're moving two locations to locations that could overlap, so we need a parallel
294    // move resolver.
295    InvokeRuntimeCallingConvention calling_convention;
296    codegen->EmitParallelMoves(
297        class_to_check_,
298        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
299        object_class_,
300        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
301
302    if (instruction_->IsInstanceOf()) {
303      arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc_);
304      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
305    } else {
306      DCHECK(instruction_->IsCheckCast());
307      arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc_);
308    }
309
310    codegen->RestoreLiveRegisters(locations);
311    __ b(GetExitLabel());
312  }
313
314 private:
315  HInstruction* const instruction_;
316  const Location class_to_check_;
317  const Location object_class_;
318  uint32_t dex_pc_;
319
320  DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM);
321};
322
323#undef __
324
325#undef __
326#define __ reinterpret_cast<ArmAssembler*>(GetAssembler())->
327
328inline Condition ARMCondition(IfCondition cond) {
329  switch (cond) {
330    case kCondEQ: return EQ;
331    case kCondNE: return NE;
332    case kCondLT: return LT;
333    case kCondLE: return LE;
334    case kCondGT: return GT;
335    case kCondGE: return GE;
336    default:
337      LOG(FATAL) << "Unknown if condition";
338  }
339  return EQ;        // Unreachable.
340}
341
342inline Condition ARMOppositeCondition(IfCondition cond) {
343  switch (cond) {
344    case kCondEQ: return NE;
345    case kCondNE: return EQ;
346    case kCondLT: return GE;
347    case kCondLE: return GT;
348    case kCondGT: return LE;
349    case kCondGE: return LT;
350    default:
351      LOG(FATAL) << "Unknown if condition";
352  }
353  return EQ;        // Unreachable.
354}
355
356void CodeGeneratorARM::DumpCoreRegister(std::ostream& stream, int reg) const {
357  stream << ArmManagedRegister::FromCoreRegister(Register(reg));
358}
359
360void CodeGeneratorARM::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
361  stream << ArmManagedRegister::FromSRegister(SRegister(reg));
362}
363
364size_t CodeGeneratorARM::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
365  __ StoreToOffset(kStoreWord, static_cast<Register>(reg_id), SP, stack_index);
366  return kArmWordSize;
367}
368
369size_t CodeGeneratorARM::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
370  __ LoadFromOffset(kLoadWord, static_cast<Register>(reg_id), SP, stack_index);
371  return kArmWordSize;
372}
373
374size_t CodeGeneratorARM::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
375  __ StoreSToOffset(static_cast<SRegister>(reg_id), SP, stack_index);
376  return kArmWordSize;
377}
378
379size_t CodeGeneratorARM::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
380  __ LoadSFromOffset(static_cast<SRegister>(reg_id), SP, stack_index);
381  return kArmWordSize;
382}
383
384CodeGeneratorARM::CodeGeneratorARM(HGraph* graph,
385                                   const ArmInstructionSetFeatures& isa_features,
386                                   const CompilerOptions& compiler_options)
387    : CodeGenerator(graph, kNumberOfCoreRegisters, kNumberOfSRegisters,
388                    kNumberOfRegisterPairs, compiler_options),
389      block_labels_(graph->GetArena(), 0),
390      location_builder_(graph, this),
391      instruction_visitor_(graph, this),
392      move_resolver_(graph->GetArena(), this),
393      assembler_(true),
394      isa_features_(isa_features) {}
395
396size_t CodeGeneratorARM::FrameEntrySpillSize() const {
397  return kNumberOfPushedRegistersAtEntry * kArmWordSize;
398}
399
400Location CodeGeneratorARM::AllocateFreeRegister(Primitive::Type type) const {
401  switch (type) {
402    case Primitive::kPrimLong: {
403      size_t reg = FindFreeEntry(blocked_register_pairs_, kNumberOfRegisterPairs);
404      ArmManagedRegister pair =
405          ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
406      DCHECK(!blocked_core_registers_[pair.AsRegisterPairLow()]);
407      DCHECK(!blocked_core_registers_[pair.AsRegisterPairHigh()]);
408
409      blocked_core_registers_[pair.AsRegisterPairLow()] = true;
410      blocked_core_registers_[pair.AsRegisterPairHigh()] = true;
411      UpdateBlockedPairRegisters();
412      return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
413    }
414
415    case Primitive::kPrimByte:
416    case Primitive::kPrimBoolean:
417    case Primitive::kPrimChar:
418    case Primitive::kPrimShort:
419    case Primitive::kPrimInt:
420    case Primitive::kPrimNot: {
421      int reg = FindFreeEntry(blocked_core_registers_, kNumberOfCoreRegisters);
422      // Block all register pairs that contain `reg`.
423      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
424        ArmManagedRegister current =
425            ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
426        if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
427          blocked_register_pairs_[i] = true;
428        }
429      }
430      return Location::RegisterLocation(reg);
431    }
432
433    case Primitive::kPrimFloat: {
434      int reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfSRegisters);
435      return Location::FpuRegisterLocation(reg);
436    }
437
438    case Primitive::kPrimDouble: {
439      int reg = FindTwoFreeConsecutiveAlignedEntries(blocked_fpu_registers_, kNumberOfSRegisters);
440      DCHECK_EQ(reg % 2, 0);
441      return Location::FpuRegisterPairLocation(reg, reg + 1);
442    }
443
444    case Primitive::kPrimVoid:
445      LOG(FATAL) << "Unreachable type " << type;
446  }
447
448  return Location();
449}
450
451void CodeGeneratorARM::SetupBlockedRegisters() const {
452  // Don't allocate the dalvik style register pair passing.
453  blocked_register_pairs_[R1_R2] = true;
454
455  // Stack register, LR and PC are always reserved.
456  blocked_core_registers_[SP] = true;
457  blocked_core_registers_[LR] = true;
458  blocked_core_registers_[PC] = true;
459
460  // Reserve thread register.
461  blocked_core_registers_[TR] = true;
462
463  // Reserve temp register.
464  blocked_core_registers_[IP] = true;
465
466  // TODO: We currently don't use Quick's callee saved registers.
467  // We always save and restore R6 and R7 to make sure we can use three
468  // register pairs for long operations.
469  blocked_core_registers_[R4] = true;
470  blocked_core_registers_[R5] = true;
471  blocked_core_registers_[R8] = true;
472  blocked_core_registers_[R10] = true;
473  blocked_core_registers_[R11] = true;
474
475  blocked_fpu_registers_[S16] = true;
476  blocked_fpu_registers_[S17] = true;
477  blocked_fpu_registers_[S18] = true;
478  blocked_fpu_registers_[S19] = true;
479  blocked_fpu_registers_[S20] = true;
480  blocked_fpu_registers_[S21] = true;
481  blocked_fpu_registers_[S22] = true;
482  blocked_fpu_registers_[S23] = true;
483  blocked_fpu_registers_[S24] = true;
484  blocked_fpu_registers_[S25] = true;
485  blocked_fpu_registers_[S26] = true;
486  blocked_fpu_registers_[S27] = true;
487  blocked_fpu_registers_[S28] = true;
488  blocked_fpu_registers_[S29] = true;
489  blocked_fpu_registers_[S30] = true;
490  blocked_fpu_registers_[S31] = true;
491
492  UpdateBlockedPairRegisters();
493}
494
495void CodeGeneratorARM::UpdateBlockedPairRegisters() const {
496  for (int i = 0; i < kNumberOfRegisterPairs; i++) {
497    ArmManagedRegister current =
498        ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
499    if (blocked_core_registers_[current.AsRegisterPairLow()]
500        || blocked_core_registers_[current.AsRegisterPairHigh()]) {
501      blocked_register_pairs_[i] = true;
502    }
503  }
504}
505
506InstructionCodeGeneratorARM::InstructionCodeGeneratorARM(HGraph* graph, CodeGeneratorARM* codegen)
507      : HGraphVisitor(graph),
508        assembler_(codegen->GetAssembler()),
509        codegen_(codegen) {}
510
511void CodeGeneratorARM::GenerateFrameEntry() {
512  bool skip_overflow_check =
513      IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm);
514  if (!skip_overflow_check) {
515    if (GetCompilerOptions().GetImplicitStackOverflowChecks()) {
516      __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm)));
517      __ LoadFromOffset(kLoadWord, IP, IP, 0);
518      RecordPcInfo(nullptr, 0);
519    } else {
520      SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathARM();
521      AddSlowPath(slow_path);
522
523      __ LoadFromOffset(kLoadWord, IP, TR, Thread::StackEndOffset<kArmWordSize>().Int32Value());
524      __ cmp(SP, ShifterOperand(IP));
525      __ b(slow_path->GetEntryLabel(), CC);
526    }
527  }
528
529  core_spill_mask_ |= (1 << LR | 1 << R6 | 1 << R7);
530  __ PushList(1 << LR | 1 << R6 | 1 << R7);
531
532  // The return PC has already been pushed on the stack.
533  __ AddConstant(SP, -(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize));
534  __ StoreToOffset(kStoreWord, R0, SP, 0);
535}
536
537void CodeGeneratorARM::GenerateFrameExit() {
538  __ AddConstant(SP, GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize);
539  __ PopList(1 << PC | 1 << R6 | 1 << R7);
540}
541
542void CodeGeneratorARM::Bind(HBasicBlock* block) {
543  __ Bind(GetLabelOf(block));
544}
545
546Location CodeGeneratorARM::GetStackLocation(HLoadLocal* load) const {
547  switch (load->GetType()) {
548    case Primitive::kPrimLong:
549    case Primitive::kPrimDouble:
550      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
551      break;
552
553    case Primitive::kPrimInt:
554    case Primitive::kPrimNot:
555    case Primitive::kPrimFloat:
556      return Location::StackSlot(GetStackSlot(load->GetLocal()));
557
558    case Primitive::kPrimBoolean:
559    case Primitive::kPrimByte:
560    case Primitive::kPrimChar:
561    case Primitive::kPrimShort:
562    case Primitive::kPrimVoid:
563      LOG(FATAL) << "Unexpected type " << load->GetType();
564  }
565
566  LOG(FATAL) << "Unreachable";
567  return Location();
568}
569
570Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
571  switch (type) {
572    case Primitive::kPrimBoolean:
573    case Primitive::kPrimByte:
574    case Primitive::kPrimChar:
575    case Primitive::kPrimShort:
576    case Primitive::kPrimInt:
577    case Primitive::kPrimNot: {
578      uint32_t index = gp_index_++;
579      uint32_t stack_index = stack_index_++;
580      if (index < calling_convention.GetNumberOfRegisters()) {
581        return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
582      } else {
583        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
584      }
585    }
586
587    case Primitive::kPrimLong: {
588      uint32_t index = gp_index_;
589      uint32_t stack_index = stack_index_;
590      gp_index_ += 2;
591      stack_index_ += 2;
592      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
593        if (calling_convention.GetRegisterAt(index) == R1) {
594          // Skip R1, and use R2_R3 instead.
595          gp_index_++;
596          index++;
597        }
598      }
599      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
600        DCHECK_EQ(calling_convention.GetRegisterAt(index) + 1,
601                  calling_convention.GetRegisterAt(index + 1));
602        return Location::RegisterPairLocation(calling_convention.GetRegisterAt(index),
603                                              calling_convention.GetRegisterAt(index + 1));
604      } else {
605        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
606      }
607    }
608
609    case Primitive::kPrimFloat: {
610      uint32_t stack_index = stack_index_++;
611      if (float_index_ % 2 == 0) {
612        float_index_ = std::max(double_index_, float_index_);
613      }
614      if (float_index_ < calling_convention.GetNumberOfFpuRegisters()) {
615        return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(float_index_++));
616      } else {
617        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
618      }
619    }
620
621    case Primitive::kPrimDouble: {
622      double_index_ = std::max(double_index_, RoundUp(float_index_, 2));
623      uint32_t stack_index = stack_index_;
624      stack_index_ += 2;
625      if (double_index_ + 1 < calling_convention.GetNumberOfFpuRegisters()) {
626        uint32_t index = double_index_;
627        double_index_ += 2;
628        DCHECK_EQ(calling_convention.GetFpuRegisterAt(index) + 1,
629                  calling_convention.GetFpuRegisterAt(index + 1));
630        DCHECK_EQ(calling_convention.GetFpuRegisterAt(index) & 1, 0);
631        return Location::FpuRegisterPairLocation(
632          calling_convention.GetFpuRegisterAt(index),
633          calling_convention.GetFpuRegisterAt(index + 1));
634      } else {
635        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
636      }
637    }
638
639    case Primitive::kPrimVoid:
640      LOG(FATAL) << "Unexpected parameter type " << type;
641      break;
642  }
643  return Location();
644}
645
646Location InvokeDexCallingConventionVisitor::GetReturnLocation(Primitive::Type type) {
647  switch (type) {
648    case Primitive::kPrimBoolean:
649    case Primitive::kPrimByte:
650    case Primitive::kPrimChar:
651    case Primitive::kPrimShort:
652    case Primitive::kPrimInt:
653    case Primitive::kPrimNot: {
654      return Location::RegisterLocation(R0);
655    }
656
657    case Primitive::kPrimFloat: {
658      return Location::FpuRegisterLocation(S0);
659    }
660
661    case Primitive::kPrimLong: {
662      return Location::RegisterPairLocation(R0, R1);
663    }
664
665    case Primitive::kPrimDouble: {
666      return Location::FpuRegisterPairLocation(S0, S1);
667    }
668
669    case Primitive::kPrimVoid:
670      return Location();
671  }
672  UNREACHABLE();
673  return Location();
674}
675
676void CodeGeneratorARM::Move32(Location destination, Location source) {
677  if (source.Equals(destination)) {
678    return;
679  }
680  if (destination.IsRegister()) {
681    if (source.IsRegister()) {
682      __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>());
683    } else if (source.IsFpuRegister()) {
684      __ vmovrs(destination.AsRegister<Register>(), source.AsFpuRegister<SRegister>());
685    } else {
686      __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(), SP, source.GetStackIndex());
687    }
688  } else if (destination.IsFpuRegister()) {
689    if (source.IsRegister()) {
690      __ vmovsr(destination.AsFpuRegister<SRegister>(), source.AsRegister<Register>());
691    } else if (source.IsFpuRegister()) {
692      __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>());
693    } else {
694      __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex());
695    }
696  } else {
697    DCHECK(destination.IsStackSlot()) << destination;
698    if (source.IsRegister()) {
699      __ StoreToOffset(kStoreWord, source.AsRegister<Register>(), SP, destination.GetStackIndex());
700    } else if (source.IsFpuRegister()) {
701      __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex());
702    } else {
703      DCHECK(source.IsStackSlot()) << source;
704      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
705      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
706    }
707  }
708}
709
710void CodeGeneratorARM::Move64(Location destination, Location source) {
711  if (source.Equals(destination)) {
712    return;
713  }
714  if (destination.IsRegisterPair()) {
715    if (source.IsRegisterPair()) {
716      EmitParallelMoves(
717          Location::RegisterLocation(source.AsRegisterPairHigh<Register>()),
718          Location::RegisterLocation(destination.AsRegisterPairHigh<Register>()),
719          Location::RegisterLocation(source.AsRegisterPairLow<Register>()),
720          Location::RegisterLocation(destination.AsRegisterPairLow<Register>()));
721    } else if (source.IsFpuRegister()) {
722      UNIMPLEMENTED(FATAL);
723    } else {
724      // No conflict possible, so just do the moves.
725      DCHECK(source.IsDoubleStackSlot());
726      if (destination.AsRegisterPairLow<Register>() == R1) {
727        DCHECK_EQ(destination.AsRegisterPairHigh<Register>(), R2);
728        __ LoadFromOffset(kLoadWord, R1, SP, source.GetStackIndex());
729        __ LoadFromOffset(kLoadWord, R2, SP, source.GetHighStackIndex(kArmWordSize));
730      } else {
731        __ LoadFromOffset(kLoadWordPair, destination.AsRegisterPairLow<Register>(),
732                          SP, source.GetStackIndex());
733      }
734    }
735  } else if (destination.IsFpuRegisterPair()) {
736    if (source.IsDoubleStackSlot()) {
737      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
738                         SP,
739                         source.GetStackIndex());
740    } else {
741      UNIMPLEMENTED(FATAL);
742    }
743  } else {
744    DCHECK(destination.IsDoubleStackSlot());
745    if (source.IsRegisterPair()) {
746      // No conflict possible, so just do the moves.
747      if (source.AsRegisterPairLow<Register>() == R1) {
748        DCHECK_EQ(source.AsRegisterPairHigh<Register>(), R2);
749        __ StoreToOffset(kStoreWord, R1, SP, destination.GetStackIndex());
750        __ StoreToOffset(kStoreWord, R2, SP, destination.GetHighStackIndex(kArmWordSize));
751      } else {
752        __ StoreToOffset(kStoreWordPair, source.AsRegisterPairLow<Register>(),
753                         SP, destination.GetStackIndex());
754      }
755    } else if (source.IsFpuRegisterPair()) {
756      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
757                        SP,
758                        destination.GetStackIndex());
759    } else {
760      DCHECK(source.IsDoubleStackSlot());
761      EmitParallelMoves(
762          Location::StackSlot(source.GetStackIndex()),
763          Location::StackSlot(destination.GetStackIndex()),
764          Location::StackSlot(source.GetHighStackIndex(kArmWordSize)),
765          Location::StackSlot(destination.GetHighStackIndex(kArmWordSize)));
766    }
767  }
768}
769
770void CodeGeneratorARM::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
771  LocationSummary* locations = instruction->GetLocations();
772  if (locations != nullptr && locations->Out().Equals(location)) {
773    return;
774  }
775
776  if (locations != nullptr && locations->Out().IsConstant()) {
777    HConstant* const_to_move = locations->Out().GetConstant();
778    if (const_to_move->IsIntConstant()) {
779      int32_t value = const_to_move->AsIntConstant()->GetValue();
780      if (location.IsRegister()) {
781        __ LoadImmediate(location.AsRegister<Register>(), value);
782      } else {
783        DCHECK(location.IsStackSlot());
784        __ LoadImmediate(IP, value);
785        __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
786      }
787    } else {
788      DCHECK(const_to_move->IsLongConstant()) << const_to_move->DebugName();
789      int64_t value = const_to_move->AsLongConstant()->GetValue();
790      if (location.IsRegisterPair()) {
791        __ LoadImmediate(location.AsRegisterPairLow<Register>(), Low32Bits(value));
792        __ LoadImmediate(location.AsRegisterPairHigh<Register>(), High32Bits(value));
793      } else {
794        DCHECK(location.IsDoubleStackSlot());
795        __ LoadImmediate(IP, Low32Bits(value));
796        __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
797        __ LoadImmediate(IP, High32Bits(value));
798        __ StoreToOffset(kStoreWord, IP, SP, location.GetHighStackIndex(kArmWordSize));
799      }
800    }
801  } else if (instruction->IsLoadLocal()) {
802    uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
803    switch (instruction->GetType()) {
804      case Primitive::kPrimBoolean:
805      case Primitive::kPrimByte:
806      case Primitive::kPrimChar:
807      case Primitive::kPrimShort:
808      case Primitive::kPrimInt:
809      case Primitive::kPrimNot:
810      case Primitive::kPrimFloat:
811        Move32(location, Location::StackSlot(stack_slot));
812        break;
813
814      case Primitive::kPrimLong:
815      case Primitive::kPrimDouble:
816        Move64(location, Location::DoubleStackSlot(stack_slot));
817        break;
818
819      default:
820        LOG(FATAL) << "Unexpected type " << instruction->GetType();
821    }
822  } else if (instruction->IsTemporary()) {
823    Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
824    if (temp_location.IsStackSlot()) {
825      Move32(location, temp_location);
826    } else {
827      DCHECK(temp_location.IsDoubleStackSlot());
828      Move64(location, temp_location);
829    }
830  } else {
831    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
832    switch (instruction->GetType()) {
833      case Primitive::kPrimBoolean:
834      case Primitive::kPrimByte:
835      case Primitive::kPrimChar:
836      case Primitive::kPrimShort:
837      case Primitive::kPrimNot:
838      case Primitive::kPrimInt:
839      case Primitive::kPrimFloat:
840        Move32(location, locations->Out());
841        break;
842
843      case Primitive::kPrimLong:
844      case Primitive::kPrimDouble:
845        Move64(location, locations->Out());
846        break;
847
848      default:
849        LOG(FATAL) << "Unexpected type " << instruction->GetType();
850    }
851  }
852}
853
854void CodeGeneratorARM::InvokeRuntime(int32_t entry_point_offset,
855                                     HInstruction* instruction,
856                                     uint32_t dex_pc) {
857  __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
858  __ blx(LR);
859  RecordPcInfo(instruction, dex_pc);
860  DCHECK(instruction->IsSuspendCheck()
861      || instruction->IsBoundsCheck()
862      || instruction->IsNullCheck()
863      || instruction->IsDivZeroCheck()
864      || instruction->GetLocations()->CanCall()
865      || !IsLeafMethod());
866}
867
868void LocationsBuilderARM::VisitGoto(HGoto* got) {
869  got->SetLocations(nullptr);
870}
871
872void InstructionCodeGeneratorARM::VisitGoto(HGoto* got) {
873  HBasicBlock* successor = got->GetSuccessor();
874  DCHECK(!successor->IsExitBlock());
875
876  HBasicBlock* block = got->GetBlock();
877  HInstruction* previous = got->GetPrevious();
878
879  HLoopInformation* info = block->GetLoopInformation();
880  if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
881    codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
882    GenerateSuspendCheck(info->GetSuspendCheck(), successor);
883    return;
884  }
885
886  if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
887    GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
888  }
889  if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
890    __ b(codegen_->GetLabelOf(successor));
891  }
892}
893
894void LocationsBuilderARM::VisitExit(HExit* exit) {
895  exit->SetLocations(nullptr);
896}
897
898void InstructionCodeGeneratorARM::VisitExit(HExit* exit) {
899  UNUSED(exit);
900  if (kIsDebugBuild) {
901    __ Comment("Unreachable");
902    __ bkpt(0);
903  }
904}
905
906void LocationsBuilderARM::VisitIf(HIf* if_instr) {
907  LocationSummary* locations =
908      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
909  HInstruction* cond = if_instr->InputAt(0);
910  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
911    locations->SetInAt(0, Location::RequiresRegister());
912  }
913}
914
915void InstructionCodeGeneratorARM::VisitIf(HIf* if_instr) {
916  HInstruction* cond = if_instr->InputAt(0);
917  if (cond->IsIntConstant()) {
918    // Constant condition, statically compared against 1.
919    int32_t cond_value = cond->AsIntConstant()->GetValue();
920    if (cond_value == 1) {
921      if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
922                                     if_instr->IfTrueSuccessor())) {
923        __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
924      }
925      return;
926    } else {
927      DCHECK_EQ(cond_value, 0);
928    }
929  } else {
930    if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
931      // Condition has been materialized, compare the output to 0
932      DCHECK(if_instr->GetLocations()->InAt(0).IsRegister());
933      __ cmp(if_instr->GetLocations()->InAt(0).AsRegister<Register>(),
934             ShifterOperand(0));
935      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()), NE);
936    } else {
937      // Condition has not been materialized, use its inputs as the
938      // comparison and its condition as the branch condition.
939      LocationSummary* locations = cond->GetLocations();
940      Register left = locations->InAt(0).AsRegister<Register>();
941      if (locations->InAt(1).IsRegister()) {
942        __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>()));
943      } else {
944        DCHECK(locations->InAt(1).IsConstant());
945        int32_t value =
946            locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
947        ShifterOperand operand;
948        if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) {
949          __ cmp(left, operand);
950        } else {
951          Register temp = IP;
952          __ LoadImmediate(temp, value);
953          __ cmp(left, ShifterOperand(temp));
954        }
955      }
956      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()),
957           ARMCondition(cond->AsCondition()->GetCondition()));
958    }
959  }
960  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
961                                 if_instr->IfFalseSuccessor())) {
962    __ b(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
963  }
964}
965
966
967void LocationsBuilderARM::VisitCondition(HCondition* comp) {
968  LocationSummary* locations =
969      new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall);
970  locations->SetInAt(0, Location::RequiresRegister());
971  locations->SetInAt(1, Location::RegisterOrConstant(comp->InputAt(1)));
972  if (comp->NeedsMaterialization()) {
973    locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
974  }
975}
976
977void InstructionCodeGeneratorARM::VisitCondition(HCondition* comp) {
978  if (!comp->NeedsMaterialization()) return;
979  LocationSummary* locations = comp->GetLocations();
980  Register left = locations->InAt(0).AsRegister<Register>();
981
982  if (locations->InAt(1).IsRegister()) {
983    __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>()));
984  } else {
985    DCHECK(locations->InAt(1).IsConstant());
986    int32_t value = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
987    ShifterOperand operand;
988    if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) {
989      __ cmp(left, operand);
990    } else {
991      Register temp = IP;
992      __ LoadImmediate(temp, value);
993      __ cmp(left, ShifterOperand(temp));
994    }
995  }
996  __ it(ARMCondition(comp->GetCondition()), kItElse);
997  __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(1),
998         ARMCondition(comp->GetCondition()));
999  __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(0),
1000         ARMOppositeCondition(comp->GetCondition()));
1001}
1002
1003void LocationsBuilderARM::VisitEqual(HEqual* comp) {
1004  VisitCondition(comp);
1005}
1006
1007void InstructionCodeGeneratorARM::VisitEqual(HEqual* comp) {
1008  VisitCondition(comp);
1009}
1010
1011void LocationsBuilderARM::VisitNotEqual(HNotEqual* comp) {
1012  VisitCondition(comp);
1013}
1014
1015void InstructionCodeGeneratorARM::VisitNotEqual(HNotEqual* comp) {
1016  VisitCondition(comp);
1017}
1018
1019void LocationsBuilderARM::VisitLessThan(HLessThan* comp) {
1020  VisitCondition(comp);
1021}
1022
1023void InstructionCodeGeneratorARM::VisitLessThan(HLessThan* comp) {
1024  VisitCondition(comp);
1025}
1026
1027void LocationsBuilderARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1028  VisitCondition(comp);
1029}
1030
1031void InstructionCodeGeneratorARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1032  VisitCondition(comp);
1033}
1034
1035void LocationsBuilderARM::VisitGreaterThan(HGreaterThan* comp) {
1036  VisitCondition(comp);
1037}
1038
1039void InstructionCodeGeneratorARM::VisitGreaterThan(HGreaterThan* comp) {
1040  VisitCondition(comp);
1041}
1042
1043void LocationsBuilderARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1044  VisitCondition(comp);
1045}
1046
1047void InstructionCodeGeneratorARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1048  VisitCondition(comp);
1049}
1050
1051void LocationsBuilderARM::VisitLocal(HLocal* local) {
1052  local->SetLocations(nullptr);
1053}
1054
1055void InstructionCodeGeneratorARM::VisitLocal(HLocal* local) {
1056  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
1057}
1058
1059void LocationsBuilderARM::VisitLoadLocal(HLoadLocal* load) {
1060  load->SetLocations(nullptr);
1061}
1062
1063void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load) {
1064  // Nothing to do, this is driven by the code generator.
1065  UNUSED(load);
1066}
1067
1068void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) {
1069  LocationSummary* locations =
1070      new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
1071  switch (store->InputAt(1)->GetType()) {
1072    case Primitive::kPrimBoolean:
1073    case Primitive::kPrimByte:
1074    case Primitive::kPrimChar:
1075    case Primitive::kPrimShort:
1076    case Primitive::kPrimInt:
1077    case Primitive::kPrimNot:
1078    case Primitive::kPrimFloat:
1079      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
1080      break;
1081
1082    case Primitive::kPrimLong:
1083    case Primitive::kPrimDouble:
1084      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
1085      break;
1086
1087    default:
1088      LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType();
1089  }
1090}
1091
1092void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store) {
1093  UNUSED(store);
1094}
1095
1096void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) {
1097  LocationSummary* locations =
1098      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1099  locations->SetOut(Location::ConstantLocation(constant));
1100}
1101
1102void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant) {
1103  // Will be generated at use site.
1104  UNUSED(constant);
1105}
1106
1107void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) {
1108  LocationSummary* locations =
1109      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1110  locations->SetOut(Location::ConstantLocation(constant));
1111}
1112
1113void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant) {
1114  // Will be generated at use site.
1115  UNUSED(constant);
1116}
1117
1118void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) {
1119  LocationSummary* locations =
1120      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1121  locations->SetOut(Location::ConstantLocation(constant));
1122}
1123
1124void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant) {
1125  // Will be generated at use site.
1126  UNUSED(constant);
1127}
1128
1129void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) {
1130  LocationSummary* locations =
1131      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1132  locations->SetOut(Location::ConstantLocation(constant));
1133}
1134
1135void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant) {
1136  // Will be generated at use site.
1137  UNUSED(constant);
1138}
1139
1140void LocationsBuilderARM::VisitReturnVoid(HReturnVoid* ret) {
1141  ret->SetLocations(nullptr);
1142}
1143
1144void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret) {
1145  UNUSED(ret);
1146  codegen_->GenerateFrameExit();
1147}
1148
1149void LocationsBuilderARM::VisitReturn(HReturn* ret) {
1150  LocationSummary* locations =
1151      new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
1152  locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType()));
1153}
1154
1155void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret) {
1156  UNUSED(ret);
1157  codegen_->GenerateFrameExit();
1158}
1159
1160void LocationsBuilderARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1161  HandleInvoke(invoke);
1162}
1163
1164void CodeGeneratorARM::LoadCurrentMethod(Register reg) {
1165  __ LoadFromOffset(kLoadWord, reg, SP, kCurrentMethodStackOffset);
1166}
1167
1168void InstructionCodeGeneratorARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1169  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1170
1171  // TODO: Implement all kinds of calls:
1172  // 1) boot -> boot
1173  // 2) app -> boot
1174  // 3) app -> app
1175  //
1176  // Currently we implement the app -> app logic, which looks up in the resolve cache.
1177
1178  // temp = method;
1179  codegen_->LoadCurrentMethod(temp);
1180  // temp = temp->dex_cache_resolved_methods_;
1181  __ LoadFromOffset(
1182      kLoadWord, temp, temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value());
1183  // temp = temp[index_in_cache]
1184  __ LoadFromOffset(
1185      kLoadWord, temp, temp, CodeGenerator::GetCacheOffset(invoke->GetDexMethodIndex()));
1186  // LR = temp[offset_of_quick_compiled_code]
1187  __ LoadFromOffset(kLoadWord, LR, temp,
1188                     mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1189                         kArmWordSize).Int32Value());
1190  // LR()
1191  __ blx(LR);
1192
1193  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1194  DCHECK(!codegen_->IsLeafMethod());
1195}
1196
1197void LocationsBuilderARM::HandleInvoke(HInvoke* invoke) {
1198  LocationSummary* locations =
1199      new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1200  locations->AddTemp(Location::RegisterLocation(R0));
1201
1202  InvokeDexCallingConventionVisitor calling_convention_visitor;
1203  for (size_t i = 0; i < invoke->InputCount(); i++) {
1204    HInstruction* input = invoke->InputAt(i);
1205    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1206  }
1207
1208  locations->SetOut(calling_convention_visitor.GetReturnLocation(invoke->GetType()));
1209}
1210
1211void LocationsBuilderARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1212  HandleInvoke(invoke);
1213}
1214
1215void InstructionCodeGeneratorARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1216  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1217  uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() +
1218          invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
1219  LocationSummary* locations = invoke->GetLocations();
1220  Location receiver = locations->InAt(0);
1221  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1222  // temp = object->GetClass();
1223  if (receiver.IsStackSlot()) {
1224    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1225    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1226  } else {
1227    __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
1228  }
1229  codegen_->MaybeRecordImplicitNullCheck(invoke);
1230  // temp = temp->GetMethodAt(method_offset);
1231  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1232      kArmWordSize).Int32Value();
1233  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1234  // LR = temp->GetEntryPoint();
1235  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1236  // LR();
1237  __ blx(LR);
1238  DCHECK(!codegen_->IsLeafMethod());
1239  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1240}
1241
1242void LocationsBuilderARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1243  HandleInvoke(invoke);
1244  // Add the hidden argument.
1245  invoke->GetLocations()->AddTemp(Location::RegisterLocation(R12));
1246}
1247
1248void InstructionCodeGeneratorARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1249  // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
1250  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1251  uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() +
1252          (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry);
1253  LocationSummary* locations = invoke->GetLocations();
1254  Location receiver = locations->InAt(0);
1255  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1256
1257  // Set the hidden argument.
1258  __ LoadImmediate(invoke->GetLocations()->GetTemp(1).AsRegister<Register>(),
1259                   invoke->GetDexMethodIndex());
1260
1261  // temp = object->GetClass();
1262  if (receiver.IsStackSlot()) {
1263    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1264    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1265  } else {
1266    __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
1267  }
1268  codegen_->MaybeRecordImplicitNullCheck(invoke);
1269  // temp = temp->GetImtEntryAt(method_offset);
1270  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1271      kArmWordSize).Int32Value();
1272  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1273  // LR = temp->GetEntryPoint();
1274  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1275  // LR();
1276  __ blx(LR);
1277  DCHECK(!codegen_->IsLeafMethod());
1278  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1279}
1280
1281void LocationsBuilderARM::VisitNeg(HNeg* neg) {
1282  LocationSummary* locations =
1283      new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
1284  switch (neg->GetResultType()) {
1285    case Primitive::kPrimInt:
1286    case Primitive::kPrimLong: {
1287      bool output_overlaps = (neg->GetResultType() == Primitive::kPrimLong);
1288      locations->SetInAt(0, Location::RequiresRegister());
1289      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1290      break;
1291    }
1292
1293    case Primitive::kPrimFloat:
1294    case Primitive::kPrimDouble:
1295      locations->SetInAt(0, Location::RequiresFpuRegister());
1296      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1297      break;
1298
1299    default:
1300      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1301  }
1302}
1303
1304void InstructionCodeGeneratorARM::VisitNeg(HNeg* neg) {
1305  LocationSummary* locations = neg->GetLocations();
1306  Location out = locations->Out();
1307  Location in = locations->InAt(0);
1308  switch (neg->GetResultType()) {
1309    case Primitive::kPrimInt:
1310      DCHECK(in.IsRegister());
1311      __ rsb(out.AsRegister<Register>(), in.AsRegister<Register>(), ShifterOperand(0));
1312      break;
1313
1314    case Primitive::kPrimLong:
1315      DCHECK(in.IsRegisterPair());
1316      // out.lo = 0 - in.lo (and update the carry/borrow (C) flag)
1317      __ rsbs(out.AsRegisterPairLow<Register>(),
1318              in.AsRegisterPairLow<Register>(),
1319              ShifterOperand(0));
1320      // We cannot emit an RSC (Reverse Subtract with Carry)
1321      // instruction here, as it does not exist in the Thumb-2
1322      // instruction set.  We use the following approach
1323      // using SBC and SUB instead.
1324      //
1325      // out.hi = -C
1326      __ sbc(out.AsRegisterPairHigh<Register>(),
1327             out.AsRegisterPairHigh<Register>(),
1328             ShifterOperand(out.AsRegisterPairHigh<Register>()));
1329      // out.hi = out.hi - in.hi
1330      __ sub(out.AsRegisterPairHigh<Register>(),
1331             out.AsRegisterPairHigh<Register>(),
1332             ShifterOperand(in.AsRegisterPairHigh<Register>()));
1333      break;
1334
1335    case Primitive::kPrimFloat:
1336      DCHECK(in.IsFpuRegister());
1337      __ vnegs(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>());
1338      break;
1339
1340    case Primitive::kPrimDouble:
1341      DCHECK(in.IsFpuRegisterPair());
1342      __ vnegd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1343               FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1344      break;
1345
1346    default:
1347      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1348  }
1349}
1350
1351void LocationsBuilderARM::VisitTypeConversion(HTypeConversion* conversion) {
1352  Primitive::Type result_type = conversion->GetResultType();
1353  Primitive::Type input_type = conversion->GetInputType();
1354  DCHECK_NE(result_type, input_type);
1355
1356  // The float-to-long and double-to-long type conversions rely on a
1357  // call to the runtime.
1358  LocationSummary::CallKind call_kind =
1359      ((input_type == Primitive::kPrimFloat || input_type == Primitive::kPrimDouble)
1360       && result_type == Primitive::kPrimLong)
1361      ? LocationSummary::kCall
1362      : LocationSummary::kNoCall;
1363  LocationSummary* locations =
1364      new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind);
1365
1366  switch (result_type) {
1367    case Primitive::kPrimByte:
1368      switch (input_type) {
1369        case Primitive::kPrimShort:
1370        case Primitive::kPrimInt:
1371        case Primitive::kPrimChar:
1372          // Processing a Dex `int-to-byte' instruction.
1373          locations->SetInAt(0, Location::RequiresRegister());
1374          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1375          break;
1376
1377        default:
1378          LOG(FATAL) << "Unexpected type conversion from " << input_type
1379                     << " to " << result_type;
1380      }
1381      break;
1382
1383    case Primitive::kPrimShort:
1384      switch (input_type) {
1385        case Primitive::kPrimByte:
1386        case Primitive::kPrimInt:
1387        case Primitive::kPrimChar:
1388          // Processing a Dex `int-to-short' instruction.
1389          locations->SetInAt(0, Location::RequiresRegister());
1390          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1391          break;
1392
1393        default:
1394          LOG(FATAL) << "Unexpected type conversion from " << input_type
1395                     << " to " << result_type;
1396      }
1397      break;
1398
1399    case Primitive::kPrimInt:
1400      switch (input_type) {
1401        case Primitive::kPrimLong:
1402          // Processing a Dex `long-to-int' instruction.
1403          locations->SetInAt(0, Location::Any());
1404          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1405          break;
1406
1407        case Primitive::kPrimFloat:
1408          // Processing a Dex `float-to-int' instruction.
1409          locations->SetInAt(0, Location::RequiresFpuRegister());
1410          locations->SetOut(Location::RequiresRegister());
1411          locations->AddTemp(Location::RequiresFpuRegister());
1412          break;
1413
1414        case Primitive::kPrimDouble:
1415          // Processing a Dex `double-to-int' instruction.
1416          locations->SetInAt(0, Location::RequiresFpuRegister());
1417          locations->SetOut(Location::RequiresRegister());
1418          locations->AddTemp(Location::RequiresFpuRegister());
1419          break;
1420
1421        default:
1422          LOG(FATAL) << "Unexpected type conversion from " << input_type
1423                     << " to " << result_type;
1424      }
1425      break;
1426
1427    case Primitive::kPrimLong:
1428      switch (input_type) {
1429        case Primitive::kPrimByte:
1430        case Primitive::kPrimShort:
1431        case Primitive::kPrimInt:
1432        case Primitive::kPrimChar:
1433          // Processing a Dex `int-to-long' instruction.
1434          locations->SetInAt(0, Location::RequiresRegister());
1435          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1436          break;
1437
1438        case Primitive::kPrimFloat: {
1439          // Processing a Dex `float-to-long' instruction.
1440          InvokeRuntimeCallingConvention calling_convention;
1441          locations->SetInAt(0, Location::FpuRegisterLocation(
1442              calling_convention.GetFpuRegisterAt(0)));
1443          locations->SetOut(Location::RegisterPairLocation(R0, R1));
1444          break;
1445        }
1446
1447        case Primitive::kPrimDouble: {
1448          // Processing a Dex `double-to-long' instruction.
1449          InvokeRuntimeCallingConvention calling_convention;
1450          locations->SetInAt(0, Location::FpuRegisterPairLocation(
1451              calling_convention.GetFpuRegisterAt(0),
1452              calling_convention.GetFpuRegisterAt(1)));
1453          locations->SetOut(Location::RegisterPairLocation(R0, R1));
1454          break;
1455        }
1456
1457        default:
1458          LOG(FATAL) << "Unexpected type conversion from " << input_type
1459                     << " to " << result_type;
1460      }
1461      break;
1462
1463    case Primitive::kPrimChar:
1464      switch (input_type) {
1465        case Primitive::kPrimByte:
1466        case Primitive::kPrimShort:
1467        case Primitive::kPrimInt:
1468          // Processing a Dex `int-to-char' instruction.
1469          locations->SetInAt(0, Location::RequiresRegister());
1470          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1471          break;
1472
1473        default:
1474          LOG(FATAL) << "Unexpected type conversion from " << input_type
1475                     << " to " << result_type;
1476      }
1477      break;
1478
1479    case Primitive::kPrimFloat:
1480      switch (input_type) {
1481        case Primitive::kPrimByte:
1482        case Primitive::kPrimShort:
1483        case Primitive::kPrimInt:
1484        case Primitive::kPrimChar:
1485          // Processing a Dex `int-to-float' instruction.
1486          locations->SetInAt(0, Location::RequiresRegister());
1487          locations->SetOut(Location::RequiresFpuRegister());
1488          break;
1489
1490        case Primitive::kPrimLong:
1491          // Processing a Dex `long-to-float' instruction.
1492          locations->SetInAt(0, Location::RequiresRegister());
1493          locations->SetOut(Location::RequiresFpuRegister());
1494          locations->AddTemp(Location::RequiresRegister());
1495          locations->AddTemp(Location::RequiresRegister());
1496          locations->AddTemp(Location::RequiresFpuRegister());
1497          locations->AddTemp(Location::RequiresFpuRegister());
1498          break;
1499
1500        case Primitive::kPrimDouble:
1501          // Processing a Dex `double-to-float' instruction.
1502          locations->SetInAt(0, Location::RequiresFpuRegister());
1503          locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1504          break;
1505
1506        default:
1507          LOG(FATAL) << "Unexpected type conversion from " << input_type
1508                     << " to " << result_type;
1509      };
1510      break;
1511
1512    case Primitive::kPrimDouble:
1513      switch (input_type) {
1514        case Primitive::kPrimByte:
1515        case Primitive::kPrimShort:
1516        case Primitive::kPrimInt:
1517        case Primitive::kPrimChar:
1518          // Processing a Dex `int-to-double' instruction.
1519          locations->SetInAt(0, Location::RequiresRegister());
1520          locations->SetOut(Location::RequiresFpuRegister());
1521          break;
1522
1523        case Primitive::kPrimLong:
1524          // Processing a Dex `long-to-double' instruction.
1525          locations->SetInAt(0, Location::RequiresRegister());
1526          locations->SetOut(Location::RequiresFpuRegister());
1527          locations->AddTemp(Location::RequiresRegister());
1528          locations->AddTemp(Location::RequiresRegister());
1529          locations->AddTemp(Location::RequiresFpuRegister());
1530          break;
1531
1532        case Primitive::kPrimFloat:
1533          // Processing a Dex `float-to-double' instruction.
1534          locations->SetInAt(0, Location::RequiresFpuRegister());
1535          locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1536          break;
1537
1538        default:
1539          LOG(FATAL) << "Unexpected type conversion from " << input_type
1540                     << " to " << result_type;
1541      };
1542      break;
1543
1544    default:
1545      LOG(FATAL) << "Unexpected type conversion from " << input_type
1546                 << " to " << result_type;
1547  }
1548}
1549
1550void InstructionCodeGeneratorARM::VisitTypeConversion(HTypeConversion* conversion) {
1551  LocationSummary* locations = conversion->GetLocations();
1552  Location out = locations->Out();
1553  Location in = locations->InAt(0);
1554  Primitive::Type result_type = conversion->GetResultType();
1555  Primitive::Type input_type = conversion->GetInputType();
1556  DCHECK_NE(result_type, input_type);
1557  switch (result_type) {
1558    case Primitive::kPrimByte:
1559      switch (input_type) {
1560        case Primitive::kPrimShort:
1561        case Primitive::kPrimInt:
1562        case Primitive::kPrimChar:
1563          // Processing a Dex `int-to-byte' instruction.
1564          __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 8);
1565          break;
1566
1567        default:
1568          LOG(FATAL) << "Unexpected type conversion from " << input_type
1569                     << " to " << result_type;
1570      }
1571      break;
1572
1573    case Primitive::kPrimShort:
1574      switch (input_type) {
1575        case Primitive::kPrimByte:
1576        case Primitive::kPrimInt:
1577        case Primitive::kPrimChar:
1578          // Processing a Dex `int-to-short' instruction.
1579          __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16);
1580          break;
1581
1582        default:
1583          LOG(FATAL) << "Unexpected type conversion from " << input_type
1584                     << " to " << result_type;
1585      }
1586      break;
1587
1588    case Primitive::kPrimInt:
1589      switch (input_type) {
1590        case Primitive::kPrimLong:
1591          // Processing a Dex `long-to-int' instruction.
1592          DCHECK(out.IsRegister());
1593          if (in.IsRegisterPair()) {
1594            __ Mov(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
1595          } else if (in.IsDoubleStackSlot()) {
1596            __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), SP, in.GetStackIndex());
1597          } else {
1598            DCHECK(in.IsConstant());
1599            DCHECK(in.GetConstant()->IsLongConstant());
1600            int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
1601            __ LoadImmediate(out.AsRegister<Register>(), static_cast<int32_t>(value));
1602          }
1603          break;
1604
1605        case Primitive::kPrimFloat: {
1606          // Processing a Dex `float-to-int' instruction.
1607          SRegister temp = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
1608          __ vmovs(temp, in.AsFpuRegister<SRegister>());
1609          __ vcvtis(temp, temp);
1610          __ vmovrs(out.AsRegister<Register>(), temp);
1611          break;
1612        }
1613
1614        case Primitive::kPrimDouble: {
1615          // Processing a Dex `double-to-int' instruction.
1616          SRegister temp_s = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
1617          DRegister temp_d = FromLowSToD(temp_s);
1618          __ vmovd(temp_d, FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1619          __ vcvtid(temp_s, temp_d);
1620          __ vmovrs(out.AsRegister<Register>(), temp_s);
1621          break;
1622        }
1623
1624        default:
1625          LOG(FATAL) << "Unexpected type conversion from " << input_type
1626                     << " to " << result_type;
1627      }
1628      break;
1629
1630    case Primitive::kPrimLong:
1631      switch (input_type) {
1632        case Primitive::kPrimByte:
1633        case Primitive::kPrimShort:
1634        case Primitive::kPrimInt:
1635        case Primitive::kPrimChar:
1636          // Processing a Dex `int-to-long' instruction.
1637          DCHECK(out.IsRegisterPair());
1638          DCHECK(in.IsRegister());
1639          __ Mov(out.AsRegisterPairLow<Register>(), in.AsRegister<Register>());
1640          // Sign extension.
1641          __ Asr(out.AsRegisterPairHigh<Register>(),
1642                 out.AsRegisterPairLow<Register>(),
1643                 31);
1644          break;
1645
1646        case Primitive::kPrimFloat:
1647          // Processing a Dex `float-to-long' instruction.
1648          codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pF2l),
1649                                  conversion,
1650                                  conversion->GetDexPc());
1651          break;
1652
1653        case Primitive::kPrimDouble:
1654          // Processing a Dex `double-to-long' instruction.
1655          codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pD2l),
1656                                  conversion,
1657                                  conversion->GetDexPc());
1658          break;
1659
1660        default:
1661          LOG(FATAL) << "Unexpected type conversion from " << input_type
1662                     << " to " << result_type;
1663      }
1664      break;
1665
1666    case Primitive::kPrimChar:
1667      switch (input_type) {
1668        case Primitive::kPrimByte:
1669        case Primitive::kPrimShort:
1670        case Primitive::kPrimInt:
1671          // Processing a Dex `int-to-char' instruction.
1672          __ ubfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16);
1673          break;
1674
1675        default:
1676          LOG(FATAL) << "Unexpected type conversion from " << input_type
1677                     << " to " << result_type;
1678      }
1679      break;
1680
1681    case Primitive::kPrimFloat:
1682      switch (input_type) {
1683        case Primitive::kPrimByte:
1684        case Primitive::kPrimShort:
1685        case Primitive::kPrimInt:
1686        case Primitive::kPrimChar: {
1687          // Processing a Dex `int-to-float' instruction.
1688          __ vmovsr(out.AsFpuRegister<SRegister>(), in.AsRegister<Register>());
1689          __ vcvtsi(out.AsFpuRegister<SRegister>(), out.AsFpuRegister<SRegister>());
1690          break;
1691        }
1692
1693        case Primitive::kPrimLong: {
1694          // Processing a Dex `long-to-float' instruction.
1695          Register low = in.AsRegisterPairLow<Register>();
1696          Register high = in.AsRegisterPairHigh<Register>();
1697          SRegister output = out.AsFpuRegister<SRegister>();
1698          Register constant_low = locations->GetTemp(0).AsRegister<Register>();
1699          Register constant_high = locations->GetTemp(1).AsRegister<Register>();
1700          SRegister temp1_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>();
1701          DRegister temp1_d = FromLowSToD(temp1_s);
1702          SRegister temp2_s = locations->GetTemp(3).AsFpuRegisterPairLow<SRegister>();
1703          DRegister temp2_d = FromLowSToD(temp2_s);
1704
1705          // Operations use doubles for precision reasons (each 32-bit
1706          // half of a long fits in the 53-bit mantissa of a double,
1707          // but not in the 24-bit mantissa of a float).  This is
1708          // especially important for the low bits.  The result is
1709          // eventually converted to float.
1710
1711          // temp1_d = int-to-double(high)
1712          __ vmovsr(temp1_s, high);
1713          __ vcvtdi(temp1_d, temp1_s);
1714          // Using vmovd to load the `k2Pow32EncodingForDouble` constant
1715          // as an immediate value into `temp2_d` does not work, as
1716          // this instruction only transfers 8 significant bits of its
1717          // immediate operand.  Instead, use two 32-bit core
1718          // registers to load `k2Pow32EncodingForDouble` into
1719          // `temp2_d`.
1720          __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble));
1721          __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble));
1722          __ vmovdrr(temp2_d, constant_low, constant_high);
1723          // temp1_d = temp1_d * 2^32
1724          __ vmuld(temp1_d, temp1_d, temp2_d);
1725          // temp2_d = unsigned-to-double(low)
1726          __ vmovsr(temp2_s, low);
1727          __ vcvtdu(temp2_d, temp2_s);
1728          // temp1_d = temp1_d + temp2_d
1729          __ vaddd(temp1_d, temp1_d, temp2_d);
1730          // output = double-to-float(temp1_d);
1731          __ vcvtsd(output, temp1_d);
1732          break;
1733        }
1734
1735        case Primitive::kPrimDouble:
1736          // Processing a Dex `double-to-float' instruction.
1737          __ vcvtsd(out.AsFpuRegister<SRegister>(),
1738                    FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1739          break;
1740
1741        default:
1742          LOG(FATAL) << "Unexpected type conversion from " << input_type
1743                     << " to " << result_type;
1744      };
1745      break;
1746
1747    case Primitive::kPrimDouble:
1748      switch (input_type) {
1749        case Primitive::kPrimByte:
1750        case Primitive::kPrimShort:
1751        case Primitive::kPrimInt:
1752        case Primitive::kPrimChar: {
1753          // Processing a Dex `int-to-double' instruction.
1754          __ vmovsr(out.AsFpuRegisterPairLow<SRegister>(), in.AsRegister<Register>());
1755          __ vcvtdi(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1756                    out.AsFpuRegisterPairLow<SRegister>());
1757          break;
1758        }
1759
1760        case Primitive::kPrimLong: {
1761          // Processing a Dex `long-to-double' instruction.
1762          Register low = in.AsRegisterPairLow<Register>();
1763          Register high = in.AsRegisterPairHigh<Register>();
1764          SRegister out_s = out.AsFpuRegisterPairLow<SRegister>();
1765          DRegister out_d = FromLowSToD(out_s);
1766          Register constant_low = locations->GetTemp(0).AsRegister<Register>();
1767          Register constant_high = locations->GetTemp(1).AsRegister<Register>();
1768          SRegister temp_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>();
1769          DRegister temp_d = FromLowSToD(temp_s);
1770
1771          // out_d = int-to-double(high)
1772          __ vmovsr(out_s, high);
1773          __ vcvtdi(out_d, out_s);
1774          // Using vmovd to load the `k2Pow32EncodingForDouble` constant
1775          // as an immediate value into `temp_d` does not work, as
1776          // this instruction only transfers 8 significant bits of its
1777          // immediate operand.  Instead, use two 32-bit core
1778          // registers to load `k2Pow32EncodingForDouble` into `temp_d`.
1779          __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble));
1780          __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble));
1781          __ vmovdrr(temp_d, constant_low, constant_high);
1782          // out_d = out_d * 2^32
1783          __ vmuld(out_d, out_d, temp_d);
1784          // temp_d = unsigned-to-double(low)
1785          __ vmovsr(temp_s, low);
1786          __ vcvtdu(temp_d, temp_s);
1787          // out_d = out_d + temp_d
1788          __ vaddd(out_d, out_d, temp_d);
1789          break;
1790        }
1791
1792        case Primitive::kPrimFloat:
1793          // Processing a Dex `float-to-double' instruction.
1794          __ vcvtds(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1795                    in.AsFpuRegister<SRegister>());
1796          break;
1797
1798        default:
1799          LOG(FATAL) << "Unexpected type conversion from " << input_type
1800                     << " to " << result_type;
1801      };
1802      break;
1803
1804    default:
1805      LOG(FATAL) << "Unexpected type conversion from " << input_type
1806                 << " to " << result_type;
1807  }
1808}
1809
1810void LocationsBuilderARM::VisitAdd(HAdd* add) {
1811  LocationSummary* locations =
1812      new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
1813  switch (add->GetResultType()) {
1814    case Primitive::kPrimInt:
1815    case Primitive::kPrimLong: {
1816      bool output_overlaps = (add->GetResultType() == Primitive::kPrimLong);
1817      locations->SetInAt(0, Location::RequiresRegister());
1818      locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
1819      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1820      break;
1821    }
1822
1823    case Primitive::kPrimFloat:
1824    case Primitive::kPrimDouble: {
1825      locations->SetInAt(0, Location::RequiresFpuRegister());
1826      locations->SetInAt(1, Location::RequiresFpuRegister());
1827      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1828      break;
1829    }
1830
1831    default:
1832      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1833  }
1834}
1835
1836void InstructionCodeGeneratorARM::VisitAdd(HAdd* add) {
1837  LocationSummary* locations = add->GetLocations();
1838  Location out = locations->Out();
1839  Location first = locations->InAt(0);
1840  Location second = locations->InAt(1);
1841  switch (add->GetResultType()) {
1842    case Primitive::kPrimInt:
1843      if (second.IsRegister()) {
1844        __ add(out.AsRegister<Register>(),
1845               first.AsRegister<Register>(),
1846               ShifterOperand(second.AsRegister<Register>()));
1847      } else {
1848        __ AddConstant(out.AsRegister<Register>(),
1849                       first.AsRegister<Register>(),
1850                       second.GetConstant()->AsIntConstant()->GetValue());
1851      }
1852      break;
1853
1854    case Primitive::kPrimLong:
1855      __ adds(out.AsRegisterPairLow<Register>(),
1856              first.AsRegisterPairLow<Register>(),
1857              ShifterOperand(second.AsRegisterPairLow<Register>()));
1858      __ adc(out.AsRegisterPairHigh<Register>(),
1859             first.AsRegisterPairHigh<Register>(),
1860             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1861      break;
1862
1863    case Primitive::kPrimFloat:
1864      __ vadds(out.AsFpuRegister<SRegister>(),
1865               first.AsFpuRegister<SRegister>(),
1866               second.AsFpuRegister<SRegister>());
1867      break;
1868
1869    case Primitive::kPrimDouble:
1870      __ vaddd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1871               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1872               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1873      break;
1874
1875    default:
1876      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1877  }
1878}
1879
1880void LocationsBuilderARM::VisitSub(HSub* sub) {
1881  LocationSummary* locations =
1882      new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
1883  switch (sub->GetResultType()) {
1884    case Primitive::kPrimInt:
1885    case Primitive::kPrimLong: {
1886      bool output_overlaps = (sub->GetResultType() == Primitive::kPrimLong);
1887      locations->SetInAt(0, Location::RequiresRegister());
1888      locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1)));
1889      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1890      break;
1891    }
1892    case Primitive::kPrimFloat:
1893    case Primitive::kPrimDouble: {
1894      locations->SetInAt(0, Location::RequiresFpuRegister());
1895      locations->SetInAt(1, Location::RequiresFpuRegister());
1896      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1897      break;
1898    }
1899    default:
1900      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1901  }
1902}
1903
1904void InstructionCodeGeneratorARM::VisitSub(HSub* sub) {
1905  LocationSummary* locations = sub->GetLocations();
1906  Location out = locations->Out();
1907  Location first = locations->InAt(0);
1908  Location second = locations->InAt(1);
1909  switch (sub->GetResultType()) {
1910    case Primitive::kPrimInt: {
1911      if (second.IsRegister()) {
1912        __ sub(out.AsRegister<Register>(),
1913               first.AsRegister<Register>(),
1914               ShifterOperand(second.AsRegister<Register>()));
1915      } else {
1916        __ AddConstant(out.AsRegister<Register>(),
1917                       first.AsRegister<Register>(),
1918                       -second.GetConstant()->AsIntConstant()->GetValue());
1919      }
1920      break;
1921    }
1922
1923    case Primitive::kPrimLong: {
1924      __ subs(out.AsRegisterPairLow<Register>(),
1925              first.AsRegisterPairLow<Register>(),
1926              ShifterOperand(second.AsRegisterPairLow<Register>()));
1927      __ sbc(out.AsRegisterPairHigh<Register>(),
1928             first.AsRegisterPairHigh<Register>(),
1929             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1930      break;
1931    }
1932
1933    case Primitive::kPrimFloat: {
1934      __ vsubs(out.AsFpuRegister<SRegister>(),
1935               first.AsFpuRegister<SRegister>(),
1936               second.AsFpuRegister<SRegister>());
1937      break;
1938    }
1939
1940    case Primitive::kPrimDouble: {
1941      __ vsubd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1942               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1943               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1944      break;
1945    }
1946
1947
1948    default:
1949      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1950  }
1951}
1952
1953void LocationsBuilderARM::VisitMul(HMul* mul) {
1954  LocationSummary* locations =
1955      new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
1956  switch (mul->GetResultType()) {
1957    case Primitive::kPrimInt:
1958    case Primitive::kPrimLong:  {
1959      locations->SetInAt(0, Location::RequiresRegister());
1960      locations->SetInAt(1, Location::RequiresRegister());
1961      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1962      break;
1963    }
1964
1965    case Primitive::kPrimFloat:
1966    case Primitive::kPrimDouble: {
1967      locations->SetInAt(0, Location::RequiresFpuRegister());
1968      locations->SetInAt(1, Location::RequiresFpuRegister());
1969      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1970      break;
1971    }
1972
1973    default:
1974      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1975  }
1976}
1977
1978void InstructionCodeGeneratorARM::VisitMul(HMul* mul) {
1979  LocationSummary* locations = mul->GetLocations();
1980  Location out = locations->Out();
1981  Location first = locations->InAt(0);
1982  Location second = locations->InAt(1);
1983  switch (mul->GetResultType()) {
1984    case Primitive::kPrimInt: {
1985      __ mul(out.AsRegister<Register>(),
1986             first.AsRegister<Register>(),
1987             second.AsRegister<Register>());
1988      break;
1989    }
1990    case Primitive::kPrimLong: {
1991      Register out_hi = out.AsRegisterPairHigh<Register>();
1992      Register out_lo = out.AsRegisterPairLow<Register>();
1993      Register in1_hi = first.AsRegisterPairHigh<Register>();
1994      Register in1_lo = first.AsRegisterPairLow<Register>();
1995      Register in2_hi = second.AsRegisterPairHigh<Register>();
1996      Register in2_lo = second.AsRegisterPairLow<Register>();
1997
1998      // Extra checks to protect caused by the existence of R1_R2.
1999      // The algorithm is wrong if out.hi is either in1.lo or in2.lo:
2000      // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2);
2001      DCHECK_NE(out_hi, in1_lo);
2002      DCHECK_NE(out_hi, in2_lo);
2003
2004      // input: in1 - 64 bits, in2 - 64 bits
2005      // output: out
2006      // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
2007      // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
2008      // parts: out.lo = (in1.lo * in2.lo)[31:0]
2009
2010      // IP <- in1.lo * in2.hi
2011      __ mul(IP, in1_lo, in2_hi);
2012      // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo
2013      __ mla(out_hi, in1_hi, in2_lo, IP);
2014      // out.lo <- (in1.lo * in2.lo)[31:0];
2015      __ umull(out_lo, IP, in1_lo, in2_lo);
2016      // out.hi <- in2.hi * in1.lo +  in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
2017      __ add(out_hi, out_hi, ShifterOperand(IP));
2018      break;
2019    }
2020
2021    case Primitive::kPrimFloat: {
2022      __ vmuls(out.AsFpuRegister<SRegister>(),
2023               first.AsFpuRegister<SRegister>(),
2024               second.AsFpuRegister<SRegister>());
2025      break;
2026    }
2027
2028    case Primitive::kPrimDouble: {
2029      __ vmuld(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2030               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2031               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2032      break;
2033    }
2034
2035    default:
2036      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2037  }
2038}
2039
2040void LocationsBuilderARM::VisitDiv(HDiv* div) {
2041  LocationSummary::CallKind call_kind = div->GetResultType() == Primitive::kPrimLong
2042      ? LocationSummary::kCall
2043      : LocationSummary::kNoCall;
2044  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
2045
2046  switch (div->GetResultType()) {
2047    case Primitive::kPrimInt: {
2048      locations->SetInAt(0, Location::RequiresRegister());
2049      locations->SetInAt(1, Location::RequiresRegister());
2050      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2051      break;
2052    }
2053    case Primitive::kPrimLong: {
2054      InvokeRuntimeCallingConvention calling_convention;
2055      locations->SetInAt(0, Location::RegisterPairLocation(
2056          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2057      locations->SetInAt(1, Location::RegisterPairLocation(
2058          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2059      // The runtime helper puts the output in R0,R2.
2060      locations->SetOut(Location::RegisterPairLocation(R0, R2));
2061      break;
2062    }
2063    case Primitive::kPrimFloat:
2064    case Primitive::kPrimDouble: {
2065      locations->SetInAt(0, Location::RequiresFpuRegister());
2066      locations->SetInAt(1, Location::RequiresFpuRegister());
2067      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2068      break;
2069    }
2070
2071    default:
2072      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2073  }
2074}
2075
2076void InstructionCodeGeneratorARM::VisitDiv(HDiv* div) {
2077  LocationSummary* locations = div->GetLocations();
2078  Location out = locations->Out();
2079  Location first = locations->InAt(0);
2080  Location second = locations->InAt(1);
2081
2082  switch (div->GetResultType()) {
2083    case Primitive::kPrimInt: {
2084      __ sdiv(out.AsRegister<Register>(),
2085              first.AsRegister<Register>(),
2086              second.AsRegister<Register>());
2087      break;
2088    }
2089
2090    case Primitive::kPrimLong: {
2091      InvokeRuntimeCallingConvention calling_convention;
2092      DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
2093      DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
2094      DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegisterPairLow<Register>());
2095      DCHECK_EQ(calling_convention.GetRegisterAt(3), second.AsRegisterPairHigh<Register>());
2096      DCHECK_EQ(R0, out.AsRegisterPairLow<Register>());
2097      DCHECK_EQ(R2, out.AsRegisterPairHigh<Register>());
2098
2099      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLdiv), div, div->GetDexPc());
2100      break;
2101    }
2102
2103    case Primitive::kPrimFloat: {
2104      __ vdivs(out.AsFpuRegister<SRegister>(),
2105               first.AsFpuRegister<SRegister>(),
2106               second.AsFpuRegister<SRegister>());
2107      break;
2108    }
2109
2110    case Primitive::kPrimDouble: {
2111      __ vdivd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2112               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2113               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2114      break;
2115    }
2116
2117    default:
2118      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2119  }
2120}
2121
2122void LocationsBuilderARM::VisitRem(HRem* rem) {
2123  Primitive::Type type = rem->GetResultType();
2124  LocationSummary::CallKind call_kind = type == Primitive::kPrimInt
2125      ? LocationSummary::kNoCall
2126      : LocationSummary::kCall;
2127  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
2128
2129  switch (type) {
2130    case Primitive::kPrimInt: {
2131      locations->SetInAt(0, Location::RequiresRegister());
2132      locations->SetInAt(1, Location::RequiresRegister());
2133      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2134      locations->AddTemp(Location::RequiresRegister());
2135      break;
2136    }
2137    case Primitive::kPrimLong: {
2138      InvokeRuntimeCallingConvention calling_convention;
2139      locations->SetInAt(0, Location::RegisterPairLocation(
2140          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2141      locations->SetInAt(1, Location::RegisterPairLocation(
2142          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2143      // The runtime helper puts the output in R2,R3.
2144      locations->SetOut(Location::RegisterPairLocation(R2, R3));
2145      break;
2146    }
2147    case Primitive::kPrimFloat: {
2148      InvokeRuntimeCallingConvention calling_convention;
2149      locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
2150      locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
2151      locations->SetOut(Location::FpuRegisterLocation(S0));
2152      break;
2153    }
2154
2155    case Primitive::kPrimDouble: {
2156      InvokeRuntimeCallingConvention calling_convention;
2157      locations->SetInAt(0, Location::FpuRegisterPairLocation(
2158          calling_convention.GetFpuRegisterAt(0), calling_convention.GetFpuRegisterAt(1)));
2159      locations->SetInAt(1, Location::FpuRegisterPairLocation(
2160          calling_convention.GetFpuRegisterAt(2), calling_convention.GetFpuRegisterAt(3)));
2161      locations->SetOut(Location::Location::FpuRegisterPairLocation(S0, S1));
2162      break;
2163    }
2164
2165    default:
2166      LOG(FATAL) << "Unexpected rem type " << type;
2167  }
2168}
2169
2170void InstructionCodeGeneratorARM::VisitRem(HRem* rem) {
2171  LocationSummary* locations = rem->GetLocations();
2172  Location out = locations->Out();
2173  Location first = locations->InAt(0);
2174  Location second = locations->InAt(1);
2175
2176  Primitive::Type type = rem->GetResultType();
2177  switch (type) {
2178    case Primitive::kPrimInt: {
2179      Register reg1 = first.AsRegister<Register>();
2180      Register reg2 = second.AsRegister<Register>();
2181      Register temp = locations->GetTemp(0).AsRegister<Register>();
2182
2183      // temp = reg1 / reg2  (integer division)
2184      // temp = temp * reg2
2185      // dest = reg1 - temp
2186      __ sdiv(temp, reg1, reg2);
2187      __ mul(temp, temp, reg2);
2188      __ sub(out.AsRegister<Register>(), reg1, ShifterOperand(temp));
2189      break;
2190    }
2191
2192    case Primitive::kPrimLong: {
2193      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLmod), rem, rem->GetDexPc());
2194      break;
2195    }
2196
2197    case Primitive::kPrimFloat: {
2198      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmodf), rem, rem->GetDexPc());
2199      break;
2200    }
2201
2202    case Primitive::kPrimDouble: {
2203      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmod), rem, rem->GetDexPc());
2204      break;
2205    }
2206
2207    default:
2208      LOG(FATAL) << "Unexpected rem type " << type;
2209  }
2210}
2211
2212void LocationsBuilderARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2213  LocationSummary* locations =
2214      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2215  locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2216  if (instruction->HasUses()) {
2217    locations->SetOut(Location::SameAsFirstInput());
2218  }
2219}
2220
2221void InstructionCodeGeneratorARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2222  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM(instruction);
2223  codegen_->AddSlowPath(slow_path);
2224
2225  LocationSummary* locations = instruction->GetLocations();
2226  Location value = locations->InAt(0);
2227
2228  switch (instruction->GetType()) {
2229    case Primitive::kPrimInt: {
2230      if (value.IsRegister()) {
2231        __ cmp(value.AsRegister<Register>(), ShifterOperand(0));
2232        __ b(slow_path->GetEntryLabel(), EQ);
2233      } else {
2234        DCHECK(value.IsConstant()) << value;
2235        if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
2236          __ b(slow_path->GetEntryLabel());
2237        }
2238      }
2239      break;
2240    }
2241    case Primitive::kPrimLong: {
2242      if (value.IsRegisterPair()) {
2243        __ orrs(IP,
2244                value.AsRegisterPairLow<Register>(),
2245                ShifterOperand(value.AsRegisterPairHigh<Register>()));
2246        __ b(slow_path->GetEntryLabel(), EQ);
2247      } else {
2248        DCHECK(value.IsConstant()) << value;
2249        if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
2250          __ b(slow_path->GetEntryLabel());
2251        }
2252      }
2253      break;
2254    default:
2255      LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
2256    }
2257  }
2258}
2259
2260void LocationsBuilderARM::HandleShift(HBinaryOperation* op) {
2261  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2262
2263  LocationSummary::CallKind call_kind = op->GetResultType() == Primitive::kPrimLong
2264      ? LocationSummary::kCall
2265      : LocationSummary::kNoCall;
2266  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(op, call_kind);
2267
2268  switch (op->GetResultType()) {
2269    case Primitive::kPrimInt: {
2270      locations->SetInAt(0, Location::RequiresRegister());
2271      locations->SetInAt(1, Location::RegisterOrConstant(op->InputAt(1)));
2272      locations->SetOut(Location::RequiresRegister());
2273      break;
2274    }
2275    case Primitive::kPrimLong: {
2276      InvokeRuntimeCallingConvention calling_convention;
2277      locations->SetInAt(0, Location::RegisterPairLocation(
2278          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2279      locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2280      // The runtime helper puts the output in R0,R2.
2281      locations->SetOut(Location::RegisterPairLocation(R0, R2));
2282      break;
2283    }
2284    default:
2285      LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
2286  }
2287}
2288
2289void InstructionCodeGeneratorARM::HandleShift(HBinaryOperation* op) {
2290  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2291
2292  LocationSummary* locations = op->GetLocations();
2293  Location out = locations->Out();
2294  Location first = locations->InAt(0);
2295  Location second = locations->InAt(1);
2296
2297  Primitive::Type type = op->GetResultType();
2298  switch (type) {
2299    case Primitive::kPrimInt: {
2300      Register out_reg = out.AsRegister<Register>();
2301      Register first_reg = first.AsRegister<Register>();
2302      // Arm doesn't mask the shift count so we need to do it ourselves.
2303      if (second.IsRegister()) {
2304        Register second_reg = second.AsRegister<Register>();
2305        __ and_(second_reg, second_reg, ShifterOperand(kMaxIntShiftValue));
2306        if (op->IsShl()) {
2307          __ Lsl(out_reg, first_reg, second_reg);
2308        } else if (op->IsShr()) {
2309          __ Asr(out_reg, first_reg, second_reg);
2310        } else {
2311          __ Lsr(out_reg, first_reg, second_reg);
2312        }
2313      } else {
2314        int32_t cst = second.GetConstant()->AsIntConstant()->GetValue();
2315        uint32_t shift_value = static_cast<uint32_t>(cst & kMaxIntShiftValue);
2316        if (shift_value == 0) {  // arm does not support shifting with 0 immediate.
2317          __ Mov(out_reg, first_reg);
2318        } else if (op->IsShl()) {
2319          __ Lsl(out_reg, first_reg, shift_value);
2320        } else if (op->IsShr()) {
2321          __ Asr(out_reg, first_reg, shift_value);
2322        } else {
2323          __ Lsr(out_reg, first_reg, shift_value);
2324        }
2325      }
2326      break;
2327    }
2328    case Primitive::kPrimLong: {
2329      // TODO: Inline the assembly instead of calling the runtime.
2330      InvokeRuntimeCallingConvention calling_convention;
2331      DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
2332      DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
2333      DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegister<Register>());
2334      DCHECK_EQ(R0, out.AsRegisterPairLow<Register>());
2335      DCHECK_EQ(R2, out.AsRegisterPairHigh<Register>());
2336
2337      int32_t entry_point_offset;
2338      if (op->IsShl()) {
2339        entry_point_offset = QUICK_ENTRY_POINT(pShlLong);
2340      } else if (op->IsShr()) {
2341        entry_point_offset = QUICK_ENTRY_POINT(pShrLong);
2342      } else {
2343        entry_point_offset = QUICK_ENTRY_POINT(pUshrLong);
2344      }
2345      __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
2346      __ blx(LR);
2347      break;
2348    }
2349    default:
2350      LOG(FATAL) << "Unexpected operation type " << type;
2351  }
2352}
2353
2354void LocationsBuilderARM::VisitShl(HShl* shl) {
2355  HandleShift(shl);
2356}
2357
2358void InstructionCodeGeneratorARM::VisitShl(HShl* shl) {
2359  HandleShift(shl);
2360}
2361
2362void LocationsBuilderARM::VisitShr(HShr* shr) {
2363  HandleShift(shr);
2364}
2365
2366void InstructionCodeGeneratorARM::VisitShr(HShr* shr) {
2367  HandleShift(shr);
2368}
2369
2370void LocationsBuilderARM::VisitUShr(HUShr* ushr) {
2371  HandleShift(ushr);
2372}
2373
2374void InstructionCodeGeneratorARM::VisitUShr(HUShr* ushr) {
2375  HandleShift(ushr);
2376}
2377
2378void LocationsBuilderARM::VisitNewInstance(HNewInstance* instruction) {
2379  LocationSummary* locations =
2380      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2381  InvokeRuntimeCallingConvention calling_convention;
2382  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2383  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2384  locations->SetOut(Location::RegisterLocation(R0));
2385}
2386
2387void InstructionCodeGeneratorARM::VisitNewInstance(HNewInstance* instruction) {
2388  InvokeRuntimeCallingConvention calling_convention;
2389  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
2390  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
2391  codegen_->InvokeRuntime(
2392      QUICK_ENTRY_POINT(pAllocObjectWithAccessCheck), instruction, instruction->GetDexPc());
2393}
2394
2395void LocationsBuilderARM::VisitNewArray(HNewArray* instruction) {
2396  LocationSummary* locations =
2397      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2398  InvokeRuntimeCallingConvention calling_convention;
2399  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2400  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2401  locations->SetOut(Location::RegisterLocation(R0));
2402  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2403}
2404
2405void InstructionCodeGeneratorARM::VisitNewArray(HNewArray* instruction) {
2406  InvokeRuntimeCallingConvention calling_convention;
2407  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(2));
2408  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
2409  codegen_->InvokeRuntime(
2410      QUICK_ENTRY_POINT(pAllocArrayWithAccessCheck), instruction, instruction->GetDexPc());
2411}
2412
2413void LocationsBuilderARM::VisitParameterValue(HParameterValue* instruction) {
2414  LocationSummary* locations =
2415      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2416  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
2417  if (location.IsStackSlot()) {
2418    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2419  } else if (location.IsDoubleStackSlot()) {
2420    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2421  }
2422  locations->SetOut(location);
2423}
2424
2425void InstructionCodeGeneratorARM::VisitParameterValue(HParameterValue* instruction) {
2426  // Nothing to do, the parameter is already at its location.
2427  UNUSED(instruction);
2428}
2429
2430void LocationsBuilderARM::VisitNot(HNot* not_) {
2431  LocationSummary* locations =
2432      new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
2433  locations->SetInAt(0, Location::RequiresRegister());
2434  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2435}
2436
2437void InstructionCodeGeneratorARM::VisitNot(HNot* not_) {
2438  LocationSummary* locations = not_->GetLocations();
2439  Location out = locations->Out();
2440  Location in = locations->InAt(0);
2441  switch (not_->InputAt(0)->GetType()) {
2442    case Primitive::kPrimBoolean:
2443      __ eor(out.AsRegister<Register>(), in.AsRegister<Register>(), ShifterOperand(1));
2444      break;
2445
2446    case Primitive::kPrimInt:
2447      __ mvn(out.AsRegister<Register>(), ShifterOperand(in.AsRegister<Register>()));
2448      break;
2449
2450    case Primitive::kPrimLong:
2451      __ mvn(out.AsRegisterPairLow<Register>(),
2452             ShifterOperand(in.AsRegisterPairLow<Register>()));
2453      __ mvn(out.AsRegisterPairHigh<Register>(),
2454             ShifterOperand(in.AsRegisterPairHigh<Register>()));
2455      break;
2456
2457    default:
2458      LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
2459  }
2460}
2461
2462void LocationsBuilderARM::VisitCompare(HCompare* compare) {
2463  LocationSummary* locations =
2464      new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2465  switch (compare->InputAt(0)->GetType()) {
2466    case Primitive::kPrimLong: {
2467      locations->SetInAt(0, Location::RequiresRegister());
2468      locations->SetInAt(1, Location::RequiresRegister());
2469      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2470      break;
2471    }
2472    case Primitive::kPrimFloat:
2473    case Primitive::kPrimDouble: {
2474      locations->SetInAt(0, Location::RequiresFpuRegister());
2475      locations->SetInAt(1, Location::RequiresFpuRegister());
2476      locations->SetOut(Location::RequiresRegister());
2477      break;
2478    }
2479    default:
2480      LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2481  }
2482}
2483
2484void InstructionCodeGeneratorARM::VisitCompare(HCompare* compare) {
2485  LocationSummary* locations = compare->GetLocations();
2486  Register out = locations->Out().AsRegister<Register>();
2487  Location left = locations->InAt(0);
2488  Location right = locations->InAt(1);
2489
2490  Label less, greater, done;
2491  Primitive::Type type = compare->InputAt(0)->GetType();
2492  switch (type) {
2493    case Primitive::kPrimLong: {
2494      __ cmp(left.AsRegisterPairHigh<Register>(),
2495             ShifterOperand(right.AsRegisterPairHigh<Register>()));  // Signed compare.
2496      __ b(&less, LT);
2497      __ b(&greater, GT);
2498      // Do LoadImmediate before any `cmp`, as LoadImmediate might affect the status flags.
2499      __ LoadImmediate(out, 0);
2500      __ cmp(left.AsRegisterPairLow<Register>(),
2501             ShifterOperand(right.AsRegisterPairLow<Register>()));  // Unsigned compare.
2502      break;
2503    }
2504    case Primitive::kPrimFloat:
2505    case Primitive::kPrimDouble: {
2506      __ LoadImmediate(out, 0);
2507      if (type == Primitive::kPrimFloat) {
2508        __ vcmps(left.AsFpuRegister<SRegister>(), right.AsFpuRegister<SRegister>());
2509      } else {
2510        __ vcmpd(FromLowSToD(left.AsFpuRegisterPairLow<SRegister>()),
2511                 FromLowSToD(right.AsFpuRegisterPairLow<SRegister>()));
2512      }
2513      __ vmstat();  // transfer FP status register to ARM APSR.
2514      __ b(compare->IsGtBias() ? &greater : &less, VS);  // VS for unordered.
2515      break;
2516    }
2517    default:
2518      LOG(FATAL) << "Unexpected compare type " << type;
2519  }
2520  __ b(&done, EQ);
2521  __ b(&less, CC);  // CC is for both: unsigned compare for longs and 'less than' for floats.
2522
2523  __ Bind(&greater);
2524  __ LoadImmediate(out, 1);
2525  __ b(&done);
2526
2527  __ Bind(&less);
2528  __ LoadImmediate(out, -1);
2529
2530  __ Bind(&done);
2531}
2532
2533void LocationsBuilderARM::VisitPhi(HPhi* instruction) {
2534  LocationSummary* locations =
2535      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2536  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
2537    locations->SetInAt(i, Location::Any());
2538  }
2539  locations->SetOut(Location::Any());
2540}
2541
2542void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction) {
2543  UNUSED(instruction);
2544  LOG(FATAL) << "Unreachable";
2545}
2546
2547void InstructionCodeGeneratorARM::GenerateMemoryBarrier(MemBarrierKind kind) {
2548  // TODO (ported from quick): revisit Arm barrier kinds
2549  DmbOptions flavour = DmbOptions::ISH;  // quiet c++ warnings
2550  switch (kind) {
2551    case MemBarrierKind::kAnyStore:
2552    case MemBarrierKind::kLoadAny:
2553    case MemBarrierKind::kAnyAny: {
2554      flavour = DmbOptions::ISH;
2555      break;
2556    }
2557    case MemBarrierKind::kStoreStore: {
2558      flavour = DmbOptions::ISHST;
2559      break;
2560    }
2561    default:
2562      LOG(FATAL) << "Unexpected memory barrier " << kind;
2563  }
2564  __ dmb(flavour);
2565}
2566
2567void InstructionCodeGeneratorARM::GenerateWideAtomicLoad(Register addr,
2568                                                         uint32_t offset,
2569                                                         Register out_lo,
2570                                                         Register out_hi) {
2571  if (offset != 0) {
2572    __ LoadImmediate(out_lo, offset);
2573    __ add(IP, addr, ShifterOperand(out_lo));
2574    addr = IP;
2575  }
2576  __ ldrexd(out_lo, out_hi, addr);
2577}
2578
2579void InstructionCodeGeneratorARM::GenerateWideAtomicStore(Register addr,
2580                                                          uint32_t offset,
2581                                                          Register value_lo,
2582                                                          Register value_hi,
2583                                                          Register temp1,
2584                                                          Register temp2,
2585                                                          HInstruction* instruction) {
2586  Label fail;
2587  if (offset != 0) {
2588    __ LoadImmediate(temp1, offset);
2589    __ add(IP, addr, ShifterOperand(temp1));
2590    addr = IP;
2591  }
2592  __ Bind(&fail);
2593  // We need a load followed by store. (The address used in a STREX instruction must
2594  // be the same as the address in the most recently executed LDREX instruction.)
2595  __ ldrexd(temp1, temp2, addr);
2596  codegen_->MaybeRecordImplicitNullCheck(instruction);
2597  __ strexd(temp1, value_lo, value_hi, addr);
2598  __ cmp(temp1, ShifterOperand(0));
2599  __ b(&fail, NE);
2600}
2601
2602void LocationsBuilderARM::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) {
2603  DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2604
2605  LocationSummary* locations =
2606      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2607  locations->SetInAt(0, Location::RequiresRegister());
2608  locations->SetInAt(1, Location::RequiresRegister());
2609
2610
2611  Primitive::Type field_type = field_info.GetFieldType();
2612  bool is_wide = field_type == Primitive::kPrimLong || field_type == Primitive::kPrimDouble;
2613  bool generate_volatile = field_info.IsVolatile()
2614      && is_wide
2615      && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2616  // Temporary registers for the write barrier.
2617  // TODO: consider renaming StoreNeedsWriteBarrier to StoreNeedsGCMark.
2618  if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
2619    locations->AddTemp(Location::RequiresRegister());
2620    locations->AddTemp(Location::RequiresRegister());
2621  } else if (generate_volatile) {
2622    // Arm encoding have some additional constraints for ldrexd/strexd:
2623    // - registers need to be consecutive
2624    // - the first register should be even but not R14.
2625    // We don't test for Arm yet, and the assertion makes sure that we revisit this if we ever
2626    // enable Arm encoding.
2627    DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
2628
2629    locations->AddTemp(Location::RequiresRegister());
2630    locations->AddTemp(Location::RequiresRegister());
2631    if (field_type == Primitive::kPrimDouble) {
2632      // For doubles we need two more registers to copy the value.
2633      locations->AddTemp(Location::RegisterLocation(R2));
2634      locations->AddTemp(Location::RegisterLocation(R3));
2635    }
2636  }
2637}
2638
2639void InstructionCodeGeneratorARM::HandleFieldSet(HInstruction* instruction,
2640                                                 const FieldInfo& field_info) {
2641  DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2642
2643  LocationSummary* locations = instruction->GetLocations();
2644  Register base = locations->InAt(0).AsRegister<Register>();
2645  Location value = locations->InAt(1);
2646
2647  bool is_volatile = field_info.IsVolatile();
2648  bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2649  Primitive::Type field_type = field_info.GetFieldType();
2650  uint32_t offset = field_info.GetFieldOffset().Uint32Value();
2651
2652  if (is_volatile) {
2653    GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
2654  }
2655
2656  switch (field_type) {
2657    case Primitive::kPrimBoolean:
2658    case Primitive::kPrimByte: {
2659      __ StoreToOffset(kStoreByte, value.AsRegister<Register>(), base, offset);
2660      break;
2661    }
2662
2663    case Primitive::kPrimShort:
2664    case Primitive::kPrimChar: {
2665      __ StoreToOffset(kStoreHalfword, value.AsRegister<Register>(), base, offset);
2666      break;
2667    }
2668
2669    case Primitive::kPrimInt:
2670    case Primitive::kPrimNot: {
2671      __ StoreToOffset(kStoreWord, value.AsRegister<Register>(), base, offset);
2672      break;
2673    }
2674
2675    case Primitive::kPrimLong: {
2676      if (is_volatile && !atomic_ldrd_strd) {
2677        GenerateWideAtomicStore(base, offset,
2678                                value.AsRegisterPairLow<Register>(),
2679                                value.AsRegisterPairHigh<Register>(),
2680                                locations->GetTemp(0).AsRegister<Register>(),
2681                                locations->GetTemp(1).AsRegister<Register>(),
2682                                instruction);
2683      } else {
2684        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), base, offset);
2685        codegen_->MaybeRecordImplicitNullCheck(instruction);
2686      }
2687      break;
2688    }
2689
2690    case Primitive::kPrimFloat: {
2691      __ StoreSToOffset(value.AsFpuRegister<SRegister>(), base, offset);
2692      break;
2693    }
2694
2695    case Primitive::kPrimDouble: {
2696      DRegister value_reg = FromLowSToD(value.AsFpuRegisterPairLow<SRegister>());
2697      if (is_volatile && !atomic_ldrd_strd) {
2698        Register value_reg_lo = locations->GetTemp(0).AsRegister<Register>();
2699        Register value_reg_hi = locations->GetTemp(1).AsRegister<Register>();
2700
2701        __ vmovrrd(value_reg_lo, value_reg_hi, value_reg);
2702
2703        GenerateWideAtomicStore(base, offset,
2704                                value_reg_lo,
2705                                value_reg_hi,
2706                                locations->GetTemp(2).AsRegister<Register>(),
2707                                locations->GetTemp(3).AsRegister<Register>(),
2708                                instruction);
2709      } else {
2710        __ StoreDToOffset(value_reg, base, offset);
2711        codegen_->MaybeRecordImplicitNullCheck(instruction);
2712      }
2713      break;
2714    }
2715
2716    case Primitive::kPrimVoid:
2717      LOG(FATAL) << "Unreachable type " << field_type;
2718      UNREACHABLE();
2719  }
2720
2721  // Longs and doubles are handled in the switch.
2722  if (field_type != Primitive::kPrimLong && field_type != Primitive::kPrimDouble) {
2723    codegen_->MaybeRecordImplicitNullCheck(instruction);
2724  }
2725
2726  if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
2727    Register temp = locations->GetTemp(0).AsRegister<Register>();
2728    Register card = locations->GetTemp(1).AsRegister<Register>();
2729    codegen_->MarkGCCard(temp, card, base, value.AsRegister<Register>());
2730  }
2731
2732  if (is_volatile) {
2733    GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
2734  }
2735}
2736
2737void LocationsBuilderARM::HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info) {
2738  DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2739  LocationSummary* locations =
2740      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2741  locations->SetInAt(0, Location::RequiresRegister());
2742  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2743
2744  bool generate_volatile = field_info.IsVolatile()
2745      && (field_info.GetFieldType() == Primitive::kPrimDouble)
2746      && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2747  if (generate_volatile) {
2748    // Arm encoding have some additional constraints for ldrexd/strexd:
2749    // - registers need to be consecutive
2750    // - the first register should be even but not R14.
2751    // We don't test for Arm yet, and the assertion makes sure that we revisit this if we ever
2752    // enable Arm encoding.
2753    DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
2754    locations->AddTemp(Location::RequiresRegister());
2755    locations->AddTemp(Location::RequiresRegister());
2756  }
2757}
2758
2759void InstructionCodeGeneratorARM::HandleFieldGet(HInstruction* instruction,
2760                                                 const FieldInfo& field_info) {
2761  DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2762
2763  LocationSummary* locations = instruction->GetLocations();
2764  Register base = locations->InAt(0).AsRegister<Register>();
2765  Location out = locations->Out();
2766  bool is_volatile = field_info.IsVolatile();
2767  bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2768  Primitive::Type field_type = field_info.GetFieldType();
2769  uint32_t offset = field_info.GetFieldOffset().Uint32Value();
2770
2771  switch (field_type) {
2772    case Primitive::kPrimBoolean: {
2773      __ LoadFromOffset(kLoadUnsignedByte, out.AsRegister<Register>(), base, offset);
2774      break;
2775    }
2776
2777    case Primitive::kPrimByte: {
2778      __ LoadFromOffset(kLoadSignedByte, out.AsRegister<Register>(), base, offset);
2779      break;
2780    }
2781
2782    case Primitive::kPrimShort: {
2783      __ LoadFromOffset(kLoadSignedHalfword, out.AsRegister<Register>(), base, offset);
2784      break;
2785    }
2786
2787    case Primitive::kPrimChar: {
2788      __ LoadFromOffset(kLoadUnsignedHalfword, out.AsRegister<Register>(), base, offset);
2789      break;
2790    }
2791
2792    case Primitive::kPrimInt:
2793    case Primitive::kPrimNot: {
2794      __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), base, offset);
2795      break;
2796    }
2797
2798    case Primitive::kPrimLong: {
2799      if (is_volatile && !atomic_ldrd_strd) {
2800        GenerateWideAtomicLoad(base, offset,
2801                               out.AsRegisterPairLow<Register>(),
2802                               out.AsRegisterPairHigh<Register>());
2803      } else {
2804        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), base, offset);
2805      }
2806      break;
2807    }
2808
2809    case Primitive::kPrimFloat: {
2810      __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), base, offset);
2811      break;
2812    }
2813
2814    case Primitive::kPrimDouble: {
2815      DRegister out_reg = FromLowSToD(out.AsFpuRegisterPairLow<SRegister>());
2816      if (is_volatile && !atomic_ldrd_strd) {
2817        Register lo = locations->GetTemp(0).AsRegister<Register>();
2818        Register hi = locations->GetTemp(1).AsRegister<Register>();
2819        GenerateWideAtomicLoad(base, offset, lo, hi);
2820        codegen_->MaybeRecordImplicitNullCheck(instruction);
2821        __ vmovdrr(out_reg, lo, hi);
2822      } else {
2823        __ LoadDFromOffset(out_reg, base, offset);
2824        codegen_->MaybeRecordImplicitNullCheck(instruction);
2825      }
2826      break;
2827    }
2828
2829    case Primitive::kPrimVoid:
2830      LOG(FATAL) << "Unreachable type " << field_type;
2831      UNREACHABLE();
2832  }
2833
2834  // Doubles are handled in the switch.
2835  if (field_type != Primitive::kPrimDouble) {
2836    codegen_->MaybeRecordImplicitNullCheck(instruction);
2837  }
2838
2839  if (is_volatile) {
2840    GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
2841  }
2842}
2843
2844void LocationsBuilderARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2845  HandleFieldSet(instruction, instruction->GetFieldInfo());
2846}
2847
2848void InstructionCodeGeneratorARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2849  HandleFieldSet(instruction, instruction->GetFieldInfo());
2850}
2851
2852void LocationsBuilderARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2853  HandleFieldGet(instruction, instruction->GetFieldInfo());
2854}
2855
2856void InstructionCodeGeneratorARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2857  HandleFieldGet(instruction, instruction->GetFieldInfo());
2858}
2859
2860void LocationsBuilderARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2861  HandleFieldGet(instruction, instruction->GetFieldInfo());
2862}
2863
2864void InstructionCodeGeneratorARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2865  HandleFieldGet(instruction, instruction->GetFieldInfo());
2866}
2867
2868void LocationsBuilderARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2869  HandleFieldSet(instruction, instruction->GetFieldInfo());
2870}
2871
2872void InstructionCodeGeneratorARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2873  HandleFieldSet(instruction, instruction->GetFieldInfo());
2874}
2875
2876void LocationsBuilderARM::VisitNullCheck(HNullCheck* instruction) {
2877  LocationSummary* locations =
2878      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2879  locations->SetInAt(0, Location::RequiresRegister());
2880  if (instruction->HasUses()) {
2881    locations->SetOut(Location::SameAsFirstInput());
2882  }
2883}
2884
2885void InstructionCodeGeneratorARM::GenerateImplicitNullCheck(HNullCheck* instruction) {
2886  if (codegen_->CanMoveNullCheckToUser(instruction)) {
2887    return;
2888  }
2889  Location obj = instruction->GetLocations()->InAt(0);
2890
2891  __ LoadFromOffset(kLoadWord, IP, obj.AsRegister<Register>(), 0);
2892  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
2893}
2894
2895void InstructionCodeGeneratorARM::GenerateExplicitNullCheck(HNullCheck* instruction) {
2896  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM(instruction);
2897  codegen_->AddSlowPath(slow_path);
2898
2899  LocationSummary* locations = instruction->GetLocations();
2900  Location obj = locations->InAt(0);
2901
2902  __ cmp(obj.AsRegister<Register>(), ShifterOperand(0));
2903  __ b(slow_path->GetEntryLabel(), EQ);
2904}
2905
2906void InstructionCodeGeneratorARM::VisitNullCheck(HNullCheck* instruction) {
2907  if (codegen_->GetCompilerOptions().GetImplicitNullChecks()) {
2908    GenerateImplicitNullCheck(instruction);
2909  } else {
2910    GenerateExplicitNullCheck(instruction);
2911  }
2912}
2913
2914void LocationsBuilderARM::VisitArrayGet(HArrayGet* instruction) {
2915  LocationSummary* locations =
2916      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2917  locations->SetInAt(0, Location::RequiresRegister());
2918  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2919  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2920}
2921
2922void InstructionCodeGeneratorARM::VisitArrayGet(HArrayGet* instruction) {
2923  LocationSummary* locations = instruction->GetLocations();
2924  Register obj = locations->InAt(0).AsRegister<Register>();
2925  Location index = locations->InAt(1);
2926
2927  switch (instruction->GetType()) {
2928    case Primitive::kPrimBoolean: {
2929      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
2930      Register out = locations->Out().AsRegister<Register>();
2931      if (index.IsConstant()) {
2932        size_t offset =
2933            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2934        __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
2935      } else {
2936        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
2937        __ LoadFromOffset(kLoadUnsignedByte, out, IP, data_offset);
2938      }
2939      break;
2940    }
2941
2942    case Primitive::kPrimByte: {
2943      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
2944      Register out = locations->Out().AsRegister<Register>();
2945      if (index.IsConstant()) {
2946        size_t offset =
2947            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2948        __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
2949      } else {
2950        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
2951        __ LoadFromOffset(kLoadSignedByte, out, IP, data_offset);
2952      }
2953      break;
2954    }
2955
2956    case Primitive::kPrimShort: {
2957      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
2958      Register out = locations->Out().AsRegister<Register>();
2959      if (index.IsConstant()) {
2960        size_t offset =
2961            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2962        __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
2963      } else {
2964        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
2965        __ LoadFromOffset(kLoadSignedHalfword, out, IP, data_offset);
2966      }
2967      break;
2968    }
2969
2970    case Primitive::kPrimChar: {
2971      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
2972      Register out = locations->Out().AsRegister<Register>();
2973      if (index.IsConstant()) {
2974        size_t offset =
2975            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2976        __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
2977      } else {
2978        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
2979        __ LoadFromOffset(kLoadUnsignedHalfword, out, IP, data_offset);
2980      }
2981      break;
2982    }
2983
2984    case Primitive::kPrimInt:
2985    case Primitive::kPrimNot: {
2986      DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
2987      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2988      Register out = locations->Out().AsRegister<Register>();
2989      if (index.IsConstant()) {
2990        size_t offset =
2991            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2992        __ LoadFromOffset(kLoadWord, out, obj, offset);
2993      } else {
2994        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
2995        __ LoadFromOffset(kLoadWord, out, IP, data_offset);
2996      }
2997      break;
2998    }
2999
3000    case Primitive::kPrimLong: {
3001      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
3002      Location out = locations->Out();
3003      if (index.IsConstant()) {
3004        size_t offset =
3005            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3006        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset);
3007      } else {
3008        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3009        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), IP, data_offset);
3010      }
3011      break;
3012    }
3013
3014    case Primitive::kPrimFloat: {
3015      uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
3016      Location out = locations->Out();
3017      DCHECK(out.IsFpuRegister());
3018      if (index.IsConstant()) {
3019        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3020        __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), obj, offset);
3021      } else {
3022        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3023        __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), IP, data_offset);
3024      }
3025      break;
3026    }
3027
3028    case Primitive::kPrimDouble: {
3029      uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
3030      Location out = locations->Out();
3031      DCHECK(out.IsFpuRegisterPair());
3032      if (index.IsConstant()) {
3033        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3034        __ LoadDFromOffset(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), obj, offset);
3035      } else {
3036        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3037        __ LoadDFromOffset(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), IP, data_offset);
3038      }
3039      break;
3040    }
3041
3042    case Primitive::kPrimVoid:
3043      LOG(FATAL) << "Unreachable type " << instruction->GetType();
3044      UNREACHABLE();
3045  }
3046  codegen_->MaybeRecordImplicitNullCheck(instruction);
3047}
3048
3049void LocationsBuilderARM::VisitArraySet(HArraySet* instruction) {
3050  Primitive::Type value_type = instruction->GetComponentType();
3051
3052  bool needs_write_barrier =
3053      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
3054  bool needs_runtime_call = instruction->NeedsTypeCheck();
3055
3056  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3057      instruction, needs_runtime_call ? LocationSummary::kCall : LocationSummary::kNoCall);
3058  if (needs_runtime_call) {
3059    InvokeRuntimeCallingConvention calling_convention;
3060    locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3061    locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3062    locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
3063  } else {
3064    locations->SetInAt(0, Location::RequiresRegister());
3065    locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3066    locations->SetInAt(2, Location::RequiresRegister());
3067
3068    if (needs_write_barrier) {
3069      // Temporary registers for the write barrier.
3070      locations->AddTemp(Location::RequiresRegister());
3071      locations->AddTemp(Location::RequiresRegister());
3072    }
3073  }
3074}
3075
3076void InstructionCodeGeneratorARM::VisitArraySet(HArraySet* instruction) {
3077  LocationSummary* locations = instruction->GetLocations();
3078  Register obj = locations->InAt(0).AsRegister<Register>();
3079  Location index = locations->InAt(1);
3080  Primitive::Type value_type = instruction->GetComponentType();
3081  bool needs_runtime_call = locations->WillCall();
3082  bool needs_write_barrier =
3083      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
3084
3085  switch (value_type) {
3086    case Primitive::kPrimBoolean:
3087    case Primitive::kPrimByte: {
3088      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
3089      Register value = locations->InAt(2).AsRegister<Register>();
3090      if (index.IsConstant()) {
3091        size_t offset =
3092            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
3093        __ StoreToOffset(kStoreByte, value, obj, offset);
3094      } else {
3095        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
3096        __ StoreToOffset(kStoreByte, value, IP, data_offset);
3097      }
3098      break;
3099    }
3100
3101    case Primitive::kPrimShort:
3102    case Primitive::kPrimChar: {
3103      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
3104      Register value = locations->InAt(2).AsRegister<Register>();
3105      if (index.IsConstant()) {
3106        size_t offset =
3107            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
3108        __ StoreToOffset(kStoreHalfword, value, obj, offset);
3109      } else {
3110        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
3111        __ StoreToOffset(kStoreHalfword, value, IP, data_offset);
3112      }
3113      break;
3114    }
3115
3116    case Primitive::kPrimInt:
3117    case Primitive::kPrimNot: {
3118      if (!needs_runtime_call) {
3119        uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
3120        Register value = locations->InAt(2).AsRegister<Register>();
3121        if (index.IsConstant()) {
3122          size_t offset =
3123              (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3124          __ StoreToOffset(kStoreWord, value, obj, offset);
3125        } else {
3126          DCHECK(index.IsRegister()) << index;
3127          __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3128          __ StoreToOffset(kStoreWord, value, IP, data_offset);
3129        }
3130        codegen_->MaybeRecordImplicitNullCheck(instruction);
3131        if (needs_write_barrier) {
3132          DCHECK_EQ(value_type, Primitive::kPrimNot);
3133          Register temp = locations->GetTemp(0).AsRegister<Register>();
3134          Register card = locations->GetTemp(1).AsRegister<Register>();
3135          codegen_->MarkGCCard(temp, card, obj, value);
3136        }
3137      } else {
3138        DCHECK_EQ(value_type, Primitive::kPrimNot);
3139        codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
3140                                instruction,
3141                                instruction->GetDexPc());
3142      }
3143      break;
3144    }
3145
3146    case Primitive::kPrimLong: {
3147      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
3148      Location value = locations->InAt(2);
3149      if (index.IsConstant()) {
3150        size_t offset =
3151            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3152        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset);
3153      } else {
3154        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3155        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), IP, data_offset);
3156      }
3157      break;
3158    }
3159
3160    case Primitive::kPrimFloat: {
3161      uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
3162      Location value = locations->InAt(2);
3163      DCHECK(value.IsFpuRegister());
3164      if (index.IsConstant()) {
3165        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3166        __ StoreSToOffset(value.AsFpuRegister<SRegister>(), obj, offset);
3167      } else {
3168        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3169        __ StoreSToOffset(value.AsFpuRegister<SRegister>(), IP, data_offset);
3170      }
3171      break;
3172    }
3173
3174    case Primitive::kPrimDouble: {
3175      uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
3176      Location value = locations->InAt(2);
3177      DCHECK(value.IsFpuRegisterPair());
3178      if (index.IsConstant()) {
3179        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3180        __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), obj, offset);
3181      } else {
3182        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3183        __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), IP, data_offset);
3184      }
3185
3186      break;
3187    }
3188
3189    case Primitive::kPrimVoid:
3190      LOG(FATAL) << "Unreachable type " << value_type;
3191      UNREACHABLE();
3192  }
3193
3194  // Ints and objects are handled in the switch.
3195  if (value_type != Primitive::kPrimInt && value_type != Primitive::kPrimNot) {
3196    codegen_->MaybeRecordImplicitNullCheck(instruction);
3197  }
3198}
3199
3200void LocationsBuilderARM::VisitArrayLength(HArrayLength* instruction) {
3201  LocationSummary* locations =
3202      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3203  locations->SetInAt(0, Location::RequiresRegister());
3204  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3205}
3206
3207void InstructionCodeGeneratorARM::VisitArrayLength(HArrayLength* instruction) {
3208  LocationSummary* locations = instruction->GetLocations();
3209  uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
3210  Register obj = locations->InAt(0).AsRegister<Register>();
3211  Register out = locations->Out().AsRegister<Register>();
3212  __ LoadFromOffset(kLoadWord, out, obj, offset);
3213  codegen_->MaybeRecordImplicitNullCheck(instruction);
3214}
3215
3216void LocationsBuilderARM::VisitBoundsCheck(HBoundsCheck* instruction) {
3217  LocationSummary* locations =
3218      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3219  locations->SetInAt(0, Location::RequiresRegister());
3220  locations->SetInAt(1, Location::RequiresRegister());
3221  if (instruction->HasUses()) {
3222    locations->SetOut(Location::SameAsFirstInput());
3223  }
3224}
3225
3226void InstructionCodeGeneratorARM::VisitBoundsCheck(HBoundsCheck* instruction) {
3227  LocationSummary* locations = instruction->GetLocations();
3228  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM(
3229      instruction, locations->InAt(0), locations->InAt(1));
3230  codegen_->AddSlowPath(slow_path);
3231
3232  Register index = locations->InAt(0).AsRegister<Register>();
3233  Register length = locations->InAt(1).AsRegister<Register>();
3234
3235  __ cmp(index, ShifterOperand(length));
3236  __ b(slow_path->GetEntryLabel(), CS);
3237}
3238
3239void CodeGeneratorARM::MarkGCCard(Register temp, Register card, Register object, Register value) {
3240  Label is_null;
3241  __ CompareAndBranchIfZero(value, &is_null);
3242  __ LoadFromOffset(kLoadWord, card, TR, Thread::CardTableOffset<kArmWordSize>().Int32Value());
3243  __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
3244  __ strb(card, Address(card, temp));
3245  __ Bind(&is_null);
3246}
3247
3248void LocationsBuilderARM::VisitTemporary(HTemporary* temp) {
3249  temp->SetLocations(nullptr);
3250}
3251
3252void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp) {
3253  // Nothing to do, this is driven by the code generator.
3254  UNUSED(temp);
3255}
3256
3257void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction) {
3258  UNUSED(instruction);
3259  LOG(FATAL) << "Unreachable";
3260}
3261
3262void InstructionCodeGeneratorARM::VisitParallelMove(HParallelMove* instruction) {
3263  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
3264}
3265
3266void LocationsBuilderARM::VisitSuspendCheck(HSuspendCheck* instruction) {
3267  new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
3268}
3269
3270void InstructionCodeGeneratorARM::VisitSuspendCheck(HSuspendCheck* instruction) {
3271  HBasicBlock* block = instruction->GetBlock();
3272  if (block->GetLoopInformation() != nullptr) {
3273    DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
3274    // The back edge will generate the suspend check.
3275    return;
3276  }
3277  if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
3278    // The goto will generate the suspend check.
3279    return;
3280  }
3281  GenerateSuspendCheck(instruction, nullptr);
3282}
3283
3284void InstructionCodeGeneratorARM::GenerateSuspendCheck(HSuspendCheck* instruction,
3285                                                       HBasicBlock* successor) {
3286  SuspendCheckSlowPathARM* slow_path =
3287      new (GetGraph()->GetArena()) SuspendCheckSlowPathARM(instruction, successor);
3288  codegen_->AddSlowPath(slow_path);
3289
3290  __ LoadFromOffset(
3291      kLoadUnsignedHalfword, IP, TR, Thread::ThreadFlagsOffset<kArmWordSize>().Int32Value());
3292  __ cmp(IP, ShifterOperand(0));
3293  // TODO: Figure out the branch offsets and use cbz/cbnz.
3294  if (successor == nullptr) {
3295    __ b(slow_path->GetEntryLabel(), NE);
3296    __ Bind(slow_path->GetReturnLabel());
3297  } else {
3298    __ b(codegen_->GetLabelOf(successor), EQ);
3299    __ b(slow_path->GetEntryLabel());
3300  }
3301}
3302
3303ArmAssembler* ParallelMoveResolverARM::GetAssembler() const {
3304  return codegen_->GetAssembler();
3305}
3306
3307void ParallelMoveResolverARM::EmitMove(size_t index) {
3308  MoveOperands* move = moves_.Get(index);
3309  Location source = move->GetSource();
3310  Location destination = move->GetDestination();
3311
3312  if (source.IsRegister()) {
3313    if (destination.IsRegister()) {
3314      __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>());
3315    } else {
3316      DCHECK(destination.IsStackSlot());
3317      __ StoreToOffset(kStoreWord, source.AsRegister<Register>(),
3318                       SP, destination.GetStackIndex());
3319    }
3320  } else if (source.IsStackSlot()) {
3321    if (destination.IsRegister()) {
3322      __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(),
3323                        SP, source.GetStackIndex());
3324    } else if (destination.IsFpuRegister()) {
3325      __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex());
3326    } else {
3327      DCHECK(destination.IsStackSlot());
3328      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
3329      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3330    }
3331  } else if (source.IsFpuRegister()) {
3332    if (destination.IsFpuRegister()) {
3333      __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>());
3334    } else {
3335      DCHECK(destination.IsStackSlot());
3336      __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex());
3337    }
3338  } else if (source.IsDoubleStackSlot()) {
3339    if (destination.IsFpuRegisterPair()) {
3340      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
3341                         SP, source.GetStackIndex());
3342    } else {
3343      DCHECK(destination.IsDoubleStackSlot()) << destination;
3344      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
3345      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3346      __ LoadFromOffset(kLoadWord, IP, SP, source.GetHighStackIndex(kArmWordSize));
3347      __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
3348    }
3349  } else {
3350    DCHECK(source.IsConstant()) << source;
3351    HInstruction* constant = source.GetConstant();
3352    if (constant->IsIntConstant()) {
3353      int32_t value = constant->AsIntConstant()->GetValue();
3354      if (destination.IsRegister()) {
3355        __ LoadImmediate(destination.AsRegister<Register>(), value);
3356      } else {
3357        DCHECK(destination.IsStackSlot());
3358        __ LoadImmediate(IP, value);
3359        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3360      }
3361    } else {
3362      DCHECK(constant->IsFloatConstant());
3363      float value = constant->AsFloatConstant()->GetValue();
3364      if (destination.IsFpuRegister()) {
3365        __ LoadSImmediate(destination.AsFpuRegister<SRegister>(), value);
3366      } else {
3367        DCHECK(destination.IsStackSlot());
3368        __ LoadImmediate(IP, bit_cast<int32_t, float>(value));
3369        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3370      }
3371    }
3372  }
3373}
3374
3375void ParallelMoveResolverARM::Exchange(Register reg, int mem) {
3376  __ Mov(IP, reg);
3377  __ LoadFromOffset(kLoadWord, reg, SP, mem);
3378  __ StoreToOffset(kStoreWord, IP, SP, mem);
3379}
3380
3381void ParallelMoveResolverARM::Exchange(int mem1, int mem2) {
3382  ScratchRegisterScope ensure_scratch(this, IP, R0, codegen_->GetNumberOfCoreRegisters());
3383  int stack_offset = ensure_scratch.IsSpilled() ? kArmWordSize : 0;
3384  __ LoadFromOffset(kLoadWord, static_cast<Register>(ensure_scratch.GetRegister()),
3385                    SP, mem1 + stack_offset);
3386  __ LoadFromOffset(kLoadWord, IP, SP, mem2 + stack_offset);
3387  __ StoreToOffset(kStoreWord, static_cast<Register>(ensure_scratch.GetRegister()),
3388                   SP, mem2 + stack_offset);
3389  __ StoreToOffset(kStoreWord, IP, SP, mem1 + stack_offset);
3390}
3391
3392void ParallelMoveResolverARM::EmitSwap(size_t index) {
3393  MoveOperands* move = moves_.Get(index);
3394  Location source = move->GetSource();
3395  Location destination = move->GetDestination();
3396
3397  if (source.IsRegister() && destination.IsRegister()) {
3398    DCHECK_NE(source.AsRegister<Register>(), IP);
3399    DCHECK_NE(destination.AsRegister<Register>(), IP);
3400    __ Mov(IP, source.AsRegister<Register>());
3401    __ Mov(source.AsRegister<Register>(), destination.AsRegister<Register>());
3402    __ Mov(destination.AsRegister<Register>(), IP);
3403  } else if (source.IsRegister() && destination.IsStackSlot()) {
3404    Exchange(source.AsRegister<Register>(), destination.GetStackIndex());
3405  } else if (source.IsStackSlot() && destination.IsRegister()) {
3406    Exchange(destination.AsRegister<Register>(), source.GetStackIndex());
3407  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
3408    Exchange(source.GetStackIndex(), destination.GetStackIndex());
3409  } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
3410    __ vmovrs(IP, source.AsFpuRegister<SRegister>());
3411    __ vmovs(source.AsFpuRegister<SRegister>(), destination.AsFpuRegister<SRegister>());
3412    __ vmovsr(destination.AsFpuRegister<SRegister>(), IP);
3413  } else if (source.IsFpuRegister() || destination.IsFpuRegister()) {
3414    SRegister reg = source.IsFpuRegister() ? source.AsFpuRegister<SRegister>()
3415                                           : destination.AsFpuRegister<SRegister>();
3416    int mem = source.IsFpuRegister()
3417        ? destination.GetStackIndex()
3418        : source.GetStackIndex();
3419
3420    __ vmovrs(IP, reg);
3421    __ LoadFromOffset(kLoadWord, IP, SP, mem);
3422    __ StoreToOffset(kStoreWord, IP, SP, mem);
3423  } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
3424    Exchange(source.GetStackIndex(), destination.GetStackIndex());
3425    Exchange(source.GetHighStackIndex(kArmWordSize), destination.GetHighStackIndex(kArmWordSize));
3426  } else {
3427    LOG(FATAL) << "Unimplemented" << source << " <-> " << destination;
3428  }
3429}
3430
3431void ParallelMoveResolverARM::SpillScratch(int reg) {
3432  __ Push(static_cast<Register>(reg));
3433}
3434
3435void ParallelMoveResolverARM::RestoreScratch(int reg) {
3436  __ Pop(static_cast<Register>(reg));
3437}
3438
3439void LocationsBuilderARM::VisitLoadClass(HLoadClass* cls) {
3440  LocationSummary::CallKind call_kind = cls->CanCallRuntime()
3441      ? LocationSummary::kCallOnSlowPath
3442      : LocationSummary::kNoCall;
3443  LocationSummary* locations =
3444      new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
3445  locations->SetOut(Location::RequiresRegister());
3446}
3447
3448void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) {
3449  Register out = cls->GetLocations()->Out().AsRegister<Register>();
3450  if (cls->IsReferrersClass()) {
3451    DCHECK(!cls->CanCallRuntime());
3452    DCHECK(!cls->MustGenerateClinitCheck());
3453    codegen_->LoadCurrentMethod(out);
3454    __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
3455  } else {
3456    DCHECK(cls->CanCallRuntime());
3457    codegen_->LoadCurrentMethod(out);
3458    __ LoadFromOffset(
3459        kLoadWord, out, out, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value());
3460    __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
3461
3462    SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
3463        cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
3464    codegen_->AddSlowPath(slow_path);
3465    __ cmp(out, ShifterOperand(0));
3466    __ b(slow_path->GetEntryLabel(), EQ);
3467    if (cls->MustGenerateClinitCheck()) {
3468      GenerateClassInitializationCheck(slow_path, out);
3469    } else {
3470      __ Bind(slow_path->GetExitLabel());
3471    }
3472  }
3473}
3474
3475void LocationsBuilderARM::VisitClinitCheck(HClinitCheck* check) {
3476  LocationSummary* locations =
3477      new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
3478  locations->SetInAt(0, Location::RequiresRegister());
3479  if (check->HasUses()) {
3480    locations->SetOut(Location::SameAsFirstInput());
3481  }
3482}
3483
3484void InstructionCodeGeneratorARM::VisitClinitCheck(HClinitCheck* check) {
3485  // We assume the class is not null.
3486  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
3487      check->GetLoadClass(), check, check->GetDexPc(), true);
3488  codegen_->AddSlowPath(slow_path);
3489  GenerateClassInitializationCheck(slow_path,
3490                                   check->GetLocations()->InAt(0).AsRegister<Register>());
3491}
3492
3493void InstructionCodeGeneratorARM::GenerateClassInitializationCheck(
3494    SlowPathCodeARM* slow_path, Register class_reg) {
3495  __ LoadFromOffset(kLoadWord, IP, class_reg, mirror::Class::StatusOffset().Int32Value());
3496  __ cmp(IP, ShifterOperand(mirror::Class::kStatusInitialized));
3497  __ b(slow_path->GetEntryLabel(), LT);
3498  // Even if the initialized flag is set, we may be in a situation where caches are not synced
3499  // properly. Therefore, we do a memory fence.
3500  __ dmb(ISH);
3501  __ Bind(slow_path->GetExitLabel());
3502}
3503
3504void LocationsBuilderARM::VisitLoadString(HLoadString* load) {
3505  LocationSummary* locations =
3506      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
3507  locations->SetOut(Location::RequiresRegister());
3508}
3509
3510void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) {
3511  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM(load);
3512  codegen_->AddSlowPath(slow_path);
3513
3514  Register out = load->GetLocations()->Out().AsRegister<Register>();
3515  codegen_->LoadCurrentMethod(out);
3516  __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
3517  __ LoadFromOffset(kLoadWord, out, out, mirror::Class::DexCacheStringsOffset().Int32Value());
3518  __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(load->GetStringIndex()));
3519  __ cmp(out, ShifterOperand(0));
3520  __ b(slow_path->GetEntryLabel(), EQ);
3521  __ Bind(slow_path->GetExitLabel());
3522}
3523
3524void LocationsBuilderARM::VisitLoadException(HLoadException* load) {
3525  LocationSummary* locations =
3526      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
3527  locations->SetOut(Location::RequiresRegister());
3528}
3529
3530void InstructionCodeGeneratorARM::VisitLoadException(HLoadException* load) {
3531  Register out = load->GetLocations()->Out().AsRegister<Register>();
3532  int32_t offset = Thread::ExceptionOffset<kArmWordSize>().Int32Value();
3533  __ LoadFromOffset(kLoadWord, out, TR, offset);
3534  __ LoadImmediate(IP, 0);
3535  __ StoreToOffset(kStoreWord, IP, TR, offset);
3536}
3537
3538void LocationsBuilderARM::VisitThrow(HThrow* instruction) {
3539  LocationSummary* locations =
3540      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3541  InvokeRuntimeCallingConvention calling_convention;
3542  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3543}
3544
3545void InstructionCodeGeneratorARM::VisitThrow(HThrow* instruction) {
3546  codegen_->InvokeRuntime(
3547      QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc());
3548}
3549
3550void LocationsBuilderARM::VisitInstanceOf(HInstanceOf* instruction) {
3551  LocationSummary::CallKind call_kind = instruction->IsClassFinal()
3552      ? LocationSummary::kNoCall
3553      : LocationSummary::kCallOnSlowPath;
3554  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3555  locations->SetInAt(0, Location::RequiresRegister());
3556  locations->SetInAt(1, Location::RequiresRegister());
3557  locations->SetOut(Location::RequiresRegister());
3558}
3559
3560void InstructionCodeGeneratorARM::VisitInstanceOf(HInstanceOf* instruction) {
3561  LocationSummary* locations = instruction->GetLocations();
3562  Register obj = locations->InAt(0).AsRegister<Register>();
3563  Register cls = locations->InAt(1).AsRegister<Register>();
3564  Register out = locations->Out().AsRegister<Register>();
3565  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3566  Label done, zero;
3567  SlowPathCodeARM* slow_path = nullptr;
3568
3569  // Return 0 if `obj` is null.
3570  // TODO: avoid this check if we know obj is not null.
3571  __ cmp(obj, ShifterOperand(0));
3572  __ b(&zero, EQ);
3573  // Compare the class of `obj` with `cls`.
3574  __ LoadFromOffset(kLoadWord, out, obj, class_offset);
3575  __ cmp(out, ShifterOperand(cls));
3576  if (instruction->IsClassFinal()) {
3577    // Classes must be equal for the instanceof to succeed.
3578    __ b(&zero, NE);
3579    __ LoadImmediate(out, 1);
3580    __ b(&done);
3581  } else {
3582    // If the classes are not equal, we go into a slow path.
3583    DCHECK(locations->OnlyCallsOnSlowPath());
3584    slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
3585        instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc());
3586    codegen_->AddSlowPath(slow_path);
3587    __ b(slow_path->GetEntryLabel(), NE);
3588    __ LoadImmediate(out, 1);
3589    __ b(&done);
3590  }
3591  __ Bind(&zero);
3592  __ LoadImmediate(out, 0);
3593  if (slow_path != nullptr) {
3594    __ Bind(slow_path->GetExitLabel());
3595  }
3596  __ Bind(&done);
3597}
3598
3599void LocationsBuilderARM::VisitCheckCast(HCheckCast* instruction) {
3600  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3601      instruction, LocationSummary::kCallOnSlowPath);
3602  locations->SetInAt(0, Location::RequiresRegister());
3603  locations->SetInAt(1, Location::RequiresRegister());
3604  locations->AddTemp(Location::RequiresRegister());
3605}
3606
3607void InstructionCodeGeneratorARM::VisitCheckCast(HCheckCast* instruction) {
3608  LocationSummary* locations = instruction->GetLocations();
3609  Register obj = locations->InAt(0).AsRegister<Register>();
3610  Register cls = locations->InAt(1).AsRegister<Register>();
3611  Register temp = locations->GetTemp(0).AsRegister<Register>();
3612  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3613
3614  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
3615      instruction, locations->InAt(1), locations->GetTemp(0), instruction->GetDexPc());
3616  codegen_->AddSlowPath(slow_path);
3617
3618  // TODO: avoid this check if we know obj is not null.
3619  __ cmp(obj, ShifterOperand(0));
3620  __ b(slow_path->GetExitLabel(), EQ);
3621  // Compare the class of `obj` with `cls`.
3622  __ LoadFromOffset(kLoadWord, temp, obj, class_offset);
3623  __ cmp(temp, ShifterOperand(cls));
3624  __ b(slow_path->GetEntryLabel(), NE);
3625  __ Bind(slow_path->GetExitLabel());
3626}
3627
3628void LocationsBuilderARM::VisitMonitorOperation(HMonitorOperation* instruction) {
3629  LocationSummary* locations =
3630      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3631  InvokeRuntimeCallingConvention calling_convention;
3632  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3633}
3634
3635void InstructionCodeGeneratorARM::VisitMonitorOperation(HMonitorOperation* instruction) {
3636  codegen_->InvokeRuntime(instruction->IsEnter()
3637        ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
3638      instruction,
3639      instruction->GetDexPc());
3640}
3641
3642void LocationsBuilderARM::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
3643void LocationsBuilderARM::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
3644void LocationsBuilderARM::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
3645
3646void LocationsBuilderARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
3647  LocationSummary* locations =
3648      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3649  DCHECK(instruction->GetResultType() == Primitive::kPrimInt
3650         || instruction->GetResultType() == Primitive::kPrimLong);
3651  locations->SetInAt(0, Location::RequiresRegister());
3652  locations->SetInAt(1, Location::RequiresRegister());
3653  bool output_overlaps = (instruction->GetResultType() == Primitive::kPrimLong);
3654  locations->SetOut(Location::RequiresRegister(), output_overlaps);
3655}
3656
3657void InstructionCodeGeneratorARM::VisitAnd(HAnd* instruction) {
3658  HandleBitwiseOperation(instruction);
3659}
3660
3661void InstructionCodeGeneratorARM::VisitOr(HOr* instruction) {
3662  HandleBitwiseOperation(instruction);
3663}
3664
3665void InstructionCodeGeneratorARM::VisitXor(HXor* instruction) {
3666  HandleBitwiseOperation(instruction);
3667}
3668
3669void InstructionCodeGeneratorARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
3670  LocationSummary* locations = instruction->GetLocations();
3671
3672  if (instruction->GetResultType() == Primitive::kPrimInt) {
3673    Register first = locations->InAt(0).AsRegister<Register>();
3674    Register second = locations->InAt(1).AsRegister<Register>();
3675    Register out = locations->Out().AsRegister<Register>();
3676    if (instruction->IsAnd()) {
3677      __ and_(out, first, ShifterOperand(second));
3678    } else if (instruction->IsOr()) {
3679      __ orr(out, first, ShifterOperand(second));
3680    } else {
3681      DCHECK(instruction->IsXor());
3682      __ eor(out, first, ShifterOperand(second));
3683    }
3684  } else {
3685    DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3686    Location first = locations->InAt(0);
3687    Location second = locations->InAt(1);
3688    Location out = locations->Out();
3689    if (instruction->IsAnd()) {
3690      __ and_(out.AsRegisterPairLow<Register>(),
3691              first.AsRegisterPairLow<Register>(),
3692              ShifterOperand(second.AsRegisterPairLow<Register>()));
3693      __ and_(out.AsRegisterPairHigh<Register>(),
3694              first.AsRegisterPairHigh<Register>(),
3695              ShifterOperand(second.AsRegisterPairHigh<Register>()));
3696    } else if (instruction->IsOr()) {
3697      __ orr(out.AsRegisterPairLow<Register>(),
3698             first.AsRegisterPairLow<Register>(),
3699             ShifterOperand(second.AsRegisterPairLow<Register>()));
3700      __ orr(out.AsRegisterPairHigh<Register>(),
3701             first.AsRegisterPairHigh<Register>(),
3702             ShifterOperand(second.AsRegisterPairHigh<Register>()));
3703    } else {
3704      DCHECK(instruction->IsXor());
3705      __ eor(out.AsRegisterPairLow<Register>(),
3706             first.AsRegisterPairLow<Register>(),
3707             ShifterOperand(second.AsRegisterPairLow<Register>()));
3708      __ eor(out.AsRegisterPairHigh<Register>(),
3709             first.AsRegisterPairHigh<Register>(),
3710             ShifterOperand(second.AsRegisterPairHigh<Register>()));
3711    }
3712  }
3713}
3714
3715}  // namespace arm
3716}  // namespace art
3717