code_generator_arm.cc revision a8ac9130b872c080299afacf5dcaab513d13ea87
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
20#include "entrypoints/quick/quick_entrypoints.h"
21#include "gc/accounting/card_table.h"
22#include "intrinsics.h"
23#include "intrinsics_arm.h"
24#include "mirror/array-inl.h"
25#include "mirror/art_method.h"
26#include "mirror/class.h"
27#include "thread.h"
28#include "utils/arm/assembler_arm.h"
29#include "utils/arm/managed_register_arm.h"
30#include "utils/assembler.h"
31#include "utils/stack_checks.h"
32
33namespace art {
34
35namespace arm {
36
37static bool ExpectedPairLayout(Location location) {
38  // We expected this for both core and fpu register pairs.
39  return ((location.low() & 1) == 0) && (location.low() + 1 == location.high());
40}
41
42static constexpr int kCurrentMethodStackOffset = 0;
43
44static constexpr Register kRuntimeParameterCoreRegisters[] = { R0, R1, R2, R3 };
45static constexpr size_t kRuntimeParameterCoreRegistersLength =
46    arraysize(kRuntimeParameterCoreRegisters);
47static constexpr SRegister kRuntimeParameterFpuRegisters[] = { S0, S1, S2, S3 };
48static constexpr size_t kRuntimeParameterFpuRegistersLength =
49    arraysize(kRuntimeParameterFpuRegisters);
50// We unconditionally allocate R5 to ensure we can do long operations
51// with baseline.
52static constexpr Register kCoreSavedRegisterForBaseline = R5;
53static constexpr Register kCoreCalleeSaves[] =
54    { R5, R6, R7, R8, R10, R11, PC };
55static constexpr SRegister kFpuCalleeSaves[] =
56    { S16, S17, S18, S19, S20, S21, S22, S23, S24, S25, S26, S27, S28, S29, S30, S31 };
57
58// D31 cannot be split into two S registers, and the register allocator only works on
59// S registers. Therefore there is no need to block it.
60static constexpr DRegister DTMP = D31;
61
62class InvokeRuntimeCallingConvention : public CallingConvention<Register, SRegister> {
63 public:
64  InvokeRuntimeCallingConvention()
65      : CallingConvention(kRuntimeParameterCoreRegisters,
66                          kRuntimeParameterCoreRegistersLength,
67                          kRuntimeParameterFpuRegisters,
68                          kRuntimeParameterFpuRegistersLength) {}
69
70 private:
71  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
72};
73
74#define __ reinterpret_cast<ArmAssembler*>(codegen->GetAssembler())->
75#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArmWordSize, x).Int32Value()
76
77class NullCheckSlowPathARM : public SlowPathCodeARM {
78 public:
79  explicit NullCheckSlowPathARM(HNullCheck* instruction) : instruction_(instruction) {}
80
81  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
82    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
83    __ Bind(GetEntryLabel());
84    arm_codegen->InvokeRuntime(
85        QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc());
86  }
87
88 private:
89  HNullCheck* const instruction_;
90  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM);
91};
92
93class DivZeroCheckSlowPathARM : public SlowPathCodeARM {
94 public:
95  explicit DivZeroCheckSlowPathARM(HDivZeroCheck* instruction) : instruction_(instruction) {}
96
97  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
98    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
99    __ Bind(GetEntryLabel());
100    arm_codegen->InvokeRuntime(
101        QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc());
102  }
103
104 private:
105  HDivZeroCheck* const instruction_;
106  DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM);
107};
108
109class SuspendCheckSlowPathARM : public SlowPathCodeARM {
110 public:
111  SuspendCheckSlowPathARM(HSuspendCheck* instruction, HBasicBlock* successor)
112      : instruction_(instruction), successor_(successor) {}
113
114  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
115    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
116    __ Bind(GetEntryLabel());
117    SaveLiveRegisters(codegen, instruction_->GetLocations());
118    arm_codegen->InvokeRuntime(
119        QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc());
120    RestoreLiveRegisters(codegen, instruction_->GetLocations());
121    if (successor_ == nullptr) {
122      __ b(GetReturnLabel());
123    } else {
124      __ b(arm_codegen->GetLabelOf(successor_));
125    }
126  }
127
128  Label* GetReturnLabel() {
129    DCHECK(successor_ == nullptr);
130    return &return_label_;
131  }
132
133 private:
134  HSuspendCheck* const instruction_;
135  // If not null, the block to branch to after the suspend check.
136  HBasicBlock* const successor_;
137
138  // If `successor_` is null, the label to branch to after the suspend check.
139  Label return_label_;
140
141  DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM);
142};
143
144class BoundsCheckSlowPathARM : public SlowPathCodeARM {
145 public:
146  BoundsCheckSlowPathARM(HBoundsCheck* instruction,
147                         Location index_location,
148                         Location length_location)
149      : instruction_(instruction),
150        index_location_(index_location),
151        length_location_(length_location) {}
152
153  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
154    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
155    __ Bind(GetEntryLabel());
156    // We're moving two locations to locations that could overlap, so we need a parallel
157    // move resolver.
158    InvokeRuntimeCallingConvention calling_convention;
159    codegen->EmitParallelMoves(
160        index_location_,
161        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
162        length_location_,
163        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
164    arm_codegen->InvokeRuntime(
165        QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc());
166  }
167
168 private:
169  HBoundsCheck* const instruction_;
170  const Location index_location_;
171  const Location length_location_;
172
173  DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM);
174};
175
176class LoadClassSlowPathARM : public SlowPathCodeARM {
177 public:
178  LoadClassSlowPathARM(HLoadClass* cls,
179                       HInstruction* at,
180                       uint32_t dex_pc,
181                       bool do_clinit)
182      : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
183    DCHECK(at->IsLoadClass() || at->IsClinitCheck());
184  }
185
186  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
187    LocationSummary* locations = at_->GetLocations();
188
189    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
190    __ Bind(GetEntryLabel());
191    SaveLiveRegisters(codegen, locations);
192
193    InvokeRuntimeCallingConvention calling_convention;
194    __ LoadImmediate(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex());
195    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
196    int32_t entry_point_offset = do_clinit_
197        ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
198        : QUICK_ENTRY_POINT(pInitializeType);
199    arm_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_);
200
201    // Move the class to the desired location.
202    Location out = locations->Out();
203    if (out.IsValid()) {
204      DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
205      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
206    }
207    RestoreLiveRegisters(codegen, locations);
208    __ b(GetExitLabel());
209  }
210
211 private:
212  // The class this slow path will load.
213  HLoadClass* const cls_;
214
215  // The instruction where this slow path is happening.
216  // (Might be the load class or an initialization check).
217  HInstruction* const at_;
218
219  // The dex PC of `at_`.
220  const uint32_t dex_pc_;
221
222  // Whether to initialize the class.
223  const bool do_clinit_;
224
225  DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM);
226};
227
228class LoadStringSlowPathARM : public SlowPathCodeARM {
229 public:
230  explicit LoadStringSlowPathARM(HLoadString* instruction) : instruction_(instruction) {}
231
232  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
233    LocationSummary* locations = instruction_->GetLocations();
234    DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
235
236    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
237    __ Bind(GetEntryLabel());
238    SaveLiveRegisters(codegen, locations);
239
240    InvokeRuntimeCallingConvention calling_convention;
241    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
242    __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction_->GetStringIndex());
243    arm_codegen->InvokeRuntime(
244        QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc());
245    arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
246
247    RestoreLiveRegisters(codegen, locations);
248    __ b(GetExitLabel());
249  }
250
251 private:
252  HLoadString* const instruction_;
253
254  DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM);
255};
256
257class TypeCheckSlowPathARM : public SlowPathCodeARM {
258 public:
259  TypeCheckSlowPathARM(HInstruction* instruction,
260                       Location class_to_check,
261                       Location object_class,
262                       uint32_t dex_pc)
263      : instruction_(instruction),
264        class_to_check_(class_to_check),
265        object_class_(object_class),
266        dex_pc_(dex_pc) {}
267
268  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
269    LocationSummary* locations = instruction_->GetLocations();
270    DCHECK(instruction_->IsCheckCast()
271           || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
272
273    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
274    __ Bind(GetEntryLabel());
275    SaveLiveRegisters(codegen, locations);
276
277    // We're moving two locations to locations that could overlap, so we need a parallel
278    // move resolver.
279    InvokeRuntimeCallingConvention calling_convention;
280    codegen->EmitParallelMoves(
281        class_to_check_,
282        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
283        object_class_,
284        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
285
286    if (instruction_->IsInstanceOf()) {
287      arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc_);
288      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
289    } else {
290      DCHECK(instruction_->IsCheckCast());
291      arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc_);
292    }
293
294    RestoreLiveRegisters(codegen, locations);
295    __ b(GetExitLabel());
296  }
297
298 private:
299  HInstruction* const instruction_;
300  const Location class_to_check_;
301  const Location object_class_;
302  uint32_t dex_pc_;
303
304  DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM);
305};
306
307#undef __
308
309#undef __
310#define __ reinterpret_cast<ArmAssembler*>(GetAssembler())->
311
312inline Condition ARMCondition(IfCondition cond) {
313  switch (cond) {
314    case kCondEQ: return EQ;
315    case kCondNE: return NE;
316    case kCondLT: return LT;
317    case kCondLE: return LE;
318    case kCondGT: return GT;
319    case kCondGE: return GE;
320    default:
321      LOG(FATAL) << "Unknown if condition";
322  }
323  return EQ;        // Unreachable.
324}
325
326inline Condition ARMOppositeCondition(IfCondition cond) {
327  switch (cond) {
328    case kCondEQ: return NE;
329    case kCondNE: return EQ;
330    case kCondLT: return GE;
331    case kCondLE: return GT;
332    case kCondGT: return LE;
333    case kCondGE: return LT;
334    default:
335      LOG(FATAL) << "Unknown if condition";
336  }
337  return EQ;        // Unreachable.
338}
339
340void CodeGeneratorARM::DumpCoreRegister(std::ostream& stream, int reg) const {
341  stream << ArmManagedRegister::FromCoreRegister(Register(reg));
342}
343
344void CodeGeneratorARM::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
345  stream << ArmManagedRegister::FromSRegister(SRegister(reg));
346}
347
348size_t CodeGeneratorARM::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
349  __ StoreToOffset(kStoreWord, static_cast<Register>(reg_id), SP, stack_index);
350  return kArmWordSize;
351}
352
353size_t CodeGeneratorARM::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
354  __ LoadFromOffset(kLoadWord, static_cast<Register>(reg_id), SP, stack_index);
355  return kArmWordSize;
356}
357
358size_t CodeGeneratorARM::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
359  __ StoreSToOffset(static_cast<SRegister>(reg_id), SP, stack_index);
360  return kArmWordSize;
361}
362
363size_t CodeGeneratorARM::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
364  __ LoadSFromOffset(static_cast<SRegister>(reg_id), SP, stack_index);
365  return kArmWordSize;
366}
367
368CodeGeneratorARM::CodeGeneratorARM(HGraph* graph,
369                                   const ArmInstructionSetFeatures& isa_features,
370                                   const CompilerOptions& compiler_options)
371    : CodeGenerator(graph,
372                    kNumberOfCoreRegisters,
373                    kNumberOfSRegisters,
374                    kNumberOfRegisterPairs,
375                    ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
376                                        arraysize(kCoreCalleeSaves)),
377                    ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
378                                        arraysize(kFpuCalleeSaves)),
379                    compiler_options),
380      block_labels_(graph->GetArena(), 0),
381      location_builder_(graph, this),
382      instruction_visitor_(graph, this),
383      move_resolver_(graph->GetArena(), this),
384      assembler_(true),
385      isa_features_(isa_features) {
386  // Save the PC register to mimic Quick.
387  AddAllocatedRegister(Location::RegisterLocation(PC));
388}
389
390Location CodeGeneratorARM::AllocateFreeRegister(Primitive::Type type) const {
391  switch (type) {
392    case Primitive::kPrimLong: {
393      size_t reg = FindFreeEntry(blocked_register_pairs_, kNumberOfRegisterPairs);
394      ArmManagedRegister pair =
395          ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
396      DCHECK(!blocked_core_registers_[pair.AsRegisterPairLow()]);
397      DCHECK(!blocked_core_registers_[pair.AsRegisterPairHigh()]);
398
399      blocked_core_registers_[pair.AsRegisterPairLow()] = true;
400      blocked_core_registers_[pair.AsRegisterPairHigh()] = true;
401      UpdateBlockedPairRegisters();
402      return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
403    }
404
405    case Primitive::kPrimByte:
406    case Primitive::kPrimBoolean:
407    case Primitive::kPrimChar:
408    case Primitive::kPrimShort:
409    case Primitive::kPrimInt:
410    case Primitive::kPrimNot: {
411      int reg = FindFreeEntry(blocked_core_registers_, kNumberOfCoreRegisters);
412      // Block all register pairs that contain `reg`.
413      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
414        ArmManagedRegister current =
415            ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
416        if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
417          blocked_register_pairs_[i] = true;
418        }
419      }
420      return Location::RegisterLocation(reg);
421    }
422
423    case Primitive::kPrimFloat: {
424      int reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfSRegisters);
425      return Location::FpuRegisterLocation(reg);
426    }
427
428    case Primitive::kPrimDouble: {
429      int reg = FindTwoFreeConsecutiveAlignedEntries(blocked_fpu_registers_, kNumberOfSRegisters);
430      DCHECK_EQ(reg % 2, 0);
431      return Location::FpuRegisterPairLocation(reg, reg + 1);
432    }
433
434    case Primitive::kPrimVoid:
435      LOG(FATAL) << "Unreachable type " << type;
436  }
437
438  return Location();
439}
440
441void CodeGeneratorARM::SetupBlockedRegisters(bool is_baseline) const {
442  // Don't allocate the dalvik style register pair passing.
443  blocked_register_pairs_[R1_R2] = true;
444
445  // Stack register, LR and PC are always reserved.
446  blocked_core_registers_[SP] = true;
447  blocked_core_registers_[LR] = true;
448  blocked_core_registers_[PC] = true;
449
450  // Reserve thread register.
451  blocked_core_registers_[TR] = true;
452
453  // Reserve temp register.
454  blocked_core_registers_[IP] = true;
455
456  if (is_baseline) {
457    for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
458      blocked_core_registers_[kCoreCalleeSaves[i]] = true;
459    }
460
461    blocked_core_registers_[kCoreSavedRegisterForBaseline] = false;
462
463    for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
464      blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
465    }
466  }
467
468  UpdateBlockedPairRegisters();
469}
470
471void CodeGeneratorARM::UpdateBlockedPairRegisters() const {
472  for (int i = 0; i < kNumberOfRegisterPairs; i++) {
473    ArmManagedRegister current =
474        ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
475    if (blocked_core_registers_[current.AsRegisterPairLow()]
476        || blocked_core_registers_[current.AsRegisterPairHigh()]) {
477      blocked_register_pairs_[i] = true;
478    }
479  }
480}
481
482InstructionCodeGeneratorARM::InstructionCodeGeneratorARM(HGraph* graph, CodeGeneratorARM* codegen)
483      : HGraphVisitor(graph),
484        assembler_(codegen->GetAssembler()),
485        codegen_(codegen) {}
486
487static uint32_t LeastSignificantBit(uint32_t mask) {
488  // ffs starts at 1.
489  return ffs(mask) - 1;
490}
491
492void CodeGeneratorARM::ComputeSpillMask() {
493  core_spill_mask_ = allocated_registers_.GetCoreRegisters() & core_callee_save_mask_;
494  // Save one extra register for baseline. Note that on thumb2, there is no easy
495  // instruction to restore just the PC, so this actually helps both baseline
496  // and non-baseline to save and restore at least two registers at entry and exit.
497  core_spill_mask_ |= (1 << kCoreSavedRegisterForBaseline);
498  DCHECK_NE(core_spill_mask_, 0u) << "At least the return address register must be saved";
499  fpu_spill_mask_ = allocated_registers_.GetFloatingPointRegisters() & fpu_callee_save_mask_;
500  // We use vpush and vpop for saving and restoring floating point registers, which take
501  // a SRegister and the number of registers to save/restore after that SRegister. We
502  // therefore update the `fpu_spill_mask_` to also contain those registers not allocated,
503  // but in the range.
504  if (fpu_spill_mask_ != 0) {
505    uint32_t least_significant_bit = LeastSignificantBit(fpu_spill_mask_);
506    uint32_t most_significant_bit = MostSignificantBit(fpu_spill_mask_);
507    for (uint32_t i = least_significant_bit + 1 ; i < most_significant_bit; ++i) {
508      fpu_spill_mask_ |= (1 << i);
509    }
510  }
511}
512
513void CodeGeneratorARM::GenerateFrameEntry() {
514  bool skip_overflow_check =
515      IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm);
516  DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
517  __ Bind(&frame_entry_label_);
518
519  if (HasEmptyFrame()) {
520    return;
521  }
522
523  if (!skip_overflow_check) {
524    __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm)));
525    __ LoadFromOffset(kLoadWord, IP, IP, 0);
526    RecordPcInfo(nullptr, 0);
527  }
528
529  // PC is in the list of callee-save to mimic Quick, but we need to push
530  // LR at entry instead.
531  __ PushList((core_spill_mask_ & (~(1 << PC))) | 1 << LR);
532  if (fpu_spill_mask_ != 0) {
533    SRegister start_register = SRegister(LeastSignificantBit(fpu_spill_mask_));
534    __ vpushs(start_register, POPCOUNT(fpu_spill_mask_));
535  }
536  __ AddConstant(SP, -(GetFrameSize() - FrameEntrySpillSize()));
537  __ StoreToOffset(kStoreWord, R0, SP, 0);
538}
539
540void CodeGeneratorARM::GenerateFrameExit() {
541  if (HasEmptyFrame()) {
542    __ bx(LR);
543    return;
544  }
545  __ AddConstant(SP, GetFrameSize() - FrameEntrySpillSize());
546  if (fpu_spill_mask_ != 0) {
547    SRegister start_register = SRegister(LeastSignificantBit(fpu_spill_mask_));
548    __ vpops(start_register, POPCOUNT(fpu_spill_mask_));
549  }
550  __ PopList(core_spill_mask_);
551}
552
553void CodeGeneratorARM::Bind(HBasicBlock* block) {
554  __ Bind(GetLabelOf(block));
555}
556
557Location CodeGeneratorARM::GetStackLocation(HLoadLocal* load) const {
558  switch (load->GetType()) {
559    case Primitive::kPrimLong:
560    case Primitive::kPrimDouble:
561      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
562      break;
563
564    case Primitive::kPrimInt:
565    case Primitive::kPrimNot:
566    case Primitive::kPrimFloat:
567      return Location::StackSlot(GetStackSlot(load->GetLocal()));
568
569    case Primitive::kPrimBoolean:
570    case Primitive::kPrimByte:
571    case Primitive::kPrimChar:
572    case Primitive::kPrimShort:
573    case Primitive::kPrimVoid:
574      LOG(FATAL) << "Unexpected type " << load->GetType();
575  }
576
577  LOG(FATAL) << "Unreachable";
578  return Location();
579}
580
581Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
582  switch (type) {
583    case Primitive::kPrimBoolean:
584    case Primitive::kPrimByte:
585    case Primitive::kPrimChar:
586    case Primitive::kPrimShort:
587    case Primitive::kPrimInt:
588    case Primitive::kPrimNot: {
589      uint32_t index = gp_index_++;
590      uint32_t stack_index = stack_index_++;
591      if (index < calling_convention.GetNumberOfRegisters()) {
592        return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
593      } else {
594        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
595      }
596    }
597
598    case Primitive::kPrimLong: {
599      uint32_t index = gp_index_;
600      uint32_t stack_index = stack_index_;
601      gp_index_ += 2;
602      stack_index_ += 2;
603      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
604        if (calling_convention.GetRegisterAt(index) == R1) {
605          // Skip R1, and use R2_R3 instead.
606          gp_index_++;
607          index++;
608        }
609      }
610      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
611        DCHECK_EQ(calling_convention.GetRegisterAt(index) + 1,
612                  calling_convention.GetRegisterAt(index + 1));
613        return Location::RegisterPairLocation(calling_convention.GetRegisterAt(index),
614                                              calling_convention.GetRegisterAt(index + 1));
615      } else {
616        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
617      }
618    }
619
620    case Primitive::kPrimFloat: {
621      uint32_t stack_index = stack_index_++;
622      if (float_index_ % 2 == 0) {
623        float_index_ = std::max(double_index_, float_index_);
624      }
625      if (float_index_ < calling_convention.GetNumberOfFpuRegisters()) {
626        return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(float_index_++));
627      } else {
628        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
629      }
630    }
631
632    case Primitive::kPrimDouble: {
633      double_index_ = std::max(double_index_, RoundUp(float_index_, 2));
634      uint32_t stack_index = stack_index_;
635      stack_index_ += 2;
636      if (double_index_ + 1 < calling_convention.GetNumberOfFpuRegisters()) {
637        uint32_t index = double_index_;
638        double_index_ += 2;
639        Location result = Location::FpuRegisterPairLocation(
640          calling_convention.GetFpuRegisterAt(index),
641          calling_convention.GetFpuRegisterAt(index + 1));
642        DCHECK(ExpectedPairLayout(result));
643        return result;
644      } else {
645        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
646      }
647    }
648
649    case Primitive::kPrimVoid:
650      LOG(FATAL) << "Unexpected parameter type " << type;
651      break;
652  }
653  return Location();
654}
655
656Location InvokeDexCallingConventionVisitor::GetReturnLocation(Primitive::Type type) {
657  switch (type) {
658    case Primitive::kPrimBoolean:
659    case Primitive::kPrimByte:
660    case Primitive::kPrimChar:
661    case Primitive::kPrimShort:
662    case Primitive::kPrimInt:
663    case Primitive::kPrimNot: {
664      return Location::RegisterLocation(R0);
665    }
666
667    case Primitive::kPrimFloat: {
668      return Location::FpuRegisterLocation(S0);
669    }
670
671    case Primitive::kPrimLong: {
672      return Location::RegisterPairLocation(R0, R1);
673    }
674
675    case Primitive::kPrimDouble: {
676      return Location::FpuRegisterPairLocation(S0, S1);
677    }
678
679    case Primitive::kPrimVoid:
680      return Location();
681  }
682  UNREACHABLE();
683  return Location();
684}
685
686void CodeGeneratorARM::Move32(Location destination, Location source) {
687  if (source.Equals(destination)) {
688    return;
689  }
690  if (destination.IsRegister()) {
691    if (source.IsRegister()) {
692      __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>());
693    } else if (source.IsFpuRegister()) {
694      __ vmovrs(destination.AsRegister<Register>(), source.AsFpuRegister<SRegister>());
695    } else {
696      __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(), SP, source.GetStackIndex());
697    }
698  } else if (destination.IsFpuRegister()) {
699    if (source.IsRegister()) {
700      __ vmovsr(destination.AsFpuRegister<SRegister>(), source.AsRegister<Register>());
701    } else if (source.IsFpuRegister()) {
702      __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>());
703    } else {
704      __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex());
705    }
706  } else {
707    DCHECK(destination.IsStackSlot()) << destination;
708    if (source.IsRegister()) {
709      __ StoreToOffset(kStoreWord, source.AsRegister<Register>(), SP, destination.GetStackIndex());
710    } else if (source.IsFpuRegister()) {
711      __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex());
712    } else {
713      DCHECK(source.IsStackSlot()) << source;
714      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
715      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
716    }
717  }
718}
719
720void CodeGeneratorARM::Move64(Location destination, Location source) {
721  if (source.Equals(destination)) {
722    return;
723  }
724  if (destination.IsRegisterPair()) {
725    if (source.IsRegisterPair()) {
726      EmitParallelMoves(
727          Location::RegisterLocation(source.AsRegisterPairHigh<Register>()),
728          Location::RegisterLocation(destination.AsRegisterPairHigh<Register>()),
729          Location::RegisterLocation(source.AsRegisterPairLow<Register>()),
730          Location::RegisterLocation(destination.AsRegisterPairLow<Register>()));
731    } else if (source.IsFpuRegister()) {
732      UNIMPLEMENTED(FATAL);
733    } else {
734      DCHECK(source.IsDoubleStackSlot());
735      DCHECK(ExpectedPairLayout(destination));
736      __ LoadFromOffset(kLoadWordPair, destination.AsRegisterPairLow<Register>(),
737                        SP, source.GetStackIndex());
738    }
739  } else if (destination.IsFpuRegisterPair()) {
740    if (source.IsDoubleStackSlot()) {
741      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
742                         SP,
743                         source.GetStackIndex());
744    } else {
745      UNIMPLEMENTED(FATAL);
746    }
747  } else {
748    DCHECK(destination.IsDoubleStackSlot());
749    if (source.IsRegisterPair()) {
750      // No conflict possible, so just do the moves.
751      if (source.AsRegisterPairLow<Register>() == R1) {
752        DCHECK_EQ(source.AsRegisterPairHigh<Register>(), R2);
753        __ StoreToOffset(kStoreWord, R1, SP, destination.GetStackIndex());
754        __ StoreToOffset(kStoreWord, R2, SP, destination.GetHighStackIndex(kArmWordSize));
755      } else {
756        __ StoreToOffset(kStoreWordPair, source.AsRegisterPairLow<Register>(),
757                         SP, destination.GetStackIndex());
758      }
759    } else if (source.IsFpuRegisterPair()) {
760      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
761                        SP,
762                        destination.GetStackIndex());
763    } else {
764      DCHECK(source.IsDoubleStackSlot());
765      EmitParallelMoves(
766          Location::StackSlot(source.GetStackIndex()),
767          Location::StackSlot(destination.GetStackIndex()),
768          Location::StackSlot(source.GetHighStackIndex(kArmWordSize)),
769          Location::StackSlot(destination.GetHighStackIndex(kArmWordSize)));
770    }
771  }
772}
773
774void CodeGeneratorARM::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
775  LocationSummary* locations = instruction->GetLocations();
776  if (locations != nullptr && locations->Out().Equals(location)) {
777    return;
778  }
779
780  if (locations != nullptr && locations->Out().IsConstant()) {
781    HConstant* const_to_move = locations->Out().GetConstant();
782    if (const_to_move->IsIntConstant() || const_to_move->IsNullConstant()) {
783      int32_t value = GetInt32ValueOf(const_to_move);
784      if (location.IsRegister()) {
785        __ LoadImmediate(location.AsRegister<Register>(), value);
786      } else {
787        DCHECK(location.IsStackSlot());
788        __ LoadImmediate(IP, value);
789        __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
790      }
791    } else {
792      DCHECK(const_to_move->IsLongConstant()) << const_to_move->DebugName();
793      int64_t value = const_to_move->AsLongConstant()->GetValue();
794      if (location.IsRegisterPair()) {
795        __ LoadImmediate(location.AsRegisterPairLow<Register>(), Low32Bits(value));
796        __ LoadImmediate(location.AsRegisterPairHigh<Register>(), High32Bits(value));
797      } else {
798        DCHECK(location.IsDoubleStackSlot());
799        __ LoadImmediate(IP, Low32Bits(value));
800        __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
801        __ LoadImmediate(IP, High32Bits(value));
802        __ StoreToOffset(kStoreWord, IP, SP, location.GetHighStackIndex(kArmWordSize));
803      }
804    }
805  } else if (instruction->IsLoadLocal()) {
806    uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
807    switch (instruction->GetType()) {
808      case Primitive::kPrimBoolean:
809      case Primitive::kPrimByte:
810      case Primitive::kPrimChar:
811      case Primitive::kPrimShort:
812      case Primitive::kPrimInt:
813      case Primitive::kPrimNot:
814      case Primitive::kPrimFloat:
815        Move32(location, Location::StackSlot(stack_slot));
816        break;
817
818      case Primitive::kPrimLong:
819      case Primitive::kPrimDouble:
820        Move64(location, Location::DoubleStackSlot(stack_slot));
821        break;
822
823      default:
824        LOG(FATAL) << "Unexpected type " << instruction->GetType();
825    }
826  } else if (instruction->IsTemporary()) {
827    Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
828    if (temp_location.IsStackSlot()) {
829      Move32(location, temp_location);
830    } else {
831      DCHECK(temp_location.IsDoubleStackSlot());
832      Move64(location, temp_location);
833    }
834  } else {
835    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
836    switch (instruction->GetType()) {
837      case Primitive::kPrimBoolean:
838      case Primitive::kPrimByte:
839      case Primitive::kPrimChar:
840      case Primitive::kPrimShort:
841      case Primitive::kPrimNot:
842      case Primitive::kPrimInt:
843      case Primitive::kPrimFloat:
844        Move32(location, locations->Out());
845        break;
846
847      case Primitive::kPrimLong:
848      case Primitive::kPrimDouble:
849        Move64(location, locations->Out());
850        break;
851
852      default:
853        LOG(FATAL) << "Unexpected type " << instruction->GetType();
854    }
855  }
856}
857
858void CodeGeneratorARM::InvokeRuntime(int32_t entry_point_offset,
859                                     HInstruction* instruction,
860                                     uint32_t dex_pc) {
861  __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
862  __ blx(LR);
863  RecordPcInfo(instruction, dex_pc);
864  DCHECK(instruction->IsSuspendCheck()
865      || instruction->IsBoundsCheck()
866      || instruction->IsNullCheck()
867      || instruction->IsDivZeroCheck()
868      || instruction->GetLocations()->CanCall()
869      || !IsLeafMethod());
870}
871
872void LocationsBuilderARM::VisitGoto(HGoto* got) {
873  got->SetLocations(nullptr);
874}
875
876void InstructionCodeGeneratorARM::VisitGoto(HGoto* got) {
877  HBasicBlock* successor = got->GetSuccessor();
878  DCHECK(!successor->IsExitBlock());
879
880  HBasicBlock* block = got->GetBlock();
881  HInstruction* previous = got->GetPrevious();
882
883  HLoopInformation* info = block->GetLoopInformation();
884  if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
885    codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
886    GenerateSuspendCheck(info->GetSuspendCheck(), successor);
887    return;
888  }
889
890  if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
891    GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
892  }
893  if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
894    __ b(codegen_->GetLabelOf(successor));
895  }
896}
897
898void LocationsBuilderARM::VisitExit(HExit* exit) {
899  exit->SetLocations(nullptr);
900}
901
902void InstructionCodeGeneratorARM::VisitExit(HExit* exit) {
903  UNUSED(exit);
904  if (kIsDebugBuild) {
905    __ Comment("Unreachable");
906    __ bkpt(0);
907  }
908}
909
910void LocationsBuilderARM::VisitIf(HIf* if_instr) {
911  LocationSummary* locations =
912      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
913  HInstruction* cond = if_instr->InputAt(0);
914  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
915    locations->SetInAt(0, Location::RequiresRegister());
916  }
917}
918
919void InstructionCodeGeneratorARM::VisitIf(HIf* if_instr) {
920  HInstruction* cond = if_instr->InputAt(0);
921  if (cond->IsIntConstant()) {
922    // Constant condition, statically compared against 1.
923    int32_t cond_value = cond->AsIntConstant()->GetValue();
924    if (cond_value == 1) {
925      if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
926                                     if_instr->IfTrueSuccessor())) {
927        __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
928      }
929      return;
930    } else {
931      DCHECK_EQ(cond_value, 0);
932    }
933  } else {
934    if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
935      // Condition has been materialized, compare the output to 0
936      DCHECK(if_instr->GetLocations()->InAt(0).IsRegister());
937      __ cmp(if_instr->GetLocations()->InAt(0).AsRegister<Register>(),
938             ShifterOperand(0));
939      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()), NE);
940    } else {
941      // Condition has not been materialized, use its inputs as the
942      // comparison and its condition as the branch condition.
943      LocationSummary* locations = cond->GetLocations();
944      DCHECK(locations->InAt(0).IsRegister()) << locations->InAt(0);
945      Register left = locations->InAt(0).AsRegister<Register>();
946      if (locations->InAt(1).IsRegister()) {
947        __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>()));
948      } else {
949        DCHECK(locations->InAt(1).IsConstant());
950        HConstant* constant = locations->InAt(1).GetConstant();
951        int32_t value = CodeGenerator::GetInt32ValueOf(constant);
952        ShifterOperand operand;
953        if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) {
954          __ cmp(left, operand);
955        } else {
956          Register temp = IP;
957          __ LoadImmediate(temp, value);
958          __ cmp(left, ShifterOperand(temp));
959        }
960      }
961      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()),
962           ARMCondition(cond->AsCondition()->GetCondition()));
963    }
964  }
965  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
966                                 if_instr->IfFalseSuccessor())) {
967    __ b(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
968  }
969}
970
971
972void LocationsBuilderARM::VisitCondition(HCondition* comp) {
973  LocationSummary* locations =
974      new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall);
975  locations->SetInAt(0, Location::RequiresRegister());
976  locations->SetInAt(1, Location::RegisterOrConstant(comp->InputAt(1)));
977  if (comp->NeedsMaterialization()) {
978    locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
979  }
980}
981
982void InstructionCodeGeneratorARM::VisitCondition(HCondition* comp) {
983  if (!comp->NeedsMaterialization()) return;
984  LocationSummary* locations = comp->GetLocations();
985  Register left = locations->InAt(0).AsRegister<Register>();
986
987  if (locations->InAt(1).IsRegister()) {
988    __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>()));
989  } else {
990    DCHECK(locations->InAt(1).IsConstant());
991    int32_t value = CodeGenerator::GetInt32ValueOf(locations->InAt(1).GetConstant());
992    ShifterOperand operand;
993    if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) {
994      __ cmp(left, operand);
995    } else {
996      Register temp = IP;
997      __ LoadImmediate(temp, value);
998      __ cmp(left, ShifterOperand(temp));
999    }
1000  }
1001  __ it(ARMCondition(comp->GetCondition()), kItElse);
1002  __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(1),
1003         ARMCondition(comp->GetCondition()));
1004  __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(0),
1005         ARMOppositeCondition(comp->GetCondition()));
1006}
1007
1008void LocationsBuilderARM::VisitEqual(HEqual* comp) {
1009  VisitCondition(comp);
1010}
1011
1012void InstructionCodeGeneratorARM::VisitEqual(HEqual* comp) {
1013  VisitCondition(comp);
1014}
1015
1016void LocationsBuilderARM::VisitNotEqual(HNotEqual* comp) {
1017  VisitCondition(comp);
1018}
1019
1020void InstructionCodeGeneratorARM::VisitNotEqual(HNotEqual* comp) {
1021  VisitCondition(comp);
1022}
1023
1024void LocationsBuilderARM::VisitLessThan(HLessThan* comp) {
1025  VisitCondition(comp);
1026}
1027
1028void InstructionCodeGeneratorARM::VisitLessThan(HLessThan* comp) {
1029  VisitCondition(comp);
1030}
1031
1032void LocationsBuilderARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1033  VisitCondition(comp);
1034}
1035
1036void InstructionCodeGeneratorARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1037  VisitCondition(comp);
1038}
1039
1040void LocationsBuilderARM::VisitGreaterThan(HGreaterThan* comp) {
1041  VisitCondition(comp);
1042}
1043
1044void InstructionCodeGeneratorARM::VisitGreaterThan(HGreaterThan* comp) {
1045  VisitCondition(comp);
1046}
1047
1048void LocationsBuilderARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1049  VisitCondition(comp);
1050}
1051
1052void InstructionCodeGeneratorARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1053  VisitCondition(comp);
1054}
1055
1056void LocationsBuilderARM::VisitLocal(HLocal* local) {
1057  local->SetLocations(nullptr);
1058}
1059
1060void InstructionCodeGeneratorARM::VisitLocal(HLocal* local) {
1061  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
1062}
1063
1064void LocationsBuilderARM::VisitLoadLocal(HLoadLocal* load) {
1065  load->SetLocations(nullptr);
1066}
1067
1068void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load) {
1069  // Nothing to do, this is driven by the code generator.
1070  UNUSED(load);
1071}
1072
1073void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) {
1074  LocationSummary* locations =
1075      new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
1076  switch (store->InputAt(1)->GetType()) {
1077    case Primitive::kPrimBoolean:
1078    case Primitive::kPrimByte:
1079    case Primitive::kPrimChar:
1080    case Primitive::kPrimShort:
1081    case Primitive::kPrimInt:
1082    case Primitive::kPrimNot:
1083    case Primitive::kPrimFloat:
1084      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
1085      break;
1086
1087    case Primitive::kPrimLong:
1088    case Primitive::kPrimDouble:
1089      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
1090      break;
1091
1092    default:
1093      LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType();
1094  }
1095}
1096
1097void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store) {
1098  UNUSED(store);
1099}
1100
1101void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) {
1102  LocationSummary* locations =
1103      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1104  locations->SetOut(Location::ConstantLocation(constant));
1105}
1106
1107void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant) {
1108  // Will be generated at use site.
1109  UNUSED(constant);
1110}
1111
1112void LocationsBuilderARM::VisitNullConstant(HNullConstant* constant) {
1113  LocationSummary* locations =
1114      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1115  locations->SetOut(Location::ConstantLocation(constant));
1116}
1117
1118void InstructionCodeGeneratorARM::VisitNullConstant(HNullConstant* constant) {
1119  // Will be generated at use site.
1120  UNUSED(constant);
1121}
1122
1123void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) {
1124  LocationSummary* locations =
1125      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1126  locations->SetOut(Location::ConstantLocation(constant));
1127}
1128
1129void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant) {
1130  // Will be generated at use site.
1131  UNUSED(constant);
1132}
1133
1134void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) {
1135  LocationSummary* locations =
1136      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1137  locations->SetOut(Location::ConstantLocation(constant));
1138}
1139
1140void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant) {
1141  // Will be generated at use site.
1142  UNUSED(constant);
1143}
1144
1145void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) {
1146  LocationSummary* locations =
1147      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1148  locations->SetOut(Location::ConstantLocation(constant));
1149}
1150
1151void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant) {
1152  // Will be generated at use site.
1153  UNUSED(constant);
1154}
1155
1156void LocationsBuilderARM::VisitReturnVoid(HReturnVoid* ret) {
1157  ret->SetLocations(nullptr);
1158}
1159
1160void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret) {
1161  UNUSED(ret);
1162  codegen_->GenerateFrameExit();
1163}
1164
1165void LocationsBuilderARM::VisitReturn(HReturn* ret) {
1166  LocationSummary* locations =
1167      new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
1168  locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType()));
1169}
1170
1171void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret) {
1172  UNUSED(ret);
1173  codegen_->GenerateFrameExit();
1174}
1175
1176void LocationsBuilderARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1177  IntrinsicLocationsBuilderARM intrinsic(GetGraph()->GetArena(),
1178                                         codegen_->GetInstructionSetFeatures());
1179  if (intrinsic.TryDispatch(invoke)) {
1180    return;
1181  }
1182
1183  HandleInvoke(invoke);
1184}
1185
1186void CodeGeneratorARM::LoadCurrentMethod(Register reg) {
1187  DCHECK(RequiresCurrentMethod());
1188  __ LoadFromOffset(kLoadWord, reg, SP, kCurrentMethodStackOffset);
1189}
1190
1191static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM* codegen) {
1192  if (invoke->GetLocations()->Intrinsified()) {
1193    IntrinsicCodeGeneratorARM intrinsic(codegen);
1194    intrinsic.Dispatch(invoke);
1195    return true;
1196  }
1197  return false;
1198}
1199
1200void InstructionCodeGeneratorARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1201  if (TryGenerateIntrinsicCode(invoke, codegen_)) {
1202    return;
1203  }
1204
1205  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1206
1207  codegen_->GenerateStaticOrDirectCall(invoke, temp);
1208  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1209}
1210
1211void LocationsBuilderARM::HandleInvoke(HInvoke* invoke) {
1212  LocationSummary* locations =
1213      new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1214  locations->AddTemp(Location::RegisterLocation(R0));
1215
1216  InvokeDexCallingConventionVisitor calling_convention_visitor;
1217  for (size_t i = 0; i < invoke->InputCount(); i++) {
1218    HInstruction* input = invoke->InputAt(i);
1219    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1220  }
1221
1222  locations->SetOut(calling_convention_visitor.GetReturnLocation(invoke->GetType()));
1223}
1224
1225void LocationsBuilderARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1226  IntrinsicLocationsBuilderARM intrinsic(GetGraph()->GetArena(),
1227                                         codegen_->GetInstructionSetFeatures());
1228  if (intrinsic.TryDispatch(invoke)) {
1229    return;
1230  }
1231
1232  HandleInvoke(invoke);
1233}
1234
1235void InstructionCodeGeneratorARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1236  if (TryGenerateIntrinsicCode(invoke, codegen_)) {
1237    return;
1238  }
1239
1240  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1241  uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() +
1242          invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
1243  LocationSummary* locations = invoke->GetLocations();
1244  Location receiver = locations->InAt(0);
1245  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1246  // temp = object->GetClass();
1247  if (receiver.IsStackSlot()) {
1248    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1249    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1250  } else {
1251    __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
1252  }
1253  codegen_->MaybeRecordImplicitNullCheck(invoke);
1254  // temp = temp->GetMethodAt(method_offset);
1255  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1256      kArmWordSize).Int32Value();
1257  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1258  // LR = temp->GetEntryPoint();
1259  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1260  // LR();
1261  __ blx(LR);
1262  DCHECK(!codegen_->IsLeafMethod());
1263  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1264}
1265
1266void LocationsBuilderARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1267  HandleInvoke(invoke);
1268  // Add the hidden argument.
1269  invoke->GetLocations()->AddTemp(Location::RegisterLocation(R12));
1270}
1271
1272void InstructionCodeGeneratorARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1273  // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
1274  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1275  uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() +
1276          (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry);
1277  LocationSummary* locations = invoke->GetLocations();
1278  Location receiver = locations->InAt(0);
1279  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1280
1281  // Set the hidden argument.
1282  __ LoadImmediate(invoke->GetLocations()->GetTemp(1).AsRegister<Register>(),
1283                   invoke->GetDexMethodIndex());
1284
1285  // temp = object->GetClass();
1286  if (receiver.IsStackSlot()) {
1287    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1288    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1289  } else {
1290    __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
1291  }
1292  codegen_->MaybeRecordImplicitNullCheck(invoke);
1293  // temp = temp->GetImtEntryAt(method_offset);
1294  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1295      kArmWordSize).Int32Value();
1296  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1297  // LR = temp->GetEntryPoint();
1298  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1299  // LR();
1300  __ blx(LR);
1301  DCHECK(!codegen_->IsLeafMethod());
1302  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1303}
1304
1305void LocationsBuilderARM::VisitNeg(HNeg* neg) {
1306  LocationSummary* locations =
1307      new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
1308  switch (neg->GetResultType()) {
1309    case Primitive::kPrimInt: {
1310      locations->SetInAt(0, Location::RequiresRegister());
1311      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1312      break;
1313    }
1314    case Primitive::kPrimLong: {
1315      locations->SetInAt(0, Location::RequiresRegister());
1316      locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1317      break;
1318    }
1319
1320    case Primitive::kPrimFloat:
1321    case Primitive::kPrimDouble:
1322      locations->SetInAt(0, Location::RequiresFpuRegister());
1323      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1324      break;
1325
1326    default:
1327      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1328  }
1329}
1330
1331void InstructionCodeGeneratorARM::VisitNeg(HNeg* neg) {
1332  LocationSummary* locations = neg->GetLocations();
1333  Location out = locations->Out();
1334  Location in = locations->InAt(0);
1335  switch (neg->GetResultType()) {
1336    case Primitive::kPrimInt:
1337      DCHECK(in.IsRegister());
1338      __ rsb(out.AsRegister<Register>(), in.AsRegister<Register>(), ShifterOperand(0));
1339      break;
1340
1341    case Primitive::kPrimLong:
1342      DCHECK(in.IsRegisterPair());
1343      // out.lo = 0 - in.lo (and update the carry/borrow (C) flag)
1344      __ rsbs(out.AsRegisterPairLow<Register>(),
1345              in.AsRegisterPairLow<Register>(),
1346              ShifterOperand(0));
1347      // We cannot emit an RSC (Reverse Subtract with Carry)
1348      // instruction here, as it does not exist in the Thumb-2
1349      // instruction set.  We use the following approach
1350      // using SBC and SUB instead.
1351      //
1352      // out.hi = -C
1353      __ sbc(out.AsRegisterPairHigh<Register>(),
1354             out.AsRegisterPairHigh<Register>(),
1355             ShifterOperand(out.AsRegisterPairHigh<Register>()));
1356      // out.hi = out.hi - in.hi
1357      __ sub(out.AsRegisterPairHigh<Register>(),
1358             out.AsRegisterPairHigh<Register>(),
1359             ShifterOperand(in.AsRegisterPairHigh<Register>()));
1360      break;
1361
1362    case Primitive::kPrimFloat:
1363      DCHECK(in.IsFpuRegister());
1364      __ vnegs(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>());
1365      break;
1366
1367    case Primitive::kPrimDouble:
1368      DCHECK(in.IsFpuRegisterPair());
1369      __ vnegd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1370               FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1371      break;
1372
1373    default:
1374      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1375  }
1376}
1377
1378void LocationsBuilderARM::VisitTypeConversion(HTypeConversion* conversion) {
1379  Primitive::Type result_type = conversion->GetResultType();
1380  Primitive::Type input_type = conversion->GetInputType();
1381  DCHECK_NE(result_type, input_type);
1382
1383  // The float-to-long and double-to-long type conversions rely on a
1384  // call to the runtime.
1385  LocationSummary::CallKind call_kind =
1386      ((input_type == Primitive::kPrimFloat || input_type == Primitive::kPrimDouble)
1387       && result_type == Primitive::kPrimLong)
1388      ? LocationSummary::kCall
1389      : LocationSummary::kNoCall;
1390  LocationSummary* locations =
1391      new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind);
1392
1393  switch (result_type) {
1394    case Primitive::kPrimByte:
1395      switch (input_type) {
1396        case Primitive::kPrimShort:
1397        case Primitive::kPrimInt:
1398        case Primitive::kPrimChar:
1399          // Processing a Dex `int-to-byte' instruction.
1400          locations->SetInAt(0, Location::RequiresRegister());
1401          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1402          break;
1403
1404        default:
1405          LOG(FATAL) << "Unexpected type conversion from " << input_type
1406                     << " to " << result_type;
1407      }
1408      break;
1409
1410    case Primitive::kPrimShort:
1411      switch (input_type) {
1412        case Primitive::kPrimByte:
1413        case Primitive::kPrimInt:
1414        case Primitive::kPrimChar:
1415          // Processing a Dex `int-to-short' instruction.
1416          locations->SetInAt(0, Location::RequiresRegister());
1417          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1418          break;
1419
1420        default:
1421          LOG(FATAL) << "Unexpected type conversion from " << input_type
1422                     << " to " << result_type;
1423      }
1424      break;
1425
1426    case Primitive::kPrimInt:
1427      switch (input_type) {
1428        case Primitive::kPrimLong:
1429          // Processing a Dex `long-to-int' instruction.
1430          locations->SetInAt(0, Location::Any());
1431          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1432          break;
1433
1434        case Primitive::kPrimFloat:
1435          // Processing a Dex `float-to-int' instruction.
1436          locations->SetInAt(0, Location::RequiresFpuRegister());
1437          locations->SetOut(Location::RequiresRegister());
1438          locations->AddTemp(Location::RequiresFpuRegister());
1439          break;
1440
1441        case Primitive::kPrimDouble:
1442          // Processing a Dex `double-to-int' instruction.
1443          locations->SetInAt(0, Location::RequiresFpuRegister());
1444          locations->SetOut(Location::RequiresRegister());
1445          locations->AddTemp(Location::RequiresFpuRegister());
1446          break;
1447
1448        default:
1449          LOG(FATAL) << "Unexpected type conversion from " << input_type
1450                     << " to " << result_type;
1451      }
1452      break;
1453
1454    case Primitive::kPrimLong:
1455      switch (input_type) {
1456        case Primitive::kPrimByte:
1457        case Primitive::kPrimShort:
1458        case Primitive::kPrimInt:
1459        case Primitive::kPrimChar:
1460          // Processing a Dex `int-to-long' instruction.
1461          locations->SetInAt(0, Location::RequiresRegister());
1462          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1463          break;
1464
1465        case Primitive::kPrimFloat: {
1466          // Processing a Dex `float-to-long' instruction.
1467          InvokeRuntimeCallingConvention calling_convention;
1468          locations->SetInAt(0, Location::FpuRegisterLocation(
1469              calling_convention.GetFpuRegisterAt(0)));
1470          locations->SetOut(Location::RegisterPairLocation(R0, R1));
1471          break;
1472        }
1473
1474        case Primitive::kPrimDouble: {
1475          // Processing a Dex `double-to-long' instruction.
1476          InvokeRuntimeCallingConvention calling_convention;
1477          locations->SetInAt(0, Location::FpuRegisterPairLocation(
1478              calling_convention.GetFpuRegisterAt(0),
1479              calling_convention.GetFpuRegisterAt(1)));
1480          locations->SetOut(Location::RegisterPairLocation(R0, R1));
1481          break;
1482        }
1483
1484        default:
1485          LOG(FATAL) << "Unexpected type conversion from " << input_type
1486                     << " to " << result_type;
1487      }
1488      break;
1489
1490    case Primitive::kPrimChar:
1491      switch (input_type) {
1492        case Primitive::kPrimByte:
1493        case Primitive::kPrimShort:
1494        case Primitive::kPrimInt:
1495          // Processing a Dex `int-to-char' instruction.
1496          locations->SetInAt(0, Location::RequiresRegister());
1497          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1498          break;
1499
1500        default:
1501          LOG(FATAL) << "Unexpected type conversion from " << input_type
1502                     << " to " << result_type;
1503      }
1504      break;
1505
1506    case Primitive::kPrimFloat:
1507      switch (input_type) {
1508        case Primitive::kPrimByte:
1509        case Primitive::kPrimShort:
1510        case Primitive::kPrimInt:
1511        case Primitive::kPrimChar:
1512          // Processing a Dex `int-to-float' instruction.
1513          locations->SetInAt(0, Location::RequiresRegister());
1514          locations->SetOut(Location::RequiresFpuRegister());
1515          break;
1516
1517        case Primitive::kPrimLong:
1518          // Processing a Dex `long-to-float' instruction.
1519          locations->SetInAt(0, Location::RequiresRegister());
1520          locations->SetOut(Location::RequiresFpuRegister());
1521          locations->AddTemp(Location::RequiresRegister());
1522          locations->AddTemp(Location::RequiresRegister());
1523          locations->AddTemp(Location::RequiresFpuRegister());
1524          locations->AddTemp(Location::RequiresFpuRegister());
1525          break;
1526
1527        case Primitive::kPrimDouble:
1528          // Processing a Dex `double-to-float' instruction.
1529          locations->SetInAt(0, Location::RequiresFpuRegister());
1530          locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1531          break;
1532
1533        default:
1534          LOG(FATAL) << "Unexpected type conversion from " << input_type
1535                     << " to " << result_type;
1536      };
1537      break;
1538
1539    case Primitive::kPrimDouble:
1540      switch (input_type) {
1541        case Primitive::kPrimByte:
1542        case Primitive::kPrimShort:
1543        case Primitive::kPrimInt:
1544        case Primitive::kPrimChar:
1545          // Processing a Dex `int-to-double' instruction.
1546          locations->SetInAt(0, Location::RequiresRegister());
1547          locations->SetOut(Location::RequiresFpuRegister());
1548          break;
1549
1550        case Primitive::kPrimLong:
1551          // Processing a Dex `long-to-double' instruction.
1552          locations->SetInAt(0, Location::RequiresRegister());
1553          locations->SetOut(Location::RequiresFpuRegister());
1554          locations->AddTemp(Location::RequiresRegister());
1555          locations->AddTemp(Location::RequiresRegister());
1556          locations->AddTemp(Location::RequiresFpuRegister());
1557          break;
1558
1559        case Primitive::kPrimFloat:
1560          // Processing a Dex `float-to-double' instruction.
1561          locations->SetInAt(0, Location::RequiresFpuRegister());
1562          locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1563          break;
1564
1565        default:
1566          LOG(FATAL) << "Unexpected type conversion from " << input_type
1567                     << " to " << result_type;
1568      };
1569      break;
1570
1571    default:
1572      LOG(FATAL) << "Unexpected type conversion from " << input_type
1573                 << " to " << result_type;
1574  }
1575}
1576
1577void InstructionCodeGeneratorARM::VisitTypeConversion(HTypeConversion* conversion) {
1578  LocationSummary* locations = conversion->GetLocations();
1579  Location out = locations->Out();
1580  Location in = locations->InAt(0);
1581  Primitive::Type result_type = conversion->GetResultType();
1582  Primitive::Type input_type = conversion->GetInputType();
1583  DCHECK_NE(result_type, input_type);
1584  switch (result_type) {
1585    case Primitive::kPrimByte:
1586      switch (input_type) {
1587        case Primitive::kPrimShort:
1588        case Primitive::kPrimInt:
1589        case Primitive::kPrimChar:
1590          // Processing a Dex `int-to-byte' instruction.
1591          __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 8);
1592          break;
1593
1594        default:
1595          LOG(FATAL) << "Unexpected type conversion from " << input_type
1596                     << " to " << result_type;
1597      }
1598      break;
1599
1600    case Primitive::kPrimShort:
1601      switch (input_type) {
1602        case Primitive::kPrimByte:
1603        case Primitive::kPrimInt:
1604        case Primitive::kPrimChar:
1605          // Processing a Dex `int-to-short' instruction.
1606          __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16);
1607          break;
1608
1609        default:
1610          LOG(FATAL) << "Unexpected type conversion from " << input_type
1611                     << " to " << result_type;
1612      }
1613      break;
1614
1615    case Primitive::kPrimInt:
1616      switch (input_type) {
1617        case Primitive::kPrimLong:
1618          // Processing a Dex `long-to-int' instruction.
1619          DCHECK(out.IsRegister());
1620          if (in.IsRegisterPair()) {
1621            __ Mov(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
1622          } else if (in.IsDoubleStackSlot()) {
1623            __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), SP, in.GetStackIndex());
1624          } else {
1625            DCHECK(in.IsConstant());
1626            DCHECK(in.GetConstant()->IsLongConstant());
1627            int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
1628            __ LoadImmediate(out.AsRegister<Register>(), static_cast<int32_t>(value));
1629          }
1630          break;
1631
1632        case Primitive::kPrimFloat: {
1633          // Processing a Dex `float-to-int' instruction.
1634          SRegister temp = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
1635          __ vmovs(temp, in.AsFpuRegister<SRegister>());
1636          __ vcvtis(temp, temp);
1637          __ vmovrs(out.AsRegister<Register>(), temp);
1638          break;
1639        }
1640
1641        case Primitive::kPrimDouble: {
1642          // Processing a Dex `double-to-int' instruction.
1643          SRegister temp_s = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
1644          DRegister temp_d = FromLowSToD(temp_s);
1645          __ vmovd(temp_d, FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1646          __ vcvtid(temp_s, temp_d);
1647          __ vmovrs(out.AsRegister<Register>(), temp_s);
1648          break;
1649        }
1650
1651        default:
1652          LOG(FATAL) << "Unexpected type conversion from " << input_type
1653                     << " to " << result_type;
1654      }
1655      break;
1656
1657    case Primitive::kPrimLong:
1658      switch (input_type) {
1659        case Primitive::kPrimByte:
1660        case Primitive::kPrimShort:
1661        case Primitive::kPrimInt:
1662        case Primitive::kPrimChar:
1663          // Processing a Dex `int-to-long' instruction.
1664          DCHECK(out.IsRegisterPair());
1665          DCHECK(in.IsRegister());
1666          __ Mov(out.AsRegisterPairLow<Register>(), in.AsRegister<Register>());
1667          // Sign extension.
1668          __ Asr(out.AsRegisterPairHigh<Register>(),
1669                 out.AsRegisterPairLow<Register>(),
1670                 31);
1671          break;
1672
1673        case Primitive::kPrimFloat:
1674          // Processing a Dex `float-to-long' instruction.
1675          codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pF2l),
1676                                  conversion,
1677                                  conversion->GetDexPc());
1678          break;
1679
1680        case Primitive::kPrimDouble:
1681          // Processing a Dex `double-to-long' instruction.
1682          codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pD2l),
1683                                  conversion,
1684                                  conversion->GetDexPc());
1685          break;
1686
1687        default:
1688          LOG(FATAL) << "Unexpected type conversion from " << input_type
1689                     << " to " << result_type;
1690      }
1691      break;
1692
1693    case Primitive::kPrimChar:
1694      switch (input_type) {
1695        case Primitive::kPrimByte:
1696        case Primitive::kPrimShort:
1697        case Primitive::kPrimInt:
1698          // Processing a Dex `int-to-char' instruction.
1699          __ ubfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16);
1700          break;
1701
1702        default:
1703          LOG(FATAL) << "Unexpected type conversion from " << input_type
1704                     << " to " << result_type;
1705      }
1706      break;
1707
1708    case Primitive::kPrimFloat:
1709      switch (input_type) {
1710        case Primitive::kPrimByte:
1711        case Primitive::kPrimShort:
1712        case Primitive::kPrimInt:
1713        case Primitive::kPrimChar: {
1714          // Processing a Dex `int-to-float' instruction.
1715          __ vmovsr(out.AsFpuRegister<SRegister>(), in.AsRegister<Register>());
1716          __ vcvtsi(out.AsFpuRegister<SRegister>(), out.AsFpuRegister<SRegister>());
1717          break;
1718        }
1719
1720        case Primitive::kPrimLong: {
1721          // Processing a Dex `long-to-float' instruction.
1722          Register low = in.AsRegisterPairLow<Register>();
1723          Register high = in.AsRegisterPairHigh<Register>();
1724          SRegister output = out.AsFpuRegister<SRegister>();
1725          Register constant_low = locations->GetTemp(0).AsRegister<Register>();
1726          Register constant_high = locations->GetTemp(1).AsRegister<Register>();
1727          SRegister temp1_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>();
1728          DRegister temp1_d = FromLowSToD(temp1_s);
1729          SRegister temp2_s = locations->GetTemp(3).AsFpuRegisterPairLow<SRegister>();
1730          DRegister temp2_d = FromLowSToD(temp2_s);
1731
1732          // Operations use doubles for precision reasons (each 32-bit
1733          // half of a long fits in the 53-bit mantissa of a double,
1734          // but not in the 24-bit mantissa of a float).  This is
1735          // especially important for the low bits.  The result is
1736          // eventually converted to float.
1737
1738          // temp1_d = int-to-double(high)
1739          __ vmovsr(temp1_s, high);
1740          __ vcvtdi(temp1_d, temp1_s);
1741          // Using vmovd to load the `k2Pow32EncodingForDouble` constant
1742          // as an immediate value into `temp2_d` does not work, as
1743          // this instruction only transfers 8 significant bits of its
1744          // immediate operand.  Instead, use two 32-bit core
1745          // registers to load `k2Pow32EncodingForDouble` into
1746          // `temp2_d`.
1747          __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble));
1748          __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble));
1749          __ vmovdrr(temp2_d, constant_low, constant_high);
1750          // temp1_d = temp1_d * 2^32
1751          __ vmuld(temp1_d, temp1_d, temp2_d);
1752          // temp2_d = unsigned-to-double(low)
1753          __ vmovsr(temp2_s, low);
1754          __ vcvtdu(temp2_d, temp2_s);
1755          // temp1_d = temp1_d + temp2_d
1756          __ vaddd(temp1_d, temp1_d, temp2_d);
1757          // output = double-to-float(temp1_d);
1758          __ vcvtsd(output, temp1_d);
1759          break;
1760        }
1761
1762        case Primitive::kPrimDouble:
1763          // Processing a Dex `double-to-float' instruction.
1764          __ vcvtsd(out.AsFpuRegister<SRegister>(),
1765                    FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1766          break;
1767
1768        default:
1769          LOG(FATAL) << "Unexpected type conversion from " << input_type
1770                     << " to " << result_type;
1771      };
1772      break;
1773
1774    case Primitive::kPrimDouble:
1775      switch (input_type) {
1776        case Primitive::kPrimByte:
1777        case Primitive::kPrimShort:
1778        case Primitive::kPrimInt:
1779        case Primitive::kPrimChar: {
1780          // Processing a Dex `int-to-double' instruction.
1781          __ vmovsr(out.AsFpuRegisterPairLow<SRegister>(), in.AsRegister<Register>());
1782          __ vcvtdi(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1783                    out.AsFpuRegisterPairLow<SRegister>());
1784          break;
1785        }
1786
1787        case Primitive::kPrimLong: {
1788          // Processing a Dex `long-to-double' instruction.
1789          Register low = in.AsRegisterPairLow<Register>();
1790          Register high = in.AsRegisterPairHigh<Register>();
1791          SRegister out_s = out.AsFpuRegisterPairLow<SRegister>();
1792          DRegister out_d = FromLowSToD(out_s);
1793          Register constant_low = locations->GetTemp(0).AsRegister<Register>();
1794          Register constant_high = locations->GetTemp(1).AsRegister<Register>();
1795          SRegister temp_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>();
1796          DRegister temp_d = FromLowSToD(temp_s);
1797
1798          // out_d = int-to-double(high)
1799          __ vmovsr(out_s, high);
1800          __ vcvtdi(out_d, out_s);
1801          // Using vmovd to load the `k2Pow32EncodingForDouble` constant
1802          // as an immediate value into `temp_d` does not work, as
1803          // this instruction only transfers 8 significant bits of its
1804          // immediate operand.  Instead, use two 32-bit core
1805          // registers to load `k2Pow32EncodingForDouble` into `temp_d`.
1806          __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble));
1807          __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble));
1808          __ vmovdrr(temp_d, constant_low, constant_high);
1809          // out_d = out_d * 2^32
1810          __ vmuld(out_d, out_d, temp_d);
1811          // temp_d = unsigned-to-double(low)
1812          __ vmovsr(temp_s, low);
1813          __ vcvtdu(temp_d, temp_s);
1814          // out_d = out_d + temp_d
1815          __ vaddd(out_d, out_d, temp_d);
1816          break;
1817        }
1818
1819        case Primitive::kPrimFloat:
1820          // Processing a Dex `float-to-double' instruction.
1821          __ vcvtds(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1822                    in.AsFpuRegister<SRegister>());
1823          break;
1824
1825        default:
1826          LOG(FATAL) << "Unexpected type conversion from " << input_type
1827                     << " to " << result_type;
1828      };
1829      break;
1830
1831    default:
1832      LOG(FATAL) << "Unexpected type conversion from " << input_type
1833                 << " to " << result_type;
1834  }
1835}
1836
1837void LocationsBuilderARM::VisitAdd(HAdd* add) {
1838  LocationSummary* locations =
1839      new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
1840  switch (add->GetResultType()) {
1841    case Primitive::kPrimInt: {
1842      locations->SetInAt(0, Location::RequiresRegister());
1843      locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
1844      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1845      break;
1846    }
1847
1848    case Primitive::kPrimLong: {
1849      locations->SetInAt(0, Location::RequiresRegister());
1850      locations->SetInAt(1, Location::RequiresRegister());
1851      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1852      break;
1853    }
1854
1855    case Primitive::kPrimFloat:
1856    case Primitive::kPrimDouble: {
1857      locations->SetInAt(0, Location::RequiresFpuRegister());
1858      locations->SetInAt(1, Location::RequiresFpuRegister());
1859      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1860      break;
1861    }
1862
1863    default:
1864      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1865  }
1866}
1867
1868void InstructionCodeGeneratorARM::VisitAdd(HAdd* add) {
1869  LocationSummary* locations = add->GetLocations();
1870  Location out = locations->Out();
1871  Location first = locations->InAt(0);
1872  Location second = locations->InAt(1);
1873  switch (add->GetResultType()) {
1874    case Primitive::kPrimInt:
1875      if (second.IsRegister()) {
1876        __ add(out.AsRegister<Register>(),
1877               first.AsRegister<Register>(),
1878               ShifterOperand(second.AsRegister<Register>()));
1879      } else {
1880        __ AddConstant(out.AsRegister<Register>(),
1881                       first.AsRegister<Register>(),
1882                       second.GetConstant()->AsIntConstant()->GetValue());
1883      }
1884      break;
1885
1886    case Primitive::kPrimLong: {
1887      DCHECK(second.IsRegisterPair());
1888      __ adds(out.AsRegisterPairLow<Register>(),
1889              first.AsRegisterPairLow<Register>(),
1890              ShifterOperand(second.AsRegisterPairLow<Register>()));
1891      __ adc(out.AsRegisterPairHigh<Register>(),
1892             first.AsRegisterPairHigh<Register>(),
1893             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1894      break;
1895    }
1896
1897    case Primitive::kPrimFloat:
1898      __ vadds(out.AsFpuRegister<SRegister>(),
1899               first.AsFpuRegister<SRegister>(),
1900               second.AsFpuRegister<SRegister>());
1901      break;
1902
1903    case Primitive::kPrimDouble:
1904      __ vaddd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1905               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1906               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1907      break;
1908
1909    default:
1910      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1911  }
1912}
1913
1914void LocationsBuilderARM::VisitSub(HSub* sub) {
1915  LocationSummary* locations =
1916      new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
1917  switch (sub->GetResultType()) {
1918    case Primitive::kPrimInt: {
1919      locations->SetInAt(0, Location::RequiresRegister());
1920      locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1)));
1921      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1922      break;
1923    }
1924
1925    case Primitive::kPrimLong: {
1926      locations->SetInAt(0, Location::RequiresRegister());
1927      locations->SetInAt(1, Location::RequiresRegister());
1928      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1929      break;
1930    }
1931    case Primitive::kPrimFloat:
1932    case Primitive::kPrimDouble: {
1933      locations->SetInAt(0, Location::RequiresFpuRegister());
1934      locations->SetInAt(1, Location::RequiresFpuRegister());
1935      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1936      break;
1937    }
1938    default:
1939      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1940  }
1941}
1942
1943void InstructionCodeGeneratorARM::VisitSub(HSub* sub) {
1944  LocationSummary* locations = sub->GetLocations();
1945  Location out = locations->Out();
1946  Location first = locations->InAt(0);
1947  Location second = locations->InAt(1);
1948  switch (sub->GetResultType()) {
1949    case Primitive::kPrimInt: {
1950      if (second.IsRegister()) {
1951        __ sub(out.AsRegister<Register>(),
1952               first.AsRegister<Register>(),
1953               ShifterOperand(second.AsRegister<Register>()));
1954      } else {
1955        __ AddConstant(out.AsRegister<Register>(),
1956                       first.AsRegister<Register>(),
1957                       -second.GetConstant()->AsIntConstant()->GetValue());
1958      }
1959      break;
1960    }
1961
1962    case Primitive::kPrimLong: {
1963      DCHECK(second.IsRegisterPair());
1964      __ subs(out.AsRegisterPairLow<Register>(),
1965              first.AsRegisterPairLow<Register>(),
1966              ShifterOperand(second.AsRegisterPairLow<Register>()));
1967      __ sbc(out.AsRegisterPairHigh<Register>(),
1968             first.AsRegisterPairHigh<Register>(),
1969             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1970      break;
1971    }
1972
1973    case Primitive::kPrimFloat: {
1974      __ vsubs(out.AsFpuRegister<SRegister>(),
1975               first.AsFpuRegister<SRegister>(),
1976               second.AsFpuRegister<SRegister>());
1977      break;
1978    }
1979
1980    case Primitive::kPrimDouble: {
1981      __ vsubd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1982               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1983               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1984      break;
1985    }
1986
1987
1988    default:
1989      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1990  }
1991}
1992
1993void LocationsBuilderARM::VisitMul(HMul* mul) {
1994  LocationSummary* locations =
1995      new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
1996  switch (mul->GetResultType()) {
1997    case Primitive::kPrimInt:
1998    case Primitive::kPrimLong:  {
1999      locations->SetInAt(0, Location::RequiresRegister());
2000      locations->SetInAt(1, Location::RequiresRegister());
2001      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2002      break;
2003    }
2004
2005    case Primitive::kPrimFloat:
2006    case Primitive::kPrimDouble: {
2007      locations->SetInAt(0, Location::RequiresFpuRegister());
2008      locations->SetInAt(1, Location::RequiresFpuRegister());
2009      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2010      break;
2011    }
2012
2013    default:
2014      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2015  }
2016}
2017
2018void InstructionCodeGeneratorARM::VisitMul(HMul* mul) {
2019  LocationSummary* locations = mul->GetLocations();
2020  Location out = locations->Out();
2021  Location first = locations->InAt(0);
2022  Location second = locations->InAt(1);
2023  switch (mul->GetResultType()) {
2024    case Primitive::kPrimInt: {
2025      __ mul(out.AsRegister<Register>(),
2026             first.AsRegister<Register>(),
2027             second.AsRegister<Register>());
2028      break;
2029    }
2030    case Primitive::kPrimLong: {
2031      Register out_hi = out.AsRegisterPairHigh<Register>();
2032      Register out_lo = out.AsRegisterPairLow<Register>();
2033      Register in1_hi = first.AsRegisterPairHigh<Register>();
2034      Register in1_lo = first.AsRegisterPairLow<Register>();
2035      Register in2_hi = second.AsRegisterPairHigh<Register>();
2036      Register in2_lo = second.AsRegisterPairLow<Register>();
2037
2038      // Extra checks to protect caused by the existence of R1_R2.
2039      // The algorithm is wrong if out.hi is either in1.lo or in2.lo:
2040      // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2);
2041      DCHECK_NE(out_hi, in1_lo);
2042      DCHECK_NE(out_hi, in2_lo);
2043
2044      // input: in1 - 64 bits, in2 - 64 bits
2045      // output: out
2046      // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
2047      // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
2048      // parts: out.lo = (in1.lo * in2.lo)[31:0]
2049
2050      // IP <- in1.lo * in2.hi
2051      __ mul(IP, in1_lo, in2_hi);
2052      // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo
2053      __ mla(out_hi, in1_hi, in2_lo, IP);
2054      // out.lo <- (in1.lo * in2.lo)[31:0];
2055      __ umull(out_lo, IP, in1_lo, in2_lo);
2056      // out.hi <- in2.hi * in1.lo +  in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
2057      __ add(out_hi, out_hi, ShifterOperand(IP));
2058      break;
2059    }
2060
2061    case Primitive::kPrimFloat: {
2062      __ vmuls(out.AsFpuRegister<SRegister>(),
2063               first.AsFpuRegister<SRegister>(),
2064               second.AsFpuRegister<SRegister>());
2065      break;
2066    }
2067
2068    case Primitive::kPrimDouble: {
2069      __ vmuld(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2070               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2071               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2072      break;
2073    }
2074
2075    default:
2076      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2077  }
2078}
2079
2080void LocationsBuilderARM::VisitDiv(HDiv* div) {
2081  LocationSummary::CallKind call_kind = div->GetResultType() == Primitive::kPrimLong
2082      ? LocationSummary::kCall
2083      : LocationSummary::kNoCall;
2084  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
2085
2086  switch (div->GetResultType()) {
2087    case Primitive::kPrimInt: {
2088      locations->SetInAt(0, Location::RequiresRegister());
2089      locations->SetInAt(1, Location::RequiresRegister());
2090      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2091      break;
2092    }
2093    case Primitive::kPrimLong: {
2094      InvokeRuntimeCallingConvention calling_convention;
2095      locations->SetInAt(0, Location::RegisterPairLocation(
2096          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2097      locations->SetInAt(1, Location::RegisterPairLocation(
2098          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2099      locations->SetOut(Location::RegisterPairLocation(R0, R1));
2100      break;
2101    }
2102    case Primitive::kPrimFloat:
2103    case Primitive::kPrimDouble: {
2104      locations->SetInAt(0, Location::RequiresFpuRegister());
2105      locations->SetInAt(1, Location::RequiresFpuRegister());
2106      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2107      break;
2108    }
2109
2110    default:
2111      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2112  }
2113}
2114
2115void InstructionCodeGeneratorARM::VisitDiv(HDiv* div) {
2116  LocationSummary* locations = div->GetLocations();
2117  Location out = locations->Out();
2118  Location first = locations->InAt(0);
2119  Location second = locations->InAt(1);
2120
2121  switch (div->GetResultType()) {
2122    case Primitive::kPrimInt: {
2123      __ sdiv(out.AsRegister<Register>(),
2124              first.AsRegister<Register>(),
2125              second.AsRegister<Register>());
2126      break;
2127    }
2128
2129    case Primitive::kPrimLong: {
2130      InvokeRuntimeCallingConvention calling_convention;
2131      DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
2132      DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
2133      DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegisterPairLow<Register>());
2134      DCHECK_EQ(calling_convention.GetRegisterAt(3), second.AsRegisterPairHigh<Register>());
2135      DCHECK_EQ(R0, out.AsRegisterPairLow<Register>());
2136      DCHECK_EQ(R1, out.AsRegisterPairHigh<Register>());
2137
2138      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLdiv), div, div->GetDexPc());
2139      break;
2140    }
2141
2142    case Primitive::kPrimFloat: {
2143      __ vdivs(out.AsFpuRegister<SRegister>(),
2144               first.AsFpuRegister<SRegister>(),
2145               second.AsFpuRegister<SRegister>());
2146      break;
2147    }
2148
2149    case Primitive::kPrimDouble: {
2150      __ vdivd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2151               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2152               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2153      break;
2154    }
2155
2156    default:
2157      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2158  }
2159}
2160
2161void LocationsBuilderARM::VisitRem(HRem* rem) {
2162  Primitive::Type type = rem->GetResultType();
2163  LocationSummary::CallKind call_kind = type == Primitive::kPrimInt
2164      ? LocationSummary::kNoCall
2165      : LocationSummary::kCall;
2166  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
2167
2168  switch (type) {
2169    case Primitive::kPrimInt: {
2170      locations->SetInAt(0, Location::RequiresRegister());
2171      locations->SetInAt(1, Location::RequiresRegister());
2172      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2173      locations->AddTemp(Location::RequiresRegister());
2174      break;
2175    }
2176    case Primitive::kPrimLong: {
2177      InvokeRuntimeCallingConvention calling_convention;
2178      locations->SetInAt(0, Location::RegisterPairLocation(
2179          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2180      locations->SetInAt(1, Location::RegisterPairLocation(
2181          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2182      // The runtime helper puts the output in R2,R3.
2183      locations->SetOut(Location::RegisterPairLocation(R2, R3));
2184      break;
2185    }
2186    case Primitive::kPrimFloat: {
2187      InvokeRuntimeCallingConvention calling_convention;
2188      locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
2189      locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
2190      locations->SetOut(Location::FpuRegisterLocation(S0));
2191      break;
2192    }
2193
2194    case Primitive::kPrimDouble: {
2195      InvokeRuntimeCallingConvention calling_convention;
2196      locations->SetInAt(0, Location::FpuRegisterPairLocation(
2197          calling_convention.GetFpuRegisterAt(0), calling_convention.GetFpuRegisterAt(1)));
2198      locations->SetInAt(1, Location::FpuRegisterPairLocation(
2199          calling_convention.GetFpuRegisterAt(2), calling_convention.GetFpuRegisterAt(3)));
2200      locations->SetOut(Location::Location::FpuRegisterPairLocation(S0, S1));
2201      break;
2202    }
2203
2204    default:
2205      LOG(FATAL) << "Unexpected rem type " << type;
2206  }
2207}
2208
2209void InstructionCodeGeneratorARM::VisitRem(HRem* rem) {
2210  LocationSummary* locations = rem->GetLocations();
2211  Location out = locations->Out();
2212  Location first = locations->InAt(0);
2213  Location second = locations->InAt(1);
2214
2215  Primitive::Type type = rem->GetResultType();
2216  switch (type) {
2217    case Primitive::kPrimInt: {
2218      Register reg1 = first.AsRegister<Register>();
2219      Register reg2 = second.AsRegister<Register>();
2220      Register temp = locations->GetTemp(0).AsRegister<Register>();
2221
2222      // temp = reg1 / reg2  (integer division)
2223      // temp = temp * reg2
2224      // dest = reg1 - temp
2225      __ sdiv(temp, reg1, reg2);
2226      __ mul(temp, temp, reg2);
2227      __ sub(out.AsRegister<Register>(), reg1, ShifterOperand(temp));
2228      break;
2229    }
2230
2231    case Primitive::kPrimLong: {
2232      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLmod), rem, rem->GetDexPc());
2233      break;
2234    }
2235
2236    case Primitive::kPrimFloat: {
2237      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmodf), rem, rem->GetDexPc());
2238      break;
2239    }
2240
2241    case Primitive::kPrimDouble: {
2242      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmod), rem, rem->GetDexPc());
2243      break;
2244    }
2245
2246    default:
2247      LOG(FATAL) << "Unexpected rem type " << type;
2248  }
2249}
2250
2251void LocationsBuilderARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2252  LocationSummary* locations =
2253      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2254  locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2255  if (instruction->HasUses()) {
2256    locations->SetOut(Location::SameAsFirstInput());
2257  }
2258}
2259
2260void InstructionCodeGeneratorARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2261  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM(instruction);
2262  codegen_->AddSlowPath(slow_path);
2263
2264  LocationSummary* locations = instruction->GetLocations();
2265  Location value = locations->InAt(0);
2266
2267  switch (instruction->GetType()) {
2268    case Primitive::kPrimInt: {
2269      if (value.IsRegister()) {
2270        __ cmp(value.AsRegister<Register>(), ShifterOperand(0));
2271        __ b(slow_path->GetEntryLabel(), EQ);
2272      } else {
2273        DCHECK(value.IsConstant()) << value;
2274        if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
2275          __ b(slow_path->GetEntryLabel());
2276        }
2277      }
2278      break;
2279    }
2280    case Primitive::kPrimLong: {
2281      if (value.IsRegisterPair()) {
2282        __ orrs(IP,
2283                value.AsRegisterPairLow<Register>(),
2284                ShifterOperand(value.AsRegisterPairHigh<Register>()));
2285        __ b(slow_path->GetEntryLabel(), EQ);
2286      } else {
2287        DCHECK(value.IsConstant()) << value;
2288        if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
2289          __ b(slow_path->GetEntryLabel());
2290        }
2291      }
2292      break;
2293    default:
2294      LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
2295    }
2296  }
2297}
2298
2299void LocationsBuilderARM::HandleShift(HBinaryOperation* op) {
2300  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2301
2302  LocationSummary::CallKind call_kind = op->GetResultType() == Primitive::kPrimLong
2303      ? LocationSummary::kCall
2304      : LocationSummary::kNoCall;
2305  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(op, call_kind);
2306
2307  switch (op->GetResultType()) {
2308    case Primitive::kPrimInt: {
2309      locations->SetInAt(0, Location::RequiresRegister());
2310      locations->SetInAt(1, Location::RegisterOrConstant(op->InputAt(1)));
2311      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2312      break;
2313    }
2314    case Primitive::kPrimLong: {
2315      InvokeRuntimeCallingConvention calling_convention;
2316      locations->SetInAt(0, Location::RegisterPairLocation(
2317          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2318      locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2319      // The runtime helper puts the output in R0,R1.
2320      locations->SetOut(Location::RegisterPairLocation(R0, R1));
2321      break;
2322    }
2323    default:
2324      LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
2325  }
2326}
2327
2328void InstructionCodeGeneratorARM::HandleShift(HBinaryOperation* op) {
2329  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2330
2331  LocationSummary* locations = op->GetLocations();
2332  Location out = locations->Out();
2333  Location first = locations->InAt(0);
2334  Location second = locations->InAt(1);
2335
2336  Primitive::Type type = op->GetResultType();
2337  switch (type) {
2338    case Primitive::kPrimInt: {
2339      Register out_reg = out.AsRegister<Register>();
2340      Register first_reg = first.AsRegister<Register>();
2341      // Arm doesn't mask the shift count so we need to do it ourselves.
2342      if (second.IsRegister()) {
2343        Register second_reg = second.AsRegister<Register>();
2344        __ and_(second_reg, second_reg, ShifterOperand(kMaxIntShiftValue));
2345        if (op->IsShl()) {
2346          __ Lsl(out_reg, first_reg, second_reg);
2347        } else if (op->IsShr()) {
2348          __ Asr(out_reg, first_reg, second_reg);
2349        } else {
2350          __ Lsr(out_reg, first_reg, second_reg);
2351        }
2352      } else {
2353        int32_t cst = second.GetConstant()->AsIntConstant()->GetValue();
2354        uint32_t shift_value = static_cast<uint32_t>(cst & kMaxIntShiftValue);
2355        if (shift_value == 0) {  // arm does not support shifting with 0 immediate.
2356          __ Mov(out_reg, first_reg);
2357        } else if (op->IsShl()) {
2358          __ Lsl(out_reg, first_reg, shift_value);
2359        } else if (op->IsShr()) {
2360          __ Asr(out_reg, first_reg, shift_value);
2361        } else {
2362          __ Lsr(out_reg, first_reg, shift_value);
2363        }
2364      }
2365      break;
2366    }
2367    case Primitive::kPrimLong: {
2368      // TODO: Inline the assembly instead of calling the runtime.
2369      InvokeRuntimeCallingConvention calling_convention;
2370      DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
2371      DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
2372      DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegister<Register>());
2373      DCHECK_EQ(R0, out.AsRegisterPairLow<Register>());
2374      DCHECK_EQ(R1, out.AsRegisterPairHigh<Register>());
2375
2376      int32_t entry_point_offset;
2377      if (op->IsShl()) {
2378        entry_point_offset = QUICK_ENTRY_POINT(pShlLong);
2379      } else if (op->IsShr()) {
2380        entry_point_offset = QUICK_ENTRY_POINT(pShrLong);
2381      } else {
2382        entry_point_offset = QUICK_ENTRY_POINT(pUshrLong);
2383      }
2384      __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
2385      __ blx(LR);
2386      break;
2387    }
2388    default:
2389      LOG(FATAL) << "Unexpected operation type " << type;
2390  }
2391}
2392
2393void LocationsBuilderARM::VisitShl(HShl* shl) {
2394  HandleShift(shl);
2395}
2396
2397void InstructionCodeGeneratorARM::VisitShl(HShl* shl) {
2398  HandleShift(shl);
2399}
2400
2401void LocationsBuilderARM::VisitShr(HShr* shr) {
2402  HandleShift(shr);
2403}
2404
2405void InstructionCodeGeneratorARM::VisitShr(HShr* shr) {
2406  HandleShift(shr);
2407}
2408
2409void LocationsBuilderARM::VisitUShr(HUShr* ushr) {
2410  HandleShift(ushr);
2411}
2412
2413void InstructionCodeGeneratorARM::VisitUShr(HUShr* ushr) {
2414  HandleShift(ushr);
2415}
2416
2417void LocationsBuilderARM::VisitNewInstance(HNewInstance* instruction) {
2418  LocationSummary* locations =
2419      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2420  InvokeRuntimeCallingConvention calling_convention;
2421  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2422  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2423  locations->SetOut(Location::RegisterLocation(R0));
2424}
2425
2426void InstructionCodeGeneratorARM::VisitNewInstance(HNewInstance* instruction) {
2427  InvokeRuntimeCallingConvention calling_convention;
2428  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
2429  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
2430  codegen_->InvokeRuntime(GetThreadOffset<kArmWordSize>(instruction->GetEntrypoint()).Int32Value(),
2431                          instruction,
2432                          instruction->GetDexPc());
2433}
2434
2435void LocationsBuilderARM::VisitNewArray(HNewArray* instruction) {
2436  LocationSummary* locations =
2437      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2438  InvokeRuntimeCallingConvention calling_convention;
2439  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2440  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2441  locations->SetOut(Location::RegisterLocation(R0));
2442  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2443}
2444
2445void InstructionCodeGeneratorARM::VisitNewArray(HNewArray* instruction) {
2446  InvokeRuntimeCallingConvention calling_convention;
2447  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(2));
2448  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
2449  codegen_->InvokeRuntime(GetThreadOffset<kArmWordSize>(instruction->GetEntrypoint()).Int32Value(),
2450                          instruction,
2451                          instruction->GetDexPc());
2452}
2453
2454void LocationsBuilderARM::VisitParameterValue(HParameterValue* instruction) {
2455  LocationSummary* locations =
2456      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2457  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
2458  if (location.IsStackSlot()) {
2459    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2460  } else if (location.IsDoubleStackSlot()) {
2461    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2462  }
2463  locations->SetOut(location);
2464}
2465
2466void InstructionCodeGeneratorARM::VisitParameterValue(HParameterValue* instruction) {
2467  // Nothing to do, the parameter is already at its location.
2468  UNUSED(instruction);
2469}
2470
2471void LocationsBuilderARM::VisitNot(HNot* not_) {
2472  LocationSummary* locations =
2473      new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
2474  locations->SetInAt(0, Location::RequiresRegister());
2475  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2476}
2477
2478void InstructionCodeGeneratorARM::VisitNot(HNot* not_) {
2479  LocationSummary* locations = not_->GetLocations();
2480  Location out = locations->Out();
2481  Location in = locations->InAt(0);
2482  switch (not_->GetResultType()) {
2483    case Primitive::kPrimInt:
2484      __ mvn(out.AsRegister<Register>(), ShifterOperand(in.AsRegister<Register>()));
2485      break;
2486
2487    case Primitive::kPrimLong:
2488      __ mvn(out.AsRegisterPairLow<Register>(),
2489             ShifterOperand(in.AsRegisterPairLow<Register>()));
2490      __ mvn(out.AsRegisterPairHigh<Register>(),
2491             ShifterOperand(in.AsRegisterPairHigh<Register>()));
2492      break;
2493
2494    default:
2495      LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
2496  }
2497}
2498
2499void LocationsBuilderARM::VisitCompare(HCompare* compare) {
2500  LocationSummary* locations =
2501      new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2502  switch (compare->InputAt(0)->GetType()) {
2503    case Primitive::kPrimLong: {
2504      locations->SetInAt(0, Location::RequiresRegister());
2505      locations->SetInAt(1, Location::RequiresRegister());
2506      // Output overlaps because it is written before doing the low comparison.
2507      locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2508      break;
2509    }
2510    case Primitive::kPrimFloat:
2511    case Primitive::kPrimDouble: {
2512      locations->SetInAt(0, Location::RequiresFpuRegister());
2513      locations->SetInAt(1, Location::RequiresFpuRegister());
2514      locations->SetOut(Location::RequiresRegister());
2515      break;
2516    }
2517    default:
2518      LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2519  }
2520}
2521
2522void InstructionCodeGeneratorARM::VisitCompare(HCompare* compare) {
2523  LocationSummary* locations = compare->GetLocations();
2524  Register out = locations->Out().AsRegister<Register>();
2525  Location left = locations->InAt(0);
2526  Location right = locations->InAt(1);
2527
2528  Label less, greater, done;
2529  Primitive::Type type = compare->InputAt(0)->GetType();
2530  switch (type) {
2531    case Primitive::kPrimLong: {
2532      __ cmp(left.AsRegisterPairHigh<Register>(),
2533             ShifterOperand(right.AsRegisterPairHigh<Register>()));  // Signed compare.
2534      __ b(&less, LT);
2535      __ b(&greater, GT);
2536      // Do LoadImmediate before any `cmp`, as LoadImmediate might affect the status flags.
2537      __ LoadImmediate(out, 0);
2538      __ cmp(left.AsRegisterPairLow<Register>(),
2539             ShifterOperand(right.AsRegisterPairLow<Register>()));  // Unsigned compare.
2540      break;
2541    }
2542    case Primitive::kPrimFloat:
2543    case Primitive::kPrimDouble: {
2544      __ LoadImmediate(out, 0);
2545      if (type == Primitive::kPrimFloat) {
2546        __ vcmps(left.AsFpuRegister<SRegister>(), right.AsFpuRegister<SRegister>());
2547      } else {
2548        __ vcmpd(FromLowSToD(left.AsFpuRegisterPairLow<SRegister>()),
2549                 FromLowSToD(right.AsFpuRegisterPairLow<SRegister>()));
2550      }
2551      __ vmstat();  // transfer FP status register to ARM APSR.
2552      __ b(compare->IsGtBias() ? &greater : &less, VS);  // VS for unordered.
2553      break;
2554    }
2555    default:
2556      LOG(FATAL) << "Unexpected compare type " << type;
2557  }
2558  __ b(&done, EQ);
2559  __ b(&less, CC);  // CC is for both: unsigned compare for longs and 'less than' for floats.
2560
2561  __ Bind(&greater);
2562  __ LoadImmediate(out, 1);
2563  __ b(&done);
2564
2565  __ Bind(&less);
2566  __ LoadImmediate(out, -1);
2567
2568  __ Bind(&done);
2569}
2570
2571void LocationsBuilderARM::VisitPhi(HPhi* instruction) {
2572  LocationSummary* locations =
2573      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2574  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
2575    locations->SetInAt(i, Location::Any());
2576  }
2577  locations->SetOut(Location::Any());
2578}
2579
2580void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction) {
2581  UNUSED(instruction);
2582  LOG(FATAL) << "Unreachable";
2583}
2584
2585void InstructionCodeGeneratorARM::GenerateMemoryBarrier(MemBarrierKind kind) {
2586  // TODO (ported from quick): revisit Arm barrier kinds
2587  DmbOptions flavour = DmbOptions::ISH;  // quiet c++ warnings
2588  switch (kind) {
2589    case MemBarrierKind::kAnyStore:
2590    case MemBarrierKind::kLoadAny:
2591    case MemBarrierKind::kAnyAny: {
2592      flavour = DmbOptions::ISH;
2593      break;
2594    }
2595    case MemBarrierKind::kStoreStore: {
2596      flavour = DmbOptions::ISHST;
2597      break;
2598    }
2599    default:
2600      LOG(FATAL) << "Unexpected memory barrier " << kind;
2601  }
2602  __ dmb(flavour);
2603}
2604
2605void InstructionCodeGeneratorARM::GenerateWideAtomicLoad(Register addr,
2606                                                         uint32_t offset,
2607                                                         Register out_lo,
2608                                                         Register out_hi) {
2609  if (offset != 0) {
2610    __ LoadImmediate(out_lo, offset);
2611    __ add(IP, addr, ShifterOperand(out_lo));
2612    addr = IP;
2613  }
2614  __ ldrexd(out_lo, out_hi, addr);
2615}
2616
2617void InstructionCodeGeneratorARM::GenerateWideAtomicStore(Register addr,
2618                                                          uint32_t offset,
2619                                                          Register value_lo,
2620                                                          Register value_hi,
2621                                                          Register temp1,
2622                                                          Register temp2,
2623                                                          HInstruction* instruction) {
2624  Label fail;
2625  if (offset != 0) {
2626    __ LoadImmediate(temp1, offset);
2627    __ add(IP, addr, ShifterOperand(temp1));
2628    addr = IP;
2629  }
2630  __ Bind(&fail);
2631  // We need a load followed by store. (The address used in a STREX instruction must
2632  // be the same as the address in the most recently executed LDREX instruction.)
2633  __ ldrexd(temp1, temp2, addr);
2634  codegen_->MaybeRecordImplicitNullCheck(instruction);
2635  __ strexd(temp1, value_lo, value_hi, addr);
2636  __ cmp(temp1, ShifterOperand(0));
2637  __ b(&fail, NE);
2638}
2639
2640void LocationsBuilderARM::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) {
2641  DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2642
2643  LocationSummary* locations =
2644      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2645  locations->SetInAt(0, Location::RequiresRegister());
2646  locations->SetInAt(1, Location::RequiresRegister());
2647
2648
2649  Primitive::Type field_type = field_info.GetFieldType();
2650  bool is_wide = field_type == Primitive::kPrimLong || field_type == Primitive::kPrimDouble;
2651  bool generate_volatile = field_info.IsVolatile()
2652      && is_wide
2653      && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2654  // Temporary registers for the write barrier.
2655  // TODO: consider renaming StoreNeedsWriteBarrier to StoreNeedsGCMark.
2656  if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
2657    locations->AddTemp(Location::RequiresRegister());
2658    locations->AddTemp(Location::RequiresRegister());
2659  } else if (generate_volatile) {
2660    // Arm encoding have some additional constraints for ldrexd/strexd:
2661    // - registers need to be consecutive
2662    // - the first register should be even but not R14.
2663    // We don't test for Arm yet, and the assertion makes sure that we revisit this if we ever
2664    // enable Arm encoding.
2665    DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
2666
2667    locations->AddTemp(Location::RequiresRegister());
2668    locations->AddTemp(Location::RequiresRegister());
2669    if (field_type == Primitive::kPrimDouble) {
2670      // For doubles we need two more registers to copy the value.
2671      locations->AddTemp(Location::RegisterLocation(R2));
2672      locations->AddTemp(Location::RegisterLocation(R3));
2673    }
2674  }
2675}
2676
2677void InstructionCodeGeneratorARM::HandleFieldSet(HInstruction* instruction,
2678                                                 const FieldInfo& field_info) {
2679  DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2680
2681  LocationSummary* locations = instruction->GetLocations();
2682  Register base = locations->InAt(0).AsRegister<Register>();
2683  Location value = locations->InAt(1);
2684
2685  bool is_volatile = field_info.IsVolatile();
2686  bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2687  Primitive::Type field_type = field_info.GetFieldType();
2688  uint32_t offset = field_info.GetFieldOffset().Uint32Value();
2689
2690  if (is_volatile) {
2691    GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
2692  }
2693
2694  switch (field_type) {
2695    case Primitive::kPrimBoolean:
2696    case Primitive::kPrimByte: {
2697      __ StoreToOffset(kStoreByte, value.AsRegister<Register>(), base, offset);
2698      break;
2699    }
2700
2701    case Primitive::kPrimShort:
2702    case Primitive::kPrimChar: {
2703      __ StoreToOffset(kStoreHalfword, value.AsRegister<Register>(), base, offset);
2704      break;
2705    }
2706
2707    case Primitive::kPrimInt:
2708    case Primitive::kPrimNot: {
2709      __ StoreToOffset(kStoreWord, value.AsRegister<Register>(), base, offset);
2710      break;
2711    }
2712
2713    case Primitive::kPrimLong: {
2714      if (is_volatile && !atomic_ldrd_strd) {
2715        GenerateWideAtomicStore(base, offset,
2716                                value.AsRegisterPairLow<Register>(),
2717                                value.AsRegisterPairHigh<Register>(),
2718                                locations->GetTemp(0).AsRegister<Register>(),
2719                                locations->GetTemp(1).AsRegister<Register>(),
2720                                instruction);
2721      } else {
2722        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), base, offset);
2723        codegen_->MaybeRecordImplicitNullCheck(instruction);
2724      }
2725      break;
2726    }
2727
2728    case Primitive::kPrimFloat: {
2729      __ StoreSToOffset(value.AsFpuRegister<SRegister>(), base, offset);
2730      break;
2731    }
2732
2733    case Primitive::kPrimDouble: {
2734      DRegister value_reg = FromLowSToD(value.AsFpuRegisterPairLow<SRegister>());
2735      if (is_volatile && !atomic_ldrd_strd) {
2736        Register value_reg_lo = locations->GetTemp(0).AsRegister<Register>();
2737        Register value_reg_hi = locations->GetTemp(1).AsRegister<Register>();
2738
2739        __ vmovrrd(value_reg_lo, value_reg_hi, value_reg);
2740
2741        GenerateWideAtomicStore(base, offset,
2742                                value_reg_lo,
2743                                value_reg_hi,
2744                                locations->GetTemp(2).AsRegister<Register>(),
2745                                locations->GetTemp(3).AsRegister<Register>(),
2746                                instruction);
2747      } else {
2748        __ StoreDToOffset(value_reg, base, offset);
2749        codegen_->MaybeRecordImplicitNullCheck(instruction);
2750      }
2751      break;
2752    }
2753
2754    case Primitive::kPrimVoid:
2755      LOG(FATAL) << "Unreachable type " << field_type;
2756      UNREACHABLE();
2757  }
2758
2759  // Longs and doubles are handled in the switch.
2760  if (field_type != Primitive::kPrimLong && field_type != Primitive::kPrimDouble) {
2761    codegen_->MaybeRecordImplicitNullCheck(instruction);
2762  }
2763
2764  if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
2765    Register temp = locations->GetTemp(0).AsRegister<Register>();
2766    Register card = locations->GetTemp(1).AsRegister<Register>();
2767    codegen_->MarkGCCard(temp, card, base, value.AsRegister<Register>());
2768  }
2769
2770  if (is_volatile) {
2771    GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
2772  }
2773}
2774
2775void LocationsBuilderARM::HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info) {
2776  DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2777  LocationSummary* locations =
2778      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2779  locations->SetInAt(0, Location::RequiresRegister());
2780
2781  bool volatile_for_double = field_info.IsVolatile()
2782      && (field_info.GetFieldType() == Primitive::kPrimDouble)
2783      && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2784  bool overlap = field_info.IsVolatile() && (field_info.GetFieldType() == Primitive::kPrimLong);
2785  locations->SetOut(Location::RequiresRegister(),
2786                    (overlap ? Location::kOutputOverlap : Location::kNoOutputOverlap));
2787  if (volatile_for_double) {
2788    // Arm encoding have some additional constraints for ldrexd/strexd:
2789    // - registers need to be consecutive
2790    // - the first register should be even but not R14.
2791    // We don't test for Arm yet, and the assertion makes sure that we revisit this if we ever
2792    // enable Arm encoding.
2793    DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
2794    locations->AddTemp(Location::RequiresRegister());
2795    locations->AddTemp(Location::RequiresRegister());
2796  }
2797}
2798
2799void InstructionCodeGeneratorARM::HandleFieldGet(HInstruction* instruction,
2800                                                 const FieldInfo& field_info) {
2801  DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2802
2803  LocationSummary* locations = instruction->GetLocations();
2804  Register base = locations->InAt(0).AsRegister<Register>();
2805  Location out = locations->Out();
2806  bool is_volatile = field_info.IsVolatile();
2807  bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2808  Primitive::Type field_type = field_info.GetFieldType();
2809  uint32_t offset = field_info.GetFieldOffset().Uint32Value();
2810
2811  switch (field_type) {
2812    case Primitive::kPrimBoolean: {
2813      __ LoadFromOffset(kLoadUnsignedByte, out.AsRegister<Register>(), base, offset);
2814      break;
2815    }
2816
2817    case Primitive::kPrimByte: {
2818      __ LoadFromOffset(kLoadSignedByte, out.AsRegister<Register>(), base, offset);
2819      break;
2820    }
2821
2822    case Primitive::kPrimShort: {
2823      __ LoadFromOffset(kLoadSignedHalfword, out.AsRegister<Register>(), base, offset);
2824      break;
2825    }
2826
2827    case Primitive::kPrimChar: {
2828      __ LoadFromOffset(kLoadUnsignedHalfword, out.AsRegister<Register>(), base, offset);
2829      break;
2830    }
2831
2832    case Primitive::kPrimInt:
2833    case Primitive::kPrimNot: {
2834      __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), base, offset);
2835      break;
2836    }
2837
2838    case Primitive::kPrimLong: {
2839      if (is_volatile && !atomic_ldrd_strd) {
2840        GenerateWideAtomicLoad(base, offset,
2841                               out.AsRegisterPairLow<Register>(),
2842                               out.AsRegisterPairHigh<Register>());
2843      } else {
2844        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), base, offset);
2845      }
2846      break;
2847    }
2848
2849    case Primitive::kPrimFloat: {
2850      __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), base, offset);
2851      break;
2852    }
2853
2854    case Primitive::kPrimDouble: {
2855      DRegister out_reg = FromLowSToD(out.AsFpuRegisterPairLow<SRegister>());
2856      if (is_volatile && !atomic_ldrd_strd) {
2857        Register lo = locations->GetTemp(0).AsRegister<Register>();
2858        Register hi = locations->GetTemp(1).AsRegister<Register>();
2859        GenerateWideAtomicLoad(base, offset, lo, hi);
2860        codegen_->MaybeRecordImplicitNullCheck(instruction);
2861        __ vmovdrr(out_reg, lo, hi);
2862      } else {
2863        __ LoadDFromOffset(out_reg, base, offset);
2864        codegen_->MaybeRecordImplicitNullCheck(instruction);
2865      }
2866      break;
2867    }
2868
2869    case Primitive::kPrimVoid:
2870      LOG(FATAL) << "Unreachable type " << field_type;
2871      UNREACHABLE();
2872  }
2873
2874  // Doubles are handled in the switch.
2875  if (field_type != Primitive::kPrimDouble) {
2876    codegen_->MaybeRecordImplicitNullCheck(instruction);
2877  }
2878
2879  if (is_volatile) {
2880    GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
2881  }
2882}
2883
2884void LocationsBuilderARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2885  HandleFieldSet(instruction, instruction->GetFieldInfo());
2886}
2887
2888void InstructionCodeGeneratorARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2889  HandleFieldSet(instruction, instruction->GetFieldInfo());
2890}
2891
2892void LocationsBuilderARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2893  HandleFieldGet(instruction, instruction->GetFieldInfo());
2894}
2895
2896void InstructionCodeGeneratorARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2897  HandleFieldGet(instruction, instruction->GetFieldInfo());
2898}
2899
2900void LocationsBuilderARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2901  HandleFieldGet(instruction, instruction->GetFieldInfo());
2902}
2903
2904void InstructionCodeGeneratorARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2905  HandleFieldGet(instruction, instruction->GetFieldInfo());
2906}
2907
2908void LocationsBuilderARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2909  HandleFieldSet(instruction, instruction->GetFieldInfo());
2910}
2911
2912void InstructionCodeGeneratorARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2913  HandleFieldSet(instruction, instruction->GetFieldInfo());
2914}
2915
2916void LocationsBuilderARM::VisitNullCheck(HNullCheck* instruction) {
2917  LocationSummary* locations =
2918      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2919  locations->SetInAt(0, Location::RequiresRegister());
2920  if (instruction->HasUses()) {
2921    locations->SetOut(Location::SameAsFirstInput());
2922  }
2923}
2924
2925void InstructionCodeGeneratorARM::GenerateImplicitNullCheck(HNullCheck* instruction) {
2926  if (codegen_->CanMoveNullCheckToUser(instruction)) {
2927    return;
2928  }
2929  Location obj = instruction->GetLocations()->InAt(0);
2930
2931  __ LoadFromOffset(kLoadWord, IP, obj.AsRegister<Register>(), 0);
2932  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
2933}
2934
2935void InstructionCodeGeneratorARM::GenerateExplicitNullCheck(HNullCheck* instruction) {
2936  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM(instruction);
2937  codegen_->AddSlowPath(slow_path);
2938
2939  LocationSummary* locations = instruction->GetLocations();
2940  Location obj = locations->InAt(0);
2941
2942  __ cmp(obj.AsRegister<Register>(), ShifterOperand(0));
2943  __ b(slow_path->GetEntryLabel(), EQ);
2944}
2945
2946void InstructionCodeGeneratorARM::VisitNullCheck(HNullCheck* instruction) {
2947  if (codegen_->GetCompilerOptions().GetImplicitNullChecks()) {
2948    GenerateImplicitNullCheck(instruction);
2949  } else {
2950    GenerateExplicitNullCheck(instruction);
2951  }
2952}
2953
2954void LocationsBuilderARM::VisitArrayGet(HArrayGet* instruction) {
2955  LocationSummary* locations =
2956      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2957  locations->SetInAt(0, Location::RequiresRegister());
2958  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2959  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2960}
2961
2962void InstructionCodeGeneratorARM::VisitArrayGet(HArrayGet* instruction) {
2963  LocationSummary* locations = instruction->GetLocations();
2964  Register obj = locations->InAt(0).AsRegister<Register>();
2965  Location index = locations->InAt(1);
2966
2967  switch (instruction->GetType()) {
2968    case Primitive::kPrimBoolean: {
2969      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
2970      Register out = locations->Out().AsRegister<Register>();
2971      if (index.IsConstant()) {
2972        size_t offset =
2973            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2974        __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
2975      } else {
2976        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
2977        __ LoadFromOffset(kLoadUnsignedByte, out, IP, data_offset);
2978      }
2979      break;
2980    }
2981
2982    case Primitive::kPrimByte: {
2983      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
2984      Register out = locations->Out().AsRegister<Register>();
2985      if (index.IsConstant()) {
2986        size_t offset =
2987            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2988        __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
2989      } else {
2990        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
2991        __ LoadFromOffset(kLoadSignedByte, out, IP, data_offset);
2992      }
2993      break;
2994    }
2995
2996    case Primitive::kPrimShort: {
2997      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
2998      Register out = locations->Out().AsRegister<Register>();
2999      if (index.IsConstant()) {
3000        size_t offset =
3001            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
3002        __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
3003      } else {
3004        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
3005        __ LoadFromOffset(kLoadSignedHalfword, out, IP, data_offset);
3006      }
3007      break;
3008    }
3009
3010    case Primitive::kPrimChar: {
3011      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
3012      Register out = locations->Out().AsRegister<Register>();
3013      if (index.IsConstant()) {
3014        size_t offset =
3015            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
3016        __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
3017      } else {
3018        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
3019        __ LoadFromOffset(kLoadUnsignedHalfword, out, IP, data_offset);
3020      }
3021      break;
3022    }
3023
3024    case Primitive::kPrimInt:
3025    case Primitive::kPrimNot: {
3026      DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
3027      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
3028      Register out = locations->Out().AsRegister<Register>();
3029      if (index.IsConstant()) {
3030        size_t offset =
3031            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3032        __ LoadFromOffset(kLoadWord, out, obj, offset);
3033      } else {
3034        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3035        __ LoadFromOffset(kLoadWord, out, IP, data_offset);
3036      }
3037      break;
3038    }
3039
3040    case Primitive::kPrimLong: {
3041      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
3042      Location out = locations->Out();
3043      if (index.IsConstant()) {
3044        size_t offset =
3045            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3046        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset);
3047      } else {
3048        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3049        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), IP, data_offset);
3050      }
3051      break;
3052    }
3053
3054    case Primitive::kPrimFloat: {
3055      uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
3056      Location out = locations->Out();
3057      DCHECK(out.IsFpuRegister());
3058      if (index.IsConstant()) {
3059        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3060        __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), obj, offset);
3061      } else {
3062        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3063        __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), IP, data_offset);
3064      }
3065      break;
3066    }
3067
3068    case Primitive::kPrimDouble: {
3069      uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
3070      Location out = locations->Out();
3071      DCHECK(out.IsFpuRegisterPair());
3072      if (index.IsConstant()) {
3073        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3074        __ LoadDFromOffset(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), obj, offset);
3075      } else {
3076        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3077        __ LoadDFromOffset(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), IP, data_offset);
3078      }
3079      break;
3080    }
3081
3082    case Primitive::kPrimVoid:
3083      LOG(FATAL) << "Unreachable type " << instruction->GetType();
3084      UNREACHABLE();
3085  }
3086  codegen_->MaybeRecordImplicitNullCheck(instruction);
3087}
3088
3089void LocationsBuilderARM::VisitArraySet(HArraySet* instruction) {
3090  Primitive::Type value_type = instruction->GetComponentType();
3091
3092  bool needs_write_barrier =
3093      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
3094  bool needs_runtime_call = instruction->NeedsTypeCheck();
3095
3096  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3097      instruction, needs_runtime_call ? LocationSummary::kCall : LocationSummary::kNoCall);
3098  if (needs_runtime_call) {
3099    InvokeRuntimeCallingConvention calling_convention;
3100    locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3101    locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3102    locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
3103  } else {
3104    locations->SetInAt(0, Location::RequiresRegister());
3105    locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3106    locations->SetInAt(2, Location::RequiresRegister());
3107
3108    if (needs_write_barrier) {
3109      // Temporary registers for the write barrier.
3110      locations->AddTemp(Location::RequiresRegister());
3111      locations->AddTemp(Location::RequiresRegister());
3112    }
3113  }
3114}
3115
3116void InstructionCodeGeneratorARM::VisitArraySet(HArraySet* instruction) {
3117  LocationSummary* locations = instruction->GetLocations();
3118  Register obj = locations->InAt(0).AsRegister<Register>();
3119  Location index = locations->InAt(1);
3120  Primitive::Type value_type = instruction->GetComponentType();
3121  bool needs_runtime_call = locations->WillCall();
3122  bool needs_write_barrier =
3123      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
3124
3125  switch (value_type) {
3126    case Primitive::kPrimBoolean:
3127    case Primitive::kPrimByte: {
3128      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
3129      Register value = locations->InAt(2).AsRegister<Register>();
3130      if (index.IsConstant()) {
3131        size_t offset =
3132            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
3133        __ StoreToOffset(kStoreByte, value, obj, offset);
3134      } else {
3135        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
3136        __ StoreToOffset(kStoreByte, value, IP, data_offset);
3137      }
3138      break;
3139    }
3140
3141    case Primitive::kPrimShort:
3142    case Primitive::kPrimChar: {
3143      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
3144      Register value = locations->InAt(2).AsRegister<Register>();
3145      if (index.IsConstant()) {
3146        size_t offset =
3147            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
3148        __ StoreToOffset(kStoreHalfword, value, obj, offset);
3149      } else {
3150        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
3151        __ StoreToOffset(kStoreHalfword, value, IP, data_offset);
3152      }
3153      break;
3154    }
3155
3156    case Primitive::kPrimInt:
3157    case Primitive::kPrimNot: {
3158      if (!needs_runtime_call) {
3159        uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
3160        Register value = locations->InAt(2).AsRegister<Register>();
3161        if (index.IsConstant()) {
3162          size_t offset =
3163              (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3164          __ StoreToOffset(kStoreWord, value, obj, offset);
3165        } else {
3166          DCHECK(index.IsRegister()) << index;
3167          __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3168          __ StoreToOffset(kStoreWord, value, IP, data_offset);
3169        }
3170        codegen_->MaybeRecordImplicitNullCheck(instruction);
3171        if (needs_write_barrier) {
3172          DCHECK_EQ(value_type, Primitive::kPrimNot);
3173          Register temp = locations->GetTemp(0).AsRegister<Register>();
3174          Register card = locations->GetTemp(1).AsRegister<Register>();
3175          codegen_->MarkGCCard(temp, card, obj, value);
3176        }
3177      } else {
3178        DCHECK_EQ(value_type, Primitive::kPrimNot);
3179        codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
3180                                instruction,
3181                                instruction->GetDexPc());
3182      }
3183      break;
3184    }
3185
3186    case Primitive::kPrimLong: {
3187      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
3188      Location value = locations->InAt(2);
3189      if (index.IsConstant()) {
3190        size_t offset =
3191            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3192        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset);
3193      } else {
3194        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3195        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), IP, data_offset);
3196      }
3197      break;
3198    }
3199
3200    case Primitive::kPrimFloat: {
3201      uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
3202      Location value = locations->InAt(2);
3203      DCHECK(value.IsFpuRegister());
3204      if (index.IsConstant()) {
3205        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3206        __ StoreSToOffset(value.AsFpuRegister<SRegister>(), obj, offset);
3207      } else {
3208        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3209        __ StoreSToOffset(value.AsFpuRegister<SRegister>(), IP, data_offset);
3210      }
3211      break;
3212    }
3213
3214    case Primitive::kPrimDouble: {
3215      uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
3216      Location value = locations->InAt(2);
3217      DCHECK(value.IsFpuRegisterPair());
3218      if (index.IsConstant()) {
3219        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3220        __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), obj, offset);
3221      } else {
3222        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3223        __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), IP, data_offset);
3224      }
3225
3226      break;
3227    }
3228
3229    case Primitive::kPrimVoid:
3230      LOG(FATAL) << "Unreachable type " << value_type;
3231      UNREACHABLE();
3232  }
3233
3234  // Ints and objects are handled in the switch.
3235  if (value_type != Primitive::kPrimInt && value_type != Primitive::kPrimNot) {
3236    codegen_->MaybeRecordImplicitNullCheck(instruction);
3237  }
3238}
3239
3240void LocationsBuilderARM::VisitArrayLength(HArrayLength* instruction) {
3241  LocationSummary* locations =
3242      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3243  locations->SetInAt(0, Location::RequiresRegister());
3244  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3245}
3246
3247void InstructionCodeGeneratorARM::VisitArrayLength(HArrayLength* instruction) {
3248  LocationSummary* locations = instruction->GetLocations();
3249  uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
3250  Register obj = locations->InAt(0).AsRegister<Register>();
3251  Register out = locations->Out().AsRegister<Register>();
3252  __ LoadFromOffset(kLoadWord, out, obj, offset);
3253  codegen_->MaybeRecordImplicitNullCheck(instruction);
3254}
3255
3256void LocationsBuilderARM::VisitBoundsCheck(HBoundsCheck* instruction) {
3257  LocationSummary* locations =
3258      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3259  locations->SetInAt(0, Location::RequiresRegister());
3260  locations->SetInAt(1, Location::RequiresRegister());
3261  if (instruction->HasUses()) {
3262    locations->SetOut(Location::SameAsFirstInput());
3263  }
3264}
3265
3266void InstructionCodeGeneratorARM::VisitBoundsCheck(HBoundsCheck* instruction) {
3267  LocationSummary* locations = instruction->GetLocations();
3268  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM(
3269      instruction, locations->InAt(0), locations->InAt(1));
3270  codegen_->AddSlowPath(slow_path);
3271
3272  Register index = locations->InAt(0).AsRegister<Register>();
3273  Register length = locations->InAt(1).AsRegister<Register>();
3274
3275  __ cmp(index, ShifterOperand(length));
3276  __ b(slow_path->GetEntryLabel(), CS);
3277}
3278
3279void CodeGeneratorARM::MarkGCCard(Register temp, Register card, Register object, Register value) {
3280  Label is_null;
3281  __ CompareAndBranchIfZero(value, &is_null);
3282  __ LoadFromOffset(kLoadWord, card, TR, Thread::CardTableOffset<kArmWordSize>().Int32Value());
3283  __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
3284  __ strb(card, Address(card, temp));
3285  __ Bind(&is_null);
3286}
3287
3288void LocationsBuilderARM::VisitTemporary(HTemporary* temp) {
3289  temp->SetLocations(nullptr);
3290}
3291
3292void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp) {
3293  // Nothing to do, this is driven by the code generator.
3294  UNUSED(temp);
3295}
3296
3297void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction) {
3298  UNUSED(instruction);
3299  LOG(FATAL) << "Unreachable";
3300}
3301
3302void InstructionCodeGeneratorARM::VisitParallelMove(HParallelMove* instruction) {
3303  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
3304}
3305
3306void LocationsBuilderARM::VisitSuspendCheck(HSuspendCheck* instruction) {
3307  new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
3308}
3309
3310void InstructionCodeGeneratorARM::VisitSuspendCheck(HSuspendCheck* instruction) {
3311  HBasicBlock* block = instruction->GetBlock();
3312  if (block->GetLoopInformation() != nullptr) {
3313    DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
3314    // The back edge will generate the suspend check.
3315    return;
3316  }
3317  if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
3318    // The goto will generate the suspend check.
3319    return;
3320  }
3321  GenerateSuspendCheck(instruction, nullptr);
3322}
3323
3324void InstructionCodeGeneratorARM::GenerateSuspendCheck(HSuspendCheck* instruction,
3325                                                       HBasicBlock* successor) {
3326  SuspendCheckSlowPathARM* slow_path =
3327      new (GetGraph()->GetArena()) SuspendCheckSlowPathARM(instruction, successor);
3328  codegen_->AddSlowPath(slow_path);
3329
3330  __ LoadFromOffset(
3331      kLoadUnsignedHalfword, IP, TR, Thread::ThreadFlagsOffset<kArmWordSize>().Int32Value());
3332  __ cmp(IP, ShifterOperand(0));
3333  // TODO: Figure out the branch offsets and use cbz/cbnz.
3334  if (successor == nullptr) {
3335    __ b(slow_path->GetEntryLabel(), NE);
3336    __ Bind(slow_path->GetReturnLabel());
3337  } else {
3338    __ b(codegen_->GetLabelOf(successor), EQ);
3339    __ b(slow_path->GetEntryLabel());
3340  }
3341}
3342
3343ArmAssembler* ParallelMoveResolverARM::GetAssembler() const {
3344  return codegen_->GetAssembler();
3345}
3346
3347void ParallelMoveResolverARM::EmitMove(size_t index) {
3348  MoveOperands* move = moves_.Get(index);
3349  Location source = move->GetSource();
3350  Location destination = move->GetDestination();
3351
3352  if (source.IsRegister()) {
3353    if (destination.IsRegister()) {
3354      __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>());
3355    } else {
3356      DCHECK(destination.IsStackSlot());
3357      __ StoreToOffset(kStoreWord, source.AsRegister<Register>(),
3358                       SP, destination.GetStackIndex());
3359    }
3360  } else if (source.IsStackSlot()) {
3361    if (destination.IsRegister()) {
3362      __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(),
3363                        SP, source.GetStackIndex());
3364    } else if (destination.IsFpuRegister()) {
3365      __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex());
3366    } else {
3367      DCHECK(destination.IsStackSlot());
3368      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
3369      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3370    }
3371  } else if (source.IsFpuRegister()) {
3372    if (destination.IsFpuRegister()) {
3373      __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>());
3374    } else {
3375      DCHECK(destination.IsStackSlot());
3376      __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex());
3377    }
3378  } else if (source.IsDoubleStackSlot()) {
3379    if (destination.IsDoubleStackSlot()) {
3380      __ LoadDFromOffset(DTMP, SP, source.GetStackIndex());
3381      __ StoreDToOffset(DTMP, SP, destination.GetStackIndex());
3382    } else if (destination.IsRegisterPair()) {
3383      DCHECK(ExpectedPairLayout(destination));
3384      __ LoadFromOffset(
3385          kLoadWordPair, destination.AsRegisterPairLow<Register>(), SP, source.GetStackIndex());
3386    } else {
3387      DCHECK(destination.IsFpuRegisterPair()) << destination;
3388      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
3389                         SP,
3390                         source.GetStackIndex());
3391    }
3392  } else if (source.IsRegisterPair()) {
3393    if (destination.IsRegisterPair()) {
3394      __ Mov(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairLow<Register>());
3395      __ Mov(destination.AsRegisterPairHigh<Register>(), source.AsRegisterPairHigh<Register>());
3396    } else {
3397      DCHECK(destination.IsDoubleStackSlot()) << destination;
3398      DCHECK(ExpectedPairLayout(source));
3399      __ StoreToOffset(
3400          kStoreWordPair, source.AsRegisterPairLow<Register>(), SP, destination.GetStackIndex());
3401    }
3402  } else if (source.IsFpuRegisterPair()) {
3403    if (destination.IsFpuRegisterPair()) {
3404      __ vmovd(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
3405               FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()));
3406    } else {
3407      DCHECK(destination.IsDoubleStackSlot()) << destination;
3408      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
3409                        SP,
3410                        destination.GetStackIndex());
3411    }
3412  } else {
3413    DCHECK(source.IsConstant()) << source;
3414    HConstant* constant = source.GetConstant();
3415    if (constant->IsIntConstant() || constant->IsNullConstant()) {
3416      int32_t value = CodeGenerator::GetInt32ValueOf(constant);
3417      if (destination.IsRegister()) {
3418        __ LoadImmediate(destination.AsRegister<Register>(), value);
3419      } else {
3420        DCHECK(destination.IsStackSlot());
3421        __ LoadImmediate(IP, value);
3422        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3423      }
3424    } else if (constant->IsLongConstant()) {
3425      int64_t value = constant->AsLongConstant()->GetValue();
3426      if (destination.IsRegisterPair()) {
3427        __ LoadImmediate(destination.AsRegisterPairLow<Register>(), Low32Bits(value));
3428        __ LoadImmediate(destination.AsRegisterPairHigh<Register>(), High32Bits(value));
3429      } else {
3430        DCHECK(destination.IsDoubleStackSlot()) << destination;
3431        __ LoadImmediate(IP, Low32Bits(value));
3432        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3433        __ LoadImmediate(IP, High32Bits(value));
3434        __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
3435      }
3436    } else if (constant->IsDoubleConstant()) {
3437      double value = constant->AsDoubleConstant()->GetValue();
3438      if (destination.IsFpuRegisterPair()) {
3439        __ LoadDImmediate(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()), value);
3440      } else {
3441        DCHECK(destination.IsDoubleStackSlot()) << destination;
3442        uint64_t int_value = bit_cast<uint64_t, double>(value);
3443        __ LoadImmediate(IP, Low32Bits(int_value));
3444        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3445        __ LoadImmediate(IP, High32Bits(int_value));
3446        __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
3447      }
3448    } else {
3449      DCHECK(constant->IsFloatConstant()) << constant->DebugName();
3450      float value = constant->AsFloatConstant()->GetValue();
3451      if (destination.IsFpuRegister()) {
3452        __ LoadSImmediate(destination.AsFpuRegister<SRegister>(), value);
3453      } else {
3454        DCHECK(destination.IsStackSlot());
3455        __ LoadImmediate(IP, bit_cast<int32_t, float>(value));
3456        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3457      }
3458    }
3459  }
3460}
3461
3462void ParallelMoveResolverARM::Exchange(Register reg, int mem) {
3463  __ Mov(IP, reg);
3464  __ LoadFromOffset(kLoadWord, reg, SP, mem);
3465  __ StoreToOffset(kStoreWord, IP, SP, mem);
3466}
3467
3468void ParallelMoveResolverARM::Exchange(int mem1, int mem2) {
3469  ScratchRegisterScope ensure_scratch(this, IP, R0, codegen_->GetNumberOfCoreRegisters());
3470  int stack_offset = ensure_scratch.IsSpilled() ? kArmWordSize : 0;
3471  __ LoadFromOffset(kLoadWord, static_cast<Register>(ensure_scratch.GetRegister()),
3472                    SP, mem1 + stack_offset);
3473  __ LoadFromOffset(kLoadWord, IP, SP, mem2 + stack_offset);
3474  __ StoreToOffset(kStoreWord, static_cast<Register>(ensure_scratch.GetRegister()),
3475                   SP, mem2 + stack_offset);
3476  __ StoreToOffset(kStoreWord, IP, SP, mem1 + stack_offset);
3477}
3478
3479void ParallelMoveResolverARM::EmitSwap(size_t index) {
3480  MoveOperands* move = moves_.Get(index);
3481  Location source = move->GetSource();
3482  Location destination = move->GetDestination();
3483
3484  if (source.IsRegister() && destination.IsRegister()) {
3485    DCHECK_NE(source.AsRegister<Register>(), IP);
3486    DCHECK_NE(destination.AsRegister<Register>(), IP);
3487    __ Mov(IP, source.AsRegister<Register>());
3488    __ Mov(source.AsRegister<Register>(), destination.AsRegister<Register>());
3489    __ Mov(destination.AsRegister<Register>(), IP);
3490  } else if (source.IsRegister() && destination.IsStackSlot()) {
3491    Exchange(source.AsRegister<Register>(), destination.GetStackIndex());
3492  } else if (source.IsStackSlot() && destination.IsRegister()) {
3493    Exchange(destination.AsRegister<Register>(), source.GetStackIndex());
3494  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
3495    Exchange(source.GetStackIndex(), destination.GetStackIndex());
3496  } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
3497    __ vmovrs(IP, source.AsFpuRegister<SRegister>());
3498    __ vmovs(source.AsFpuRegister<SRegister>(), destination.AsFpuRegister<SRegister>());
3499    __ vmovsr(destination.AsFpuRegister<SRegister>(), IP);
3500  } else if (source.IsRegisterPair() && destination.IsRegisterPair()) {
3501    __ vmovdrr(DTMP, source.AsRegisterPairLow<Register>(), source.AsRegisterPairHigh<Register>());
3502    __ Mov(source.AsRegisterPairLow<Register>(), destination.AsRegisterPairLow<Register>());
3503    __ Mov(source.AsRegisterPairHigh<Register>(), destination.AsRegisterPairHigh<Register>());
3504    __ vmovrrd(destination.AsRegisterPairLow<Register>(),
3505               destination.AsRegisterPairHigh<Register>(),
3506               DTMP);
3507  } else if (source.IsRegisterPair() || destination.IsRegisterPair()) {
3508    Register low_reg = source.IsRegisterPair()
3509        ? source.AsRegisterPairLow<Register>()
3510        : destination.AsRegisterPairLow<Register>();
3511    int mem = source.IsRegisterPair()
3512        ? destination.GetStackIndex()
3513        : source.GetStackIndex();
3514    DCHECK(ExpectedPairLayout(source.IsRegisterPair() ? source : destination));
3515    __ vmovdrr(DTMP, low_reg, static_cast<Register>(low_reg + 1));
3516    __ LoadFromOffset(kLoadWordPair, low_reg, SP, mem);
3517    __ StoreDToOffset(DTMP, SP, mem);
3518  } else if (source.IsFpuRegisterPair() && destination.IsFpuRegisterPair()) {
3519    DRegister first = FromLowSToD(source.AsFpuRegisterPairLow<SRegister>());
3520    DRegister second = FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>());
3521    __ vmovd(DTMP, first);
3522    __ vmovd(first, second);
3523    __ vmovd(second, DTMP);
3524  } else if (source.IsFpuRegisterPair() || destination.IsFpuRegisterPair()) {
3525    DRegister reg = source.IsFpuRegisterPair()
3526        ? FromLowSToD(source.AsFpuRegisterPairLow<SRegister>())
3527        : FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>());
3528    int mem = source.IsFpuRegisterPair()
3529        ? destination.GetStackIndex()
3530        : source.GetStackIndex();
3531    __ vmovd(DTMP, reg);
3532    __ LoadDFromOffset(reg, SP, mem);
3533    __ StoreDToOffset(DTMP, SP, mem);
3534  } else if (source.IsFpuRegister() || destination.IsFpuRegister()) {
3535    SRegister reg = source.IsFpuRegister() ? source.AsFpuRegister<SRegister>()
3536                                           : destination.AsFpuRegister<SRegister>();
3537    int mem = source.IsFpuRegister()
3538        ? destination.GetStackIndex()
3539        : source.GetStackIndex();
3540
3541    __ vmovrs(IP, reg);
3542    __ LoadSFromOffset(reg, SP, mem);
3543    __ StoreToOffset(kStoreWord, IP, SP, mem);
3544  } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
3545    Exchange(source.GetStackIndex(), destination.GetStackIndex());
3546    Exchange(source.GetHighStackIndex(kArmWordSize), destination.GetHighStackIndex(kArmWordSize));
3547  } else {
3548    LOG(FATAL) << "Unimplemented" << source << " <-> " << destination;
3549  }
3550}
3551
3552void ParallelMoveResolverARM::SpillScratch(int reg) {
3553  __ Push(static_cast<Register>(reg));
3554}
3555
3556void ParallelMoveResolverARM::RestoreScratch(int reg) {
3557  __ Pop(static_cast<Register>(reg));
3558}
3559
3560void LocationsBuilderARM::VisitLoadClass(HLoadClass* cls) {
3561  LocationSummary::CallKind call_kind = cls->CanCallRuntime()
3562      ? LocationSummary::kCallOnSlowPath
3563      : LocationSummary::kNoCall;
3564  LocationSummary* locations =
3565      new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
3566  locations->SetOut(Location::RequiresRegister());
3567}
3568
3569void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) {
3570  Register out = cls->GetLocations()->Out().AsRegister<Register>();
3571  if (cls->IsReferrersClass()) {
3572    DCHECK(!cls->CanCallRuntime());
3573    DCHECK(!cls->MustGenerateClinitCheck());
3574    codegen_->LoadCurrentMethod(out);
3575    __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
3576  } else {
3577    DCHECK(cls->CanCallRuntime());
3578    codegen_->LoadCurrentMethod(out);
3579    __ LoadFromOffset(
3580        kLoadWord, out, out, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value());
3581    __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
3582
3583    SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
3584        cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
3585    codegen_->AddSlowPath(slow_path);
3586    __ cmp(out, ShifterOperand(0));
3587    __ b(slow_path->GetEntryLabel(), EQ);
3588    if (cls->MustGenerateClinitCheck()) {
3589      GenerateClassInitializationCheck(slow_path, out);
3590    } else {
3591      __ Bind(slow_path->GetExitLabel());
3592    }
3593  }
3594}
3595
3596void LocationsBuilderARM::VisitClinitCheck(HClinitCheck* check) {
3597  LocationSummary* locations =
3598      new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
3599  locations->SetInAt(0, Location::RequiresRegister());
3600  if (check->HasUses()) {
3601    locations->SetOut(Location::SameAsFirstInput());
3602  }
3603}
3604
3605void InstructionCodeGeneratorARM::VisitClinitCheck(HClinitCheck* check) {
3606  // We assume the class is not null.
3607  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
3608      check->GetLoadClass(), check, check->GetDexPc(), true);
3609  codegen_->AddSlowPath(slow_path);
3610  GenerateClassInitializationCheck(slow_path,
3611                                   check->GetLocations()->InAt(0).AsRegister<Register>());
3612}
3613
3614void InstructionCodeGeneratorARM::GenerateClassInitializationCheck(
3615    SlowPathCodeARM* slow_path, Register class_reg) {
3616  __ LoadFromOffset(kLoadWord, IP, class_reg, mirror::Class::StatusOffset().Int32Value());
3617  __ cmp(IP, ShifterOperand(mirror::Class::kStatusInitialized));
3618  __ b(slow_path->GetEntryLabel(), LT);
3619  // Even if the initialized flag is set, we may be in a situation where caches are not synced
3620  // properly. Therefore, we do a memory fence.
3621  __ dmb(ISH);
3622  __ Bind(slow_path->GetExitLabel());
3623}
3624
3625void LocationsBuilderARM::VisitLoadString(HLoadString* load) {
3626  LocationSummary* locations =
3627      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
3628  locations->SetOut(Location::RequiresRegister());
3629}
3630
3631void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) {
3632  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM(load);
3633  codegen_->AddSlowPath(slow_path);
3634
3635  Register out = load->GetLocations()->Out().AsRegister<Register>();
3636  codegen_->LoadCurrentMethod(out);
3637  __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
3638  __ LoadFromOffset(kLoadWord, out, out, mirror::Class::DexCacheStringsOffset().Int32Value());
3639  __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(load->GetStringIndex()));
3640  __ cmp(out, ShifterOperand(0));
3641  __ b(slow_path->GetEntryLabel(), EQ);
3642  __ Bind(slow_path->GetExitLabel());
3643}
3644
3645void LocationsBuilderARM::VisitLoadException(HLoadException* load) {
3646  LocationSummary* locations =
3647      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
3648  locations->SetOut(Location::RequiresRegister());
3649}
3650
3651void InstructionCodeGeneratorARM::VisitLoadException(HLoadException* load) {
3652  Register out = load->GetLocations()->Out().AsRegister<Register>();
3653  int32_t offset = Thread::ExceptionOffset<kArmWordSize>().Int32Value();
3654  __ LoadFromOffset(kLoadWord, out, TR, offset);
3655  __ LoadImmediate(IP, 0);
3656  __ StoreToOffset(kStoreWord, IP, TR, offset);
3657}
3658
3659void LocationsBuilderARM::VisitThrow(HThrow* instruction) {
3660  LocationSummary* locations =
3661      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3662  InvokeRuntimeCallingConvention calling_convention;
3663  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3664}
3665
3666void InstructionCodeGeneratorARM::VisitThrow(HThrow* instruction) {
3667  codegen_->InvokeRuntime(
3668      QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc());
3669}
3670
3671void LocationsBuilderARM::VisitInstanceOf(HInstanceOf* instruction) {
3672  LocationSummary::CallKind call_kind = instruction->IsClassFinal()
3673      ? LocationSummary::kNoCall
3674      : LocationSummary::kCallOnSlowPath;
3675  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3676  locations->SetInAt(0, Location::RequiresRegister());
3677  locations->SetInAt(1, Location::RequiresRegister());
3678  // The out register is used as a temporary, so it overlaps with the inputs.
3679  locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3680}
3681
3682void InstructionCodeGeneratorARM::VisitInstanceOf(HInstanceOf* instruction) {
3683  LocationSummary* locations = instruction->GetLocations();
3684  Register obj = locations->InAt(0).AsRegister<Register>();
3685  Register cls = locations->InAt(1).AsRegister<Register>();
3686  Register out = locations->Out().AsRegister<Register>();
3687  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3688  Label done, zero;
3689  SlowPathCodeARM* slow_path = nullptr;
3690
3691  // Return 0 if `obj` is null.
3692  // TODO: avoid this check if we know obj is not null.
3693  __ cmp(obj, ShifterOperand(0));
3694  __ b(&zero, EQ);
3695  // Compare the class of `obj` with `cls`.
3696  __ LoadFromOffset(kLoadWord, out, obj, class_offset);
3697  __ cmp(out, ShifterOperand(cls));
3698  if (instruction->IsClassFinal()) {
3699    // Classes must be equal for the instanceof to succeed.
3700    __ b(&zero, NE);
3701    __ LoadImmediate(out, 1);
3702    __ b(&done);
3703  } else {
3704    // If the classes are not equal, we go into a slow path.
3705    DCHECK(locations->OnlyCallsOnSlowPath());
3706    slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
3707        instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc());
3708    codegen_->AddSlowPath(slow_path);
3709    __ b(slow_path->GetEntryLabel(), NE);
3710    __ LoadImmediate(out, 1);
3711    __ b(&done);
3712  }
3713  __ Bind(&zero);
3714  __ LoadImmediate(out, 0);
3715  if (slow_path != nullptr) {
3716    __ Bind(slow_path->GetExitLabel());
3717  }
3718  __ Bind(&done);
3719}
3720
3721void LocationsBuilderARM::VisitCheckCast(HCheckCast* instruction) {
3722  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3723      instruction, LocationSummary::kCallOnSlowPath);
3724  locations->SetInAt(0, Location::RequiresRegister());
3725  locations->SetInAt(1, Location::RequiresRegister());
3726  locations->AddTemp(Location::RequiresRegister());
3727}
3728
3729void InstructionCodeGeneratorARM::VisitCheckCast(HCheckCast* instruction) {
3730  LocationSummary* locations = instruction->GetLocations();
3731  Register obj = locations->InAt(0).AsRegister<Register>();
3732  Register cls = locations->InAt(1).AsRegister<Register>();
3733  Register temp = locations->GetTemp(0).AsRegister<Register>();
3734  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3735
3736  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
3737      instruction, locations->InAt(1), locations->GetTemp(0), instruction->GetDexPc());
3738  codegen_->AddSlowPath(slow_path);
3739
3740  // TODO: avoid this check if we know obj is not null.
3741  __ cmp(obj, ShifterOperand(0));
3742  __ b(slow_path->GetExitLabel(), EQ);
3743  // Compare the class of `obj` with `cls`.
3744  __ LoadFromOffset(kLoadWord, temp, obj, class_offset);
3745  __ cmp(temp, ShifterOperand(cls));
3746  __ b(slow_path->GetEntryLabel(), NE);
3747  __ Bind(slow_path->GetExitLabel());
3748}
3749
3750void LocationsBuilderARM::VisitMonitorOperation(HMonitorOperation* instruction) {
3751  LocationSummary* locations =
3752      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3753  InvokeRuntimeCallingConvention calling_convention;
3754  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3755}
3756
3757void InstructionCodeGeneratorARM::VisitMonitorOperation(HMonitorOperation* instruction) {
3758  codegen_->InvokeRuntime(instruction->IsEnter()
3759        ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
3760      instruction,
3761      instruction->GetDexPc());
3762}
3763
3764void LocationsBuilderARM::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
3765void LocationsBuilderARM::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
3766void LocationsBuilderARM::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
3767
3768void LocationsBuilderARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
3769  LocationSummary* locations =
3770      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3771  DCHECK(instruction->GetResultType() == Primitive::kPrimInt
3772         || instruction->GetResultType() == Primitive::kPrimLong);
3773  locations->SetInAt(0, Location::RequiresRegister());
3774  locations->SetInAt(1, Location::RequiresRegister());
3775  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3776}
3777
3778void InstructionCodeGeneratorARM::VisitAnd(HAnd* instruction) {
3779  HandleBitwiseOperation(instruction);
3780}
3781
3782void InstructionCodeGeneratorARM::VisitOr(HOr* instruction) {
3783  HandleBitwiseOperation(instruction);
3784}
3785
3786void InstructionCodeGeneratorARM::VisitXor(HXor* instruction) {
3787  HandleBitwiseOperation(instruction);
3788}
3789
3790void InstructionCodeGeneratorARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
3791  LocationSummary* locations = instruction->GetLocations();
3792
3793  if (instruction->GetResultType() == Primitive::kPrimInt) {
3794    Register first = locations->InAt(0).AsRegister<Register>();
3795    Register second = locations->InAt(1).AsRegister<Register>();
3796    Register out = locations->Out().AsRegister<Register>();
3797    if (instruction->IsAnd()) {
3798      __ and_(out, first, ShifterOperand(second));
3799    } else if (instruction->IsOr()) {
3800      __ orr(out, first, ShifterOperand(second));
3801    } else {
3802      DCHECK(instruction->IsXor());
3803      __ eor(out, first, ShifterOperand(second));
3804    }
3805  } else {
3806    DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3807    Location first = locations->InAt(0);
3808    Location second = locations->InAt(1);
3809    Location out = locations->Out();
3810    if (instruction->IsAnd()) {
3811      __ and_(out.AsRegisterPairLow<Register>(),
3812              first.AsRegisterPairLow<Register>(),
3813              ShifterOperand(second.AsRegisterPairLow<Register>()));
3814      __ and_(out.AsRegisterPairHigh<Register>(),
3815              first.AsRegisterPairHigh<Register>(),
3816              ShifterOperand(second.AsRegisterPairHigh<Register>()));
3817    } else if (instruction->IsOr()) {
3818      __ orr(out.AsRegisterPairLow<Register>(),
3819             first.AsRegisterPairLow<Register>(),
3820             ShifterOperand(second.AsRegisterPairLow<Register>()));
3821      __ orr(out.AsRegisterPairHigh<Register>(),
3822             first.AsRegisterPairHigh<Register>(),
3823             ShifterOperand(second.AsRegisterPairHigh<Register>()));
3824    } else {
3825      DCHECK(instruction->IsXor());
3826      __ eor(out.AsRegisterPairLow<Register>(),
3827             first.AsRegisterPairLow<Register>(),
3828             ShifterOperand(second.AsRegisterPairLow<Register>()));
3829      __ eor(out.AsRegisterPairHigh<Register>(),
3830             first.AsRegisterPairHigh<Register>(),
3831             ShifterOperand(second.AsRegisterPairHigh<Register>()));
3832    }
3833  }
3834}
3835
3836void CodeGeneratorARM::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Register temp) {
3837  DCHECK_EQ(temp, kArtMethodRegister);
3838
3839  // TODO: Implement all kinds of calls:
3840  // 1) boot -> boot
3841  // 2) app -> boot
3842  // 3) app -> app
3843  //
3844  // Currently we implement the app -> app logic, which looks up in the resolve cache.
3845
3846  // temp = method;
3847  LoadCurrentMethod(temp);
3848  if (!invoke->IsRecursive()) {
3849    // temp = temp->dex_cache_resolved_methods_;
3850    __ LoadFromOffset(
3851        kLoadWord, temp, temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value());
3852    // temp = temp[index_in_cache]
3853    __ LoadFromOffset(
3854        kLoadWord, temp, temp, CodeGenerator::GetCacheOffset(invoke->GetDexMethodIndex()));
3855    // LR = temp[offset_of_quick_compiled_code]
3856    __ LoadFromOffset(kLoadWord, LR, temp,
3857                      mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
3858                          kArmWordSize).Int32Value());
3859    // LR()
3860    __ blx(LR);
3861  } else {
3862    __ bl(GetFrameEntryLabel());
3863  }
3864
3865  DCHECK(!IsLeafMethod());
3866}
3867
3868void LocationsBuilderARM::VisitBoundType(HBoundType* instruction) {
3869  // Nothing to do, this should be removed during prepare for register allocator.
3870  UNUSED(instruction);
3871  LOG(FATAL) << "Unreachable";
3872}
3873
3874void InstructionCodeGeneratorARM::VisitBoundType(HBoundType* instruction) {
3875  // Nothing to do, this should be removed during prepare for register allocator.
3876  UNUSED(instruction);
3877  LOG(FATAL) << "Unreachable";
3878}
3879
3880}  // namespace arm
3881}  // namespace art
3882