code_generator_arm.cc revision 09895ebf2d98783e65930a820e9288703bb1a50b
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
20#include "entrypoints/quick/quick_entrypoints.h"
21#include "gc/accounting/card_table.h"
22#include "intrinsics.h"
23#include "intrinsics_arm.h"
24#include "mirror/array-inl.h"
25#include "mirror/art_method.h"
26#include "mirror/class.h"
27#include "thread.h"
28#include "utils/arm/assembler_arm.h"
29#include "utils/arm/managed_register_arm.h"
30#include "utils/assembler.h"
31#include "utils/stack_checks.h"
32
33namespace art {
34
35namespace arm {
36
37static bool ExpectedPairLayout(Location location) {
38  // We expected this for both core and fpu register pairs.
39  return ((location.low() & 1) == 0) && (location.low() + 1 == location.high());
40}
41
42static constexpr int kCurrentMethodStackOffset = 0;
43
44static constexpr Register kRuntimeParameterCoreRegisters[] = { R0, R1, R2, R3 };
45static constexpr size_t kRuntimeParameterCoreRegistersLength =
46    arraysize(kRuntimeParameterCoreRegisters);
47static constexpr SRegister kRuntimeParameterFpuRegisters[] = { S0, S1, S2, S3 };
48static constexpr size_t kRuntimeParameterFpuRegistersLength =
49    arraysize(kRuntimeParameterFpuRegisters);
50// We unconditionally allocate R5 to ensure we can do long operations
51// with baseline.
52static constexpr Register kCoreSavedRegisterForBaseline = R5;
53static constexpr Register kCoreCalleeSaves[] =
54    { R5, R6, R7, R8, R10, R11, PC };
55static constexpr SRegister kFpuCalleeSaves[] =
56    { S16, S17, S18, S19, S20, S21, S22, S23, S24, S25, S26, S27, S28, S29, S30, S31 };
57
58// D31 cannot be split into two S registers, and the register allocator only works on
59// S registers. Therefore there is no need to block it.
60static constexpr DRegister DTMP = D31;
61
62class InvokeRuntimeCallingConvention : public CallingConvention<Register, SRegister> {
63 public:
64  InvokeRuntimeCallingConvention()
65      : CallingConvention(kRuntimeParameterCoreRegisters,
66                          kRuntimeParameterCoreRegistersLength,
67                          kRuntimeParameterFpuRegisters,
68                          kRuntimeParameterFpuRegistersLength) {}
69
70 private:
71  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
72};
73
74#define __ reinterpret_cast<ArmAssembler*>(codegen->GetAssembler())->
75#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArmWordSize, x).Int32Value()
76
77class NullCheckSlowPathARM : public SlowPathCodeARM {
78 public:
79  explicit NullCheckSlowPathARM(HNullCheck* instruction) : instruction_(instruction) {}
80
81  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
82    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
83    __ Bind(GetEntryLabel());
84    arm_codegen->InvokeRuntime(
85        QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc());
86  }
87
88 private:
89  HNullCheck* const instruction_;
90  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM);
91};
92
93class DivZeroCheckSlowPathARM : public SlowPathCodeARM {
94 public:
95  explicit DivZeroCheckSlowPathARM(HDivZeroCheck* instruction) : instruction_(instruction) {}
96
97  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
98    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
99    __ Bind(GetEntryLabel());
100    arm_codegen->InvokeRuntime(
101        QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc());
102  }
103
104 private:
105  HDivZeroCheck* const instruction_;
106  DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM);
107};
108
109class SuspendCheckSlowPathARM : public SlowPathCodeARM {
110 public:
111  SuspendCheckSlowPathARM(HSuspendCheck* instruction, HBasicBlock* successor)
112      : instruction_(instruction), successor_(successor) {}
113
114  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
115    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
116    __ Bind(GetEntryLabel());
117    codegen->SaveLiveRegisters(instruction_->GetLocations());
118    arm_codegen->InvokeRuntime(
119        QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc());
120    codegen->RestoreLiveRegisters(instruction_->GetLocations());
121    if (successor_ == nullptr) {
122      __ b(GetReturnLabel());
123    } else {
124      __ b(arm_codegen->GetLabelOf(successor_));
125    }
126  }
127
128  Label* GetReturnLabel() {
129    DCHECK(successor_ == nullptr);
130    return &return_label_;
131  }
132
133 private:
134  HSuspendCheck* const instruction_;
135  // If not null, the block to branch to after the suspend check.
136  HBasicBlock* const successor_;
137
138  // If `successor_` is null, the label to branch to after the suspend check.
139  Label return_label_;
140
141  DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM);
142};
143
144class BoundsCheckSlowPathARM : public SlowPathCodeARM {
145 public:
146  BoundsCheckSlowPathARM(HBoundsCheck* instruction,
147                         Location index_location,
148                         Location length_location)
149      : instruction_(instruction),
150        index_location_(index_location),
151        length_location_(length_location) {}
152
153  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
154    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
155    __ Bind(GetEntryLabel());
156    // We're moving two locations to locations that could overlap, so we need a parallel
157    // move resolver.
158    InvokeRuntimeCallingConvention calling_convention;
159    codegen->EmitParallelMoves(
160        index_location_,
161        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
162        length_location_,
163        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
164    arm_codegen->InvokeRuntime(
165        QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc());
166  }
167
168 private:
169  HBoundsCheck* const instruction_;
170  const Location index_location_;
171  const Location length_location_;
172
173  DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM);
174};
175
176class LoadClassSlowPathARM : public SlowPathCodeARM {
177 public:
178  LoadClassSlowPathARM(HLoadClass* cls,
179                       HInstruction* at,
180                       uint32_t dex_pc,
181                       bool do_clinit)
182      : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
183    DCHECK(at->IsLoadClass() || at->IsClinitCheck());
184  }
185
186  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
187    LocationSummary* locations = at_->GetLocations();
188
189    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
190    __ Bind(GetEntryLabel());
191    codegen->SaveLiveRegisters(locations);
192
193    InvokeRuntimeCallingConvention calling_convention;
194    __ LoadImmediate(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex());
195    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
196    int32_t entry_point_offset = do_clinit_
197        ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
198        : QUICK_ENTRY_POINT(pInitializeType);
199    arm_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_);
200
201    // Move the class to the desired location.
202    Location out = locations->Out();
203    if (out.IsValid()) {
204      DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
205      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
206    }
207    codegen->RestoreLiveRegisters(locations);
208    __ b(GetExitLabel());
209  }
210
211 private:
212  // The class this slow path will load.
213  HLoadClass* const cls_;
214
215  // The instruction where this slow path is happening.
216  // (Might be the load class or an initialization check).
217  HInstruction* const at_;
218
219  // The dex PC of `at_`.
220  const uint32_t dex_pc_;
221
222  // Whether to initialize the class.
223  const bool do_clinit_;
224
225  DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM);
226};
227
228class LoadStringSlowPathARM : public SlowPathCodeARM {
229 public:
230  explicit LoadStringSlowPathARM(HLoadString* instruction) : instruction_(instruction) {}
231
232  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
233    LocationSummary* locations = instruction_->GetLocations();
234    DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
235
236    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
237    __ Bind(GetEntryLabel());
238    codegen->SaveLiveRegisters(locations);
239
240    InvokeRuntimeCallingConvention calling_convention;
241    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
242    __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction_->GetStringIndex());
243    arm_codegen->InvokeRuntime(
244        QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc());
245    arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
246
247    codegen->RestoreLiveRegisters(locations);
248    __ b(GetExitLabel());
249  }
250
251 private:
252  HLoadString* const instruction_;
253
254  DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM);
255};
256
257class TypeCheckSlowPathARM : public SlowPathCodeARM {
258 public:
259  TypeCheckSlowPathARM(HInstruction* instruction,
260                       Location class_to_check,
261                       Location object_class,
262                       uint32_t dex_pc)
263      : instruction_(instruction),
264        class_to_check_(class_to_check),
265        object_class_(object_class),
266        dex_pc_(dex_pc) {}
267
268  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
269    LocationSummary* locations = instruction_->GetLocations();
270    DCHECK(instruction_->IsCheckCast()
271           || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
272
273    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
274    __ Bind(GetEntryLabel());
275    codegen->SaveLiveRegisters(locations);
276
277    // We're moving two locations to locations that could overlap, so we need a parallel
278    // move resolver.
279    InvokeRuntimeCallingConvention calling_convention;
280    codegen->EmitParallelMoves(
281        class_to_check_,
282        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
283        object_class_,
284        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
285
286    if (instruction_->IsInstanceOf()) {
287      arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc_);
288      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
289    } else {
290      DCHECK(instruction_->IsCheckCast());
291      arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc_);
292    }
293
294    codegen->RestoreLiveRegisters(locations);
295    __ b(GetExitLabel());
296  }
297
298 private:
299  HInstruction* const instruction_;
300  const Location class_to_check_;
301  const Location object_class_;
302  uint32_t dex_pc_;
303
304  DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM);
305};
306
307#undef __
308
309#undef __
310#define __ reinterpret_cast<ArmAssembler*>(GetAssembler())->
311
312inline Condition ARMCondition(IfCondition cond) {
313  switch (cond) {
314    case kCondEQ: return EQ;
315    case kCondNE: return NE;
316    case kCondLT: return LT;
317    case kCondLE: return LE;
318    case kCondGT: return GT;
319    case kCondGE: return GE;
320    default:
321      LOG(FATAL) << "Unknown if condition";
322  }
323  return EQ;        // Unreachable.
324}
325
326inline Condition ARMOppositeCondition(IfCondition cond) {
327  switch (cond) {
328    case kCondEQ: return NE;
329    case kCondNE: return EQ;
330    case kCondLT: return GE;
331    case kCondLE: return GT;
332    case kCondGT: return LE;
333    case kCondGE: return LT;
334    default:
335      LOG(FATAL) << "Unknown if condition";
336  }
337  return EQ;        // Unreachable.
338}
339
340void CodeGeneratorARM::DumpCoreRegister(std::ostream& stream, int reg) const {
341  stream << ArmManagedRegister::FromCoreRegister(Register(reg));
342}
343
344void CodeGeneratorARM::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
345  stream << ArmManagedRegister::FromSRegister(SRegister(reg));
346}
347
348size_t CodeGeneratorARM::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
349  __ StoreToOffset(kStoreWord, static_cast<Register>(reg_id), SP, stack_index);
350  return kArmWordSize;
351}
352
353size_t CodeGeneratorARM::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
354  __ LoadFromOffset(kLoadWord, static_cast<Register>(reg_id), SP, stack_index);
355  return kArmWordSize;
356}
357
358size_t CodeGeneratorARM::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
359  __ StoreSToOffset(static_cast<SRegister>(reg_id), SP, stack_index);
360  return kArmWordSize;
361}
362
363size_t CodeGeneratorARM::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
364  __ LoadSFromOffset(static_cast<SRegister>(reg_id), SP, stack_index);
365  return kArmWordSize;
366}
367
368CodeGeneratorARM::CodeGeneratorARM(HGraph* graph,
369                                   const ArmInstructionSetFeatures& isa_features,
370                                   const CompilerOptions& compiler_options)
371    : CodeGenerator(graph,
372                    kNumberOfCoreRegisters,
373                    kNumberOfSRegisters,
374                    kNumberOfRegisterPairs,
375                    ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
376                                        arraysize(kCoreCalleeSaves)),
377                    ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
378                                        arraysize(kFpuCalleeSaves)),
379                    compiler_options),
380      block_labels_(graph->GetArena(), 0),
381      location_builder_(graph, this),
382      instruction_visitor_(graph, this),
383      move_resolver_(graph->GetArena(), this),
384      assembler_(true),
385      isa_features_(isa_features) {
386  // Save the PC register to mimic Quick.
387  AddAllocatedRegister(Location::RegisterLocation(PC));
388}
389
390Location CodeGeneratorARM::AllocateFreeRegister(Primitive::Type type) const {
391  switch (type) {
392    case Primitive::kPrimLong: {
393      size_t reg = FindFreeEntry(blocked_register_pairs_, kNumberOfRegisterPairs);
394      ArmManagedRegister pair =
395          ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
396      DCHECK(!blocked_core_registers_[pair.AsRegisterPairLow()]);
397      DCHECK(!blocked_core_registers_[pair.AsRegisterPairHigh()]);
398
399      blocked_core_registers_[pair.AsRegisterPairLow()] = true;
400      blocked_core_registers_[pair.AsRegisterPairHigh()] = true;
401      UpdateBlockedPairRegisters();
402      return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
403    }
404
405    case Primitive::kPrimByte:
406    case Primitive::kPrimBoolean:
407    case Primitive::kPrimChar:
408    case Primitive::kPrimShort:
409    case Primitive::kPrimInt:
410    case Primitive::kPrimNot: {
411      int reg = FindFreeEntry(blocked_core_registers_, kNumberOfCoreRegisters);
412      // Block all register pairs that contain `reg`.
413      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
414        ArmManagedRegister current =
415            ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
416        if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
417          blocked_register_pairs_[i] = true;
418        }
419      }
420      return Location::RegisterLocation(reg);
421    }
422
423    case Primitive::kPrimFloat: {
424      int reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfSRegisters);
425      return Location::FpuRegisterLocation(reg);
426    }
427
428    case Primitive::kPrimDouble: {
429      int reg = FindTwoFreeConsecutiveAlignedEntries(blocked_fpu_registers_, kNumberOfSRegisters);
430      DCHECK_EQ(reg % 2, 0);
431      return Location::FpuRegisterPairLocation(reg, reg + 1);
432    }
433
434    case Primitive::kPrimVoid:
435      LOG(FATAL) << "Unreachable type " << type;
436  }
437
438  return Location();
439}
440
441void CodeGeneratorARM::SetupBlockedRegisters(bool is_baseline) const {
442  // Don't allocate the dalvik style register pair passing.
443  blocked_register_pairs_[R1_R2] = true;
444
445  // Stack register, LR and PC are always reserved.
446  blocked_core_registers_[SP] = true;
447  blocked_core_registers_[LR] = true;
448  blocked_core_registers_[PC] = true;
449
450  // Reserve thread register.
451  blocked_core_registers_[TR] = true;
452
453  // Reserve temp register.
454  blocked_core_registers_[IP] = true;
455
456  if (is_baseline) {
457    for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
458      blocked_core_registers_[kCoreCalleeSaves[i]] = true;
459    }
460
461    blocked_core_registers_[kCoreSavedRegisterForBaseline] = false;
462
463    for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
464      blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
465    }
466  }
467
468  UpdateBlockedPairRegisters();
469}
470
471void CodeGeneratorARM::UpdateBlockedPairRegisters() const {
472  for (int i = 0; i < kNumberOfRegisterPairs; i++) {
473    ArmManagedRegister current =
474        ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
475    if (blocked_core_registers_[current.AsRegisterPairLow()]
476        || blocked_core_registers_[current.AsRegisterPairHigh()]) {
477      blocked_register_pairs_[i] = true;
478    }
479  }
480}
481
482InstructionCodeGeneratorARM::InstructionCodeGeneratorARM(HGraph* graph, CodeGeneratorARM* codegen)
483      : HGraphVisitor(graph),
484        assembler_(codegen->GetAssembler()),
485        codegen_(codegen) {}
486
487static uint32_t LeastSignificantBit(uint32_t mask) {
488  // ffs starts at 1.
489  return ffs(mask) - 1;
490}
491
492void CodeGeneratorARM::ComputeSpillMask() {
493  core_spill_mask_ = allocated_registers_.GetCoreRegisters() & core_callee_save_mask_;
494  // Save one extra register for baseline. Note that on thumb2, there is no easy
495  // instruction to restore just the PC, so this actually helps both baseline
496  // and non-baseline to save and restore at least two registers at entry and exit.
497  core_spill_mask_ |= (1 << kCoreSavedRegisterForBaseline);
498  DCHECK_NE(core_spill_mask_, 0u) << "At least the return address register must be saved";
499  fpu_spill_mask_ = allocated_registers_.GetFloatingPointRegisters() & fpu_callee_save_mask_;
500  // We use vpush and vpop for saving and restoring floating point registers, which take
501  // a SRegister and the number of registers to save/restore after that SRegister. We
502  // therefore update the `fpu_spill_mask_` to also contain those registers not allocated,
503  // but in the range.
504  if (fpu_spill_mask_ != 0) {
505    uint32_t least_significant_bit = LeastSignificantBit(fpu_spill_mask_);
506    uint32_t most_significant_bit = MostSignificantBit(fpu_spill_mask_);
507    for (uint32_t i = least_significant_bit + 1 ; i < most_significant_bit; ++i) {
508      fpu_spill_mask_ |= (1 << i);
509    }
510  }
511}
512
513void CodeGeneratorARM::GenerateFrameEntry() {
514  bool skip_overflow_check =
515      IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm);
516  DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
517  __ Bind(&frame_entry_label_);
518
519  if (HasEmptyFrame()) {
520    return;
521  }
522
523  if (!skip_overflow_check) {
524    __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm)));
525    __ LoadFromOffset(kLoadWord, IP, IP, 0);
526    RecordPcInfo(nullptr, 0);
527  }
528
529  // PC is in the list of callee-save to mimic Quick, but we need to push
530  // LR at entry instead.
531  __ PushList((core_spill_mask_ & (~(1 << PC))) | 1 << LR);
532  if (fpu_spill_mask_ != 0) {
533    SRegister start_register = SRegister(LeastSignificantBit(fpu_spill_mask_));
534    __ vpushs(start_register, POPCOUNT(fpu_spill_mask_));
535  }
536  __ AddConstant(SP, -(GetFrameSize() - FrameEntrySpillSize()));
537  __ StoreToOffset(kStoreWord, R0, SP, 0);
538}
539
540void CodeGeneratorARM::GenerateFrameExit() {
541  if (HasEmptyFrame()) {
542    __ bx(LR);
543    return;
544  }
545  __ AddConstant(SP, GetFrameSize() - FrameEntrySpillSize());
546  if (fpu_spill_mask_ != 0) {
547    SRegister start_register = SRegister(LeastSignificantBit(fpu_spill_mask_));
548    __ vpops(start_register, POPCOUNT(fpu_spill_mask_));
549  }
550  __ PopList(core_spill_mask_);
551}
552
553void CodeGeneratorARM::Bind(HBasicBlock* block) {
554  __ Bind(GetLabelOf(block));
555}
556
557Location CodeGeneratorARM::GetStackLocation(HLoadLocal* load) const {
558  switch (load->GetType()) {
559    case Primitive::kPrimLong:
560    case Primitive::kPrimDouble:
561      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
562      break;
563
564    case Primitive::kPrimInt:
565    case Primitive::kPrimNot:
566    case Primitive::kPrimFloat:
567      return Location::StackSlot(GetStackSlot(load->GetLocal()));
568
569    case Primitive::kPrimBoolean:
570    case Primitive::kPrimByte:
571    case Primitive::kPrimChar:
572    case Primitive::kPrimShort:
573    case Primitive::kPrimVoid:
574      LOG(FATAL) << "Unexpected type " << load->GetType();
575  }
576
577  LOG(FATAL) << "Unreachable";
578  return Location();
579}
580
581Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
582  switch (type) {
583    case Primitive::kPrimBoolean:
584    case Primitive::kPrimByte:
585    case Primitive::kPrimChar:
586    case Primitive::kPrimShort:
587    case Primitive::kPrimInt:
588    case Primitive::kPrimNot: {
589      uint32_t index = gp_index_++;
590      uint32_t stack_index = stack_index_++;
591      if (index < calling_convention.GetNumberOfRegisters()) {
592        return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
593      } else {
594        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
595      }
596    }
597
598    case Primitive::kPrimLong: {
599      uint32_t index = gp_index_;
600      uint32_t stack_index = stack_index_;
601      gp_index_ += 2;
602      stack_index_ += 2;
603      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
604        if (calling_convention.GetRegisterAt(index) == R1) {
605          // Skip R1, and use R2_R3 instead.
606          gp_index_++;
607          index++;
608        }
609      }
610      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
611        DCHECK_EQ(calling_convention.GetRegisterAt(index) + 1,
612                  calling_convention.GetRegisterAt(index + 1));
613        return Location::RegisterPairLocation(calling_convention.GetRegisterAt(index),
614                                              calling_convention.GetRegisterAt(index + 1));
615      } else {
616        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
617      }
618    }
619
620    case Primitive::kPrimFloat: {
621      uint32_t stack_index = stack_index_++;
622      if (float_index_ % 2 == 0) {
623        float_index_ = std::max(double_index_, float_index_);
624      }
625      if (float_index_ < calling_convention.GetNumberOfFpuRegisters()) {
626        return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(float_index_++));
627      } else {
628        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
629      }
630    }
631
632    case Primitive::kPrimDouble: {
633      double_index_ = std::max(double_index_, RoundUp(float_index_, 2));
634      uint32_t stack_index = stack_index_;
635      stack_index_ += 2;
636      if (double_index_ + 1 < calling_convention.GetNumberOfFpuRegisters()) {
637        uint32_t index = double_index_;
638        double_index_ += 2;
639        Location result = Location::FpuRegisterPairLocation(
640          calling_convention.GetFpuRegisterAt(index),
641          calling_convention.GetFpuRegisterAt(index + 1));
642        DCHECK(ExpectedPairLayout(result));
643        return result;
644      } else {
645        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
646      }
647    }
648
649    case Primitive::kPrimVoid:
650      LOG(FATAL) << "Unexpected parameter type " << type;
651      break;
652  }
653  return Location();
654}
655
656Location InvokeDexCallingConventionVisitor::GetReturnLocation(Primitive::Type type) {
657  switch (type) {
658    case Primitive::kPrimBoolean:
659    case Primitive::kPrimByte:
660    case Primitive::kPrimChar:
661    case Primitive::kPrimShort:
662    case Primitive::kPrimInt:
663    case Primitive::kPrimNot: {
664      return Location::RegisterLocation(R0);
665    }
666
667    case Primitive::kPrimFloat: {
668      return Location::FpuRegisterLocation(S0);
669    }
670
671    case Primitive::kPrimLong: {
672      return Location::RegisterPairLocation(R0, R1);
673    }
674
675    case Primitive::kPrimDouble: {
676      return Location::FpuRegisterPairLocation(S0, S1);
677    }
678
679    case Primitive::kPrimVoid:
680      return Location();
681  }
682  UNREACHABLE();
683  return Location();
684}
685
686void CodeGeneratorARM::Move32(Location destination, Location source) {
687  if (source.Equals(destination)) {
688    return;
689  }
690  if (destination.IsRegister()) {
691    if (source.IsRegister()) {
692      __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>());
693    } else if (source.IsFpuRegister()) {
694      __ vmovrs(destination.AsRegister<Register>(), source.AsFpuRegister<SRegister>());
695    } else {
696      __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(), SP, source.GetStackIndex());
697    }
698  } else if (destination.IsFpuRegister()) {
699    if (source.IsRegister()) {
700      __ vmovsr(destination.AsFpuRegister<SRegister>(), source.AsRegister<Register>());
701    } else if (source.IsFpuRegister()) {
702      __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>());
703    } else {
704      __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex());
705    }
706  } else {
707    DCHECK(destination.IsStackSlot()) << destination;
708    if (source.IsRegister()) {
709      __ StoreToOffset(kStoreWord, source.AsRegister<Register>(), SP, destination.GetStackIndex());
710    } else if (source.IsFpuRegister()) {
711      __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex());
712    } else {
713      DCHECK(source.IsStackSlot()) << source;
714      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
715      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
716    }
717  }
718}
719
720void CodeGeneratorARM::Move64(Location destination, Location source) {
721  if (source.Equals(destination)) {
722    return;
723  }
724  if (destination.IsRegisterPair()) {
725    if (source.IsRegisterPair()) {
726      EmitParallelMoves(
727          Location::RegisterLocation(source.AsRegisterPairHigh<Register>()),
728          Location::RegisterLocation(destination.AsRegisterPairHigh<Register>()),
729          Location::RegisterLocation(source.AsRegisterPairLow<Register>()),
730          Location::RegisterLocation(destination.AsRegisterPairLow<Register>()));
731    } else if (source.IsFpuRegister()) {
732      UNIMPLEMENTED(FATAL);
733    } else {
734      DCHECK(source.IsDoubleStackSlot());
735      DCHECK(ExpectedPairLayout(destination));
736      __ LoadFromOffset(kLoadWordPair, destination.AsRegisterPairLow<Register>(),
737                        SP, source.GetStackIndex());
738    }
739  } else if (destination.IsFpuRegisterPair()) {
740    if (source.IsDoubleStackSlot()) {
741      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
742                         SP,
743                         source.GetStackIndex());
744    } else {
745      UNIMPLEMENTED(FATAL);
746    }
747  } else {
748    DCHECK(destination.IsDoubleStackSlot());
749    if (source.IsRegisterPair()) {
750      // No conflict possible, so just do the moves.
751      if (source.AsRegisterPairLow<Register>() == R1) {
752        DCHECK_EQ(source.AsRegisterPairHigh<Register>(), R2);
753        __ StoreToOffset(kStoreWord, R1, SP, destination.GetStackIndex());
754        __ StoreToOffset(kStoreWord, R2, SP, destination.GetHighStackIndex(kArmWordSize));
755      } else {
756        __ StoreToOffset(kStoreWordPair, source.AsRegisterPairLow<Register>(),
757                         SP, destination.GetStackIndex());
758      }
759    } else if (source.IsFpuRegisterPair()) {
760      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
761                        SP,
762                        destination.GetStackIndex());
763    } else {
764      DCHECK(source.IsDoubleStackSlot());
765      EmitParallelMoves(
766          Location::StackSlot(source.GetStackIndex()),
767          Location::StackSlot(destination.GetStackIndex()),
768          Location::StackSlot(source.GetHighStackIndex(kArmWordSize)),
769          Location::StackSlot(destination.GetHighStackIndex(kArmWordSize)));
770    }
771  }
772}
773
774void CodeGeneratorARM::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
775  LocationSummary* locations = instruction->GetLocations();
776  if (locations != nullptr && locations->Out().Equals(location)) {
777    return;
778  }
779
780  if (locations != nullptr && locations->Out().IsConstant()) {
781    HConstant* const_to_move = locations->Out().GetConstant();
782    if (const_to_move->IsIntConstant() || const_to_move->IsNullConstant()) {
783      int32_t value = GetInt32ValueOf(const_to_move);
784      if (location.IsRegister()) {
785        __ LoadImmediate(location.AsRegister<Register>(), value);
786      } else {
787        DCHECK(location.IsStackSlot());
788        __ LoadImmediate(IP, value);
789        __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
790      }
791    } else {
792      DCHECK(const_to_move->IsLongConstant()) << const_to_move->DebugName();
793      int64_t value = const_to_move->AsLongConstant()->GetValue();
794      if (location.IsRegisterPair()) {
795        __ LoadImmediate(location.AsRegisterPairLow<Register>(), Low32Bits(value));
796        __ LoadImmediate(location.AsRegisterPairHigh<Register>(), High32Bits(value));
797      } else {
798        DCHECK(location.IsDoubleStackSlot());
799        __ LoadImmediate(IP, Low32Bits(value));
800        __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
801        __ LoadImmediate(IP, High32Bits(value));
802        __ StoreToOffset(kStoreWord, IP, SP, location.GetHighStackIndex(kArmWordSize));
803      }
804    }
805  } else if (instruction->IsLoadLocal()) {
806    uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
807    switch (instruction->GetType()) {
808      case Primitive::kPrimBoolean:
809      case Primitive::kPrimByte:
810      case Primitive::kPrimChar:
811      case Primitive::kPrimShort:
812      case Primitive::kPrimInt:
813      case Primitive::kPrimNot:
814      case Primitive::kPrimFloat:
815        Move32(location, Location::StackSlot(stack_slot));
816        break;
817
818      case Primitive::kPrimLong:
819      case Primitive::kPrimDouble:
820        Move64(location, Location::DoubleStackSlot(stack_slot));
821        break;
822
823      default:
824        LOG(FATAL) << "Unexpected type " << instruction->GetType();
825    }
826  } else if (instruction->IsTemporary()) {
827    Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
828    if (temp_location.IsStackSlot()) {
829      Move32(location, temp_location);
830    } else {
831      DCHECK(temp_location.IsDoubleStackSlot());
832      Move64(location, temp_location);
833    }
834  } else {
835    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
836    switch (instruction->GetType()) {
837      case Primitive::kPrimBoolean:
838      case Primitive::kPrimByte:
839      case Primitive::kPrimChar:
840      case Primitive::kPrimShort:
841      case Primitive::kPrimNot:
842      case Primitive::kPrimInt:
843      case Primitive::kPrimFloat:
844        Move32(location, locations->Out());
845        break;
846
847      case Primitive::kPrimLong:
848      case Primitive::kPrimDouble:
849        Move64(location, locations->Out());
850        break;
851
852      default:
853        LOG(FATAL) << "Unexpected type " << instruction->GetType();
854    }
855  }
856}
857
858void CodeGeneratorARM::InvokeRuntime(int32_t entry_point_offset,
859                                     HInstruction* instruction,
860                                     uint32_t dex_pc) {
861  __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
862  __ blx(LR);
863  RecordPcInfo(instruction, dex_pc);
864  DCHECK(instruction->IsSuspendCheck()
865      || instruction->IsBoundsCheck()
866      || instruction->IsNullCheck()
867      || instruction->IsDivZeroCheck()
868      || instruction->GetLocations()->CanCall()
869      || !IsLeafMethod());
870}
871
872void LocationsBuilderARM::VisitGoto(HGoto* got) {
873  got->SetLocations(nullptr);
874}
875
876void InstructionCodeGeneratorARM::VisitGoto(HGoto* got) {
877  HBasicBlock* successor = got->GetSuccessor();
878  DCHECK(!successor->IsExitBlock());
879
880  HBasicBlock* block = got->GetBlock();
881  HInstruction* previous = got->GetPrevious();
882
883  HLoopInformation* info = block->GetLoopInformation();
884  if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
885    codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
886    GenerateSuspendCheck(info->GetSuspendCheck(), successor);
887    return;
888  }
889
890  if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
891    GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
892  }
893  if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
894    __ b(codegen_->GetLabelOf(successor));
895  }
896}
897
898void LocationsBuilderARM::VisitExit(HExit* exit) {
899  exit->SetLocations(nullptr);
900}
901
902void InstructionCodeGeneratorARM::VisitExit(HExit* exit) {
903  UNUSED(exit);
904  if (kIsDebugBuild) {
905    __ Comment("Unreachable");
906    __ bkpt(0);
907  }
908}
909
910void LocationsBuilderARM::VisitIf(HIf* if_instr) {
911  LocationSummary* locations =
912      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
913  HInstruction* cond = if_instr->InputAt(0);
914  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
915    locations->SetInAt(0, Location::RequiresRegister());
916  }
917}
918
919void InstructionCodeGeneratorARM::VisitIf(HIf* if_instr) {
920  HInstruction* cond = if_instr->InputAt(0);
921  if (cond->IsIntConstant()) {
922    // Constant condition, statically compared against 1.
923    int32_t cond_value = cond->AsIntConstant()->GetValue();
924    if (cond_value == 1) {
925      if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
926                                     if_instr->IfTrueSuccessor())) {
927        __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
928      }
929      return;
930    } else {
931      DCHECK_EQ(cond_value, 0);
932    }
933  } else {
934    if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
935      // Condition has been materialized, compare the output to 0
936      DCHECK(if_instr->GetLocations()->InAt(0).IsRegister());
937      __ cmp(if_instr->GetLocations()->InAt(0).AsRegister<Register>(),
938             ShifterOperand(0));
939      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()), NE);
940    } else {
941      // Condition has not been materialized, use its inputs as the
942      // comparison and its condition as the branch condition.
943      LocationSummary* locations = cond->GetLocations();
944      DCHECK(locations->InAt(0).IsRegister()) << locations->InAt(0);
945      Register left = locations->InAt(0).AsRegister<Register>();
946      if (locations->InAt(1).IsRegister()) {
947        __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>()));
948      } else {
949        DCHECK(locations->InAt(1).IsConstant());
950        HConstant* constant = locations->InAt(1).GetConstant();
951        int32_t value = CodeGenerator::GetInt32ValueOf(constant);
952        ShifterOperand operand;
953        if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) {
954          __ cmp(left, operand);
955        } else {
956          Register temp = IP;
957          __ LoadImmediate(temp, value);
958          __ cmp(left, ShifterOperand(temp));
959        }
960      }
961      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()),
962           ARMCondition(cond->AsCondition()->GetCondition()));
963    }
964  }
965  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
966                                 if_instr->IfFalseSuccessor())) {
967    __ b(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
968  }
969}
970
971
972void LocationsBuilderARM::VisitCondition(HCondition* comp) {
973  LocationSummary* locations =
974      new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall);
975  locations->SetInAt(0, Location::RequiresRegister());
976  locations->SetInAt(1, Location::RegisterOrConstant(comp->InputAt(1)));
977  if (comp->NeedsMaterialization()) {
978    locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
979  }
980}
981
982void InstructionCodeGeneratorARM::VisitCondition(HCondition* comp) {
983  if (!comp->NeedsMaterialization()) return;
984  LocationSummary* locations = comp->GetLocations();
985  Register left = locations->InAt(0).AsRegister<Register>();
986
987  if (locations->InAt(1).IsRegister()) {
988    __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>()));
989  } else {
990    DCHECK(locations->InAt(1).IsConstant());
991    int32_t value = CodeGenerator::GetInt32ValueOf(locations->InAt(1).GetConstant());
992    ShifterOperand operand;
993    if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) {
994      __ cmp(left, operand);
995    } else {
996      Register temp = IP;
997      __ LoadImmediate(temp, value);
998      __ cmp(left, ShifterOperand(temp));
999    }
1000  }
1001  __ it(ARMCondition(comp->GetCondition()), kItElse);
1002  __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(1),
1003         ARMCondition(comp->GetCondition()));
1004  __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(0),
1005         ARMOppositeCondition(comp->GetCondition()));
1006}
1007
1008void LocationsBuilderARM::VisitEqual(HEqual* comp) {
1009  VisitCondition(comp);
1010}
1011
1012void InstructionCodeGeneratorARM::VisitEqual(HEqual* comp) {
1013  VisitCondition(comp);
1014}
1015
1016void LocationsBuilderARM::VisitNotEqual(HNotEqual* comp) {
1017  VisitCondition(comp);
1018}
1019
1020void InstructionCodeGeneratorARM::VisitNotEqual(HNotEqual* comp) {
1021  VisitCondition(comp);
1022}
1023
1024void LocationsBuilderARM::VisitLessThan(HLessThan* comp) {
1025  VisitCondition(comp);
1026}
1027
1028void InstructionCodeGeneratorARM::VisitLessThan(HLessThan* comp) {
1029  VisitCondition(comp);
1030}
1031
1032void LocationsBuilderARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1033  VisitCondition(comp);
1034}
1035
1036void InstructionCodeGeneratorARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1037  VisitCondition(comp);
1038}
1039
1040void LocationsBuilderARM::VisitGreaterThan(HGreaterThan* comp) {
1041  VisitCondition(comp);
1042}
1043
1044void InstructionCodeGeneratorARM::VisitGreaterThan(HGreaterThan* comp) {
1045  VisitCondition(comp);
1046}
1047
1048void LocationsBuilderARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1049  VisitCondition(comp);
1050}
1051
1052void InstructionCodeGeneratorARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1053  VisitCondition(comp);
1054}
1055
1056void LocationsBuilderARM::VisitLocal(HLocal* local) {
1057  local->SetLocations(nullptr);
1058}
1059
1060void InstructionCodeGeneratorARM::VisitLocal(HLocal* local) {
1061  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
1062}
1063
1064void LocationsBuilderARM::VisitLoadLocal(HLoadLocal* load) {
1065  load->SetLocations(nullptr);
1066}
1067
1068void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load) {
1069  // Nothing to do, this is driven by the code generator.
1070  UNUSED(load);
1071}
1072
1073void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) {
1074  LocationSummary* locations =
1075      new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
1076  switch (store->InputAt(1)->GetType()) {
1077    case Primitive::kPrimBoolean:
1078    case Primitive::kPrimByte:
1079    case Primitive::kPrimChar:
1080    case Primitive::kPrimShort:
1081    case Primitive::kPrimInt:
1082    case Primitive::kPrimNot:
1083    case Primitive::kPrimFloat:
1084      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
1085      break;
1086
1087    case Primitive::kPrimLong:
1088    case Primitive::kPrimDouble:
1089      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
1090      break;
1091
1092    default:
1093      LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType();
1094  }
1095}
1096
1097void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store) {
1098  UNUSED(store);
1099}
1100
1101void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) {
1102  LocationSummary* locations =
1103      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1104  locations->SetOut(Location::ConstantLocation(constant));
1105}
1106
1107void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant) {
1108  // Will be generated at use site.
1109  UNUSED(constant);
1110}
1111
1112void LocationsBuilderARM::VisitNullConstant(HNullConstant* constant) {
1113  LocationSummary* locations =
1114      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1115  locations->SetOut(Location::ConstantLocation(constant));
1116}
1117
1118void InstructionCodeGeneratorARM::VisitNullConstant(HNullConstant* constant) {
1119  // Will be generated at use site.
1120  UNUSED(constant);
1121}
1122
1123void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) {
1124  LocationSummary* locations =
1125      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1126  locations->SetOut(Location::ConstantLocation(constant));
1127}
1128
1129void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant) {
1130  // Will be generated at use site.
1131  UNUSED(constant);
1132}
1133
1134void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) {
1135  LocationSummary* locations =
1136      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1137  locations->SetOut(Location::ConstantLocation(constant));
1138}
1139
1140void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant) {
1141  // Will be generated at use site.
1142  UNUSED(constant);
1143}
1144
1145void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) {
1146  LocationSummary* locations =
1147      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1148  locations->SetOut(Location::ConstantLocation(constant));
1149}
1150
1151void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant) {
1152  // Will be generated at use site.
1153  UNUSED(constant);
1154}
1155
1156void LocationsBuilderARM::VisitReturnVoid(HReturnVoid* ret) {
1157  ret->SetLocations(nullptr);
1158}
1159
1160void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret) {
1161  UNUSED(ret);
1162  codegen_->GenerateFrameExit();
1163}
1164
1165void LocationsBuilderARM::VisitReturn(HReturn* ret) {
1166  LocationSummary* locations =
1167      new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
1168  locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType()));
1169}
1170
1171void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret) {
1172  UNUSED(ret);
1173  codegen_->GenerateFrameExit();
1174}
1175
1176void LocationsBuilderARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1177  IntrinsicLocationsBuilderARM intrinsic(GetGraph()->GetArena(),
1178                                         codegen_->GetInstructionSetFeatures());
1179  if (intrinsic.TryDispatch(invoke)) {
1180    return;
1181  }
1182
1183  HandleInvoke(invoke);
1184}
1185
1186void CodeGeneratorARM::LoadCurrentMethod(Register reg) {
1187  DCHECK(RequiresCurrentMethod());
1188  __ LoadFromOffset(kLoadWord, reg, SP, kCurrentMethodStackOffset);
1189}
1190
1191static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM* codegen) {
1192  if (invoke->GetLocations()->Intrinsified()) {
1193    IntrinsicCodeGeneratorARM intrinsic(codegen);
1194    intrinsic.Dispatch(invoke);
1195    return true;
1196  }
1197  return false;
1198}
1199
1200void InstructionCodeGeneratorARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1201  if (TryGenerateIntrinsicCode(invoke, codegen_)) {
1202    return;
1203  }
1204
1205  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1206
1207  codegen_->GenerateStaticOrDirectCall(invoke, temp);
1208}
1209
1210void LocationsBuilderARM::HandleInvoke(HInvoke* invoke) {
1211  LocationSummary* locations =
1212      new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1213  locations->AddTemp(Location::RegisterLocation(R0));
1214
1215  InvokeDexCallingConventionVisitor calling_convention_visitor;
1216  for (size_t i = 0; i < invoke->InputCount(); i++) {
1217    HInstruction* input = invoke->InputAt(i);
1218    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1219  }
1220
1221  locations->SetOut(calling_convention_visitor.GetReturnLocation(invoke->GetType()));
1222}
1223
1224void LocationsBuilderARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1225  IntrinsicLocationsBuilderARM intrinsic(GetGraph()->GetArena(),
1226                                         codegen_->GetInstructionSetFeatures());
1227  if (intrinsic.TryDispatch(invoke)) {
1228    return;
1229  }
1230
1231  HandleInvoke(invoke);
1232}
1233
1234void InstructionCodeGeneratorARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1235  if (TryGenerateIntrinsicCode(invoke, codegen_)) {
1236    return;
1237  }
1238
1239  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1240  uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() +
1241          invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
1242  LocationSummary* locations = invoke->GetLocations();
1243  Location receiver = locations->InAt(0);
1244  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1245  // temp = object->GetClass();
1246  if (receiver.IsStackSlot()) {
1247    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1248    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1249  } else {
1250    __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
1251  }
1252  codegen_->MaybeRecordImplicitNullCheck(invoke);
1253  // temp = temp->GetMethodAt(method_offset);
1254  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1255      kArmWordSize).Int32Value();
1256  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1257  // LR = temp->GetEntryPoint();
1258  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1259  // LR();
1260  __ blx(LR);
1261  DCHECK(!codegen_->IsLeafMethod());
1262  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1263}
1264
1265void LocationsBuilderARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1266  HandleInvoke(invoke);
1267  // Add the hidden argument.
1268  invoke->GetLocations()->AddTemp(Location::RegisterLocation(R12));
1269}
1270
1271void InstructionCodeGeneratorARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1272  // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
1273  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1274  uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() +
1275          (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry);
1276  LocationSummary* locations = invoke->GetLocations();
1277  Location receiver = locations->InAt(0);
1278  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1279
1280  // Set the hidden argument.
1281  __ LoadImmediate(invoke->GetLocations()->GetTemp(1).AsRegister<Register>(),
1282                   invoke->GetDexMethodIndex());
1283
1284  // temp = object->GetClass();
1285  if (receiver.IsStackSlot()) {
1286    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1287    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1288  } else {
1289    __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
1290  }
1291  codegen_->MaybeRecordImplicitNullCheck(invoke);
1292  // temp = temp->GetImtEntryAt(method_offset);
1293  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1294      kArmWordSize).Int32Value();
1295  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1296  // LR = temp->GetEntryPoint();
1297  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1298  // LR();
1299  __ blx(LR);
1300  DCHECK(!codegen_->IsLeafMethod());
1301  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1302}
1303
1304void LocationsBuilderARM::VisitNeg(HNeg* neg) {
1305  LocationSummary* locations =
1306      new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
1307  switch (neg->GetResultType()) {
1308    case Primitive::kPrimInt: {
1309      locations->SetInAt(0, Location::RequiresRegister());
1310      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1311      break;
1312    }
1313    case Primitive::kPrimLong: {
1314      locations->SetInAt(0, Location::RequiresRegister());
1315      locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1316      break;
1317    }
1318
1319    case Primitive::kPrimFloat:
1320    case Primitive::kPrimDouble:
1321      locations->SetInAt(0, Location::RequiresFpuRegister());
1322      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1323      break;
1324
1325    default:
1326      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1327  }
1328}
1329
1330void InstructionCodeGeneratorARM::VisitNeg(HNeg* neg) {
1331  LocationSummary* locations = neg->GetLocations();
1332  Location out = locations->Out();
1333  Location in = locations->InAt(0);
1334  switch (neg->GetResultType()) {
1335    case Primitive::kPrimInt:
1336      DCHECK(in.IsRegister());
1337      __ rsb(out.AsRegister<Register>(), in.AsRegister<Register>(), ShifterOperand(0));
1338      break;
1339
1340    case Primitive::kPrimLong:
1341      DCHECK(in.IsRegisterPair());
1342      // out.lo = 0 - in.lo (and update the carry/borrow (C) flag)
1343      __ rsbs(out.AsRegisterPairLow<Register>(),
1344              in.AsRegisterPairLow<Register>(),
1345              ShifterOperand(0));
1346      // We cannot emit an RSC (Reverse Subtract with Carry)
1347      // instruction here, as it does not exist in the Thumb-2
1348      // instruction set.  We use the following approach
1349      // using SBC and SUB instead.
1350      //
1351      // out.hi = -C
1352      __ sbc(out.AsRegisterPairHigh<Register>(),
1353             out.AsRegisterPairHigh<Register>(),
1354             ShifterOperand(out.AsRegisterPairHigh<Register>()));
1355      // out.hi = out.hi - in.hi
1356      __ sub(out.AsRegisterPairHigh<Register>(),
1357             out.AsRegisterPairHigh<Register>(),
1358             ShifterOperand(in.AsRegisterPairHigh<Register>()));
1359      break;
1360
1361    case Primitive::kPrimFloat:
1362      DCHECK(in.IsFpuRegister());
1363      __ vnegs(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>());
1364      break;
1365
1366    case Primitive::kPrimDouble:
1367      DCHECK(in.IsFpuRegisterPair());
1368      __ vnegd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1369               FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1370      break;
1371
1372    default:
1373      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1374  }
1375}
1376
1377void LocationsBuilderARM::VisitTypeConversion(HTypeConversion* conversion) {
1378  Primitive::Type result_type = conversion->GetResultType();
1379  Primitive::Type input_type = conversion->GetInputType();
1380  DCHECK_NE(result_type, input_type);
1381
1382  // The float-to-long and double-to-long type conversions rely on a
1383  // call to the runtime.
1384  LocationSummary::CallKind call_kind =
1385      ((input_type == Primitive::kPrimFloat || input_type == Primitive::kPrimDouble)
1386       && result_type == Primitive::kPrimLong)
1387      ? LocationSummary::kCall
1388      : LocationSummary::kNoCall;
1389  LocationSummary* locations =
1390      new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind);
1391
1392  switch (result_type) {
1393    case Primitive::kPrimByte:
1394      switch (input_type) {
1395        case Primitive::kPrimShort:
1396        case Primitive::kPrimInt:
1397        case Primitive::kPrimChar:
1398          // Processing a Dex `int-to-byte' instruction.
1399          locations->SetInAt(0, Location::RequiresRegister());
1400          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1401          break;
1402
1403        default:
1404          LOG(FATAL) << "Unexpected type conversion from " << input_type
1405                     << " to " << result_type;
1406      }
1407      break;
1408
1409    case Primitive::kPrimShort:
1410      switch (input_type) {
1411        case Primitive::kPrimByte:
1412        case Primitive::kPrimInt:
1413        case Primitive::kPrimChar:
1414          // Processing a Dex `int-to-short' instruction.
1415          locations->SetInAt(0, Location::RequiresRegister());
1416          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1417          break;
1418
1419        default:
1420          LOG(FATAL) << "Unexpected type conversion from " << input_type
1421                     << " to " << result_type;
1422      }
1423      break;
1424
1425    case Primitive::kPrimInt:
1426      switch (input_type) {
1427        case Primitive::kPrimLong:
1428          // Processing a Dex `long-to-int' instruction.
1429          locations->SetInAt(0, Location::Any());
1430          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1431          break;
1432
1433        case Primitive::kPrimFloat:
1434          // Processing a Dex `float-to-int' instruction.
1435          locations->SetInAt(0, Location::RequiresFpuRegister());
1436          locations->SetOut(Location::RequiresRegister());
1437          locations->AddTemp(Location::RequiresFpuRegister());
1438          break;
1439
1440        case Primitive::kPrimDouble:
1441          // Processing a Dex `double-to-int' instruction.
1442          locations->SetInAt(0, Location::RequiresFpuRegister());
1443          locations->SetOut(Location::RequiresRegister());
1444          locations->AddTemp(Location::RequiresFpuRegister());
1445          break;
1446
1447        default:
1448          LOG(FATAL) << "Unexpected type conversion from " << input_type
1449                     << " to " << result_type;
1450      }
1451      break;
1452
1453    case Primitive::kPrimLong:
1454      switch (input_type) {
1455        case Primitive::kPrimByte:
1456        case Primitive::kPrimShort:
1457        case Primitive::kPrimInt:
1458        case Primitive::kPrimChar:
1459          // Processing a Dex `int-to-long' instruction.
1460          locations->SetInAt(0, Location::RequiresRegister());
1461          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1462          break;
1463
1464        case Primitive::kPrimFloat: {
1465          // Processing a Dex `float-to-long' instruction.
1466          InvokeRuntimeCallingConvention calling_convention;
1467          locations->SetInAt(0, Location::FpuRegisterLocation(
1468              calling_convention.GetFpuRegisterAt(0)));
1469          locations->SetOut(Location::RegisterPairLocation(R0, R1));
1470          break;
1471        }
1472
1473        case Primitive::kPrimDouble: {
1474          // Processing a Dex `double-to-long' instruction.
1475          InvokeRuntimeCallingConvention calling_convention;
1476          locations->SetInAt(0, Location::FpuRegisterPairLocation(
1477              calling_convention.GetFpuRegisterAt(0),
1478              calling_convention.GetFpuRegisterAt(1)));
1479          locations->SetOut(Location::RegisterPairLocation(R0, R1));
1480          break;
1481        }
1482
1483        default:
1484          LOG(FATAL) << "Unexpected type conversion from " << input_type
1485                     << " to " << result_type;
1486      }
1487      break;
1488
1489    case Primitive::kPrimChar:
1490      switch (input_type) {
1491        case Primitive::kPrimByte:
1492        case Primitive::kPrimShort:
1493        case Primitive::kPrimInt:
1494          // Processing a Dex `int-to-char' instruction.
1495          locations->SetInAt(0, Location::RequiresRegister());
1496          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1497          break;
1498
1499        default:
1500          LOG(FATAL) << "Unexpected type conversion from " << input_type
1501                     << " to " << result_type;
1502      }
1503      break;
1504
1505    case Primitive::kPrimFloat:
1506      switch (input_type) {
1507        case Primitive::kPrimByte:
1508        case Primitive::kPrimShort:
1509        case Primitive::kPrimInt:
1510        case Primitive::kPrimChar:
1511          // Processing a Dex `int-to-float' instruction.
1512          locations->SetInAt(0, Location::RequiresRegister());
1513          locations->SetOut(Location::RequiresFpuRegister());
1514          break;
1515
1516        case Primitive::kPrimLong:
1517          // Processing a Dex `long-to-float' instruction.
1518          locations->SetInAt(0, Location::RequiresRegister());
1519          locations->SetOut(Location::RequiresFpuRegister());
1520          locations->AddTemp(Location::RequiresRegister());
1521          locations->AddTemp(Location::RequiresRegister());
1522          locations->AddTemp(Location::RequiresFpuRegister());
1523          locations->AddTemp(Location::RequiresFpuRegister());
1524          break;
1525
1526        case Primitive::kPrimDouble:
1527          // Processing a Dex `double-to-float' instruction.
1528          locations->SetInAt(0, Location::RequiresFpuRegister());
1529          locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1530          break;
1531
1532        default:
1533          LOG(FATAL) << "Unexpected type conversion from " << input_type
1534                     << " to " << result_type;
1535      };
1536      break;
1537
1538    case Primitive::kPrimDouble:
1539      switch (input_type) {
1540        case Primitive::kPrimByte:
1541        case Primitive::kPrimShort:
1542        case Primitive::kPrimInt:
1543        case Primitive::kPrimChar:
1544          // Processing a Dex `int-to-double' instruction.
1545          locations->SetInAt(0, Location::RequiresRegister());
1546          locations->SetOut(Location::RequiresFpuRegister());
1547          break;
1548
1549        case Primitive::kPrimLong:
1550          // Processing a Dex `long-to-double' instruction.
1551          locations->SetInAt(0, Location::RequiresRegister());
1552          locations->SetOut(Location::RequiresFpuRegister());
1553          locations->AddTemp(Location::RequiresRegister());
1554          locations->AddTemp(Location::RequiresRegister());
1555          locations->AddTemp(Location::RequiresFpuRegister());
1556          break;
1557
1558        case Primitive::kPrimFloat:
1559          // Processing a Dex `float-to-double' instruction.
1560          locations->SetInAt(0, Location::RequiresFpuRegister());
1561          locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1562          break;
1563
1564        default:
1565          LOG(FATAL) << "Unexpected type conversion from " << input_type
1566                     << " to " << result_type;
1567      };
1568      break;
1569
1570    default:
1571      LOG(FATAL) << "Unexpected type conversion from " << input_type
1572                 << " to " << result_type;
1573  }
1574}
1575
1576void InstructionCodeGeneratorARM::VisitTypeConversion(HTypeConversion* conversion) {
1577  LocationSummary* locations = conversion->GetLocations();
1578  Location out = locations->Out();
1579  Location in = locations->InAt(0);
1580  Primitive::Type result_type = conversion->GetResultType();
1581  Primitive::Type input_type = conversion->GetInputType();
1582  DCHECK_NE(result_type, input_type);
1583  switch (result_type) {
1584    case Primitive::kPrimByte:
1585      switch (input_type) {
1586        case Primitive::kPrimShort:
1587        case Primitive::kPrimInt:
1588        case Primitive::kPrimChar:
1589          // Processing a Dex `int-to-byte' instruction.
1590          __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 8);
1591          break;
1592
1593        default:
1594          LOG(FATAL) << "Unexpected type conversion from " << input_type
1595                     << " to " << result_type;
1596      }
1597      break;
1598
1599    case Primitive::kPrimShort:
1600      switch (input_type) {
1601        case Primitive::kPrimByte:
1602        case Primitive::kPrimInt:
1603        case Primitive::kPrimChar:
1604          // Processing a Dex `int-to-short' instruction.
1605          __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16);
1606          break;
1607
1608        default:
1609          LOG(FATAL) << "Unexpected type conversion from " << input_type
1610                     << " to " << result_type;
1611      }
1612      break;
1613
1614    case Primitive::kPrimInt:
1615      switch (input_type) {
1616        case Primitive::kPrimLong:
1617          // Processing a Dex `long-to-int' instruction.
1618          DCHECK(out.IsRegister());
1619          if (in.IsRegisterPair()) {
1620            __ Mov(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
1621          } else if (in.IsDoubleStackSlot()) {
1622            __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), SP, in.GetStackIndex());
1623          } else {
1624            DCHECK(in.IsConstant());
1625            DCHECK(in.GetConstant()->IsLongConstant());
1626            int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
1627            __ LoadImmediate(out.AsRegister<Register>(), static_cast<int32_t>(value));
1628          }
1629          break;
1630
1631        case Primitive::kPrimFloat: {
1632          // Processing a Dex `float-to-int' instruction.
1633          SRegister temp = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
1634          __ vmovs(temp, in.AsFpuRegister<SRegister>());
1635          __ vcvtis(temp, temp);
1636          __ vmovrs(out.AsRegister<Register>(), temp);
1637          break;
1638        }
1639
1640        case Primitive::kPrimDouble: {
1641          // Processing a Dex `double-to-int' instruction.
1642          SRegister temp_s = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
1643          DRegister temp_d = FromLowSToD(temp_s);
1644          __ vmovd(temp_d, FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1645          __ vcvtid(temp_s, temp_d);
1646          __ vmovrs(out.AsRegister<Register>(), temp_s);
1647          break;
1648        }
1649
1650        default:
1651          LOG(FATAL) << "Unexpected type conversion from " << input_type
1652                     << " to " << result_type;
1653      }
1654      break;
1655
1656    case Primitive::kPrimLong:
1657      switch (input_type) {
1658        case Primitive::kPrimByte:
1659        case Primitive::kPrimShort:
1660        case Primitive::kPrimInt:
1661        case Primitive::kPrimChar:
1662          // Processing a Dex `int-to-long' instruction.
1663          DCHECK(out.IsRegisterPair());
1664          DCHECK(in.IsRegister());
1665          __ Mov(out.AsRegisterPairLow<Register>(), in.AsRegister<Register>());
1666          // Sign extension.
1667          __ Asr(out.AsRegisterPairHigh<Register>(),
1668                 out.AsRegisterPairLow<Register>(),
1669                 31);
1670          break;
1671
1672        case Primitive::kPrimFloat:
1673          // Processing a Dex `float-to-long' instruction.
1674          codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pF2l),
1675                                  conversion,
1676                                  conversion->GetDexPc());
1677          break;
1678
1679        case Primitive::kPrimDouble:
1680          // Processing a Dex `double-to-long' instruction.
1681          codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pD2l),
1682                                  conversion,
1683                                  conversion->GetDexPc());
1684          break;
1685
1686        default:
1687          LOG(FATAL) << "Unexpected type conversion from " << input_type
1688                     << " to " << result_type;
1689      }
1690      break;
1691
1692    case Primitive::kPrimChar:
1693      switch (input_type) {
1694        case Primitive::kPrimByte:
1695        case Primitive::kPrimShort:
1696        case Primitive::kPrimInt:
1697          // Processing a Dex `int-to-char' instruction.
1698          __ ubfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16);
1699          break;
1700
1701        default:
1702          LOG(FATAL) << "Unexpected type conversion from " << input_type
1703                     << " to " << result_type;
1704      }
1705      break;
1706
1707    case Primitive::kPrimFloat:
1708      switch (input_type) {
1709        case Primitive::kPrimByte:
1710        case Primitive::kPrimShort:
1711        case Primitive::kPrimInt:
1712        case Primitive::kPrimChar: {
1713          // Processing a Dex `int-to-float' instruction.
1714          __ vmovsr(out.AsFpuRegister<SRegister>(), in.AsRegister<Register>());
1715          __ vcvtsi(out.AsFpuRegister<SRegister>(), out.AsFpuRegister<SRegister>());
1716          break;
1717        }
1718
1719        case Primitive::kPrimLong: {
1720          // Processing a Dex `long-to-float' instruction.
1721          Register low = in.AsRegisterPairLow<Register>();
1722          Register high = in.AsRegisterPairHigh<Register>();
1723          SRegister output = out.AsFpuRegister<SRegister>();
1724          Register constant_low = locations->GetTemp(0).AsRegister<Register>();
1725          Register constant_high = locations->GetTemp(1).AsRegister<Register>();
1726          SRegister temp1_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>();
1727          DRegister temp1_d = FromLowSToD(temp1_s);
1728          SRegister temp2_s = locations->GetTemp(3).AsFpuRegisterPairLow<SRegister>();
1729          DRegister temp2_d = FromLowSToD(temp2_s);
1730
1731          // Operations use doubles for precision reasons (each 32-bit
1732          // half of a long fits in the 53-bit mantissa of a double,
1733          // but not in the 24-bit mantissa of a float).  This is
1734          // especially important for the low bits.  The result is
1735          // eventually converted to float.
1736
1737          // temp1_d = int-to-double(high)
1738          __ vmovsr(temp1_s, high);
1739          __ vcvtdi(temp1_d, temp1_s);
1740          // Using vmovd to load the `k2Pow32EncodingForDouble` constant
1741          // as an immediate value into `temp2_d` does not work, as
1742          // this instruction only transfers 8 significant bits of its
1743          // immediate operand.  Instead, use two 32-bit core
1744          // registers to load `k2Pow32EncodingForDouble` into
1745          // `temp2_d`.
1746          __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble));
1747          __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble));
1748          __ vmovdrr(temp2_d, constant_low, constant_high);
1749          // temp1_d = temp1_d * 2^32
1750          __ vmuld(temp1_d, temp1_d, temp2_d);
1751          // temp2_d = unsigned-to-double(low)
1752          __ vmovsr(temp2_s, low);
1753          __ vcvtdu(temp2_d, temp2_s);
1754          // temp1_d = temp1_d + temp2_d
1755          __ vaddd(temp1_d, temp1_d, temp2_d);
1756          // output = double-to-float(temp1_d);
1757          __ vcvtsd(output, temp1_d);
1758          break;
1759        }
1760
1761        case Primitive::kPrimDouble:
1762          // Processing a Dex `double-to-float' instruction.
1763          __ vcvtsd(out.AsFpuRegister<SRegister>(),
1764                    FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1765          break;
1766
1767        default:
1768          LOG(FATAL) << "Unexpected type conversion from " << input_type
1769                     << " to " << result_type;
1770      };
1771      break;
1772
1773    case Primitive::kPrimDouble:
1774      switch (input_type) {
1775        case Primitive::kPrimByte:
1776        case Primitive::kPrimShort:
1777        case Primitive::kPrimInt:
1778        case Primitive::kPrimChar: {
1779          // Processing a Dex `int-to-double' instruction.
1780          __ vmovsr(out.AsFpuRegisterPairLow<SRegister>(), in.AsRegister<Register>());
1781          __ vcvtdi(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1782                    out.AsFpuRegisterPairLow<SRegister>());
1783          break;
1784        }
1785
1786        case Primitive::kPrimLong: {
1787          // Processing a Dex `long-to-double' instruction.
1788          Register low = in.AsRegisterPairLow<Register>();
1789          Register high = in.AsRegisterPairHigh<Register>();
1790          SRegister out_s = out.AsFpuRegisterPairLow<SRegister>();
1791          DRegister out_d = FromLowSToD(out_s);
1792          Register constant_low = locations->GetTemp(0).AsRegister<Register>();
1793          Register constant_high = locations->GetTemp(1).AsRegister<Register>();
1794          SRegister temp_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>();
1795          DRegister temp_d = FromLowSToD(temp_s);
1796
1797          // out_d = int-to-double(high)
1798          __ vmovsr(out_s, high);
1799          __ vcvtdi(out_d, out_s);
1800          // Using vmovd to load the `k2Pow32EncodingForDouble` constant
1801          // as an immediate value into `temp_d` does not work, as
1802          // this instruction only transfers 8 significant bits of its
1803          // immediate operand.  Instead, use two 32-bit core
1804          // registers to load `k2Pow32EncodingForDouble` into `temp_d`.
1805          __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble));
1806          __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble));
1807          __ vmovdrr(temp_d, constant_low, constant_high);
1808          // out_d = out_d * 2^32
1809          __ vmuld(out_d, out_d, temp_d);
1810          // temp_d = unsigned-to-double(low)
1811          __ vmovsr(temp_s, low);
1812          __ vcvtdu(temp_d, temp_s);
1813          // out_d = out_d + temp_d
1814          __ vaddd(out_d, out_d, temp_d);
1815          break;
1816        }
1817
1818        case Primitive::kPrimFloat:
1819          // Processing a Dex `float-to-double' instruction.
1820          __ vcvtds(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1821                    in.AsFpuRegister<SRegister>());
1822          break;
1823
1824        default:
1825          LOG(FATAL) << "Unexpected type conversion from " << input_type
1826                     << " to " << result_type;
1827      };
1828      break;
1829
1830    default:
1831      LOG(FATAL) << "Unexpected type conversion from " << input_type
1832                 << " to " << result_type;
1833  }
1834}
1835
1836void LocationsBuilderARM::VisitAdd(HAdd* add) {
1837  LocationSummary* locations =
1838      new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
1839  switch (add->GetResultType()) {
1840    case Primitive::kPrimInt: {
1841      locations->SetInAt(0, Location::RequiresRegister());
1842      locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
1843      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1844      break;
1845    }
1846
1847    case Primitive::kPrimLong: {
1848      locations->SetInAt(0, Location::RequiresRegister());
1849      locations->SetInAt(1, Location::RequiresRegister());
1850      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1851      break;
1852    }
1853
1854    case Primitive::kPrimFloat:
1855    case Primitive::kPrimDouble: {
1856      locations->SetInAt(0, Location::RequiresFpuRegister());
1857      locations->SetInAt(1, Location::RequiresFpuRegister());
1858      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1859      break;
1860    }
1861
1862    default:
1863      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1864  }
1865}
1866
1867void InstructionCodeGeneratorARM::VisitAdd(HAdd* add) {
1868  LocationSummary* locations = add->GetLocations();
1869  Location out = locations->Out();
1870  Location first = locations->InAt(0);
1871  Location second = locations->InAt(1);
1872  switch (add->GetResultType()) {
1873    case Primitive::kPrimInt:
1874      if (second.IsRegister()) {
1875        __ add(out.AsRegister<Register>(),
1876               first.AsRegister<Register>(),
1877               ShifterOperand(second.AsRegister<Register>()));
1878      } else {
1879        __ AddConstant(out.AsRegister<Register>(),
1880                       first.AsRegister<Register>(),
1881                       second.GetConstant()->AsIntConstant()->GetValue());
1882      }
1883      break;
1884
1885    case Primitive::kPrimLong: {
1886      DCHECK(second.IsRegisterPair());
1887      __ adds(out.AsRegisterPairLow<Register>(),
1888              first.AsRegisterPairLow<Register>(),
1889              ShifterOperand(second.AsRegisterPairLow<Register>()));
1890      __ adc(out.AsRegisterPairHigh<Register>(),
1891             first.AsRegisterPairHigh<Register>(),
1892             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1893      break;
1894    }
1895
1896    case Primitive::kPrimFloat:
1897      __ vadds(out.AsFpuRegister<SRegister>(),
1898               first.AsFpuRegister<SRegister>(),
1899               second.AsFpuRegister<SRegister>());
1900      break;
1901
1902    case Primitive::kPrimDouble:
1903      __ vaddd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1904               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1905               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1906      break;
1907
1908    default:
1909      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1910  }
1911}
1912
1913void LocationsBuilderARM::VisitSub(HSub* sub) {
1914  LocationSummary* locations =
1915      new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
1916  switch (sub->GetResultType()) {
1917    case Primitive::kPrimInt: {
1918      locations->SetInAt(0, Location::RequiresRegister());
1919      locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1)));
1920      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1921      break;
1922    }
1923
1924    case Primitive::kPrimLong: {
1925      locations->SetInAt(0, Location::RequiresRegister());
1926      locations->SetInAt(1, Location::RequiresRegister());
1927      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1928      break;
1929    }
1930    case Primitive::kPrimFloat:
1931    case Primitive::kPrimDouble: {
1932      locations->SetInAt(0, Location::RequiresFpuRegister());
1933      locations->SetInAt(1, Location::RequiresFpuRegister());
1934      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1935      break;
1936    }
1937    default:
1938      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1939  }
1940}
1941
1942void InstructionCodeGeneratorARM::VisitSub(HSub* sub) {
1943  LocationSummary* locations = sub->GetLocations();
1944  Location out = locations->Out();
1945  Location first = locations->InAt(0);
1946  Location second = locations->InAt(1);
1947  switch (sub->GetResultType()) {
1948    case Primitive::kPrimInt: {
1949      if (second.IsRegister()) {
1950        __ sub(out.AsRegister<Register>(),
1951               first.AsRegister<Register>(),
1952               ShifterOperand(second.AsRegister<Register>()));
1953      } else {
1954        __ AddConstant(out.AsRegister<Register>(),
1955                       first.AsRegister<Register>(),
1956                       -second.GetConstant()->AsIntConstant()->GetValue());
1957      }
1958      break;
1959    }
1960
1961    case Primitive::kPrimLong: {
1962      DCHECK(second.IsRegisterPair());
1963      __ subs(out.AsRegisterPairLow<Register>(),
1964              first.AsRegisterPairLow<Register>(),
1965              ShifterOperand(second.AsRegisterPairLow<Register>()));
1966      __ sbc(out.AsRegisterPairHigh<Register>(),
1967             first.AsRegisterPairHigh<Register>(),
1968             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1969      break;
1970    }
1971
1972    case Primitive::kPrimFloat: {
1973      __ vsubs(out.AsFpuRegister<SRegister>(),
1974               first.AsFpuRegister<SRegister>(),
1975               second.AsFpuRegister<SRegister>());
1976      break;
1977    }
1978
1979    case Primitive::kPrimDouble: {
1980      __ vsubd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1981               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1982               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1983      break;
1984    }
1985
1986
1987    default:
1988      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1989  }
1990}
1991
1992void LocationsBuilderARM::VisitMul(HMul* mul) {
1993  LocationSummary* locations =
1994      new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
1995  switch (mul->GetResultType()) {
1996    case Primitive::kPrimInt:
1997    case Primitive::kPrimLong:  {
1998      locations->SetInAt(0, Location::RequiresRegister());
1999      locations->SetInAt(1, Location::RequiresRegister());
2000      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2001      break;
2002    }
2003
2004    case Primitive::kPrimFloat:
2005    case Primitive::kPrimDouble: {
2006      locations->SetInAt(0, Location::RequiresFpuRegister());
2007      locations->SetInAt(1, Location::RequiresFpuRegister());
2008      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2009      break;
2010    }
2011
2012    default:
2013      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2014  }
2015}
2016
2017void InstructionCodeGeneratorARM::VisitMul(HMul* mul) {
2018  LocationSummary* locations = mul->GetLocations();
2019  Location out = locations->Out();
2020  Location first = locations->InAt(0);
2021  Location second = locations->InAt(1);
2022  switch (mul->GetResultType()) {
2023    case Primitive::kPrimInt: {
2024      __ mul(out.AsRegister<Register>(),
2025             first.AsRegister<Register>(),
2026             second.AsRegister<Register>());
2027      break;
2028    }
2029    case Primitive::kPrimLong: {
2030      Register out_hi = out.AsRegisterPairHigh<Register>();
2031      Register out_lo = out.AsRegisterPairLow<Register>();
2032      Register in1_hi = first.AsRegisterPairHigh<Register>();
2033      Register in1_lo = first.AsRegisterPairLow<Register>();
2034      Register in2_hi = second.AsRegisterPairHigh<Register>();
2035      Register in2_lo = second.AsRegisterPairLow<Register>();
2036
2037      // Extra checks to protect caused by the existence of R1_R2.
2038      // The algorithm is wrong if out.hi is either in1.lo or in2.lo:
2039      // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2);
2040      DCHECK_NE(out_hi, in1_lo);
2041      DCHECK_NE(out_hi, in2_lo);
2042
2043      // input: in1 - 64 bits, in2 - 64 bits
2044      // output: out
2045      // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
2046      // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
2047      // parts: out.lo = (in1.lo * in2.lo)[31:0]
2048
2049      // IP <- in1.lo * in2.hi
2050      __ mul(IP, in1_lo, in2_hi);
2051      // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo
2052      __ mla(out_hi, in1_hi, in2_lo, IP);
2053      // out.lo <- (in1.lo * in2.lo)[31:0];
2054      __ umull(out_lo, IP, in1_lo, in2_lo);
2055      // out.hi <- in2.hi * in1.lo +  in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
2056      __ add(out_hi, out_hi, ShifterOperand(IP));
2057      break;
2058    }
2059
2060    case Primitive::kPrimFloat: {
2061      __ vmuls(out.AsFpuRegister<SRegister>(),
2062               first.AsFpuRegister<SRegister>(),
2063               second.AsFpuRegister<SRegister>());
2064      break;
2065    }
2066
2067    case Primitive::kPrimDouble: {
2068      __ vmuld(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2069               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2070               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2071      break;
2072    }
2073
2074    default:
2075      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2076  }
2077}
2078
2079void LocationsBuilderARM::VisitDiv(HDiv* div) {
2080  LocationSummary::CallKind call_kind = div->GetResultType() == Primitive::kPrimLong
2081      ? LocationSummary::kCall
2082      : LocationSummary::kNoCall;
2083  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
2084
2085  switch (div->GetResultType()) {
2086    case Primitive::kPrimInt: {
2087      locations->SetInAt(0, Location::RequiresRegister());
2088      locations->SetInAt(1, Location::RequiresRegister());
2089      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2090      break;
2091    }
2092    case Primitive::kPrimLong: {
2093      InvokeRuntimeCallingConvention calling_convention;
2094      locations->SetInAt(0, Location::RegisterPairLocation(
2095          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2096      locations->SetInAt(1, Location::RegisterPairLocation(
2097          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2098      locations->SetOut(Location::RegisterPairLocation(R0, R1));
2099      break;
2100    }
2101    case Primitive::kPrimFloat:
2102    case Primitive::kPrimDouble: {
2103      locations->SetInAt(0, Location::RequiresFpuRegister());
2104      locations->SetInAt(1, Location::RequiresFpuRegister());
2105      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2106      break;
2107    }
2108
2109    default:
2110      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2111  }
2112}
2113
2114void InstructionCodeGeneratorARM::VisitDiv(HDiv* div) {
2115  LocationSummary* locations = div->GetLocations();
2116  Location out = locations->Out();
2117  Location first = locations->InAt(0);
2118  Location second = locations->InAt(1);
2119
2120  switch (div->GetResultType()) {
2121    case Primitive::kPrimInt: {
2122      __ sdiv(out.AsRegister<Register>(),
2123              first.AsRegister<Register>(),
2124              second.AsRegister<Register>());
2125      break;
2126    }
2127
2128    case Primitive::kPrimLong: {
2129      InvokeRuntimeCallingConvention calling_convention;
2130      DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
2131      DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
2132      DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegisterPairLow<Register>());
2133      DCHECK_EQ(calling_convention.GetRegisterAt(3), second.AsRegisterPairHigh<Register>());
2134      DCHECK_EQ(R0, out.AsRegisterPairLow<Register>());
2135      DCHECK_EQ(R1, out.AsRegisterPairHigh<Register>());
2136
2137      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLdiv), div, div->GetDexPc());
2138      break;
2139    }
2140
2141    case Primitive::kPrimFloat: {
2142      __ vdivs(out.AsFpuRegister<SRegister>(),
2143               first.AsFpuRegister<SRegister>(),
2144               second.AsFpuRegister<SRegister>());
2145      break;
2146    }
2147
2148    case Primitive::kPrimDouble: {
2149      __ vdivd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2150               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2151               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2152      break;
2153    }
2154
2155    default:
2156      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2157  }
2158}
2159
2160void LocationsBuilderARM::VisitRem(HRem* rem) {
2161  Primitive::Type type = rem->GetResultType();
2162  LocationSummary::CallKind call_kind = type == Primitive::kPrimInt
2163      ? LocationSummary::kNoCall
2164      : LocationSummary::kCall;
2165  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
2166
2167  switch (type) {
2168    case Primitive::kPrimInt: {
2169      locations->SetInAt(0, Location::RequiresRegister());
2170      locations->SetInAt(1, Location::RequiresRegister());
2171      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2172      locations->AddTemp(Location::RequiresRegister());
2173      break;
2174    }
2175    case Primitive::kPrimLong: {
2176      InvokeRuntimeCallingConvention calling_convention;
2177      locations->SetInAt(0, Location::RegisterPairLocation(
2178          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2179      locations->SetInAt(1, Location::RegisterPairLocation(
2180          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2181      // The runtime helper puts the output in R2,R3.
2182      locations->SetOut(Location::RegisterPairLocation(R2, R3));
2183      break;
2184    }
2185    case Primitive::kPrimFloat: {
2186      InvokeRuntimeCallingConvention calling_convention;
2187      locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
2188      locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
2189      locations->SetOut(Location::FpuRegisterLocation(S0));
2190      break;
2191    }
2192
2193    case Primitive::kPrimDouble: {
2194      InvokeRuntimeCallingConvention calling_convention;
2195      locations->SetInAt(0, Location::FpuRegisterPairLocation(
2196          calling_convention.GetFpuRegisterAt(0), calling_convention.GetFpuRegisterAt(1)));
2197      locations->SetInAt(1, Location::FpuRegisterPairLocation(
2198          calling_convention.GetFpuRegisterAt(2), calling_convention.GetFpuRegisterAt(3)));
2199      locations->SetOut(Location::Location::FpuRegisterPairLocation(S0, S1));
2200      break;
2201    }
2202
2203    default:
2204      LOG(FATAL) << "Unexpected rem type " << type;
2205  }
2206}
2207
2208void InstructionCodeGeneratorARM::VisitRem(HRem* rem) {
2209  LocationSummary* locations = rem->GetLocations();
2210  Location out = locations->Out();
2211  Location first = locations->InAt(0);
2212  Location second = locations->InAt(1);
2213
2214  Primitive::Type type = rem->GetResultType();
2215  switch (type) {
2216    case Primitive::kPrimInt: {
2217      Register reg1 = first.AsRegister<Register>();
2218      Register reg2 = second.AsRegister<Register>();
2219      Register temp = locations->GetTemp(0).AsRegister<Register>();
2220
2221      // temp = reg1 / reg2  (integer division)
2222      // temp = temp * reg2
2223      // dest = reg1 - temp
2224      __ sdiv(temp, reg1, reg2);
2225      __ mul(temp, temp, reg2);
2226      __ sub(out.AsRegister<Register>(), reg1, ShifterOperand(temp));
2227      break;
2228    }
2229
2230    case Primitive::kPrimLong: {
2231      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLmod), rem, rem->GetDexPc());
2232      break;
2233    }
2234
2235    case Primitive::kPrimFloat: {
2236      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmodf), rem, rem->GetDexPc());
2237      break;
2238    }
2239
2240    case Primitive::kPrimDouble: {
2241      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmod), rem, rem->GetDexPc());
2242      break;
2243    }
2244
2245    default:
2246      LOG(FATAL) << "Unexpected rem type " << type;
2247  }
2248}
2249
2250void LocationsBuilderARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2251  LocationSummary* locations =
2252      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2253  locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2254  if (instruction->HasUses()) {
2255    locations->SetOut(Location::SameAsFirstInput());
2256  }
2257}
2258
2259void InstructionCodeGeneratorARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2260  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM(instruction);
2261  codegen_->AddSlowPath(slow_path);
2262
2263  LocationSummary* locations = instruction->GetLocations();
2264  Location value = locations->InAt(0);
2265
2266  switch (instruction->GetType()) {
2267    case Primitive::kPrimInt: {
2268      if (value.IsRegister()) {
2269        __ cmp(value.AsRegister<Register>(), ShifterOperand(0));
2270        __ b(slow_path->GetEntryLabel(), EQ);
2271      } else {
2272        DCHECK(value.IsConstant()) << value;
2273        if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
2274          __ b(slow_path->GetEntryLabel());
2275        }
2276      }
2277      break;
2278    }
2279    case Primitive::kPrimLong: {
2280      if (value.IsRegisterPair()) {
2281        __ orrs(IP,
2282                value.AsRegisterPairLow<Register>(),
2283                ShifterOperand(value.AsRegisterPairHigh<Register>()));
2284        __ b(slow_path->GetEntryLabel(), EQ);
2285      } else {
2286        DCHECK(value.IsConstant()) << value;
2287        if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
2288          __ b(slow_path->GetEntryLabel());
2289        }
2290      }
2291      break;
2292    default:
2293      LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
2294    }
2295  }
2296}
2297
2298void LocationsBuilderARM::HandleShift(HBinaryOperation* op) {
2299  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2300
2301  LocationSummary::CallKind call_kind = op->GetResultType() == Primitive::kPrimLong
2302      ? LocationSummary::kCall
2303      : LocationSummary::kNoCall;
2304  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(op, call_kind);
2305
2306  switch (op->GetResultType()) {
2307    case Primitive::kPrimInt: {
2308      locations->SetInAt(0, Location::RequiresRegister());
2309      locations->SetInAt(1, Location::RegisterOrConstant(op->InputAt(1)));
2310      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2311      break;
2312    }
2313    case Primitive::kPrimLong: {
2314      locations->SetInAt(0, Location::RequiresRegister());
2315      locations->SetInAt(1, Location::RequiresRegister());
2316      locations->SetOut(Location::RequiresRegister());
2317      break;
2318    }
2319    default:
2320      LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
2321  }
2322}
2323
2324void InstructionCodeGeneratorARM::HandleShift(HBinaryOperation* op) {
2325  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2326
2327  LocationSummary* locations = op->GetLocations();
2328  Location out = locations->Out();
2329  Location first = locations->InAt(0);
2330  Location second = locations->InAt(1);
2331
2332  Primitive::Type type = op->GetResultType();
2333  switch (type) {
2334    case Primitive::kPrimInt: {
2335      Register out_reg = out.AsRegister<Register>();
2336      Register first_reg = first.AsRegister<Register>();
2337      // Arm doesn't mask the shift count so we need to do it ourselves.
2338      if (second.IsRegister()) {
2339        Register second_reg = second.AsRegister<Register>();
2340        __ and_(second_reg, second_reg, ShifterOperand(kMaxIntShiftValue));
2341        if (op->IsShl()) {
2342          __ Lsl(out_reg, first_reg, second_reg);
2343        } else if (op->IsShr()) {
2344          __ Asr(out_reg, first_reg, second_reg);
2345        } else {
2346          __ Lsr(out_reg, first_reg, second_reg);
2347        }
2348      } else {
2349        int32_t cst = second.GetConstant()->AsIntConstant()->GetValue();
2350        uint32_t shift_value = static_cast<uint32_t>(cst & kMaxIntShiftValue);
2351        if (shift_value == 0) {  // arm does not support shifting with 0 immediate.
2352          __ Mov(out_reg, first_reg);
2353        } else if (op->IsShl()) {
2354          __ Lsl(out_reg, first_reg, shift_value);
2355        } else if (op->IsShr()) {
2356          __ Asr(out_reg, first_reg, shift_value);
2357        } else {
2358          __ Lsr(out_reg, first_reg, shift_value);
2359        }
2360      }
2361      break;
2362    }
2363    case Primitive::kPrimLong: {
2364      Register o_h = out.AsRegisterPairHigh<Register>();
2365      Register o_l = out.AsRegisterPairLow<Register>();
2366
2367      Register high = first.AsRegisterPairHigh<Register>();
2368      Register low = first.AsRegisterPairLow<Register>();
2369
2370      Register second_reg = second.AsRegister<Register>();
2371
2372      if (op->IsShl()) {
2373        // Shift the high part
2374        __ and_(second_reg, second_reg, ShifterOperand(63));
2375        __ Lsl(high, high, second_reg);
2376        // Shift the low part and `or` what overflowed on the high part
2377        __ rsb(IP, second_reg, ShifterOperand(32));
2378        __ Lsr(IP, low, IP);
2379        __ orr(o_h, high, ShifterOperand(IP));
2380        // If the shift is > 32 bits, override the high part
2381        __ subs(IP, second_reg, ShifterOperand(32));
2382        __ it(PL);
2383        __ Lsl(o_h, low, IP, false, PL);
2384        // Shift the low part
2385        __ Lsl(o_l, low, second_reg);
2386      } else if (op->IsShr()) {
2387        // Shift the low part
2388        __ and_(second_reg, second_reg, ShifterOperand(63));
2389        __ Lsr(low, low, second_reg);
2390        // Shift the high part and `or` what underflowed on the low part
2391        __ rsb(IP, second_reg, ShifterOperand(32));
2392        __ Lsl(IP, high, IP);
2393        __ orr(o_l, low, ShifterOperand(IP));
2394        // If the shift is > 32 bits, override the low part
2395        __ subs(IP, second_reg, ShifterOperand(32));
2396        __ it(PL);
2397        __ Asr(o_l, high, IP, false, PL);
2398        // Shift the high part
2399        __ Asr(o_h, high, second_reg);
2400      } else {
2401        // same as Shr except we use `Lsr`s and not `Asr`s
2402        __ and_(second_reg, second_reg, ShifterOperand(63));
2403        __ Lsr(low, low, second_reg);
2404        __ rsb(IP, second_reg, ShifterOperand(32));
2405        __ Lsl(IP, high, IP);
2406        __ orr(o_l, low, ShifterOperand(IP));
2407        __ subs(IP, second_reg, ShifterOperand(32));
2408        __ it(PL);
2409        __ Lsr(o_l, high, IP, false, PL);
2410        __ Lsr(o_h, high, second_reg);
2411      }
2412      break;
2413    }
2414    default:
2415      LOG(FATAL) << "Unexpected operation type " << type;
2416  }
2417}
2418
2419void LocationsBuilderARM::VisitShl(HShl* shl) {
2420  HandleShift(shl);
2421}
2422
2423void InstructionCodeGeneratorARM::VisitShl(HShl* shl) {
2424  HandleShift(shl);
2425}
2426
2427void LocationsBuilderARM::VisitShr(HShr* shr) {
2428  HandleShift(shr);
2429}
2430
2431void InstructionCodeGeneratorARM::VisitShr(HShr* shr) {
2432  HandleShift(shr);
2433}
2434
2435void LocationsBuilderARM::VisitUShr(HUShr* ushr) {
2436  HandleShift(ushr);
2437}
2438
2439void InstructionCodeGeneratorARM::VisitUShr(HUShr* ushr) {
2440  HandleShift(ushr);
2441}
2442
2443void LocationsBuilderARM::VisitNewInstance(HNewInstance* instruction) {
2444  LocationSummary* locations =
2445      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2446  InvokeRuntimeCallingConvention calling_convention;
2447  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2448  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2449  locations->SetOut(Location::RegisterLocation(R0));
2450}
2451
2452void InstructionCodeGeneratorARM::VisitNewInstance(HNewInstance* instruction) {
2453  InvokeRuntimeCallingConvention calling_convention;
2454  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
2455  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
2456  codegen_->InvokeRuntime(GetThreadOffset<kArmWordSize>(instruction->GetEntrypoint()).Int32Value(),
2457                          instruction,
2458                          instruction->GetDexPc());
2459}
2460
2461void LocationsBuilderARM::VisitNewArray(HNewArray* instruction) {
2462  LocationSummary* locations =
2463      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2464  InvokeRuntimeCallingConvention calling_convention;
2465  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2466  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2467  locations->SetOut(Location::RegisterLocation(R0));
2468  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2469}
2470
2471void InstructionCodeGeneratorARM::VisitNewArray(HNewArray* instruction) {
2472  InvokeRuntimeCallingConvention calling_convention;
2473  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(2));
2474  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
2475  codegen_->InvokeRuntime(GetThreadOffset<kArmWordSize>(instruction->GetEntrypoint()).Int32Value(),
2476                          instruction,
2477                          instruction->GetDexPc());
2478}
2479
2480void LocationsBuilderARM::VisitParameterValue(HParameterValue* instruction) {
2481  LocationSummary* locations =
2482      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2483  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
2484  if (location.IsStackSlot()) {
2485    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2486  } else if (location.IsDoubleStackSlot()) {
2487    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2488  }
2489  locations->SetOut(location);
2490}
2491
2492void InstructionCodeGeneratorARM::VisitParameterValue(HParameterValue* instruction) {
2493  // Nothing to do, the parameter is already at its location.
2494  UNUSED(instruction);
2495}
2496
2497void LocationsBuilderARM::VisitNot(HNot* not_) {
2498  LocationSummary* locations =
2499      new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
2500  locations->SetInAt(0, Location::RequiresRegister());
2501  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2502}
2503
2504void InstructionCodeGeneratorARM::VisitNot(HNot* not_) {
2505  LocationSummary* locations = not_->GetLocations();
2506  Location out = locations->Out();
2507  Location in = locations->InAt(0);
2508  switch (not_->GetResultType()) {
2509    case Primitive::kPrimInt:
2510      __ mvn(out.AsRegister<Register>(), ShifterOperand(in.AsRegister<Register>()));
2511      break;
2512
2513    case Primitive::kPrimLong:
2514      __ mvn(out.AsRegisterPairLow<Register>(),
2515             ShifterOperand(in.AsRegisterPairLow<Register>()));
2516      __ mvn(out.AsRegisterPairHigh<Register>(),
2517             ShifterOperand(in.AsRegisterPairHigh<Register>()));
2518      break;
2519
2520    default:
2521      LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
2522  }
2523}
2524
2525void LocationsBuilderARM::VisitCompare(HCompare* compare) {
2526  LocationSummary* locations =
2527      new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2528  switch (compare->InputAt(0)->GetType()) {
2529    case Primitive::kPrimLong: {
2530      locations->SetInAt(0, Location::RequiresRegister());
2531      locations->SetInAt(1, Location::RequiresRegister());
2532      // Output overlaps because it is written before doing the low comparison.
2533      locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2534      break;
2535    }
2536    case Primitive::kPrimFloat:
2537    case Primitive::kPrimDouble: {
2538      locations->SetInAt(0, Location::RequiresFpuRegister());
2539      locations->SetInAt(1, Location::RequiresFpuRegister());
2540      locations->SetOut(Location::RequiresRegister());
2541      break;
2542    }
2543    default:
2544      LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2545  }
2546}
2547
2548void InstructionCodeGeneratorARM::VisitCompare(HCompare* compare) {
2549  LocationSummary* locations = compare->GetLocations();
2550  Register out = locations->Out().AsRegister<Register>();
2551  Location left = locations->InAt(0);
2552  Location right = locations->InAt(1);
2553
2554  Label less, greater, done;
2555  Primitive::Type type = compare->InputAt(0)->GetType();
2556  switch (type) {
2557    case Primitive::kPrimLong: {
2558      __ cmp(left.AsRegisterPairHigh<Register>(),
2559             ShifterOperand(right.AsRegisterPairHigh<Register>()));  // Signed compare.
2560      __ b(&less, LT);
2561      __ b(&greater, GT);
2562      // Do LoadImmediate before any `cmp`, as LoadImmediate might affect the status flags.
2563      __ LoadImmediate(out, 0);
2564      __ cmp(left.AsRegisterPairLow<Register>(),
2565             ShifterOperand(right.AsRegisterPairLow<Register>()));  // Unsigned compare.
2566      break;
2567    }
2568    case Primitive::kPrimFloat:
2569    case Primitive::kPrimDouble: {
2570      __ LoadImmediate(out, 0);
2571      if (type == Primitive::kPrimFloat) {
2572        __ vcmps(left.AsFpuRegister<SRegister>(), right.AsFpuRegister<SRegister>());
2573      } else {
2574        __ vcmpd(FromLowSToD(left.AsFpuRegisterPairLow<SRegister>()),
2575                 FromLowSToD(right.AsFpuRegisterPairLow<SRegister>()));
2576      }
2577      __ vmstat();  // transfer FP status register to ARM APSR.
2578      __ b(compare->IsGtBias() ? &greater : &less, VS);  // VS for unordered.
2579      break;
2580    }
2581    default:
2582      LOG(FATAL) << "Unexpected compare type " << type;
2583  }
2584  __ b(&done, EQ);
2585  __ b(&less, CC);  // CC is for both: unsigned compare for longs and 'less than' for floats.
2586
2587  __ Bind(&greater);
2588  __ LoadImmediate(out, 1);
2589  __ b(&done);
2590
2591  __ Bind(&less);
2592  __ LoadImmediate(out, -1);
2593
2594  __ Bind(&done);
2595}
2596
2597void LocationsBuilderARM::VisitPhi(HPhi* instruction) {
2598  LocationSummary* locations =
2599      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2600  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
2601    locations->SetInAt(i, Location::Any());
2602  }
2603  locations->SetOut(Location::Any());
2604}
2605
2606void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction) {
2607  UNUSED(instruction);
2608  LOG(FATAL) << "Unreachable";
2609}
2610
2611void InstructionCodeGeneratorARM::GenerateMemoryBarrier(MemBarrierKind kind) {
2612  // TODO (ported from quick): revisit Arm barrier kinds
2613  DmbOptions flavour = DmbOptions::ISH;  // quiet c++ warnings
2614  switch (kind) {
2615    case MemBarrierKind::kAnyStore:
2616    case MemBarrierKind::kLoadAny:
2617    case MemBarrierKind::kAnyAny: {
2618      flavour = DmbOptions::ISH;
2619      break;
2620    }
2621    case MemBarrierKind::kStoreStore: {
2622      flavour = DmbOptions::ISHST;
2623      break;
2624    }
2625    default:
2626      LOG(FATAL) << "Unexpected memory barrier " << kind;
2627  }
2628  __ dmb(flavour);
2629}
2630
2631void InstructionCodeGeneratorARM::GenerateWideAtomicLoad(Register addr,
2632                                                         uint32_t offset,
2633                                                         Register out_lo,
2634                                                         Register out_hi) {
2635  if (offset != 0) {
2636    __ LoadImmediate(out_lo, offset);
2637    __ add(IP, addr, ShifterOperand(out_lo));
2638    addr = IP;
2639  }
2640  __ ldrexd(out_lo, out_hi, addr);
2641}
2642
2643void InstructionCodeGeneratorARM::GenerateWideAtomicStore(Register addr,
2644                                                          uint32_t offset,
2645                                                          Register value_lo,
2646                                                          Register value_hi,
2647                                                          Register temp1,
2648                                                          Register temp2,
2649                                                          HInstruction* instruction) {
2650  Label fail;
2651  if (offset != 0) {
2652    __ LoadImmediate(temp1, offset);
2653    __ add(IP, addr, ShifterOperand(temp1));
2654    addr = IP;
2655  }
2656  __ Bind(&fail);
2657  // We need a load followed by store. (The address used in a STREX instruction must
2658  // be the same as the address in the most recently executed LDREX instruction.)
2659  __ ldrexd(temp1, temp2, addr);
2660  codegen_->MaybeRecordImplicitNullCheck(instruction);
2661  __ strexd(temp1, value_lo, value_hi, addr);
2662  __ cmp(temp1, ShifterOperand(0));
2663  __ b(&fail, NE);
2664}
2665
2666void LocationsBuilderARM::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) {
2667  DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2668
2669  LocationSummary* locations =
2670      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2671  locations->SetInAt(0, Location::RequiresRegister());
2672  locations->SetInAt(1, Location::RequiresRegister());
2673
2674
2675  Primitive::Type field_type = field_info.GetFieldType();
2676  bool is_wide = field_type == Primitive::kPrimLong || field_type == Primitive::kPrimDouble;
2677  bool generate_volatile = field_info.IsVolatile()
2678      && is_wide
2679      && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2680  // Temporary registers for the write barrier.
2681  // TODO: consider renaming StoreNeedsWriteBarrier to StoreNeedsGCMark.
2682  if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
2683    locations->AddTemp(Location::RequiresRegister());
2684    locations->AddTemp(Location::RequiresRegister());
2685  } else if (generate_volatile) {
2686    // Arm encoding have some additional constraints for ldrexd/strexd:
2687    // - registers need to be consecutive
2688    // - the first register should be even but not R14.
2689    // We don't test for Arm yet, and the assertion makes sure that we revisit this if we ever
2690    // enable Arm encoding.
2691    DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
2692
2693    locations->AddTemp(Location::RequiresRegister());
2694    locations->AddTemp(Location::RequiresRegister());
2695    if (field_type == Primitive::kPrimDouble) {
2696      // For doubles we need two more registers to copy the value.
2697      locations->AddTemp(Location::RegisterLocation(R2));
2698      locations->AddTemp(Location::RegisterLocation(R3));
2699    }
2700  }
2701}
2702
2703void InstructionCodeGeneratorARM::HandleFieldSet(HInstruction* instruction,
2704                                                 const FieldInfo& field_info) {
2705  DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2706
2707  LocationSummary* locations = instruction->GetLocations();
2708  Register base = locations->InAt(0).AsRegister<Register>();
2709  Location value = locations->InAt(1);
2710
2711  bool is_volatile = field_info.IsVolatile();
2712  bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2713  Primitive::Type field_type = field_info.GetFieldType();
2714  uint32_t offset = field_info.GetFieldOffset().Uint32Value();
2715
2716  if (is_volatile) {
2717    GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
2718  }
2719
2720  switch (field_type) {
2721    case Primitive::kPrimBoolean:
2722    case Primitive::kPrimByte: {
2723      __ StoreToOffset(kStoreByte, value.AsRegister<Register>(), base, offset);
2724      break;
2725    }
2726
2727    case Primitive::kPrimShort:
2728    case Primitive::kPrimChar: {
2729      __ StoreToOffset(kStoreHalfword, value.AsRegister<Register>(), base, offset);
2730      break;
2731    }
2732
2733    case Primitive::kPrimInt:
2734    case Primitive::kPrimNot: {
2735      __ StoreToOffset(kStoreWord, value.AsRegister<Register>(), base, offset);
2736      break;
2737    }
2738
2739    case Primitive::kPrimLong: {
2740      if (is_volatile && !atomic_ldrd_strd) {
2741        GenerateWideAtomicStore(base, offset,
2742                                value.AsRegisterPairLow<Register>(),
2743                                value.AsRegisterPairHigh<Register>(),
2744                                locations->GetTemp(0).AsRegister<Register>(),
2745                                locations->GetTemp(1).AsRegister<Register>(),
2746                                instruction);
2747      } else {
2748        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), base, offset);
2749        codegen_->MaybeRecordImplicitNullCheck(instruction);
2750      }
2751      break;
2752    }
2753
2754    case Primitive::kPrimFloat: {
2755      __ StoreSToOffset(value.AsFpuRegister<SRegister>(), base, offset);
2756      break;
2757    }
2758
2759    case Primitive::kPrimDouble: {
2760      DRegister value_reg = FromLowSToD(value.AsFpuRegisterPairLow<SRegister>());
2761      if (is_volatile && !atomic_ldrd_strd) {
2762        Register value_reg_lo = locations->GetTemp(0).AsRegister<Register>();
2763        Register value_reg_hi = locations->GetTemp(1).AsRegister<Register>();
2764
2765        __ vmovrrd(value_reg_lo, value_reg_hi, value_reg);
2766
2767        GenerateWideAtomicStore(base, offset,
2768                                value_reg_lo,
2769                                value_reg_hi,
2770                                locations->GetTemp(2).AsRegister<Register>(),
2771                                locations->GetTemp(3).AsRegister<Register>(),
2772                                instruction);
2773      } else {
2774        __ StoreDToOffset(value_reg, base, offset);
2775        codegen_->MaybeRecordImplicitNullCheck(instruction);
2776      }
2777      break;
2778    }
2779
2780    case Primitive::kPrimVoid:
2781      LOG(FATAL) << "Unreachable type " << field_type;
2782      UNREACHABLE();
2783  }
2784
2785  // Longs and doubles are handled in the switch.
2786  if (field_type != Primitive::kPrimLong && field_type != Primitive::kPrimDouble) {
2787    codegen_->MaybeRecordImplicitNullCheck(instruction);
2788  }
2789
2790  if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
2791    Register temp = locations->GetTemp(0).AsRegister<Register>();
2792    Register card = locations->GetTemp(1).AsRegister<Register>();
2793    codegen_->MarkGCCard(temp, card, base, value.AsRegister<Register>());
2794  }
2795
2796  if (is_volatile) {
2797    GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
2798  }
2799}
2800
2801void LocationsBuilderARM::HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info) {
2802  DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2803  LocationSummary* locations =
2804      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2805  locations->SetInAt(0, Location::RequiresRegister());
2806
2807  bool volatile_for_double = field_info.IsVolatile()
2808      && (field_info.GetFieldType() == Primitive::kPrimDouble)
2809      && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2810  bool overlap = field_info.IsVolatile() && (field_info.GetFieldType() == Primitive::kPrimLong);
2811  locations->SetOut(Location::RequiresRegister(),
2812                    (overlap ? Location::kOutputOverlap : Location::kNoOutputOverlap));
2813  if (volatile_for_double) {
2814    // Arm encoding have some additional constraints for ldrexd/strexd:
2815    // - registers need to be consecutive
2816    // - the first register should be even but not R14.
2817    // We don't test for Arm yet, and the assertion makes sure that we revisit this if we ever
2818    // enable Arm encoding.
2819    DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
2820    locations->AddTemp(Location::RequiresRegister());
2821    locations->AddTemp(Location::RequiresRegister());
2822  }
2823}
2824
2825void InstructionCodeGeneratorARM::HandleFieldGet(HInstruction* instruction,
2826                                                 const FieldInfo& field_info) {
2827  DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2828
2829  LocationSummary* locations = instruction->GetLocations();
2830  Register base = locations->InAt(0).AsRegister<Register>();
2831  Location out = locations->Out();
2832  bool is_volatile = field_info.IsVolatile();
2833  bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2834  Primitive::Type field_type = field_info.GetFieldType();
2835  uint32_t offset = field_info.GetFieldOffset().Uint32Value();
2836
2837  switch (field_type) {
2838    case Primitive::kPrimBoolean: {
2839      __ LoadFromOffset(kLoadUnsignedByte, out.AsRegister<Register>(), base, offset);
2840      break;
2841    }
2842
2843    case Primitive::kPrimByte: {
2844      __ LoadFromOffset(kLoadSignedByte, out.AsRegister<Register>(), base, offset);
2845      break;
2846    }
2847
2848    case Primitive::kPrimShort: {
2849      __ LoadFromOffset(kLoadSignedHalfword, out.AsRegister<Register>(), base, offset);
2850      break;
2851    }
2852
2853    case Primitive::kPrimChar: {
2854      __ LoadFromOffset(kLoadUnsignedHalfword, out.AsRegister<Register>(), base, offset);
2855      break;
2856    }
2857
2858    case Primitive::kPrimInt:
2859    case Primitive::kPrimNot: {
2860      __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), base, offset);
2861      break;
2862    }
2863
2864    case Primitive::kPrimLong: {
2865      if (is_volatile && !atomic_ldrd_strd) {
2866        GenerateWideAtomicLoad(base, offset,
2867                               out.AsRegisterPairLow<Register>(),
2868                               out.AsRegisterPairHigh<Register>());
2869      } else {
2870        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), base, offset);
2871      }
2872      break;
2873    }
2874
2875    case Primitive::kPrimFloat: {
2876      __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), base, offset);
2877      break;
2878    }
2879
2880    case Primitive::kPrimDouble: {
2881      DRegister out_reg = FromLowSToD(out.AsFpuRegisterPairLow<SRegister>());
2882      if (is_volatile && !atomic_ldrd_strd) {
2883        Register lo = locations->GetTemp(0).AsRegister<Register>();
2884        Register hi = locations->GetTemp(1).AsRegister<Register>();
2885        GenerateWideAtomicLoad(base, offset, lo, hi);
2886        codegen_->MaybeRecordImplicitNullCheck(instruction);
2887        __ vmovdrr(out_reg, lo, hi);
2888      } else {
2889        __ LoadDFromOffset(out_reg, base, offset);
2890        codegen_->MaybeRecordImplicitNullCheck(instruction);
2891      }
2892      break;
2893    }
2894
2895    case Primitive::kPrimVoid:
2896      LOG(FATAL) << "Unreachable type " << field_type;
2897      UNREACHABLE();
2898  }
2899
2900  // Doubles are handled in the switch.
2901  if (field_type != Primitive::kPrimDouble) {
2902    codegen_->MaybeRecordImplicitNullCheck(instruction);
2903  }
2904
2905  if (is_volatile) {
2906    GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
2907  }
2908}
2909
2910void LocationsBuilderARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2911  HandleFieldSet(instruction, instruction->GetFieldInfo());
2912}
2913
2914void InstructionCodeGeneratorARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2915  HandleFieldSet(instruction, instruction->GetFieldInfo());
2916}
2917
2918void LocationsBuilderARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2919  HandleFieldGet(instruction, instruction->GetFieldInfo());
2920}
2921
2922void InstructionCodeGeneratorARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2923  HandleFieldGet(instruction, instruction->GetFieldInfo());
2924}
2925
2926void LocationsBuilderARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2927  HandleFieldGet(instruction, instruction->GetFieldInfo());
2928}
2929
2930void InstructionCodeGeneratorARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2931  HandleFieldGet(instruction, instruction->GetFieldInfo());
2932}
2933
2934void LocationsBuilderARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2935  HandleFieldSet(instruction, instruction->GetFieldInfo());
2936}
2937
2938void InstructionCodeGeneratorARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2939  HandleFieldSet(instruction, instruction->GetFieldInfo());
2940}
2941
2942void LocationsBuilderARM::VisitNullCheck(HNullCheck* instruction) {
2943  LocationSummary* locations =
2944      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2945  locations->SetInAt(0, Location::RequiresRegister());
2946  if (instruction->HasUses()) {
2947    locations->SetOut(Location::SameAsFirstInput());
2948  }
2949}
2950
2951void InstructionCodeGeneratorARM::GenerateImplicitNullCheck(HNullCheck* instruction) {
2952  if (codegen_->CanMoveNullCheckToUser(instruction)) {
2953    return;
2954  }
2955  Location obj = instruction->GetLocations()->InAt(0);
2956
2957  __ LoadFromOffset(kLoadWord, IP, obj.AsRegister<Register>(), 0);
2958  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
2959}
2960
2961void InstructionCodeGeneratorARM::GenerateExplicitNullCheck(HNullCheck* instruction) {
2962  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM(instruction);
2963  codegen_->AddSlowPath(slow_path);
2964
2965  LocationSummary* locations = instruction->GetLocations();
2966  Location obj = locations->InAt(0);
2967
2968  __ cmp(obj.AsRegister<Register>(), ShifterOperand(0));
2969  __ b(slow_path->GetEntryLabel(), EQ);
2970}
2971
2972void InstructionCodeGeneratorARM::VisitNullCheck(HNullCheck* instruction) {
2973  if (codegen_->GetCompilerOptions().GetImplicitNullChecks()) {
2974    GenerateImplicitNullCheck(instruction);
2975  } else {
2976    GenerateExplicitNullCheck(instruction);
2977  }
2978}
2979
2980void LocationsBuilderARM::VisitArrayGet(HArrayGet* instruction) {
2981  LocationSummary* locations =
2982      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2983  locations->SetInAt(0, Location::RequiresRegister());
2984  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2985  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2986}
2987
2988void InstructionCodeGeneratorARM::VisitArrayGet(HArrayGet* instruction) {
2989  LocationSummary* locations = instruction->GetLocations();
2990  Register obj = locations->InAt(0).AsRegister<Register>();
2991  Location index = locations->InAt(1);
2992
2993  switch (instruction->GetType()) {
2994    case Primitive::kPrimBoolean: {
2995      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
2996      Register out = locations->Out().AsRegister<Register>();
2997      if (index.IsConstant()) {
2998        size_t offset =
2999            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
3000        __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
3001      } else {
3002        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
3003        __ LoadFromOffset(kLoadUnsignedByte, out, IP, data_offset);
3004      }
3005      break;
3006    }
3007
3008    case Primitive::kPrimByte: {
3009      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
3010      Register out = locations->Out().AsRegister<Register>();
3011      if (index.IsConstant()) {
3012        size_t offset =
3013            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
3014        __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
3015      } else {
3016        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
3017        __ LoadFromOffset(kLoadSignedByte, out, IP, data_offset);
3018      }
3019      break;
3020    }
3021
3022    case Primitive::kPrimShort: {
3023      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
3024      Register out = locations->Out().AsRegister<Register>();
3025      if (index.IsConstant()) {
3026        size_t offset =
3027            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
3028        __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
3029      } else {
3030        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
3031        __ LoadFromOffset(kLoadSignedHalfword, out, IP, data_offset);
3032      }
3033      break;
3034    }
3035
3036    case Primitive::kPrimChar: {
3037      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
3038      Register out = locations->Out().AsRegister<Register>();
3039      if (index.IsConstant()) {
3040        size_t offset =
3041            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
3042        __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
3043      } else {
3044        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
3045        __ LoadFromOffset(kLoadUnsignedHalfword, out, IP, data_offset);
3046      }
3047      break;
3048    }
3049
3050    case Primitive::kPrimInt:
3051    case Primitive::kPrimNot: {
3052      DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
3053      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
3054      Register out = locations->Out().AsRegister<Register>();
3055      if (index.IsConstant()) {
3056        size_t offset =
3057            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3058        __ LoadFromOffset(kLoadWord, out, obj, offset);
3059      } else {
3060        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3061        __ LoadFromOffset(kLoadWord, out, IP, data_offset);
3062      }
3063      break;
3064    }
3065
3066    case Primitive::kPrimLong: {
3067      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
3068      Location out = locations->Out();
3069      if (index.IsConstant()) {
3070        size_t offset =
3071            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3072        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset);
3073      } else {
3074        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3075        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), IP, data_offset);
3076      }
3077      break;
3078    }
3079
3080    case Primitive::kPrimFloat: {
3081      uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
3082      Location out = locations->Out();
3083      DCHECK(out.IsFpuRegister());
3084      if (index.IsConstant()) {
3085        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3086        __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), obj, offset);
3087      } else {
3088        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3089        __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), IP, data_offset);
3090      }
3091      break;
3092    }
3093
3094    case Primitive::kPrimDouble: {
3095      uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
3096      Location out = locations->Out();
3097      DCHECK(out.IsFpuRegisterPair());
3098      if (index.IsConstant()) {
3099        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3100        __ LoadDFromOffset(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), obj, offset);
3101      } else {
3102        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3103        __ LoadDFromOffset(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), IP, data_offset);
3104      }
3105      break;
3106    }
3107
3108    case Primitive::kPrimVoid:
3109      LOG(FATAL) << "Unreachable type " << instruction->GetType();
3110      UNREACHABLE();
3111  }
3112  codegen_->MaybeRecordImplicitNullCheck(instruction);
3113}
3114
3115void LocationsBuilderARM::VisitArraySet(HArraySet* instruction) {
3116  Primitive::Type value_type = instruction->GetComponentType();
3117
3118  bool needs_write_barrier =
3119      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
3120  bool needs_runtime_call = instruction->NeedsTypeCheck();
3121
3122  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3123      instruction, needs_runtime_call ? LocationSummary::kCall : LocationSummary::kNoCall);
3124  if (needs_runtime_call) {
3125    InvokeRuntimeCallingConvention calling_convention;
3126    locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3127    locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3128    locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
3129  } else {
3130    locations->SetInAt(0, Location::RequiresRegister());
3131    locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3132    locations->SetInAt(2, Location::RequiresRegister());
3133
3134    if (needs_write_barrier) {
3135      // Temporary registers for the write barrier.
3136      locations->AddTemp(Location::RequiresRegister());
3137      locations->AddTemp(Location::RequiresRegister());
3138    }
3139  }
3140}
3141
3142void InstructionCodeGeneratorARM::VisitArraySet(HArraySet* instruction) {
3143  LocationSummary* locations = instruction->GetLocations();
3144  Register obj = locations->InAt(0).AsRegister<Register>();
3145  Location index = locations->InAt(1);
3146  Primitive::Type value_type = instruction->GetComponentType();
3147  bool needs_runtime_call = locations->WillCall();
3148  bool needs_write_barrier =
3149      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
3150
3151  switch (value_type) {
3152    case Primitive::kPrimBoolean:
3153    case Primitive::kPrimByte: {
3154      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
3155      Register value = locations->InAt(2).AsRegister<Register>();
3156      if (index.IsConstant()) {
3157        size_t offset =
3158            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
3159        __ StoreToOffset(kStoreByte, value, obj, offset);
3160      } else {
3161        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
3162        __ StoreToOffset(kStoreByte, value, IP, data_offset);
3163      }
3164      break;
3165    }
3166
3167    case Primitive::kPrimShort:
3168    case Primitive::kPrimChar: {
3169      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
3170      Register value = locations->InAt(2).AsRegister<Register>();
3171      if (index.IsConstant()) {
3172        size_t offset =
3173            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
3174        __ StoreToOffset(kStoreHalfword, value, obj, offset);
3175      } else {
3176        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
3177        __ StoreToOffset(kStoreHalfword, value, IP, data_offset);
3178      }
3179      break;
3180    }
3181
3182    case Primitive::kPrimInt:
3183    case Primitive::kPrimNot: {
3184      if (!needs_runtime_call) {
3185        uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
3186        Register value = locations->InAt(2).AsRegister<Register>();
3187        if (index.IsConstant()) {
3188          size_t offset =
3189              (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3190          __ StoreToOffset(kStoreWord, value, obj, offset);
3191        } else {
3192          DCHECK(index.IsRegister()) << index;
3193          __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3194          __ StoreToOffset(kStoreWord, value, IP, data_offset);
3195        }
3196        codegen_->MaybeRecordImplicitNullCheck(instruction);
3197        if (needs_write_barrier) {
3198          DCHECK_EQ(value_type, Primitive::kPrimNot);
3199          Register temp = locations->GetTemp(0).AsRegister<Register>();
3200          Register card = locations->GetTemp(1).AsRegister<Register>();
3201          codegen_->MarkGCCard(temp, card, obj, value);
3202        }
3203      } else {
3204        DCHECK_EQ(value_type, Primitive::kPrimNot);
3205        codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
3206                                instruction,
3207                                instruction->GetDexPc());
3208      }
3209      break;
3210    }
3211
3212    case Primitive::kPrimLong: {
3213      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
3214      Location value = locations->InAt(2);
3215      if (index.IsConstant()) {
3216        size_t offset =
3217            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3218        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset);
3219      } else {
3220        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3221        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), IP, data_offset);
3222      }
3223      break;
3224    }
3225
3226    case Primitive::kPrimFloat: {
3227      uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
3228      Location value = locations->InAt(2);
3229      DCHECK(value.IsFpuRegister());
3230      if (index.IsConstant()) {
3231        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3232        __ StoreSToOffset(value.AsFpuRegister<SRegister>(), obj, offset);
3233      } else {
3234        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3235        __ StoreSToOffset(value.AsFpuRegister<SRegister>(), IP, data_offset);
3236      }
3237      break;
3238    }
3239
3240    case Primitive::kPrimDouble: {
3241      uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
3242      Location value = locations->InAt(2);
3243      DCHECK(value.IsFpuRegisterPair());
3244      if (index.IsConstant()) {
3245        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3246        __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), obj, offset);
3247      } else {
3248        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3249        __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), IP, data_offset);
3250      }
3251
3252      break;
3253    }
3254
3255    case Primitive::kPrimVoid:
3256      LOG(FATAL) << "Unreachable type " << value_type;
3257      UNREACHABLE();
3258  }
3259
3260  // Ints and objects are handled in the switch.
3261  if (value_type != Primitive::kPrimInt && value_type != Primitive::kPrimNot) {
3262    codegen_->MaybeRecordImplicitNullCheck(instruction);
3263  }
3264}
3265
3266void LocationsBuilderARM::VisitArrayLength(HArrayLength* instruction) {
3267  LocationSummary* locations =
3268      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3269  locations->SetInAt(0, Location::RequiresRegister());
3270  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3271}
3272
3273void InstructionCodeGeneratorARM::VisitArrayLength(HArrayLength* instruction) {
3274  LocationSummary* locations = instruction->GetLocations();
3275  uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
3276  Register obj = locations->InAt(0).AsRegister<Register>();
3277  Register out = locations->Out().AsRegister<Register>();
3278  __ LoadFromOffset(kLoadWord, out, obj, offset);
3279  codegen_->MaybeRecordImplicitNullCheck(instruction);
3280}
3281
3282void LocationsBuilderARM::VisitBoundsCheck(HBoundsCheck* instruction) {
3283  LocationSummary* locations =
3284      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3285  locations->SetInAt(0, Location::RequiresRegister());
3286  locations->SetInAt(1, Location::RequiresRegister());
3287  if (instruction->HasUses()) {
3288    locations->SetOut(Location::SameAsFirstInput());
3289  }
3290}
3291
3292void InstructionCodeGeneratorARM::VisitBoundsCheck(HBoundsCheck* instruction) {
3293  LocationSummary* locations = instruction->GetLocations();
3294  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM(
3295      instruction, locations->InAt(0), locations->InAt(1));
3296  codegen_->AddSlowPath(slow_path);
3297
3298  Register index = locations->InAt(0).AsRegister<Register>();
3299  Register length = locations->InAt(1).AsRegister<Register>();
3300
3301  __ cmp(index, ShifterOperand(length));
3302  __ b(slow_path->GetEntryLabel(), CS);
3303}
3304
3305void CodeGeneratorARM::MarkGCCard(Register temp, Register card, Register object, Register value) {
3306  Label is_null;
3307  __ CompareAndBranchIfZero(value, &is_null);
3308  __ LoadFromOffset(kLoadWord, card, TR, Thread::CardTableOffset<kArmWordSize>().Int32Value());
3309  __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
3310  __ strb(card, Address(card, temp));
3311  __ Bind(&is_null);
3312}
3313
3314void LocationsBuilderARM::VisitTemporary(HTemporary* temp) {
3315  temp->SetLocations(nullptr);
3316}
3317
3318void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp) {
3319  // Nothing to do, this is driven by the code generator.
3320  UNUSED(temp);
3321}
3322
3323void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction) {
3324  UNUSED(instruction);
3325  LOG(FATAL) << "Unreachable";
3326}
3327
3328void InstructionCodeGeneratorARM::VisitParallelMove(HParallelMove* instruction) {
3329  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
3330}
3331
3332void LocationsBuilderARM::VisitSuspendCheck(HSuspendCheck* instruction) {
3333  new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
3334}
3335
3336void InstructionCodeGeneratorARM::VisitSuspendCheck(HSuspendCheck* instruction) {
3337  HBasicBlock* block = instruction->GetBlock();
3338  if (block->GetLoopInformation() != nullptr) {
3339    DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
3340    // The back edge will generate the suspend check.
3341    return;
3342  }
3343  if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
3344    // The goto will generate the suspend check.
3345    return;
3346  }
3347  GenerateSuspendCheck(instruction, nullptr);
3348}
3349
3350void InstructionCodeGeneratorARM::GenerateSuspendCheck(HSuspendCheck* instruction,
3351                                                       HBasicBlock* successor) {
3352  SuspendCheckSlowPathARM* slow_path =
3353      new (GetGraph()->GetArena()) SuspendCheckSlowPathARM(instruction, successor);
3354  codegen_->AddSlowPath(slow_path);
3355
3356  __ LoadFromOffset(
3357      kLoadUnsignedHalfword, IP, TR, Thread::ThreadFlagsOffset<kArmWordSize>().Int32Value());
3358  __ cmp(IP, ShifterOperand(0));
3359  // TODO: Figure out the branch offsets and use cbz/cbnz.
3360  if (successor == nullptr) {
3361    __ b(slow_path->GetEntryLabel(), NE);
3362    __ Bind(slow_path->GetReturnLabel());
3363  } else {
3364    __ b(codegen_->GetLabelOf(successor), EQ);
3365    __ b(slow_path->GetEntryLabel());
3366  }
3367}
3368
3369ArmAssembler* ParallelMoveResolverARM::GetAssembler() const {
3370  return codegen_->GetAssembler();
3371}
3372
3373void ParallelMoveResolverARM::EmitMove(size_t index) {
3374  MoveOperands* move = moves_.Get(index);
3375  Location source = move->GetSource();
3376  Location destination = move->GetDestination();
3377
3378  if (source.IsRegister()) {
3379    if (destination.IsRegister()) {
3380      __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>());
3381    } else {
3382      DCHECK(destination.IsStackSlot());
3383      __ StoreToOffset(kStoreWord, source.AsRegister<Register>(),
3384                       SP, destination.GetStackIndex());
3385    }
3386  } else if (source.IsStackSlot()) {
3387    if (destination.IsRegister()) {
3388      __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(),
3389                        SP, source.GetStackIndex());
3390    } else if (destination.IsFpuRegister()) {
3391      __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex());
3392    } else {
3393      DCHECK(destination.IsStackSlot());
3394      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
3395      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3396    }
3397  } else if (source.IsFpuRegister()) {
3398    if (destination.IsFpuRegister()) {
3399      __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>());
3400    } else {
3401      DCHECK(destination.IsStackSlot());
3402      __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex());
3403    }
3404  } else if (source.IsDoubleStackSlot()) {
3405    if (destination.IsDoubleStackSlot()) {
3406      __ LoadDFromOffset(DTMP, SP, source.GetStackIndex());
3407      __ StoreDToOffset(DTMP, SP, destination.GetStackIndex());
3408    } else if (destination.IsRegisterPair()) {
3409      DCHECK(ExpectedPairLayout(destination));
3410      __ LoadFromOffset(
3411          kLoadWordPair, destination.AsRegisterPairLow<Register>(), SP, source.GetStackIndex());
3412    } else {
3413      DCHECK(destination.IsFpuRegisterPair()) << destination;
3414      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
3415                         SP,
3416                         source.GetStackIndex());
3417    }
3418  } else if (source.IsRegisterPair()) {
3419    if (destination.IsRegisterPair()) {
3420      __ Mov(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairLow<Register>());
3421      __ Mov(destination.AsRegisterPairHigh<Register>(), source.AsRegisterPairHigh<Register>());
3422    } else {
3423      DCHECK(destination.IsDoubleStackSlot()) << destination;
3424      DCHECK(ExpectedPairLayout(source));
3425      __ StoreToOffset(
3426          kStoreWordPair, source.AsRegisterPairLow<Register>(), SP, destination.GetStackIndex());
3427    }
3428  } else if (source.IsFpuRegisterPair()) {
3429    if (destination.IsFpuRegisterPair()) {
3430      __ vmovd(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
3431               FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()));
3432    } else {
3433      DCHECK(destination.IsDoubleStackSlot()) << destination;
3434      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
3435                        SP,
3436                        destination.GetStackIndex());
3437    }
3438  } else {
3439    DCHECK(source.IsConstant()) << source;
3440    HConstant* constant = source.GetConstant();
3441    if (constant->IsIntConstant() || constant->IsNullConstant()) {
3442      int32_t value = CodeGenerator::GetInt32ValueOf(constant);
3443      if (destination.IsRegister()) {
3444        __ LoadImmediate(destination.AsRegister<Register>(), value);
3445      } else {
3446        DCHECK(destination.IsStackSlot());
3447        __ LoadImmediate(IP, value);
3448        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3449      }
3450    } else if (constant->IsLongConstant()) {
3451      int64_t value = constant->AsLongConstant()->GetValue();
3452      if (destination.IsRegisterPair()) {
3453        __ LoadImmediate(destination.AsRegisterPairLow<Register>(), Low32Bits(value));
3454        __ LoadImmediate(destination.AsRegisterPairHigh<Register>(), High32Bits(value));
3455      } else {
3456        DCHECK(destination.IsDoubleStackSlot()) << destination;
3457        __ LoadImmediate(IP, Low32Bits(value));
3458        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3459        __ LoadImmediate(IP, High32Bits(value));
3460        __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
3461      }
3462    } else if (constant->IsDoubleConstant()) {
3463      double value = constant->AsDoubleConstant()->GetValue();
3464      if (destination.IsFpuRegisterPair()) {
3465        __ LoadDImmediate(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()), value);
3466      } else {
3467        DCHECK(destination.IsDoubleStackSlot()) << destination;
3468        uint64_t int_value = bit_cast<uint64_t, double>(value);
3469        __ LoadImmediate(IP, Low32Bits(int_value));
3470        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3471        __ LoadImmediate(IP, High32Bits(int_value));
3472        __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
3473      }
3474    } else {
3475      DCHECK(constant->IsFloatConstant()) << constant->DebugName();
3476      float value = constant->AsFloatConstant()->GetValue();
3477      if (destination.IsFpuRegister()) {
3478        __ LoadSImmediate(destination.AsFpuRegister<SRegister>(), value);
3479      } else {
3480        DCHECK(destination.IsStackSlot());
3481        __ LoadImmediate(IP, bit_cast<int32_t, float>(value));
3482        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3483      }
3484    }
3485  }
3486}
3487
3488void ParallelMoveResolverARM::Exchange(Register reg, int mem) {
3489  __ Mov(IP, reg);
3490  __ LoadFromOffset(kLoadWord, reg, SP, mem);
3491  __ StoreToOffset(kStoreWord, IP, SP, mem);
3492}
3493
3494void ParallelMoveResolverARM::Exchange(int mem1, int mem2) {
3495  ScratchRegisterScope ensure_scratch(this, IP, R0, codegen_->GetNumberOfCoreRegisters());
3496  int stack_offset = ensure_scratch.IsSpilled() ? kArmWordSize : 0;
3497  __ LoadFromOffset(kLoadWord, static_cast<Register>(ensure_scratch.GetRegister()),
3498                    SP, mem1 + stack_offset);
3499  __ LoadFromOffset(kLoadWord, IP, SP, mem2 + stack_offset);
3500  __ StoreToOffset(kStoreWord, static_cast<Register>(ensure_scratch.GetRegister()),
3501                   SP, mem2 + stack_offset);
3502  __ StoreToOffset(kStoreWord, IP, SP, mem1 + stack_offset);
3503}
3504
3505void ParallelMoveResolverARM::EmitSwap(size_t index) {
3506  MoveOperands* move = moves_.Get(index);
3507  Location source = move->GetSource();
3508  Location destination = move->GetDestination();
3509
3510  if (source.IsRegister() && destination.IsRegister()) {
3511    DCHECK_NE(source.AsRegister<Register>(), IP);
3512    DCHECK_NE(destination.AsRegister<Register>(), IP);
3513    __ Mov(IP, source.AsRegister<Register>());
3514    __ Mov(source.AsRegister<Register>(), destination.AsRegister<Register>());
3515    __ Mov(destination.AsRegister<Register>(), IP);
3516  } else if (source.IsRegister() && destination.IsStackSlot()) {
3517    Exchange(source.AsRegister<Register>(), destination.GetStackIndex());
3518  } else if (source.IsStackSlot() && destination.IsRegister()) {
3519    Exchange(destination.AsRegister<Register>(), source.GetStackIndex());
3520  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
3521    Exchange(source.GetStackIndex(), destination.GetStackIndex());
3522  } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
3523    __ vmovrs(IP, source.AsFpuRegister<SRegister>());
3524    __ vmovs(source.AsFpuRegister<SRegister>(), destination.AsFpuRegister<SRegister>());
3525    __ vmovsr(destination.AsFpuRegister<SRegister>(), IP);
3526  } else if (source.IsRegisterPair() && destination.IsRegisterPair()) {
3527    __ vmovdrr(DTMP, source.AsRegisterPairLow<Register>(), source.AsRegisterPairHigh<Register>());
3528    __ Mov(source.AsRegisterPairLow<Register>(), destination.AsRegisterPairLow<Register>());
3529    __ Mov(source.AsRegisterPairHigh<Register>(), destination.AsRegisterPairHigh<Register>());
3530    __ vmovrrd(destination.AsRegisterPairLow<Register>(),
3531               destination.AsRegisterPairHigh<Register>(),
3532               DTMP);
3533  } else if (source.IsRegisterPair() || destination.IsRegisterPair()) {
3534    Register low_reg = source.IsRegisterPair()
3535        ? source.AsRegisterPairLow<Register>()
3536        : destination.AsRegisterPairLow<Register>();
3537    int mem = source.IsRegisterPair()
3538        ? destination.GetStackIndex()
3539        : source.GetStackIndex();
3540    DCHECK(ExpectedPairLayout(source.IsRegisterPair() ? source : destination));
3541    __ vmovdrr(DTMP, low_reg, static_cast<Register>(low_reg + 1));
3542    __ LoadFromOffset(kLoadWordPair, low_reg, SP, mem);
3543    __ StoreDToOffset(DTMP, SP, mem);
3544  } else if (source.IsFpuRegisterPair() && destination.IsFpuRegisterPair()) {
3545    DRegister first = FromLowSToD(source.AsFpuRegisterPairLow<SRegister>());
3546    DRegister second = FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>());
3547    __ vmovd(DTMP, first);
3548    __ vmovd(first, second);
3549    __ vmovd(second, DTMP);
3550  } else if (source.IsFpuRegisterPair() || destination.IsFpuRegisterPair()) {
3551    DRegister reg = source.IsFpuRegisterPair()
3552        ? FromLowSToD(source.AsFpuRegisterPairLow<SRegister>())
3553        : FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>());
3554    int mem = source.IsFpuRegisterPair()
3555        ? destination.GetStackIndex()
3556        : source.GetStackIndex();
3557    __ vmovd(DTMP, reg);
3558    __ LoadDFromOffset(reg, SP, mem);
3559    __ StoreDToOffset(DTMP, SP, mem);
3560  } else if (source.IsFpuRegister() || destination.IsFpuRegister()) {
3561    SRegister reg = source.IsFpuRegister() ? source.AsFpuRegister<SRegister>()
3562                                           : destination.AsFpuRegister<SRegister>();
3563    int mem = source.IsFpuRegister()
3564        ? destination.GetStackIndex()
3565        : source.GetStackIndex();
3566
3567    __ vmovrs(IP, reg);
3568    __ LoadSFromOffset(reg, SP, mem);
3569    __ StoreToOffset(kStoreWord, IP, SP, mem);
3570  } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
3571    Exchange(source.GetStackIndex(), destination.GetStackIndex());
3572    Exchange(source.GetHighStackIndex(kArmWordSize), destination.GetHighStackIndex(kArmWordSize));
3573  } else {
3574    LOG(FATAL) << "Unimplemented" << source << " <-> " << destination;
3575  }
3576}
3577
3578void ParallelMoveResolverARM::SpillScratch(int reg) {
3579  __ Push(static_cast<Register>(reg));
3580}
3581
3582void ParallelMoveResolverARM::RestoreScratch(int reg) {
3583  __ Pop(static_cast<Register>(reg));
3584}
3585
3586void LocationsBuilderARM::VisitLoadClass(HLoadClass* cls) {
3587  LocationSummary::CallKind call_kind = cls->CanCallRuntime()
3588      ? LocationSummary::kCallOnSlowPath
3589      : LocationSummary::kNoCall;
3590  LocationSummary* locations =
3591      new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
3592  locations->SetOut(Location::RequiresRegister());
3593}
3594
3595void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) {
3596  Register out = cls->GetLocations()->Out().AsRegister<Register>();
3597  if (cls->IsReferrersClass()) {
3598    DCHECK(!cls->CanCallRuntime());
3599    DCHECK(!cls->MustGenerateClinitCheck());
3600    codegen_->LoadCurrentMethod(out);
3601    __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
3602  } else {
3603    DCHECK(cls->CanCallRuntime());
3604    codegen_->LoadCurrentMethod(out);
3605    __ LoadFromOffset(
3606        kLoadWord, out, out, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value());
3607    __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
3608
3609    SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
3610        cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
3611    codegen_->AddSlowPath(slow_path);
3612    __ cmp(out, ShifterOperand(0));
3613    __ b(slow_path->GetEntryLabel(), EQ);
3614    if (cls->MustGenerateClinitCheck()) {
3615      GenerateClassInitializationCheck(slow_path, out);
3616    } else {
3617      __ Bind(slow_path->GetExitLabel());
3618    }
3619  }
3620}
3621
3622void LocationsBuilderARM::VisitClinitCheck(HClinitCheck* check) {
3623  LocationSummary* locations =
3624      new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
3625  locations->SetInAt(0, Location::RequiresRegister());
3626  if (check->HasUses()) {
3627    locations->SetOut(Location::SameAsFirstInput());
3628  }
3629}
3630
3631void InstructionCodeGeneratorARM::VisitClinitCheck(HClinitCheck* check) {
3632  // We assume the class is not null.
3633  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
3634      check->GetLoadClass(), check, check->GetDexPc(), true);
3635  codegen_->AddSlowPath(slow_path);
3636  GenerateClassInitializationCheck(slow_path,
3637                                   check->GetLocations()->InAt(0).AsRegister<Register>());
3638}
3639
3640void InstructionCodeGeneratorARM::GenerateClassInitializationCheck(
3641    SlowPathCodeARM* slow_path, Register class_reg) {
3642  __ LoadFromOffset(kLoadWord, IP, class_reg, mirror::Class::StatusOffset().Int32Value());
3643  __ cmp(IP, ShifterOperand(mirror::Class::kStatusInitialized));
3644  __ b(slow_path->GetEntryLabel(), LT);
3645  // Even if the initialized flag is set, we may be in a situation where caches are not synced
3646  // properly. Therefore, we do a memory fence.
3647  __ dmb(ISH);
3648  __ Bind(slow_path->GetExitLabel());
3649}
3650
3651void LocationsBuilderARM::VisitLoadString(HLoadString* load) {
3652  LocationSummary* locations =
3653      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
3654  locations->SetOut(Location::RequiresRegister());
3655}
3656
3657void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) {
3658  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM(load);
3659  codegen_->AddSlowPath(slow_path);
3660
3661  Register out = load->GetLocations()->Out().AsRegister<Register>();
3662  codegen_->LoadCurrentMethod(out);
3663  __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
3664  __ LoadFromOffset(kLoadWord, out, out, mirror::Class::DexCacheStringsOffset().Int32Value());
3665  __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(load->GetStringIndex()));
3666  __ cmp(out, ShifterOperand(0));
3667  __ b(slow_path->GetEntryLabel(), EQ);
3668  __ Bind(slow_path->GetExitLabel());
3669}
3670
3671void LocationsBuilderARM::VisitLoadException(HLoadException* load) {
3672  LocationSummary* locations =
3673      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
3674  locations->SetOut(Location::RequiresRegister());
3675}
3676
3677void InstructionCodeGeneratorARM::VisitLoadException(HLoadException* load) {
3678  Register out = load->GetLocations()->Out().AsRegister<Register>();
3679  int32_t offset = Thread::ExceptionOffset<kArmWordSize>().Int32Value();
3680  __ LoadFromOffset(kLoadWord, out, TR, offset);
3681  __ LoadImmediate(IP, 0);
3682  __ StoreToOffset(kStoreWord, IP, TR, offset);
3683}
3684
3685void LocationsBuilderARM::VisitThrow(HThrow* instruction) {
3686  LocationSummary* locations =
3687      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3688  InvokeRuntimeCallingConvention calling_convention;
3689  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3690}
3691
3692void InstructionCodeGeneratorARM::VisitThrow(HThrow* instruction) {
3693  codegen_->InvokeRuntime(
3694      QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc());
3695}
3696
3697void LocationsBuilderARM::VisitInstanceOf(HInstanceOf* instruction) {
3698  LocationSummary::CallKind call_kind = instruction->IsClassFinal()
3699      ? LocationSummary::kNoCall
3700      : LocationSummary::kCallOnSlowPath;
3701  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3702  locations->SetInAt(0, Location::RequiresRegister());
3703  locations->SetInAt(1, Location::RequiresRegister());
3704  // The out register is used as a temporary, so it overlaps with the inputs.
3705  locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3706}
3707
3708void InstructionCodeGeneratorARM::VisitInstanceOf(HInstanceOf* instruction) {
3709  LocationSummary* locations = instruction->GetLocations();
3710  Register obj = locations->InAt(0).AsRegister<Register>();
3711  Register cls = locations->InAt(1).AsRegister<Register>();
3712  Register out = locations->Out().AsRegister<Register>();
3713  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3714  Label done, zero;
3715  SlowPathCodeARM* slow_path = nullptr;
3716
3717  // Return 0 if `obj` is null.
3718  // TODO: avoid this check if we know obj is not null.
3719  __ cmp(obj, ShifterOperand(0));
3720  __ b(&zero, EQ);
3721  // Compare the class of `obj` with `cls`.
3722  __ LoadFromOffset(kLoadWord, out, obj, class_offset);
3723  __ cmp(out, ShifterOperand(cls));
3724  if (instruction->IsClassFinal()) {
3725    // Classes must be equal for the instanceof to succeed.
3726    __ b(&zero, NE);
3727    __ LoadImmediate(out, 1);
3728    __ b(&done);
3729  } else {
3730    // If the classes are not equal, we go into a slow path.
3731    DCHECK(locations->OnlyCallsOnSlowPath());
3732    slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
3733        instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc());
3734    codegen_->AddSlowPath(slow_path);
3735    __ b(slow_path->GetEntryLabel(), NE);
3736    __ LoadImmediate(out, 1);
3737    __ b(&done);
3738  }
3739  __ Bind(&zero);
3740  __ LoadImmediate(out, 0);
3741  if (slow_path != nullptr) {
3742    __ Bind(slow_path->GetExitLabel());
3743  }
3744  __ Bind(&done);
3745}
3746
3747void LocationsBuilderARM::VisitCheckCast(HCheckCast* instruction) {
3748  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3749      instruction, LocationSummary::kCallOnSlowPath);
3750  locations->SetInAt(0, Location::RequiresRegister());
3751  locations->SetInAt(1, Location::RequiresRegister());
3752  locations->AddTemp(Location::RequiresRegister());
3753}
3754
3755void InstructionCodeGeneratorARM::VisitCheckCast(HCheckCast* instruction) {
3756  LocationSummary* locations = instruction->GetLocations();
3757  Register obj = locations->InAt(0).AsRegister<Register>();
3758  Register cls = locations->InAt(1).AsRegister<Register>();
3759  Register temp = locations->GetTemp(0).AsRegister<Register>();
3760  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3761
3762  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
3763      instruction, locations->InAt(1), locations->GetTemp(0), instruction->GetDexPc());
3764  codegen_->AddSlowPath(slow_path);
3765
3766  // TODO: avoid this check if we know obj is not null.
3767  __ cmp(obj, ShifterOperand(0));
3768  __ b(slow_path->GetExitLabel(), EQ);
3769  // Compare the class of `obj` with `cls`.
3770  __ LoadFromOffset(kLoadWord, temp, obj, class_offset);
3771  __ cmp(temp, ShifterOperand(cls));
3772  __ b(slow_path->GetEntryLabel(), NE);
3773  __ Bind(slow_path->GetExitLabel());
3774}
3775
3776void LocationsBuilderARM::VisitMonitorOperation(HMonitorOperation* instruction) {
3777  LocationSummary* locations =
3778      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3779  InvokeRuntimeCallingConvention calling_convention;
3780  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3781}
3782
3783void InstructionCodeGeneratorARM::VisitMonitorOperation(HMonitorOperation* instruction) {
3784  codegen_->InvokeRuntime(instruction->IsEnter()
3785        ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
3786      instruction,
3787      instruction->GetDexPc());
3788}
3789
3790void LocationsBuilderARM::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
3791void LocationsBuilderARM::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
3792void LocationsBuilderARM::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
3793
3794void LocationsBuilderARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
3795  LocationSummary* locations =
3796      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3797  DCHECK(instruction->GetResultType() == Primitive::kPrimInt
3798         || instruction->GetResultType() == Primitive::kPrimLong);
3799  locations->SetInAt(0, Location::RequiresRegister());
3800  locations->SetInAt(1, Location::RequiresRegister());
3801  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3802}
3803
3804void InstructionCodeGeneratorARM::VisitAnd(HAnd* instruction) {
3805  HandleBitwiseOperation(instruction);
3806}
3807
3808void InstructionCodeGeneratorARM::VisitOr(HOr* instruction) {
3809  HandleBitwiseOperation(instruction);
3810}
3811
3812void InstructionCodeGeneratorARM::VisitXor(HXor* instruction) {
3813  HandleBitwiseOperation(instruction);
3814}
3815
3816void InstructionCodeGeneratorARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
3817  LocationSummary* locations = instruction->GetLocations();
3818
3819  if (instruction->GetResultType() == Primitive::kPrimInt) {
3820    Register first = locations->InAt(0).AsRegister<Register>();
3821    Register second = locations->InAt(1).AsRegister<Register>();
3822    Register out = locations->Out().AsRegister<Register>();
3823    if (instruction->IsAnd()) {
3824      __ and_(out, first, ShifterOperand(second));
3825    } else if (instruction->IsOr()) {
3826      __ orr(out, first, ShifterOperand(second));
3827    } else {
3828      DCHECK(instruction->IsXor());
3829      __ eor(out, first, ShifterOperand(second));
3830    }
3831  } else {
3832    DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3833    Location first = locations->InAt(0);
3834    Location second = locations->InAt(1);
3835    Location out = locations->Out();
3836    if (instruction->IsAnd()) {
3837      __ and_(out.AsRegisterPairLow<Register>(),
3838              first.AsRegisterPairLow<Register>(),
3839              ShifterOperand(second.AsRegisterPairLow<Register>()));
3840      __ and_(out.AsRegisterPairHigh<Register>(),
3841              first.AsRegisterPairHigh<Register>(),
3842              ShifterOperand(second.AsRegisterPairHigh<Register>()));
3843    } else if (instruction->IsOr()) {
3844      __ orr(out.AsRegisterPairLow<Register>(),
3845             first.AsRegisterPairLow<Register>(),
3846             ShifterOperand(second.AsRegisterPairLow<Register>()));
3847      __ orr(out.AsRegisterPairHigh<Register>(),
3848             first.AsRegisterPairHigh<Register>(),
3849             ShifterOperand(second.AsRegisterPairHigh<Register>()));
3850    } else {
3851      DCHECK(instruction->IsXor());
3852      __ eor(out.AsRegisterPairLow<Register>(),
3853             first.AsRegisterPairLow<Register>(),
3854             ShifterOperand(second.AsRegisterPairLow<Register>()));
3855      __ eor(out.AsRegisterPairHigh<Register>(),
3856             first.AsRegisterPairHigh<Register>(),
3857             ShifterOperand(second.AsRegisterPairHigh<Register>()));
3858    }
3859  }
3860}
3861
3862void CodeGeneratorARM::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Register temp) {
3863  DCHECK_EQ(temp, kArtMethodRegister);
3864
3865  // TODO: Implement all kinds of calls:
3866  // 1) boot -> boot
3867  // 2) app -> boot
3868  // 3) app -> app
3869  //
3870  // Currently we implement the app -> app logic, which looks up in the resolve cache.
3871
3872  // temp = method;
3873  LoadCurrentMethod(temp);
3874  if (!invoke->IsRecursive()) {
3875    // temp = temp->dex_cache_resolved_methods_;
3876    __ LoadFromOffset(
3877        kLoadWord, temp, temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value());
3878    // temp = temp[index_in_cache]
3879    __ LoadFromOffset(
3880        kLoadWord, temp, temp, CodeGenerator::GetCacheOffset(invoke->GetDexMethodIndex()));
3881    // LR = temp[offset_of_quick_compiled_code]
3882    __ LoadFromOffset(kLoadWord, LR, temp,
3883                      mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
3884                          kArmWordSize).Int32Value());
3885    // LR()
3886    __ blx(LR);
3887  } else {
3888    __ bl(GetFrameEntryLabel());
3889  }
3890
3891  RecordPcInfo(invoke, invoke->GetDexPc());
3892  DCHECK(!IsLeafMethod());
3893}
3894
3895void LocationsBuilderARM::VisitBoundType(HBoundType* instruction) {
3896  // Nothing to do, this should be removed during prepare for register allocator.
3897  UNUSED(instruction);
3898  LOG(FATAL) << "Unreachable";
3899}
3900
3901void InstructionCodeGeneratorARM::VisitBoundType(HBoundType* instruction) {
3902  // Nothing to do, this should be removed during prepare for register allocator.
3903  UNUSED(instruction);
3904  LOG(FATAL) << "Unreachable";
3905}
3906
3907}  // namespace arm
3908}  // namespace art
3909