code_generator_arm.cc revision f3b4aebd0f5ce6c82bfd6284919a5c5e91955124
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
20#include "entrypoints/quick/quick_entrypoints.h"
21#include "gc/accounting/card_table.h"
22#include "intrinsics.h"
23#include "intrinsics_arm.h"
24#include "mirror/array-inl.h"
25#include "mirror/art_method.h"
26#include "mirror/class.h"
27#include "thread.h"
28#include "utils/arm/assembler_arm.h"
29#include "utils/arm/managed_register_arm.h"
30#include "utils/assembler.h"
31#include "utils/stack_checks.h"
32
33namespace art {
34
35namespace arm {
36
37static bool ExpectedPairLayout(Location location) {
38  // We expected this for both core and fpu register pairs.
39  return ((location.low() & 1) == 0) && (location.low() + 1 == location.high());
40}
41
42static constexpr int kCurrentMethodStackOffset = 0;
43
44static constexpr Register kRuntimeParameterCoreRegisters[] = { R0, R1, R2, R3 };
45static constexpr size_t kRuntimeParameterCoreRegistersLength =
46    arraysize(kRuntimeParameterCoreRegisters);
47static constexpr SRegister kRuntimeParameterFpuRegisters[] = { S0, S1, S2, S3 };
48static constexpr size_t kRuntimeParameterFpuRegistersLength =
49    arraysize(kRuntimeParameterFpuRegisters);
50// We unconditionally allocate R5 to ensure we can do long operations
51// with baseline.
52static constexpr Register kCoreSavedRegisterForBaseline = R5;
53static constexpr Register kCoreCalleeSaves[] =
54    { R5, R6, R7, R8, R10, R11, PC };
55static constexpr SRegister kFpuCalleeSaves[] =
56    { S16, S17, S18, S19, S20, S21, S22, S23, S24, S25, S26, S27, S28, S29, S30, S31 };
57
58// D31 cannot be split into two S registers, and the register allocator only works on
59// S registers. Therefore there is no need to block it.
60static constexpr DRegister DTMP = D31;
61
62class InvokeRuntimeCallingConvention : public CallingConvention<Register, SRegister> {
63 public:
64  InvokeRuntimeCallingConvention()
65      : CallingConvention(kRuntimeParameterCoreRegisters,
66                          kRuntimeParameterCoreRegistersLength,
67                          kRuntimeParameterFpuRegisters,
68                          kRuntimeParameterFpuRegistersLength) {}
69
70 private:
71  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
72};
73
74#define __ reinterpret_cast<ArmAssembler*>(codegen->GetAssembler())->
75#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArmWordSize, x).Int32Value()
76
77class NullCheckSlowPathARM : public SlowPathCodeARM {
78 public:
79  explicit NullCheckSlowPathARM(HNullCheck* instruction) : instruction_(instruction) {}
80
81  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
82    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
83    __ Bind(GetEntryLabel());
84    arm_codegen->InvokeRuntime(
85        QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc());
86  }
87
88 private:
89  HNullCheck* const instruction_;
90  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM);
91};
92
93class DivZeroCheckSlowPathARM : public SlowPathCodeARM {
94 public:
95  explicit DivZeroCheckSlowPathARM(HDivZeroCheck* instruction) : instruction_(instruction) {}
96
97  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
98    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
99    __ Bind(GetEntryLabel());
100    arm_codegen->InvokeRuntime(
101        QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc());
102  }
103
104 private:
105  HDivZeroCheck* const instruction_;
106  DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM);
107};
108
109class SuspendCheckSlowPathARM : public SlowPathCodeARM {
110 public:
111  SuspendCheckSlowPathARM(HSuspendCheck* instruction, HBasicBlock* successor)
112      : instruction_(instruction), successor_(successor) {}
113
114  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
115    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
116    __ Bind(GetEntryLabel());
117    codegen->SaveLiveRegisters(instruction_->GetLocations());
118    arm_codegen->InvokeRuntime(
119        QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc());
120    codegen->RestoreLiveRegisters(instruction_->GetLocations());
121    if (successor_ == nullptr) {
122      __ b(GetReturnLabel());
123    } else {
124      __ b(arm_codegen->GetLabelOf(successor_));
125    }
126  }
127
128  Label* GetReturnLabel() {
129    DCHECK(successor_ == nullptr);
130    return &return_label_;
131  }
132
133 private:
134  HSuspendCheck* const instruction_;
135  // If not null, the block to branch to after the suspend check.
136  HBasicBlock* const successor_;
137
138  // If `successor_` is null, the label to branch to after the suspend check.
139  Label return_label_;
140
141  DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM);
142};
143
144class BoundsCheckSlowPathARM : public SlowPathCodeARM {
145 public:
146  BoundsCheckSlowPathARM(HBoundsCheck* instruction,
147                         Location index_location,
148                         Location length_location)
149      : instruction_(instruction),
150        index_location_(index_location),
151        length_location_(length_location) {}
152
153  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
154    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
155    __ Bind(GetEntryLabel());
156    // We're moving two locations to locations that could overlap, so we need a parallel
157    // move resolver.
158    InvokeRuntimeCallingConvention calling_convention;
159    codegen->EmitParallelMoves(
160        index_location_,
161        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
162        length_location_,
163        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
164    arm_codegen->InvokeRuntime(
165        QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc());
166  }
167
168 private:
169  HBoundsCheck* const instruction_;
170  const Location index_location_;
171  const Location length_location_;
172
173  DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM);
174};
175
176class LoadClassSlowPathARM : public SlowPathCodeARM {
177 public:
178  LoadClassSlowPathARM(HLoadClass* cls,
179                       HInstruction* at,
180                       uint32_t dex_pc,
181                       bool do_clinit)
182      : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
183    DCHECK(at->IsLoadClass() || at->IsClinitCheck());
184  }
185
186  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
187    LocationSummary* locations = at_->GetLocations();
188
189    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
190    __ Bind(GetEntryLabel());
191    codegen->SaveLiveRegisters(locations);
192
193    InvokeRuntimeCallingConvention calling_convention;
194    __ LoadImmediate(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex());
195    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
196    int32_t entry_point_offset = do_clinit_
197        ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
198        : QUICK_ENTRY_POINT(pInitializeType);
199    arm_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_);
200
201    // Move the class to the desired location.
202    Location out = locations->Out();
203    if (out.IsValid()) {
204      DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
205      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
206    }
207    codegen->RestoreLiveRegisters(locations);
208    __ b(GetExitLabel());
209  }
210
211 private:
212  // The class this slow path will load.
213  HLoadClass* const cls_;
214
215  // The instruction where this slow path is happening.
216  // (Might be the load class or an initialization check).
217  HInstruction* const at_;
218
219  // The dex PC of `at_`.
220  const uint32_t dex_pc_;
221
222  // Whether to initialize the class.
223  const bool do_clinit_;
224
225  DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM);
226};
227
228class LoadStringSlowPathARM : public SlowPathCodeARM {
229 public:
230  explicit LoadStringSlowPathARM(HLoadString* instruction) : instruction_(instruction) {}
231
232  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
233    LocationSummary* locations = instruction_->GetLocations();
234    DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
235
236    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
237    __ Bind(GetEntryLabel());
238    codegen->SaveLiveRegisters(locations);
239
240    InvokeRuntimeCallingConvention calling_convention;
241    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
242    __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction_->GetStringIndex());
243    arm_codegen->InvokeRuntime(
244        QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc());
245    arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
246
247    codegen->RestoreLiveRegisters(locations);
248    __ b(GetExitLabel());
249  }
250
251 private:
252  HLoadString* const instruction_;
253
254  DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM);
255};
256
257class TypeCheckSlowPathARM : public SlowPathCodeARM {
258 public:
259  TypeCheckSlowPathARM(HInstruction* instruction,
260                       Location class_to_check,
261                       Location object_class,
262                       uint32_t dex_pc)
263      : instruction_(instruction),
264        class_to_check_(class_to_check),
265        object_class_(object_class),
266        dex_pc_(dex_pc) {}
267
268  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
269    LocationSummary* locations = instruction_->GetLocations();
270    DCHECK(instruction_->IsCheckCast()
271           || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
272
273    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
274    __ Bind(GetEntryLabel());
275    codegen->SaveLiveRegisters(locations);
276
277    // We're moving two locations to locations that could overlap, so we need a parallel
278    // move resolver.
279    InvokeRuntimeCallingConvention calling_convention;
280    codegen->EmitParallelMoves(
281        class_to_check_,
282        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
283        object_class_,
284        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
285
286    if (instruction_->IsInstanceOf()) {
287      arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc_);
288      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
289    } else {
290      DCHECK(instruction_->IsCheckCast());
291      arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc_);
292    }
293
294    codegen->RestoreLiveRegisters(locations);
295    __ b(GetExitLabel());
296  }
297
298 private:
299  HInstruction* const instruction_;
300  const Location class_to_check_;
301  const Location object_class_;
302  uint32_t dex_pc_;
303
304  DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM);
305};
306
307#undef __
308
309#undef __
310#define __ reinterpret_cast<ArmAssembler*>(GetAssembler())->
311
312inline Condition ARMCondition(IfCondition cond) {
313  switch (cond) {
314    case kCondEQ: return EQ;
315    case kCondNE: return NE;
316    case kCondLT: return LT;
317    case kCondLE: return LE;
318    case kCondGT: return GT;
319    case kCondGE: return GE;
320    default:
321      LOG(FATAL) << "Unknown if condition";
322  }
323  return EQ;        // Unreachable.
324}
325
326inline Condition ARMOppositeCondition(IfCondition cond) {
327  switch (cond) {
328    case kCondEQ: return NE;
329    case kCondNE: return EQ;
330    case kCondLT: return GE;
331    case kCondLE: return GT;
332    case kCondGT: return LE;
333    case kCondGE: return LT;
334    default:
335      LOG(FATAL) << "Unknown if condition";
336  }
337  return EQ;        // Unreachable.
338}
339
340void CodeGeneratorARM::DumpCoreRegister(std::ostream& stream, int reg) const {
341  stream << ArmManagedRegister::FromCoreRegister(Register(reg));
342}
343
344void CodeGeneratorARM::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
345  stream << ArmManagedRegister::FromSRegister(SRegister(reg));
346}
347
348size_t CodeGeneratorARM::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
349  __ StoreToOffset(kStoreWord, static_cast<Register>(reg_id), SP, stack_index);
350  return kArmWordSize;
351}
352
353size_t CodeGeneratorARM::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
354  __ LoadFromOffset(kLoadWord, static_cast<Register>(reg_id), SP, stack_index);
355  return kArmWordSize;
356}
357
358size_t CodeGeneratorARM::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
359  __ StoreSToOffset(static_cast<SRegister>(reg_id), SP, stack_index);
360  return kArmWordSize;
361}
362
363size_t CodeGeneratorARM::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
364  __ LoadSFromOffset(static_cast<SRegister>(reg_id), SP, stack_index);
365  return kArmWordSize;
366}
367
368CodeGeneratorARM::CodeGeneratorARM(HGraph* graph,
369                                   const ArmInstructionSetFeatures& isa_features,
370                                   const CompilerOptions& compiler_options)
371    : CodeGenerator(graph,
372                    kNumberOfCoreRegisters,
373                    kNumberOfSRegisters,
374                    kNumberOfRegisterPairs,
375                    ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
376                                        arraysize(kCoreCalleeSaves)),
377                    ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
378                                        arraysize(kFpuCalleeSaves)),
379                    compiler_options),
380      block_labels_(graph->GetArena(), 0),
381      location_builder_(graph, this),
382      instruction_visitor_(graph, this),
383      move_resolver_(graph->GetArena(), this),
384      assembler_(true),
385      isa_features_(isa_features) {
386  // Save the PC register to mimic Quick.
387  AddAllocatedRegister(Location::RegisterLocation(PC));
388}
389
390Location CodeGeneratorARM::AllocateFreeRegister(Primitive::Type type) const {
391  switch (type) {
392    case Primitive::kPrimLong: {
393      size_t reg = FindFreeEntry(blocked_register_pairs_, kNumberOfRegisterPairs);
394      ArmManagedRegister pair =
395          ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
396      DCHECK(!blocked_core_registers_[pair.AsRegisterPairLow()]);
397      DCHECK(!blocked_core_registers_[pair.AsRegisterPairHigh()]);
398
399      blocked_core_registers_[pair.AsRegisterPairLow()] = true;
400      blocked_core_registers_[pair.AsRegisterPairHigh()] = true;
401      UpdateBlockedPairRegisters();
402      return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
403    }
404
405    case Primitive::kPrimByte:
406    case Primitive::kPrimBoolean:
407    case Primitive::kPrimChar:
408    case Primitive::kPrimShort:
409    case Primitive::kPrimInt:
410    case Primitive::kPrimNot: {
411      int reg = FindFreeEntry(blocked_core_registers_, kNumberOfCoreRegisters);
412      // Block all register pairs that contain `reg`.
413      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
414        ArmManagedRegister current =
415            ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
416        if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
417          blocked_register_pairs_[i] = true;
418        }
419      }
420      return Location::RegisterLocation(reg);
421    }
422
423    case Primitive::kPrimFloat: {
424      int reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfSRegisters);
425      return Location::FpuRegisterLocation(reg);
426    }
427
428    case Primitive::kPrimDouble: {
429      int reg = FindTwoFreeConsecutiveAlignedEntries(blocked_fpu_registers_, kNumberOfSRegisters);
430      DCHECK_EQ(reg % 2, 0);
431      return Location::FpuRegisterPairLocation(reg, reg + 1);
432    }
433
434    case Primitive::kPrimVoid:
435      LOG(FATAL) << "Unreachable type " << type;
436  }
437
438  return Location();
439}
440
441void CodeGeneratorARM::SetupBlockedRegisters(bool is_baseline) const {
442  // Don't allocate the dalvik style register pair passing.
443  blocked_register_pairs_[R1_R2] = true;
444
445  // Stack register, LR and PC are always reserved.
446  blocked_core_registers_[SP] = true;
447  blocked_core_registers_[LR] = true;
448  blocked_core_registers_[PC] = true;
449
450  // Reserve thread register.
451  blocked_core_registers_[TR] = true;
452
453  // Reserve temp register.
454  blocked_core_registers_[IP] = true;
455
456  if (is_baseline) {
457    for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
458      blocked_core_registers_[kCoreCalleeSaves[i]] = true;
459    }
460
461    blocked_core_registers_[kCoreSavedRegisterForBaseline] = false;
462
463    for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
464      blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
465    }
466  }
467
468  UpdateBlockedPairRegisters();
469}
470
471void CodeGeneratorARM::UpdateBlockedPairRegisters() const {
472  for (int i = 0; i < kNumberOfRegisterPairs; i++) {
473    ArmManagedRegister current =
474        ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
475    if (blocked_core_registers_[current.AsRegisterPairLow()]
476        || blocked_core_registers_[current.AsRegisterPairHigh()]) {
477      blocked_register_pairs_[i] = true;
478    }
479  }
480}
481
482InstructionCodeGeneratorARM::InstructionCodeGeneratorARM(HGraph* graph, CodeGeneratorARM* codegen)
483      : HGraphVisitor(graph),
484        assembler_(codegen->GetAssembler()),
485        codegen_(codegen) {}
486
487static uint32_t LeastSignificantBit(uint32_t mask) {
488  // ffs starts at 1.
489  return ffs(mask) - 1;
490}
491
492void CodeGeneratorARM::ComputeSpillMask() {
493  core_spill_mask_ = allocated_registers_.GetCoreRegisters() & core_callee_save_mask_;
494  // Save one extra register for baseline. Note that on thumb2, there is no easy
495  // instruction to restore just the PC, so this actually helps both baseline
496  // and non-baseline to save and restore at least two registers at entry and exit.
497  core_spill_mask_ |= (1 << kCoreSavedRegisterForBaseline);
498  DCHECK_NE(core_spill_mask_, 0u) << "At least the return address register must be saved";
499  fpu_spill_mask_ = allocated_registers_.GetFloatingPointRegisters() & fpu_callee_save_mask_;
500  // We use vpush and vpop for saving and restoring floating point registers, which take
501  // a SRegister and the number of registers to save/restore after that SRegister. We
502  // therefore update the `fpu_spill_mask_` to also contain those registers not allocated,
503  // but in the range.
504  if (fpu_spill_mask_ != 0) {
505    uint32_t least_significant_bit = LeastSignificantBit(fpu_spill_mask_);
506    uint32_t most_significant_bit = MostSignificantBit(fpu_spill_mask_);
507    for (uint32_t i = least_significant_bit + 1 ; i < most_significant_bit; ++i) {
508      fpu_spill_mask_ |= (1 << i);
509    }
510  }
511}
512
513void CodeGeneratorARM::GenerateFrameEntry() {
514  bool skip_overflow_check =
515      IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm);
516  DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
517  __ Bind(&frame_entry_label_);
518
519  if (HasEmptyFrame()) {
520    return;
521  }
522
523  if (!skip_overflow_check) {
524    __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm)));
525    __ LoadFromOffset(kLoadWord, IP, IP, 0);
526    RecordPcInfo(nullptr, 0);
527  }
528
529  // PC is in the list of callee-save to mimic Quick, but we need to push
530  // LR at entry instead.
531  __ PushList((core_spill_mask_ & (~(1 << PC))) | 1 << LR);
532  if (fpu_spill_mask_ != 0) {
533    SRegister start_register = SRegister(LeastSignificantBit(fpu_spill_mask_));
534    __ vpushs(start_register, POPCOUNT(fpu_spill_mask_));
535  }
536  __ AddConstant(SP, -(GetFrameSize() - FrameEntrySpillSize()));
537  __ StoreToOffset(kStoreWord, R0, SP, 0);
538}
539
540void CodeGeneratorARM::GenerateFrameExit() {
541  if (HasEmptyFrame()) {
542    __ bx(LR);
543    return;
544  }
545  __ AddConstant(SP, GetFrameSize() - FrameEntrySpillSize());
546  if (fpu_spill_mask_ != 0) {
547    SRegister start_register = SRegister(LeastSignificantBit(fpu_spill_mask_));
548    __ vpops(start_register, POPCOUNT(fpu_spill_mask_));
549  }
550  __ PopList(core_spill_mask_);
551}
552
553void CodeGeneratorARM::Bind(HBasicBlock* block) {
554  __ Bind(GetLabelOf(block));
555}
556
557Location CodeGeneratorARM::GetStackLocation(HLoadLocal* load) const {
558  switch (load->GetType()) {
559    case Primitive::kPrimLong:
560    case Primitive::kPrimDouble:
561      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
562      break;
563
564    case Primitive::kPrimInt:
565    case Primitive::kPrimNot:
566    case Primitive::kPrimFloat:
567      return Location::StackSlot(GetStackSlot(load->GetLocal()));
568
569    case Primitive::kPrimBoolean:
570    case Primitive::kPrimByte:
571    case Primitive::kPrimChar:
572    case Primitive::kPrimShort:
573    case Primitive::kPrimVoid:
574      LOG(FATAL) << "Unexpected type " << load->GetType();
575  }
576
577  LOG(FATAL) << "Unreachable";
578  return Location();
579}
580
581Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
582  switch (type) {
583    case Primitive::kPrimBoolean:
584    case Primitive::kPrimByte:
585    case Primitive::kPrimChar:
586    case Primitive::kPrimShort:
587    case Primitive::kPrimInt:
588    case Primitive::kPrimNot: {
589      uint32_t index = gp_index_++;
590      uint32_t stack_index = stack_index_++;
591      if (index < calling_convention.GetNumberOfRegisters()) {
592        return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
593      } else {
594        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
595      }
596    }
597
598    case Primitive::kPrimLong: {
599      uint32_t index = gp_index_;
600      uint32_t stack_index = stack_index_;
601      gp_index_ += 2;
602      stack_index_ += 2;
603      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
604        if (calling_convention.GetRegisterAt(index) == R1) {
605          // Skip R1, and use R2_R3 instead.
606          gp_index_++;
607          index++;
608        }
609      }
610      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
611        DCHECK_EQ(calling_convention.GetRegisterAt(index) + 1,
612                  calling_convention.GetRegisterAt(index + 1));
613        return Location::RegisterPairLocation(calling_convention.GetRegisterAt(index),
614                                              calling_convention.GetRegisterAt(index + 1));
615      } else {
616        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
617      }
618    }
619
620    case Primitive::kPrimFloat: {
621      uint32_t stack_index = stack_index_++;
622      if (float_index_ % 2 == 0) {
623        float_index_ = std::max(double_index_, float_index_);
624      }
625      if (float_index_ < calling_convention.GetNumberOfFpuRegisters()) {
626        return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(float_index_++));
627      } else {
628        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
629      }
630    }
631
632    case Primitive::kPrimDouble: {
633      double_index_ = std::max(double_index_, RoundUp(float_index_, 2));
634      uint32_t stack_index = stack_index_;
635      stack_index_ += 2;
636      if (double_index_ + 1 < calling_convention.GetNumberOfFpuRegisters()) {
637        uint32_t index = double_index_;
638        double_index_ += 2;
639        Location result = Location::FpuRegisterPairLocation(
640          calling_convention.GetFpuRegisterAt(index),
641          calling_convention.GetFpuRegisterAt(index + 1));
642        DCHECK(ExpectedPairLayout(result));
643        return result;
644      } else {
645        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
646      }
647    }
648
649    case Primitive::kPrimVoid:
650      LOG(FATAL) << "Unexpected parameter type " << type;
651      break;
652  }
653  return Location();
654}
655
656Location InvokeDexCallingConventionVisitor::GetReturnLocation(Primitive::Type type) {
657  switch (type) {
658    case Primitive::kPrimBoolean:
659    case Primitive::kPrimByte:
660    case Primitive::kPrimChar:
661    case Primitive::kPrimShort:
662    case Primitive::kPrimInt:
663    case Primitive::kPrimNot: {
664      return Location::RegisterLocation(R0);
665    }
666
667    case Primitive::kPrimFloat: {
668      return Location::FpuRegisterLocation(S0);
669    }
670
671    case Primitive::kPrimLong: {
672      return Location::RegisterPairLocation(R0, R1);
673    }
674
675    case Primitive::kPrimDouble: {
676      return Location::FpuRegisterPairLocation(S0, S1);
677    }
678
679    case Primitive::kPrimVoid:
680      return Location();
681  }
682  UNREACHABLE();
683  return Location();
684}
685
686void CodeGeneratorARM::Move32(Location destination, Location source) {
687  if (source.Equals(destination)) {
688    return;
689  }
690  if (destination.IsRegister()) {
691    if (source.IsRegister()) {
692      __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>());
693    } else if (source.IsFpuRegister()) {
694      __ vmovrs(destination.AsRegister<Register>(), source.AsFpuRegister<SRegister>());
695    } else {
696      __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(), SP, source.GetStackIndex());
697    }
698  } else if (destination.IsFpuRegister()) {
699    if (source.IsRegister()) {
700      __ vmovsr(destination.AsFpuRegister<SRegister>(), source.AsRegister<Register>());
701    } else if (source.IsFpuRegister()) {
702      __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>());
703    } else {
704      __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex());
705    }
706  } else {
707    DCHECK(destination.IsStackSlot()) << destination;
708    if (source.IsRegister()) {
709      __ StoreToOffset(kStoreWord, source.AsRegister<Register>(), SP, destination.GetStackIndex());
710    } else if (source.IsFpuRegister()) {
711      __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex());
712    } else {
713      DCHECK(source.IsStackSlot()) << source;
714      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
715      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
716    }
717  }
718}
719
720void CodeGeneratorARM::Move64(Location destination, Location source) {
721  if (source.Equals(destination)) {
722    return;
723  }
724  if (destination.IsRegisterPair()) {
725    if (source.IsRegisterPair()) {
726      EmitParallelMoves(
727          Location::RegisterLocation(source.AsRegisterPairHigh<Register>()),
728          Location::RegisterLocation(destination.AsRegisterPairHigh<Register>()),
729          Location::RegisterLocation(source.AsRegisterPairLow<Register>()),
730          Location::RegisterLocation(destination.AsRegisterPairLow<Register>()));
731    } else if (source.IsFpuRegister()) {
732      UNIMPLEMENTED(FATAL);
733    } else {
734      DCHECK(source.IsDoubleStackSlot());
735      DCHECK(ExpectedPairLayout(destination));
736      __ LoadFromOffset(kLoadWordPair, destination.AsRegisterPairLow<Register>(),
737                        SP, source.GetStackIndex());
738    }
739  } else if (destination.IsFpuRegisterPair()) {
740    if (source.IsDoubleStackSlot()) {
741      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
742                         SP,
743                         source.GetStackIndex());
744    } else {
745      UNIMPLEMENTED(FATAL);
746    }
747  } else {
748    DCHECK(destination.IsDoubleStackSlot());
749    if (source.IsRegisterPair()) {
750      // No conflict possible, so just do the moves.
751      if (source.AsRegisterPairLow<Register>() == R1) {
752        DCHECK_EQ(source.AsRegisterPairHigh<Register>(), R2);
753        __ StoreToOffset(kStoreWord, R1, SP, destination.GetStackIndex());
754        __ StoreToOffset(kStoreWord, R2, SP, destination.GetHighStackIndex(kArmWordSize));
755      } else {
756        __ StoreToOffset(kStoreWordPair, source.AsRegisterPairLow<Register>(),
757                         SP, destination.GetStackIndex());
758      }
759    } else if (source.IsFpuRegisterPair()) {
760      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
761                        SP,
762                        destination.GetStackIndex());
763    } else {
764      DCHECK(source.IsDoubleStackSlot());
765      EmitParallelMoves(
766          Location::StackSlot(source.GetStackIndex()),
767          Location::StackSlot(destination.GetStackIndex()),
768          Location::StackSlot(source.GetHighStackIndex(kArmWordSize)),
769          Location::StackSlot(destination.GetHighStackIndex(kArmWordSize)));
770    }
771  }
772}
773
774void CodeGeneratorARM::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
775  LocationSummary* locations = instruction->GetLocations();
776  if (locations != nullptr && locations->Out().Equals(location)) {
777    return;
778  }
779
780  if (locations != nullptr && locations->Out().IsConstant()) {
781    HConstant* const_to_move = locations->Out().GetConstant();
782    if (const_to_move->IsIntConstant() || const_to_move->IsNullConstant()) {
783      int32_t value = GetInt32ValueOf(const_to_move);
784      if (location.IsRegister()) {
785        __ LoadImmediate(location.AsRegister<Register>(), value);
786      } else {
787        DCHECK(location.IsStackSlot());
788        __ LoadImmediate(IP, value);
789        __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
790      }
791    } else {
792      DCHECK(const_to_move->IsLongConstant()) << const_to_move->DebugName();
793      int64_t value = const_to_move->AsLongConstant()->GetValue();
794      if (location.IsRegisterPair()) {
795        __ LoadImmediate(location.AsRegisterPairLow<Register>(), Low32Bits(value));
796        __ LoadImmediate(location.AsRegisterPairHigh<Register>(), High32Bits(value));
797      } else {
798        DCHECK(location.IsDoubleStackSlot());
799        __ LoadImmediate(IP, Low32Bits(value));
800        __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
801        __ LoadImmediate(IP, High32Bits(value));
802        __ StoreToOffset(kStoreWord, IP, SP, location.GetHighStackIndex(kArmWordSize));
803      }
804    }
805  } else if (instruction->IsLoadLocal()) {
806    uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
807    switch (instruction->GetType()) {
808      case Primitive::kPrimBoolean:
809      case Primitive::kPrimByte:
810      case Primitive::kPrimChar:
811      case Primitive::kPrimShort:
812      case Primitive::kPrimInt:
813      case Primitive::kPrimNot:
814      case Primitive::kPrimFloat:
815        Move32(location, Location::StackSlot(stack_slot));
816        break;
817
818      case Primitive::kPrimLong:
819      case Primitive::kPrimDouble:
820        Move64(location, Location::DoubleStackSlot(stack_slot));
821        break;
822
823      default:
824        LOG(FATAL) << "Unexpected type " << instruction->GetType();
825    }
826  } else if (instruction->IsTemporary()) {
827    Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
828    if (temp_location.IsStackSlot()) {
829      Move32(location, temp_location);
830    } else {
831      DCHECK(temp_location.IsDoubleStackSlot());
832      Move64(location, temp_location);
833    }
834  } else {
835    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
836    switch (instruction->GetType()) {
837      case Primitive::kPrimBoolean:
838      case Primitive::kPrimByte:
839      case Primitive::kPrimChar:
840      case Primitive::kPrimShort:
841      case Primitive::kPrimNot:
842      case Primitive::kPrimInt:
843      case Primitive::kPrimFloat:
844        Move32(location, locations->Out());
845        break;
846
847      case Primitive::kPrimLong:
848      case Primitive::kPrimDouble:
849        Move64(location, locations->Out());
850        break;
851
852      default:
853        LOG(FATAL) << "Unexpected type " << instruction->GetType();
854    }
855  }
856}
857
858void CodeGeneratorARM::InvokeRuntime(int32_t entry_point_offset,
859                                     HInstruction* instruction,
860                                     uint32_t dex_pc) {
861  __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
862  __ blx(LR);
863  RecordPcInfo(instruction, dex_pc);
864  DCHECK(instruction->IsSuspendCheck()
865      || instruction->IsBoundsCheck()
866      || instruction->IsNullCheck()
867      || instruction->IsDivZeroCheck()
868      || instruction->GetLocations()->CanCall()
869      || !IsLeafMethod());
870}
871
872void LocationsBuilderARM::VisitGoto(HGoto* got) {
873  got->SetLocations(nullptr);
874}
875
876void InstructionCodeGeneratorARM::VisitGoto(HGoto* got) {
877  HBasicBlock* successor = got->GetSuccessor();
878  DCHECK(!successor->IsExitBlock());
879
880  HBasicBlock* block = got->GetBlock();
881  HInstruction* previous = got->GetPrevious();
882
883  HLoopInformation* info = block->GetLoopInformation();
884  if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
885    codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
886    GenerateSuspendCheck(info->GetSuspendCheck(), successor);
887    return;
888  }
889
890  if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
891    GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
892  }
893  if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
894    __ b(codegen_->GetLabelOf(successor));
895  }
896}
897
898void LocationsBuilderARM::VisitExit(HExit* exit) {
899  exit->SetLocations(nullptr);
900}
901
902void InstructionCodeGeneratorARM::VisitExit(HExit* exit) {
903  UNUSED(exit);
904  if (kIsDebugBuild) {
905    __ Comment("Unreachable");
906    __ bkpt(0);
907  }
908}
909
910void LocationsBuilderARM::VisitIf(HIf* if_instr) {
911  LocationSummary* locations =
912      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
913  HInstruction* cond = if_instr->InputAt(0);
914  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
915    locations->SetInAt(0, Location::RequiresRegister());
916  }
917}
918
919void InstructionCodeGeneratorARM::VisitIf(HIf* if_instr) {
920  HInstruction* cond = if_instr->InputAt(0);
921  if (cond->IsIntConstant()) {
922    // Constant condition, statically compared against 1.
923    int32_t cond_value = cond->AsIntConstant()->GetValue();
924    if (cond_value == 1) {
925      if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
926                                     if_instr->IfTrueSuccessor())) {
927        __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
928      }
929      return;
930    } else {
931      DCHECK_EQ(cond_value, 0);
932    }
933  } else {
934    if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
935      // Condition has been materialized, compare the output to 0
936      DCHECK(if_instr->GetLocations()->InAt(0).IsRegister());
937      __ cmp(if_instr->GetLocations()->InAt(0).AsRegister<Register>(),
938             ShifterOperand(0));
939      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()), NE);
940    } else {
941      // Condition has not been materialized, use its inputs as the
942      // comparison and its condition as the branch condition.
943      LocationSummary* locations = cond->GetLocations();
944      DCHECK(locations->InAt(0).IsRegister()) << locations->InAt(0);
945      Register left = locations->InAt(0).AsRegister<Register>();
946      if (locations->InAt(1).IsRegister()) {
947        __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>()));
948      } else {
949        DCHECK(locations->InAt(1).IsConstant());
950        HConstant* constant = locations->InAt(1).GetConstant();
951        int32_t value = CodeGenerator::GetInt32ValueOf(constant);
952        ShifterOperand operand;
953        if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) {
954          __ cmp(left, operand);
955        } else {
956          Register temp = IP;
957          __ LoadImmediate(temp, value);
958          __ cmp(left, ShifterOperand(temp));
959        }
960      }
961      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()),
962           ARMCondition(cond->AsCondition()->GetCondition()));
963    }
964  }
965  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
966                                 if_instr->IfFalseSuccessor())) {
967    __ b(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
968  }
969}
970
971
972void LocationsBuilderARM::VisitCondition(HCondition* comp) {
973  LocationSummary* locations =
974      new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall);
975  locations->SetInAt(0, Location::RequiresRegister());
976  locations->SetInAt(1, Location::RegisterOrConstant(comp->InputAt(1)));
977  if (comp->NeedsMaterialization()) {
978    locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
979  }
980}
981
982void InstructionCodeGeneratorARM::VisitCondition(HCondition* comp) {
983  if (!comp->NeedsMaterialization()) return;
984  LocationSummary* locations = comp->GetLocations();
985  Register left = locations->InAt(0).AsRegister<Register>();
986
987  if (locations->InAt(1).IsRegister()) {
988    __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>()));
989  } else {
990    DCHECK(locations->InAt(1).IsConstant());
991    int32_t value = CodeGenerator::GetInt32ValueOf(locations->InAt(1).GetConstant());
992    ShifterOperand operand;
993    if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) {
994      __ cmp(left, operand);
995    } else {
996      Register temp = IP;
997      __ LoadImmediate(temp, value);
998      __ cmp(left, ShifterOperand(temp));
999    }
1000  }
1001  __ it(ARMCondition(comp->GetCondition()), kItElse);
1002  __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(1),
1003         ARMCondition(comp->GetCondition()));
1004  __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(0),
1005         ARMOppositeCondition(comp->GetCondition()));
1006}
1007
1008void LocationsBuilderARM::VisitEqual(HEqual* comp) {
1009  VisitCondition(comp);
1010}
1011
1012void InstructionCodeGeneratorARM::VisitEqual(HEqual* comp) {
1013  VisitCondition(comp);
1014}
1015
1016void LocationsBuilderARM::VisitNotEqual(HNotEqual* comp) {
1017  VisitCondition(comp);
1018}
1019
1020void InstructionCodeGeneratorARM::VisitNotEqual(HNotEqual* comp) {
1021  VisitCondition(comp);
1022}
1023
1024void LocationsBuilderARM::VisitLessThan(HLessThan* comp) {
1025  VisitCondition(comp);
1026}
1027
1028void InstructionCodeGeneratorARM::VisitLessThan(HLessThan* comp) {
1029  VisitCondition(comp);
1030}
1031
1032void LocationsBuilderARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1033  VisitCondition(comp);
1034}
1035
1036void InstructionCodeGeneratorARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1037  VisitCondition(comp);
1038}
1039
1040void LocationsBuilderARM::VisitGreaterThan(HGreaterThan* comp) {
1041  VisitCondition(comp);
1042}
1043
1044void InstructionCodeGeneratorARM::VisitGreaterThan(HGreaterThan* comp) {
1045  VisitCondition(comp);
1046}
1047
1048void LocationsBuilderARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1049  VisitCondition(comp);
1050}
1051
1052void InstructionCodeGeneratorARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1053  VisitCondition(comp);
1054}
1055
1056void LocationsBuilderARM::VisitLocal(HLocal* local) {
1057  local->SetLocations(nullptr);
1058}
1059
1060void InstructionCodeGeneratorARM::VisitLocal(HLocal* local) {
1061  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
1062}
1063
1064void LocationsBuilderARM::VisitLoadLocal(HLoadLocal* load) {
1065  load->SetLocations(nullptr);
1066}
1067
1068void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load) {
1069  // Nothing to do, this is driven by the code generator.
1070  UNUSED(load);
1071}
1072
1073void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) {
1074  LocationSummary* locations =
1075      new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
1076  switch (store->InputAt(1)->GetType()) {
1077    case Primitive::kPrimBoolean:
1078    case Primitive::kPrimByte:
1079    case Primitive::kPrimChar:
1080    case Primitive::kPrimShort:
1081    case Primitive::kPrimInt:
1082    case Primitive::kPrimNot:
1083    case Primitive::kPrimFloat:
1084      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
1085      break;
1086
1087    case Primitive::kPrimLong:
1088    case Primitive::kPrimDouble:
1089      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
1090      break;
1091
1092    default:
1093      LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType();
1094  }
1095}
1096
1097void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store) {
1098  UNUSED(store);
1099}
1100
1101void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) {
1102  LocationSummary* locations =
1103      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1104  locations->SetOut(Location::ConstantLocation(constant));
1105}
1106
1107void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant) {
1108  // Will be generated at use site.
1109  UNUSED(constant);
1110}
1111
1112void LocationsBuilderARM::VisitNullConstant(HNullConstant* constant) {
1113  LocationSummary* locations =
1114      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1115  locations->SetOut(Location::ConstantLocation(constant));
1116}
1117
1118void InstructionCodeGeneratorARM::VisitNullConstant(HNullConstant* constant) {
1119  // Will be generated at use site.
1120  UNUSED(constant);
1121}
1122
1123void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) {
1124  LocationSummary* locations =
1125      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1126  locations->SetOut(Location::ConstantLocation(constant));
1127}
1128
1129void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant) {
1130  // Will be generated at use site.
1131  UNUSED(constant);
1132}
1133
1134void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) {
1135  LocationSummary* locations =
1136      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1137  locations->SetOut(Location::ConstantLocation(constant));
1138}
1139
1140void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant) {
1141  // Will be generated at use site.
1142  UNUSED(constant);
1143}
1144
1145void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) {
1146  LocationSummary* locations =
1147      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1148  locations->SetOut(Location::ConstantLocation(constant));
1149}
1150
1151void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant) {
1152  // Will be generated at use site.
1153  UNUSED(constant);
1154}
1155
1156void LocationsBuilderARM::VisitReturnVoid(HReturnVoid* ret) {
1157  ret->SetLocations(nullptr);
1158}
1159
1160void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret) {
1161  UNUSED(ret);
1162  codegen_->GenerateFrameExit();
1163}
1164
1165void LocationsBuilderARM::VisitReturn(HReturn* ret) {
1166  LocationSummary* locations =
1167      new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
1168  locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType()));
1169}
1170
1171void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret) {
1172  UNUSED(ret);
1173  codegen_->GenerateFrameExit();
1174}
1175
1176void LocationsBuilderARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1177  IntrinsicLocationsBuilderARM intrinsic(GetGraph()->GetArena(),
1178                                         codegen_->GetInstructionSetFeatures());
1179  if (intrinsic.TryDispatch(invoke)) {
1180    return;
1181  }
1182
1183  HandleInvoke(invoke);
1184}
1185
1186void CodeGeneratorARM::LoadCurrentMethod(Register reg) {
1187  DCHECK(RequiresCurrentMethod());
1188  __ LoadFromOffset(kLoadWord, reg, SP, kCurrentMethodStackOffset);
1189}
1190
1191static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM* codegen) {
1192  if (invoke->GetLocations()->Intrinsified()) {
1193    IntrinsicCodeGeneratorARM intrinsic(codegen);
1194    intrinsic.Dispatch(invoke);
1195    return true;
1196  }
1197  return false;
1198}
1199
1200void InstructionCodeGeneratorARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1201  if (TryGenerateIntrinsicCode(invoke, codegen_)) {
1202    return;
1203  }
1204
1205  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1206
1207  codegen_->GenerateStaticOrDirectCall(invoke, temp);
1208}
1209
1210void LocationsBuilderARM::HandleInvoke(HInvoke* invoke) {
1211  LocationSummary* locations =
1212      new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1213  locations->AddTemp(Location::RegisterLocation(R0));
1214
1215  InvokeDexCallingConventionVisitor calling_convention_visitor;
1216  for (size_t i = 0; i < invoke->InputCount(); i++) {
1217    HInstruction* input = invoke->InputAt(i);
1218    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1219  }
1220
1221  locations->SetOut(calling_convention_visitor.GetReturnLocation(invoke->GetType()));
1222}
1223
1224void LocationsBuilderARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1225  IntrinsicLocationsBuilderARM intrinsic(GetGraph()->GetArena(),
1226                                         codegen_->GetInstructionSetFeatures());
1227  if (intrinsic.TryDispatch(invoke)) {
1228    return;
1229  }
1230
1231  HandleInvoke(invoke);
1232}
1233
1234void InstructionCodeGeneratorARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1235  if (TryGenerateIntrinsicCode(invoke, codegen_)) {
1236    return;
1237  }
1238
1239  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1240  uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() +
1241          invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
1242  LocationSummary* locations = invoke->GetLocations();
1243  Location receiver = locations->InAt(0);
1244  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1245  // temp = object->GetClass();
1246  if (receiver.IsStackSlot()) {
1247    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1248    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1249  } else {
1250    __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
1251  }
1252  codegen_->MaybeRecordImplicitNullCheck(invoke);
1253  // temp = temp->GetMethodAt(method_offset);
1254  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1255      kArmWordSize).Int32Value();
1256  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1257  // LR = temp->GetEntryPoint();
1258  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1259  // LR();
1260  __ blx(LR);
1261  DCHECK(!codegen_->IsLeafMethod());
1262  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1263}
1264
1265void LocationsBuilderARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1266  HandleInvoke(invoke);
1267  // Add the hidden argument.
1268  invoke->GetLocations()->AddTemp(Location::RegisterLocation(R12));
1269}
1270
1271void InstructionCodeGeneratorARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1272  // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
1273  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1274  uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() +
1275          (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry);
1276  LocationSummary* locations = invoke->GetLocations();
1277  Location receiver = locations->InAt(0);
1278  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1279
1280  // Set the hidden argument.
1281  __ LoadImmediate(invoke->GetLocations()->GetTemp(1).AsRegister<Register>(),
1282                   invoke->GetDexMethodIndex());
1283
1284  // temp = object->GetClass();
1285  if (receiver.IsStackSlot()) {
1286    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1287    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1288  } else {
1289    __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
1290  }
1291  codegen_->MaybeRecordImplicitNullCheck(invoke);
1292  // temp = temp->GetImtEntryAt(method_offset);
1293  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1294      kArmWordSize).Int32Value();
1295  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1296  // LR = temp->GetEntryPoint();
1297  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1298  // LR();
1299  __ blx(LR);
1300  DCHECK(!codegen_->IsLeafMethod());
1301  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1302}
1303
1304void LocationsBuilderARM::VisitNeg(HNeg* neg) {
1305  LocationSummary* locations =
1306      new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
1307  switch (neg->GetResultType()) {
1308    case Primitive::kPrimInt: {
1309      locations->SetInAt(0, Location::RequiresRegister());
1310      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1311      break;
1312    }
1313    case Primitive::kPrimLong: {
1314      locations->SetInAt(0, Location::RequiresRegister());
1315      locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1316      break;
1317    }
1318
1319    case Primitive::kPrimFloat:
1320    case Primitive::kPrimDouble:
1321      locations->SetInAt(0, Location::RequiresFpuRegister());
1322      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1323      break;
1324
1325    default:
1326      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1327  }
1328}
1329
1330void InstructionCodeGeneratorARM::VisitNeg(HNeg* neg) {
1331  LocationSummary* locations = neg->GetLocations();
1332  Location out = locations->Out();
1333  Location in = locations->InAt(0);
1334  switch (neg->GetResultType()) {
1335    case Primitive::kPrimInt:
1336      DCHECK(in.IsRegister());
1337      __ rsb(out.AsRegister<Register>(), in.AsRegister<Register>(), ShifterOperand(0));
1338      break;
1339
1340    case Primitive::kPrimLong:
1341      DCHECK(in.IsRegisterPair());
1342      // out.lo = 0 - in.lo (and update the carry/borrow (C) flag)
1343      __ rsbs(out.AsRegisterPairLow<Register>(),
1344              in.AsRegisterPairLow<Register>(),
1345              ShifterOperand(0));
1346      // We cannot emit an RSC (Reverse Subtract with Carry)
1347      // instruction here, as it does not exist in the Thumb-2
1348      // instruction set.  We use the following approach
1349      // using SBC and SUB instead.
1350      //
1351      // out.hi = -C
1352      __ sbc(out.AsRegisterPairHigh<Register>(),
1353             out.AsRegisterPairHigh<Register>(),
1354             ShifterOperand(out.AsRegisterPairHigh<Register>()));
1355      // out.hi = out.hi - in.hi
1356      __ sub(out.AsRegisterPairHigh<Register>(),
1357             out.AsRegisterPairHigh<Register>(),
1358             ShifterOperand(in.AsRegisterPairHigh<Register>()));
1359      break;
1360
1361    case Primitive::kPrimFloat:
1362      DCHECK(in.IsFpuRegister());
1363      __ vnegs(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>());
1364      break;
1365
1366    case Primitive::kPrimDouble:
1367      DCHECK(in.IsFpuRegisterPair());
1368      __ vnegd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1369               FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1370      break;
1371
1372    default:
1373      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1374  }
1375}
1376
1377void LocationsBuilderARM::VisitTypeConversion(HTypeConversion* conversion) {
1378  Primitive::Type result_type = conversion->GetResultType();
1379  Primitive::Type input_type = conversion->GetInputType();
1380  DCHECK_NE(result_type, input_type);
1381
1382  // The float-to-long and double-to-long type conversions rely on a
1383  // call to the runtime.
1384  LocationSummary::CallKind call_kind =
1385      ((input_type == Primitive::kPrimFloat || input_type == Primitive::kPrimDouble)
1386       && result_type == Primitive::kPrimLong)
1387      ? LocationSummary::kCall
1388      : LocationSummary::kNoCall;
1389  LocationSummary* locations =
1390      new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind);
1391
1392  switch (result_type) {
1393    case Primitive::kPrimByte:
1394      switch (input_type) {
1395        case Primitive::kPrimShort:
1396        case Primitive::kPrimInt:
1397        case Primitive::kPrimChar:
1398          // Processing a Dex `int-to-byte' instruction.
1399          locations->SetInAt(0, Location::RequiresRegister());
1400          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1401          break;
1402
1403        default:
1404          LOG(FATAL) << "Unexpected type conversion from " << input_type
1405                     << " to " << result_type;
1406      }
1407      break;
1408
1409    case Primitive::kPrimShort:
1410      switch (input_type) {
1411        case Primitive::kPrimByte:
1412        case Primitive::kPrimInt:
1413        case Primitive::kPrimChar:
1414          // Processing a Dex `int-to-short' instruction.
1415          locations->SetInAt(0, Location::RequiresRegister());
1416          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1417          break;
1418
1419        default:
1420          LOG(FATAL) << "Unexpected type conversion from " << input_type
1421                     << " to " << result_type;
1422      }
1423      break;
1424
1425    case Primitive::kPrimInt:
1426      switch (input_type) {
1427        case Primitive::kPrimLong:
1428          // Processing a Dex `long-to-int' instruction.
1429          locations->SetInAt(0, Location::Any());
1430          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1431          break;
1432
1433        case Primitive::kPrimFloat:
1434          // Processing a Dex `float-to-int' instruction.
1435          locations->SetInAt(0, Location::RequiresFpuRegister());
1436          locations->SetOut(Location::RequiresRegister());
1437          locations->AddTemp(Location::RequiresFpuRegister());
1438          break;
1439
1440        case Primitive::kPrimDouble:
1441          // Processing a Dex `double-to-int' instruction.
1442          locations->SetInAt(0, Location::RequiresFpuRegister());
1443          locations->SetOut(Location::RequiresRegister());
1444          locations->AddTemp(Location::RequiresFpuRegister());
1445          break;
1446
1447        default:
1448          LOG(FATAL) << "Unexpected type conversion from " << input_type
1449                     << " to " << result_type;
1450      }
1451      break;
1452
1453    case Primitive::kPrimLong:
1454      switch (input_type) {
1455        case Primitive::kPrimByte:
1456        case Primitive::kPrimShort:
1457        case Primitive::kPrimInt:
1458        case Primitive::kPrimChar:
1459          // Processing a Dex `int-to-long' instruction.
1460          locations->SetInAt(0, Location::RequiresRegister());
1461          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1462          break;
1463
1464        case Primitive::kPrimFloat: {
1465          // Processing a Dex `float-to-long' instruction.
1466          InvokeRuntimeCallingConvention calling_convention;
1467          locations->SetInAt(0, Location::FpuRegisterLocation(
1468              calling_convention.GetFpuRegisterAt(0)));
1469          locations->SetOut(Location::RegisterPairLocation(R0, R1));
1470          break;
1471        }
1472
1473        case Primitive::kPrimDouble: {
1474          // Processing a Dex `double-to-long' instruction.
1475          InvokeRuntimeCallingConvention calling_convention;
1476          locations->SetInAt(0, Location::FpuRegisterPairLocation(
1477              calling_convention.GetFpuRegisterAt(0),
1478              calling_convention.GetFpuRegisterAt(1)));
1479          locations->SetOut(Location::RegisterPairLocation(R0, R1));
1480          break;
1481        }
1482
1483        default:
1484          LOG(FATAL) << "Unexpected type conversion from " << input_type
1485                     << " to " << result_type;
1486      }
1487      break;
1488
1489    case Primitive::kPrimChar:
1490      switch (input_type) {
1491        case Primitive::kPrimByte:
1492        case Primitive::kPrimShort:
1493        case Primitive::kPrimInt:
1494          // Processing a Dex `int-to-char' instruction.
1495          locations->SetInAt(0, Location::RequiresRegister());
1496          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1497          break;
1498
1499        default:
1500          LOG(FATAL) << "Unexpected type conversion from " << input_type
1501                     << " to " << result_type;
1502      }
1503      break;
1504
1505    case Primitive::kPrimFloat:
1506      switch (input_type) {
1507        case Primitive::kPrimByte:
1508        case Primitive::kPrimShort:
1509        case Primitive::kPrimInt:
1510        case Primitive::kPrimChar:
1511          // Processing a Dex `int-to-float' instruction.
1512          locations->SetInAt(0, Location::RequiresRegister());
1513          locations->SetOut(Location::RequiresFpuRegister());
1514          break;
1515
1516        case Primitive::kPrimLong:
1517          // Processing a Dex `long-to-float' instruction.
1518          locations->SetInAt(0, Location::RequiresRegister());
1519          locations->SetOut(Location::RequiresFpuRegister());
1520          locations->AddTemp(Location::RequiresRegister());
1521          locations->AddTemp(Location::RequiresRegister());
1522          locations->AddTemp(Location::RequiresFpuRegister());
1523          locations->AddTemp(Location::RequiresFpuRegister());
1524          break;
1525
1526        case Primitive::kPrimDouble:
1527          // Processing a Dex `double-to-float' instruction.
1528          locations->SetInAt(0, Location::RequiresFpuRegister());
1529          locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1530          break;
1531
1532        default:
1533          LOG(FATAL) << "Unexpected type conversion from " << input_type
1534                     << " to " << result_type;
1535      };
1536      break;
1537
1538    case Primitive::kPrimDouble:
1539      switch (input_type) {
1540        case Primitive::kPrimByte:
1541        case Primitive::kPrimShort:
1542        case Primitive::kPrimInt:
1543        case Primitive::kPrimChar:
1544          // Processing a Dex `int-to-double' instruction.
1545          locations->SetInAt(0, Location::RequiresRegister());
1546          locations->SetOut(Location::RequiresFpuRegister());
1547          break;
1548
1549        case Primitive::kPrimLong:
1550          // Processing a Dex `long-to-double' instruction.
1551          locations->SetInAt(0, Location::RequiresRegister());
1552          locations->SetOut(Location::RequiresFpuRegister());
1553          locations->AddTemp(Location::RequiresRegister());
1554          locations->AddTemp(Location::RequiresRegister());
1555          locations->AddTemp(Location::RequiresFpuRegister());
1556          break;
1557
1558        case Primitive::kPrimFloat:
1559          // Processing a Dex `float-to-double' instruction.
1560          locations->SetInAt(0, Location::RequiresFpuRegister());
1561          locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1562          break;
1563
1564        default:
1565          LOG(FATAL) << "Unexpected type conversion from " << input_type
1566                     << " to " << result_type;
1567      };
1568      break;
1569
1570    default:
1571      LOG(FATAL) << "Unexpected type conversion from " << input_type
1572                 << " to " << result_type;
1573  }
1574}
1575
1576void InstructionCodeGeneratorARM::VisitTypeConversion(HTypeConversion* conversion) {
1577  LocationSummary* locations = conversion->GetLocations();
1578  Location out = locations->Out();
1579  Location in = locations->InAt(0);
1580  Primitive::Type result_type = conversion->GetResultType();
1581  Primitive::Type input_type = conversion->GetInputType();
1582  DCHECK_NE(result_type, input_type);
1583  switch (result_type) {
1584    case Primitive::kPrimByte:
1585      switch (input_type) {
1586        case Primitive::kPrimShort:
1587        case Primitive::kPrimInt:
1588        case Primitive::kPrimChar:
1589          // Processing a Dex `int-to-byte' instruction.
1590          __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 8);
1591          break;
1592
1593        default:
1594          LOG(FATAL) << "Unexpected type conversion from " << input_type
1595                     << " to " << result_type;
1596      }
1597      break;
1598
1599    case Primitive::kPrimShort:
1600      switch (input_type) {
1601        case Primitive::kPrimByte:
1602        case Primitive::kPrimInt:
1603        case Primitive::kPrimChar:
1604          // Processing a Dex `int-to-short' instruction.
1605          __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16);
1606          break;
1607
1608        default:
1609          LOG(FATAL) << "Unexpected type conversion from " << input_type
1610                     << " to " << result_type;
1611      }
1612      break;
1613
1614    case Primitive::kPrimInt:
1615      switch (input_type) {
1616        case Primitive::kPrimLong:
1617          // Processing a Dex `long-to-int' instruction.
1618          DCHECK(out.IsRegister());
1619          if (in.IsRegisterPair()) {
1620            __ Mov(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
1621          } else if (in.IsDoubleStackSlot()) {
1622            __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), SP, in.GetStackIndex());
1623          } else {
1624            DCHECK(in.IsConstant());
1625            DCHECK(in.GetConstant()->IsLongConstant());
1626            int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
1627            __ LoadImmediate(out.AsRegister<Register>(), static_cast<int32_t>(value));
1628          }
1629          break;
1630
1631        case Primitive::kPrimFloat: {
1632          // Processing a Dex `float-to-int' instruction.
1633          SRegister temp = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
1634          __ vmovs(temp, in.AsFpuRegister<SRegister>());
1635          __ vcvtis(temp, temp);
1636          __ vmovrs(out.AsRegister<Register>(), temp);
1637          break;
1638        }
1639
1640        case Primitive::kPrimDouble: {
1641          // Processing a Dex `double-to-int' instruction.
1642          SRegister temp_s = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
1643          DRegister temp_d = FromLowSToD(temp_s);
1644          __ vmovd(temp_d, FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1645          __ vcvtid(temp_s, temp_d);
1646          __ vmovrs(out.AsRegister<Register>(), temp_s);
1647          break;
1648        }
1649
1650        default:
1651          LOG(FATAL) << "Unexpected type conversion from " << input_type
1652                     << " to " << result_type;
1653      }
1654      break;
1655
1656    case Primitive::kPrimLong:
1657      switch (input_type) {
1658        case Primitive::kPrimByte:
1659        case Primitive::kPrimShort:
1660        case Primitive::kPrimInt:
1661        case Primitive::kPrimChar:
1662          // Processing a Dex `int-to-long' instruction.
1663          DCHECK(out.IsRegisterPair());
1664          DCHECK(in.IsRegister());
1665          __ Mov(out.AsRegisterPairLow<Register>(), in.AsRegister<Register>());
1666          // Sign extension.
1667          __ Asr(out.AsRegisterPairHigh<Register>(),
1668                 out.AsRegisterPairLow<Register>(),
1669                 31);
1670          break;
1671
1672        case Primitive::kPrimFloat:
1673          // Processing a Dex `float-to-long' instruction.
1674          codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pF2l),
1675                                  conversion,
1676                                  conversion->GetDexPc());
1677          break;
1678
1679        case Primitive::kPrimDouble:
1680          // Processing a Dex `double-to-long' instruction.
1681          codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pD2l),
1682                                  conversion,
1683                                  conversion->GetDexPc());
1684          break;
1685
1686        default:
1687          LOG(FATAL) << "Unexpected type conversion from " << input_type
1688                     << " to " << result_type;
1689      }
1690      break;
1691
1692    case Primitive::kPrimChar:
1693      switch (input_type) {
1694        case Primitive::kPrimByte:
1695        case Primitive::kPrimShort:
1696        case Primitive::kPrimInt:
1697          // Processing a Dex `int-to-char' instruction.
1698          __ ubfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16);
1699          break;
1700
1701        default:
1702          LOG(FATAL) << "Unexpected type conversion from " << input_type
1703                     << " to " << result_type;
1704      }
1705      break;
1706
1707    case Primitive::kPrimFloat:
1708      switch (input_type) {
1709        case Primitive::kPrimByte:
1710        case Primitive::kPrimShort:
1711        case Primitive::kPrimInt:
1712        case Primitive::kPrimChar: {
1713          // Processing a Dex `int-to-float' instruction.
1714          __ vmovsr(out.AsFpuRegister<SRegister>(), in.AsRegister<Register>());
1715          __ vcvtsi(out.AsFpuRegister<SRegister>(), out.AsFpuRegister<SRegister>());
1716          break;
1717        }
1718
1719        case Primitive::kPrimLong: {
1720          // Processing a Dex `long-to-float' instruction.
1721          Register low = in.AsRegisterPairLow<Register>();
1722          Register high = in.AsRegisterPairHigh<Register>();
1723          SRegister output = out.AsFpuRegister<SRegister>();
1724          Register constant_low = locations->GetTemp(0).AsRegister<Register>();
1725          Register constant_high = locations->GetTemp(1).AsRegister<Register>();
1726          SRegister temp1_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>();
1727          DRegister temp1_d = FromLowSToD(temp1_s);
1728          SRegister temp2_s = locations->GetTemp(3).AsFpuRegisterPairLow<SRegister>();
1729          DRegister temp2_d = FromLowSToD(temp2_s);
1730
1731          // Operations use doubles for precision reasons (each 32-bit
1732          // half of a long fits in the 53-bit mantissa of a double,
1733          // but not in the 24-bit mantissa of a float).  This is
1734          // especially important for the low bits.  The result is
1735          // eventually converted to float.
1736
1737          // temp1_d = int-to-double(high)
1738          __ vmovsr(temp1_s, high);
1739          __ vcvtdi(temp1_d, temp1_s);
1740          // Using vmovd to load the `k2Pow32EncodingForDouble` constant
1741          // as an immediate value into `temp2_d` does not work, as
1742          // this instruction only transfers 8 significant bits of its
1743          // immediate operand.  Instead, use two 32-bit core
1744          // registers to load `k2Pow32EncodingForDouble` into
1745          // `temp2_d`.
1746          __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble));
1747          __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble));
1748          __ vmovdrr(temp2_d, constant_low, constant_high);
1749          // temp1_d = temp1_d * 2^32
1750          __ vmuld(temp1_d, temp1_d, temp2_d);
1751          // temp2_d = unsigned-to-double(low)
1752          __ vmovsr(temp2_s, low);
1753          __ vcvtdu(temp2_d, temp2_s);
1754          // temp1_d = temp1_d + temp2_d
1755          __ vaddd(temp1_d, temp1_d, temp2_d);
1756          // output = double-to-float(temp1_d);
1757          __ vcvtsd(output, temp1_d);
1758          break;
1759        }
1760
1761        case Primitive::kPrimDouble:
1762          // Processing a Dex `double-to-float' instruction.
1763          __ vcvtsd(out.AsFpuRegister<SRegister>(),
1764                    FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1765          break;
1766
1767        default:
1768          LOG(FATAL) << "Unexpected type conversion from " << input_type
1769                     << " to " << result_type;
1770      };
1771      break;
1772
1773    case Primitive::kPrimDouble:
1774      switch (input_type) {
1775        case Primitive::kPrimByte:
1776        case Primitive::kPrimShort:
1777        case Primitive::kPrimInt:
1778        case Primitive::kPrimChar: {
1779          // Processing a Dex `int-to-double' instruction.
1780          __ vmovsr(out.AsFpuRegisterPairLow<SRegister>(), in.AsRegister<Register>());
1781          __ vcvtdi(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1782                    out.AsFpuRegisterPairLow<SRegister>());
1783          break;
1784        }
1785
1786        case Primitive::kPrimLong: {
1787          // Processing a Dex `long-to-double' instruction.
1788          Register low = in.AsRegisterPairLow<Register>();
1789          Register high = in.AsRegisterPairHigh<Register>();
1790          SRegister out_s = out.AsFpuRegisterPairLow<SRegister>();
1791          DRegister out_d = FromLowSToD(out_s);
1792          Register constant_low = locations->GetTemp(0).AsRegister<Register>();
1793          Register constant_high = locations->GetTemp(1).AsRegister<Register>();
1794          SRegister temp_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>();
1795          DRegister temp_d = FromLowSToD(temp_s);
1796
1797          // out_d = int-to-double(high)
1798          __ vmovsr(out_s, high);
1799          __ vcvtdi(out_d, out_s);
1800          // Using vmovd to load the `k2Pow32EncodingForDouble` constant
1801          // as an immediate value into `temp_d` does not work, as
1802          // this instruction only transfers 8 significant bits of its
1803          // immediate operand.  Instead, use two 32-bit core
1804          // registers to load `k2Pow32EncodingForDouble` into `temp_d`.
1805          __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble));
1806          __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble));
1807          __ vmovdrr(temp_d, constant_low, constant_high);
1808          // out_d = out_d * 2^32
1809          __ vmuld(out_d, out_d, temp_d);
1810          // temp_d = unsigned-to-double(low)
1811          __ vmovsr(temp_s, low);
1812          __ vcvtdu(temp_d, temp_s);
1813          // out_d = out_d + temp_d
1814          __ vaddd(out_d, out_d, temp_d);
1815          break;
1816        }
1817
1818        case Primitive::kPrimFloat:
1819          // Processing a Dex `float-to-double' instruction.
1820          __ vcvtds(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1821                    in.AsFpuRegister<SRegister>());
1822          break;
1823
1824        default:
1825          LOG(FATAL) << "Unexpected type conversion from " << input_type
1826                     << " to " << result_type;
1827      };
1828      break;
1829
1830    default:
1831      LOG(FATAL) << "Unexpected type conversion from " << input_type
1832                 << " to " << result_type;
1833  }
1834}
1835
1836void LocationsBuilderARM::VisitAdd(HAdd* add) {
1837  LocationSummary* locations =
1838      new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
1839  switch (add->GetResultType()) {
1840    case Primitive::kPrimInt: {
1841      locations->SetInAt(0, Location::RequiresRegister());
1842      locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
1843      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1844      break;
1845    }
1846
1847    case Primitive::kPrimLong: {
1848      locations->SetInAt(0, Location::RequiresRegister());
1849      locations->SetInAt(1, Location::RequiresRegister());
1850      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1851      break;
1852    }
1853
1854    case Primitive::kPrimFloat:
1855    case Primitive::kPrimDouble: {
1856      locations->SetInAt(0, Location::RequiresFpuRegister());
1857      locations->SetInAt(1, Location::RequiresFpuRegister());
1858      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1859      break;
1860    }
1861
1862    default:
1863      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1864  }
1865}
1866
1867void InstructionCodeGeneratorARM::VisitAdd(HAdd* add) {
1868  LocationSummary* locations = add->GetLocations();
1869  Location out = locations->Out();
1870  Location first = locations->InAt(0);
1871  Location second = locations->InAt(1);
1872  switch (add->GetResultType()) {
1873    case Primitive::kPrimInt:
1874      if (second.IsRegister()) {
1875        __ add(out.AsRegister<Register>(),
1876               first.AsRegister<Register>(),
1877               ShifterOperand(second.AsRegister<Register>()));
1878      } else {
1879        __ AddConstant(out.AsRegister<Register>(),
1880                       first.AsRegister<Register>(),
1881                       second.GetConstant()->AsIntConstant()->GetValue());
1882      }
1883      break;
1884
1885    case Primitive::kPrimLong: {
1886      DCHECK(second.IsRegisterPair());
1887      __ adds(out.AsRegisterPairLow<Register>(),
1888              first.AsRegisterPairLow<Register>(),
1889              ShifterOperand(second.AsRegisterPairLow<Register>()));
1890      __ adc(out.AsRegisterPairHigh<Register>(),
1891             first.AsRegisterPairHigh<Register>(),
1892             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1893      break;
1894    }
1895
1896    case Primitive::kPrimFloat:
1897      __ vadds(out.AsFpuRegister<SRegister>(),
1898               first.AsFpuRegister<SRegister>(),
1899               second.AsFpuRegister<SRegister>());
1900      break;
1901
1902    case Primitive::kPrimDouble:
1903      __ vaddd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1904               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1905               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1906      break;
1907
1908    default:
1909      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1910  }
1911}
1912
1913void LocationsBuilderARM::VisitSub(HSub* sub) {
1914  LocationSummary* locations =
1915      new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
1916  switch (sub->GetResultType()) {
1917    case Primitive::kPrimInt: {
1918      locations->SetInAt(0, Location::RequiresRegister());
1919      locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1)));
1920      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1921      break;
1922    }
1923
1924    case Primitive::kPrimLong: {
1925      locations->SetInAt(0, Location::RequiresRegister());
1926      locations->SetInAt(1, Location::RequiresRegister());
1927      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1928      break;
1929    }
1930    case Primitive::kPrimFloat:
1931    case Primitive::kPrimDouble: {
1932      locations->SetInAt(0, Location::RequiresFpuRegister());
1933      locations->SetInAt(1, Location::RequiresFpuRegister());
1934      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1935      break;
1936    }
1937    default:
1938      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1939  }
1940}
1941
1942void InstructionCodeGeneratorARM::VisitSub(HSub* sub) {
1943  LocationSummary* locations = sub->GetLocations();
1944  Location out = locations->Out();
1945  Location first = locations->InAt(0);
1946  Location second = locations->InAt(1);
1947  switch (sub->GetResultType()) {
1948    case Primitive::kPrimInt: {
1949      if (second.IsRegister()) {
1950        __ sub(out.AsRegister<Register>(),
1951               first.AsRegister<Register>(),
1952               ShifterOperand(second.AsRegister<Register>()));
1953      } else {
1954        __ AddConstant(out.AsRegister<Register>(),
1955                       first.AsRegister<Register>(),
1956                       -second.GetConstant()->AsIntConstant()->GetValue());
1957      }
1958      break;
1959    }
1960
1961    case Primitive::kPrimLong: {
1962      DCHECK(second.IsRegisterPair());
1963      __ subs(out.AsRegisterPairLow<Register>(),
1964              first.AsRegisterPairLow<Register>(),
1965              ShifterOperand(second.AsRegisterPairLow<Register>()));
1966      __ sbc(out.AsRegisterPairHigh<Register>(),
1967             first.AsRegisterPairHigh<Register>(),
1968             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1969      break;
1970    }
1971
1972    case Primitive::kPrimFloat: {
1973      __ vsubs(out.AsFpuRegister<SRegister>(),
1974               first.AsFpuRegister<SRegister>(),
1975               second.AsFpuRegister<SRegister>());
1976      break;
1977    }
1978
1979    case Primitive::kPrimDouble: {
1980      __ vsubd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1981               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1982               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1983      break;
1984    }
1985
1986
1987    default:
1988      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1989  }
1990}
1991
1992void LocationsBuilderARM::VisitMul(HMul* mul) {
1993  LocationSummary* locations =
1994      new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
1995  switch (mul->GetResultType()) {
1996    case Primitive::kPrimInt:
1997    case Primitive::kPrimLong:  {
1998      locations->SetInAt(0, Location::RequiresRegister());
1999      locations->SetInAt(1, Location::RequiresRegister());
2000      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2001      break;
2002    }
2003
2004    case Primitive::kPrimFloat:
2005    case Primitive::kPrimDouble: {
2006      locations->SetInAt(0, Location::RequiresFpuRegister());
2007      locations->SetInAt(1, Location::RequiresFpuRegister());
2008      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2009      break;
2010    }
2011
2012    default:
2013      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2014  }
2015}
2016
2017void InstructionCodeGeneratorARM::VisitMul(HMul* mul) {
2018  LocationSummary* locations = mul->GetLocations();
2019  Location out = locations->Out();
2020  Location first = locations->InAt(0);
2021  Location second = locations->InAt(1);
2022  switch (mul->GetResultType()) {
2023    case Primitive::kPrimInt: {
2024      __ mul(out.AsRegister<Register>(),
2025             first.AsRegister<Register>(),
2026             second.AsRegister<Register>());
2027      break;
2028    }
2029    case Primitive::kPrimLong: {
2030      Register out_hi = out.AsRegisterPairHigh<Register>();
2031      Register out_lo = out.AsRegisterPairLow<Register>();
2032      Register in1_hi = first.AsRegisterPairHigh<Register>();
2033      Register in1_lo = first.AsRegisterPairLow<Register>();
2034      Register in2_hi = second.AsRegisterPairHigh<Register>();
2035      Register in2_lo = second.AsRegisterPairLow<Register>();
2036
2037      // Extra checks to protect caused by the existence of R1_R2.
2038      // The algorithm is wrong if out.hi is either in1.lo or in2.lo:
2039      // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2);
2040      DCHECK_NE(out_hi, in1_lo);
2041      DCHECK_NE(out_hi, in2_lo);
2042
2043      // input: in1 - 64 bits, in2 - 64 bits
2044      // output: out
2045      // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
2046      // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
2047      // parts: out.lo = (in1.lo * in2.lo)[31:0]
2048
2049      // IP <- in1.lo * in2.hi
2050      __ mul(IP, in1_lo, in2_hi);
2051      // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo
2052      __ mla(out_hi, in1_hi, in2_lo, IP);
2053      // out.lo <- (in1.lo * in2.lo)[31:0];
2054      __ umull(out_lo, IP, in1_lo, in2_lo);
2055      // out.hi <- in2.hi * in1.lo +  in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
2056      __ add(out_hi, out_hi, ShifterOperand(IP));
2057      break;
2058    }
2059
2060    case Primitive::kPrimFloat: {
2061      __ vmuls(out.AsFpuRegister<SRegister>(),
2062               first.AsFpuRegister<SRegister>(),
2063               second.AsFpuRegister<SRegister>());
2064      break;
2065    }
2066
2067    case Primitive::kPrimDouble: {
2068      __ vmuld(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2069               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2070               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2071      break;
2072    }
2073
2074    default:
2075      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2076  }
2077}
2078
2079void LocationsBuilderARM::VisitDiv(HDiv* div) {
2080  LocationSummary::CallKind call_kind = div->GetResultType() == Primitive::kPrimLong
2081      ? LocationSummary::kCall
2082      : LocationSummary::kNoCall;
2083  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
2084
2085  switch (div->GetResultType()) {
2086    case Primitive::kPrimInt: {
2087      locations->SetInAt(0, Location::RequiresRegister());
2088      locations->SetInAt(1, Location::RequiresRegister());
2089      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2090      break;
2091    }
2092    case Primitive::kPrimLong: {
2093      InvokeRuntimeCallingConvention calling_convention;
2094      locations->SetInAt(0, Location::RegisterPairLocation(
2095          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2096      locations->SetInAt(1, Location::RegisterPairLocation(
2097          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2098      locations->SetOut(Location::RegisterPairLocation(R0, R1));
2099      break;
2100    }
2101    case Primitive::kPrimFloat:
2102    case Primitive::kPrimDouble: {
2103      locations->SetInAt(0, Location::RequiresFpuRegister());
2104      locations->SetInAt(1, Location::RequiresFpuRegister());
2105      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2106      break;
2107    }
2108
2109    default:
2110      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2111  }
2112}
2113
2114void InstructionCodeGeneratorARM::VisitDiv(HDiv* div) {
2115  LocationSummary* locations = div->GetLocations();
2116  Location out = locations->Out();
2117  Location first = locations->InAt(0);
2118  Location second = locations->InAt(1);
2119
2120  switch (div->GetResultType()) {
2121    case Primitive::kPrimInt: {
2122      __ sdiv(out.AsRegister<Register>(),
2123              first.AsRegister<Register>(),
2124              second.AsRegister<Register>());
2125      break;
2126    }
2127
2128    case Primitive::kPrimLong: {
2129      InvokeRuntimeCallingConvention calling_convention;
2130      DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
2131      DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
2132      DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegisterPairLow<Register>());
2133      DCHECK_EQ(calling_convention.GetRegisterAt(3), second.AsRegisterPairHigh<Register>());
2134      DCHECK_EQ(R0, out.AsRegisterPairLow<Register>());
2135      DCHECK_EQ(R1, out.AsRegisterPairHigh<Register>());
2136
2137      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLdiv), div, div->GetDexPc());
2138      break;
2139    }
2140
2141    case Primitive::kPrimFloat: {
2142      __ vdivs(out.AsFpuRegister<SRegister>(),
2143               first.AsFpuRegister<SRegister>(),
2144               second.AsFpuRegister<SRegister>());
2145      break;
2146    }
2147
2148    case Primitive::kPrimDouble: {
2149      __ vdivd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2150               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2151               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2152      break;
2153    }
2154
2155    default:
2156      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2157  }
2158}
2159
2160void LocationsBuilderARM::VisitRem(HRem* rem) {
2161  Primitive::Type type = rem->GetResultType();
2162  LocationSummary::CallKind call_kind = type == Primitive::kPrimInt
2163      ? LocationSummary::kNoCall
2164      : LocationSummary::kCall;
2165  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
2166
2167  switch (type) {
2168    case Primitive::kPrimInt: {
2169      locations->SetInAt(0, Location::RequiresRegister());
2170      locations->SetInAt(1, Location::RequiresRegister());
2171      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2172      locations->AddTemp(Location::RequiresRegister());
2173      break;
2174    }
2175    case Primitive::kPrimLong: {
2176      InvokeRuntimeCallingConvention calling_convention;
2177      locations->SetInAt(0, Location::RegisterPairLocation(
2178          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2179      locations->SetInAt(1, Location::RegisterPairLocation(
2180          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2181      // The runtime helper puts the output in R2,R3.
2182      locations->SetOut(Location::RegisterPairLocation(R2, R3));
2183      break;
2184    }
2185    case Primitive::kPrimFloat: {
2186      InvokeRuntimeCallingConvention calling_convention;
2187      locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
2188      locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
2189      locations->SetOut(Location::FpuRegisterLocation(S0));
2190      break;
2191    }
2192
2193    case Primitive::kPrimDouble: {
2194      InvokeRuntimeCallingConvention calling_convention;
2195      locations->SetInAt(0, Location::FpuRegisterPairLocation(
2196          calling_convention.GetFpuRegisterAt(0), calling_convention.GetFpuRegisterAt(1)));
2197      locations->SetInAt(1, Location::FpuRegisterPairLocation(
2198          calling_convention.GetFpuRegisterAt(2), calling_convention.GetFpuRegisterAt(3)));
2199      locations->SetOut(Location::Location::FpuRegisterPairLocation(S0, S1));
2200      break;
2201    }
2202
2203    default:
2204      LOG(FATAL) << "Unexpected rem type " << type;
2205  }
2206}
2207
2208void InstructionCodeGeneratorARM::VisitRem(HRem* rem) {
2209  LocationSummary* locations = rem->GetLocations();
2210  Location out = locations->Out();
2211  Location first = locations->InAt(0);
2212  Location second = locations->InAt(1);
2213
2214  Primitive::Type type = rem->GetResultType();
2215  switch (type) {
2216    case Primitive::kPrimInt: {
2217      Register reg1 = first.AsRegister<Register>();
2218      Register reg2 = second.AsRegister<Register>();
2219      Register temp = locations->GetTemp(0).AsRegister<Register>();
2220
2221      // temp = reg1 / reg2  (integer division)
2222      // temp = temp * reg2
2223      // dest = reg1 - temp
2224      __ sdiv(temp, reg1, reg2);
2225      __ mul(temp, temp, reg2);
2226      __ sub(out.AsRegister<Register>(), reg1, ShifterOperand(temp));
2227      break;
2228    }
2229
2230    case Primitive::kPrimLong: {
2231      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLmod), rem, rem->GetDexPc());
2232      break;
2233    }
2234
2235    case Primitive::kPrimFloat: {
2236      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmodf), rem, rem->GetDexPc());
2237      break;
2238    }
2239
2240    case Primitive::kPrimDouble: {
2241      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmod), rem, rem->GetDexPc());
2242      break;
2243    }
2244
2245    default:
2246      LOG(FATAL) << "Unexpected rem type " << type;
2247  }
2248}
2249
2250void LocationsBuilderARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2251  LocationSummary* locations =
2252      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2253  locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2254  if (instruction->HasUses()) {
2255    locations->SetOut(Location::SameAsFirstInput());
2256  }
2257}
2258
2259void InstructionCodeGeneratorARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2260  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM(instruction);
2261  codegen_->AddSlowPath(slow_path);
2262
2263  LocationSummary* locations = instruction->GetLocations();
2264  Location value = locations->InAt(0);
2265
2266  switch (instruction->GetType()) {
2267    case Primitive::kPrimInt: {
2268      if (value.IsRegister()) {
2269        __ cmp(value.AsRegister<Register>(), ShifterOperand(0));
2270        __ b(slow_path->GetEntryLabel(), EQ);
2271      } else {
2272        DCHECK(value.IsConstant()) << value;
2273        if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
2274          __ b(slow_path->GetEntryLabel());
2275        }
2276      }
2277      break;
2278    }
2279    case Primitive::kPrimLong: {
2280      if (value.IsRegisterPair()) {
2281        __ orrs(IP,
2282                value.AsRegisterPairLow<Register>(),
2283                ShifterOperand(value.AsRegisterPairHigh<Register>()));
2284        __ b(slow_path->GetEntryLabel(), EQ);
2285      } else {
2286        DCHECK(value.IsConstant()) << value;
2287        if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
2288          __ b(slow_path->GetEntryLabel());
2289        }
2290      }
2291      break;
2292    default:
2293      LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
2294    }
2295  }
2296}
2297
2298void LocationsBuilderARM::HandleShift(HBinaryOperation* op) {
2299  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2300
2301  LocationSummary::CallKind call_kind = op->GetResultType() == Primitive::kPrimLong
2302      ? LocationSummary::kCall
2303      : LocationSummary::kNoCall;
2304  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(op, call_kind);
2305
2306  switch (op->GetResultType()) {
2307    case Primitive::kPrimInt: {
2308      locations->SetInAt(0, Location::RequiresRegister());
2309      locations->SetInAt(1, Location::RegisterOrConstant(op->InputAt(1)));
2310      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2311      break;
2312    }
2313    case Primitive::kPrimLong: {
2314      InvokeRuntimeCallingConvention calling_convention;
2315      locations->SetInAt(0, Location::RegisterPairLocation(
2316          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2317      locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2318      // The runtime helper puts the output in R0,R1.
2319      locations->SetOut(Location::RegisterPairLocation(R0, R1));
2320      break;
2321    }
2322    default:
2323      LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
2324  }
2325}
2326
2327void InstructionCodeGeneratorARM::HandleShift(HBinaryOperation* op) {
2328  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2329
2330  LocationSummary* locations = op->GetLocations();
2331  Location out = locations->Out();
2332  Location first = locations->InAt(0);
2333  Location second = locations->InAt(1);
2334
2335  Primitive::Type type = op->GetResultType();
2336  switch (type) {
2337    case Primitive::kPrimInt: {
2338      Register out_reg = out.AsRegister<Register>();
2339      Register first_reg = first.AsRegister<Register>();
2340      // Arm doesn't mask the shift count so we need to do it ourselves.
2341      if (second.IsRegister()) {
2342        Register second_reg = second.AsRegister<Register>();
2343        __ and_(second_reg, second_reg, ShifterOperand(kMaxIntShiftValue));
2344        if (op->IsShl()) {
2345          __ Lsl(out_reg, first_reg, second_reg);
2346        } else if (op->IsShr()) {
2347          __ Asr(out_reg, first_reg, second_reg);
2348        } else {
2349          __ Lsr(out_reg, first_reg, second_reg);
2350        }
2351      } else {
2352        int32_t cst = second.GetConstant()->AsIntConstant()->GetValue();
2353        uint32_t shift_value = static_cast<uint32_t>(cst & kMaxIntShiftValue);
2354        if (shift_value == 0) {  // arm does not support shifting with 0 immediate.
2355          __ Mov(out_reg, first_reg);
2356        } else if (op->IsShl()) {
2357          __ Lsl(out_reg, first_reg, shift_value);
2358        } else if (op->IsShr()) {
2359          __ Asr(out_reg, first_reg, shift_value);
2360        } else {
2361          __ Lsr(out_reg, first_reg, shift_value);
2362        }
2363      }
2364      break;
2365    }
2366    case Primitive::kPrimLong: {
2367      // TODO: Inline the assembly instead of calling the runtime.
2368      InvokeRuntimeCallingConvention calling_convention;
2369      DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
2370      DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
2371      DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegister<Register>());
2372      DCHECK_EQ(R0, out.AsRegisterPairLow<Register>());
2373      DCHECK_EQ(R1, out.AsRegisterPairHigh<Register>());
2374
2375      int32_t entry_point_offset;
2376      if (op->IsShl()) {
2377        entry_point_offset = QUICK_ENTRY_POINT(pShlLong);
2378      } else if (op->IsShr()) {
2379        entry_point_offset = QUICK_ENTRY_POINT(pShrLong);
2380      } else {
2381        entry_point_offset = QUICK_ENTRY_POINT(pUshrLong);
2382      }
2383      __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
2384      __ blx(LR);
2385      break;
2386    }
2387    default:
2388      LOG(FATAL) << "Unexpected operation type " << type;
2389  }
2390}
2391
2392void LocationsBuilderARM::VisitShl(HShl* shl) {
2393  HandleShift(shl);
2394}
2395
2396void InstructionCodeGeneratorARM::VisitShl(HShl* shl) {
2397  HandleShift(shl);
2398}
2399
2400void LocationsBuilderARM::VisitShr(HShr* shr) {
2401  HandleShift(shr);
2402}
2403
2404void InstructionCodeGeneratorARM::VisitShr(HShr* shr) {
2405  HandleShift(shr);
2406}
2407
2408void LocationsBuilderARM::VisitUShr(HUShr* ushr) {
2409  HandleShift(ushr);
2410}
2411
2412void InstructionCodeGeneratorARM::VisitUShr(HUShr* ushr) {
2413  HandleShift(ushr);
2414}
2415
2416void LocationsBuilderARM::VisitNewInstance(HNewInstance* instruction) {
2417  LocationSummary* locations =
2418      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2419  InvokeRuntimeCallingConvention calling_convention;
2420  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2421  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2422  locations->SetOut(Location::RegisterLocation(R0));
2423}
2424
2425void InstructionCodeGeneratorARM::VisitNewInstance(HNewInstance* instruction) {
2426  InvokeRuntimeCallingConvention calling_convention;
2427  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
2428  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
2429  codegen_->InvokeRuntime(GetThreadOffset<kArmWordSize>(instruction->GetEntrypoint()).Int32Value(),
2430                          instruction,
2431                          instruction->GetDexPc());
2432}
2433
2434void LocationsBuilderARM::VisitNewArray(HNewArray* instruction) {
2435  LocationSummary* locations =
2436      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2437  InvokeRuntimeCallingConvention calling_convention;
2438  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2439  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2440  locations->SetOut(Location::RegisterLocation(R0));
2441  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2442}
2443
2444void InstructionCodeGeneratorARM::VisitNewArray(HNewArray* instruction) {
2445  InvokeRuntimeCallingConvention calling_convention;
2446  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(2));
2447  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
2448  codegen_->InvokeRuntime(GetThreadOffset<kArmWordSize>(instruction->GetEntrypoint()).Int32Value(),
2449                          instruction,
2450                          instruction->GetDexPc());
2451}
2452
2453void LocationsBuilderARM::VisitParameterValue(HParameterValue* instruction) {
2454  LocationSummary* locations =
2455      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2456  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
2457  if (location.IsStackSlot()) {
2458    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2459  } else if (location.IsDoubleStackSlot()) {
2460    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2461  }
2462  locations->SetOut(location);
2463}
2464
2465void InstructionCodeGeneratorARM::VisitParameterValue(HParameterValue* instruction) {
2466  // Nothing to do, the parameter is already at its location.
2467  UNUSED(instruction);
2468}
2469
2470void LocationsBuilderARM::VisitNot(HNot* not_) {
2471  LocationSummary* locations =
2472      new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
2473  locations->SetInAt(0, Location::RequiresRegister());
2474  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2475}
2476
2477void InstructionCodeGeneratorARM::VisitNot(HNot* not_) {
2478  LocationSummary* locations = not_->GetLocations();
2479  Location out = locations->Out();
2480  Location in = locations->InAt(0);
2481  switch (not_->GetResultType()) {
2482    case Primitive::kPrimInt:
2483      __ mvn(out.AsRegister<Register>(), ShifterOperand(in.AsRegister<Register>()));
2484      break;
2485
2486    case Primitive::kPrimLong:
2487      __ mvn(out.AsRegisterPairLow<Register>(),
2488             ShifterOperand(in.AsRegisterPairLow<Register>()));
2489      __ mvn(out.AsRegisterPairHigh<Register>(),
2490             ShifterOperand(in.AsRegisterPairHigh<Register>()));
2491      break;
2492
2493    default:
2494      LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
2495  }
2496}
2497
2498void LocationsBuilderARM::VisitCompare(HCompare* compare) {
2499  LocationSummary* locations =
2500      new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2501  switch (compare->InputAt(0)->GetType()) {
2502    case Primitive::kPrimLong: {
2503      locations->SetInAt(0, Location::RequiresRegister());
2504      locations->SetInAt(1, Location::RequiresRegister());
2505      // Output overlaps because it is written before doing the low comparison.
2506      locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2507      break;
2508    }
2509    case Primitive::kPrimFloat:
2510    case Primitive::kPrimDouble: {
2511      locations->SetInAt(0, Location::RequiresFpuRegister());
2512      locations->SetInAt(1, Location::RequiresFpuRegister());
2513      locations->SetOut(Location::RequiresRegister());
2514      break;
2515    }
2516    default:
2517      LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2518  }
2519}
2520
2521void InstructionCodeGeneratorARM::VisitCompare(HCompare* compare) {
2522  LocationSummary* locations = compare->GetLocations();
2523  Register out = locations->Out().AsRegister<Register>();
2524  Location left = locations->InAt(0);
2525  Location right = locations->InAt(1);
2526
2527  Label less, greater, done;
2528  Primitive::Type type = compare->InputAt(0)->GetType();
2529  switch (type) {
2530    case Primitive::kPrimLong: {
2531      __ cmp(left.AsRegisterPairHigh<Register>(),
2532             ShifterOperand(right.AsRegisterPairHigh<Register>()));  // Signed compare.
2533      __ b(&less, LT);
2534      __ b(&greater, GT);
2535      // Do LoadImmediate before any `cmp`, as LoadImmediate might affect the status flags.
2536      __ LoadImmediate(out, 0);
2537      __ cmp(left.AsRegisterPairLow<Register>(),
2538             ShifterOperand(right.AsRegisterPairLow<Register>()));  // Unsigned compare.
2539      break;
2540    }
2541    case Primitive::kPrimFloat:
2542    case Primitive::kPrimDouble: {
2543      __ LoadImmediate(out, 0);
2544      if (type == Primitive::kPrimFloat) {
2545        __ vcmps(left.AsFpuRegister<SRegister>(), right.AsFpuRegister<SRegister>());
2546      } else {
2547        __ vcmpd(FromLowSToD(left.AsFpuRegisterPairLow<SRegister>()),
2548                 FromLowSToD(right.AsFpuRegisterPairLow<SRegister>()));
2549      }
2550      __ vmstat();  // transfer FP status register to ARM APSR.
2551      __ b(compare->IsGtBias() ? &greater : &less, VS);  // VS for unordered.
2552      break;
2553    }
2554    default:
2555      LOG(FATAL) << "Unexpected compare type " << type;
2556  }
2557  __ b(&done, EQ);
2558  __ b(&less, CC);  // CC is for both: unsigned compare for longs and 'less than' for floats.
2559
2560  __ Bind(&greater);
2561  __ LoadImmediate(out, 1);
2562  __ b(&done);
2563
2564  __ Bind(&less);
2565  __ LoadImmediate(out, -1);
2566
2567  __ Bind(&done);
2568}
2569
2570void LocationsBuilderARM::VisitPhi(HPhi* instruction) {
2571  LocationSummary* locations =
2572      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2573  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
2574    locations->SetInAt(i, Location::Any());
2575  }
2576  locations->SetOut(Location::Any());
2577}
2578
2579void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction) {
2580  UNUSED(instruction);
2581  LOG(FATAL) << "Unreachable";
2582}
2583
2584void InstructionCodeGeneratorARM::GenerateMemoryBarrier(MemBarrierKind kind) {
2585  // TODO (ported from quick): revisit Arm barrier kinds
2586  DmbOptions flavour = DmbOptions::ISH;  // quiet c++ warnings
2587  switch (kind) {
2588    case MemBarrierKind::kAnyStore:
2589    case MemBarrierKind::kLoadAny:
2590    case MemBarrierKind::kAnyAny: {
2591      flavour = DmbOptions::ISH;
2592      break;
2593    }
2594    case MemBarrierKind::kStoreStore: {
2595      flavour = DmbOptions::ISHST;
2596      break;
2597    }
2598    default:
2599      LOG(FATAL) << "Unexpected memory barrier " << kind;
2600  }
2601  __ dmb(flavour);
2602}
2603
2604void InstructionCodeGeneratorARM::GenerateWideAtomicLoad(Register addr,
2605                                                         uint32_t offset,
2606                                                         Register out_lo,
2607                                                         Register out_hi) {
2608  if (offset != 0) {
2609    __ LoadImmediate(out_lo, offset);
2610    __ add(IP, addr, ShifterOperand(out_lo));
2611    addr = IP;
2612  }
2613  __ ldrexd(out_lo, out_hi, addr);
2614}
2615
2616void InstructionCodeGeneratorARM::GenerateWideAtomicStore(Register addr,
2617                                                          uint32_t offset,
2618                                                          Register value_lo,
2619                                                          Register value_hi,
2620                                                          Register temp1,
2621                                                          Register temp2,
2622                                                          HInstruction* instruction) {
2623  Label fail;
2624  if (offset != 0) {
2625    __ LoadImmediate(temp1, offset);
2626    __ add(IP, addr, ShifterOperand(temp1));
2627    addr = IP;
2628  }
2629  __ Bind(&fail);
2630  // We need a load followed by store. (The address used in a STREX instruction must
2631  // be the same as the address in the most recently executed LDREX instruction.)
2632  __ ldrexd(temp1, temp2, addr);
2633  codegen_->MaybeRecordImplicitNullCheck(instruction);
2634  __ strexd(temp1, value_lo, value_hi, addr);
2635  __ cmp(temp1, ShifterOperand(0));
2636  __ b(&fail, NE);
2637}
2638
2639void LocationsBuilderARM::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) {
2640  DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2641
2642  LocationSummary* locations =
2643      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2644  locations->SetInAt(0, Location::RequiresRegister());
2645  locations->SetInAt(1, Location::RequiresRegister());
2646
2647
2648  Primitive::Type field_type = field_info.GetFieldType();
2649  bool is_wide = field_type == Primitive::kPrimLong || field_type == Primitive::kPrimDouble;
2650  bool generate_volatile = field_info.IsVolatile()
2651      && is_wide
2652      && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2653  // Temporary registers for the write barrier.
2654  // TODO: consider renaming StoreNeedsWriteBarrier to StoreNeedsGCMark.
2655  if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
2656    locations->AddTemp(Location::RequiresRegister());
2657    locations->AddTemp(Location::RequiresRegister());
2658  } else if (generate_volatile) {
2659    // Arm encoding have some additional constraints for ldrexd/strexd:
2660    // - registers need to be consecutive
2661    // - the first register should be even but not R14.
2662    // We don't test for Arm yet, and the assertion makes sure that we revisit this if we ever
2663    // enable Arm encoding.
2664    DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
2665
2666    locations->AddTemp(Location::RequiresRegister());
2667    locations->AddTemp(Location::RequiresRegister());
2668    if (field_type == Primitive::kPrimDouble) {
2669      // For doubles we need two more registers to copy the value.
2670      locations->AddTemp(Location::RegisterLocation(R2));
2671      locations->AddTemp(Location::RegisterLocation(R3));
2672    }
2673  }
2674}
2675
2676void InstructionCodeGeneratorARM::HandleFieldSet(HInstruction* instruction,
2677                                                 const FieldInfo& field_info) {
2678  DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2679
2680  LocationSummary* locations = instruction->GetLocations();
2681  Register base = locations->InAt(0).AsRegister<Register>();
2682  Location value = locations->InAt(1);
2683
2684  bool is_volatile = field_info.IsVolatile();
2685  bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2686  Primitive::Type field_type = field_info.GetFieldType();
2687  uint32_t offset = field_info.GetFieldOffset().Uint32Value();
2688
2689  if (is_volatile) {
2690    GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
2691  }
2692
2693  switch (field_type) {
2694    case Primitive::kPrimBoolean:
2695    case Primitive::kPrimByte: {
2696      __ StoreToOffset(kStoreByte, value.AsRegister<Register>(), base, offset);
2697      break;
2698    }
2699
2700    case Primitive::kPrimShort:
2701    case Primitive::kPrimChar: {
2702      __ StoreToOffset(kStoreHalfword, value.AsRegister<Register>(), base, offset);
2703      break;
2704    }
2705
2706    case Primitive::kPrimInt:
2707    case Primitive::kPrimNot: {
2708      __ StoreToOffset(kStoreWord, value.AsRegister<Register>(), base, offset);
2709      break;
2710    }
2711
2712    case Primitive::kPrimLong: {
2713      if (is_volatile && !atomic_ldrd_strd) {
2714        GenerateWideAtomicStore(base, offset,
2715                                value.AsRegisterPairLow<Register>(),
2716                                value.AsRegisterPairHigh<Register>(),
2717                                locations->GetTemp(0).AsRegister<Register>(),
2718                                locations->GetTemp(1).AsRegister<Register>(),
2719                                instruction);
2720      } else {
2721        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), base, offset);
2722        codegen_->MaybeRecordImplicitNullCheck(instruction);
2723      }
2724      break;
2725    }
2726
2727    case Primitive::kPrimFloat: {
2728      __ StoreSToOffset(value.AsFpuRegister<SRegister>(), base, offset);
2729      break;
2730    }
2731
2732    case Primitive::kPrimDouble: {
2733      DRegister value_reg = FromLowSToD(value.AsFpuRegisterPairLow<SRegister>());
2734      if (is_volatile && !atomic_ldrd_strd) {
2735        Register value_reg_lo = locations->GetTemp(0).AsRegister<Register>();
2736        Register value_reg_hi = locations->GetTemp(1).AsRegister<Register>();
2737
2738        __ vmovrrd(value_reg_lo, value_reg_hi, value_reg);
2739
2740        GenerateWideAtomicStore(base, offset,
2741                                value_reg_lo,
2742                                value_reg_hi,
2743                                locations->GetTemp(2).AsRegister<Register>(),
2744                                locations->GetTemp(3).AsRegister<Register>(),
2745                                instruction);
2746      } else {
2747        __ StoreDToOffset(value_reg, base, offset);
2748        codegen_->MaybeRecordImplicitNullCheck(instruction);
2749      }
2750      break;
2751    }
2752
2753    case Primitive::kPrimVoid:
2754      LOG(FATAL) << "Unreachable type " << field_type;
2755      UNREACHABLE();
2756  }
2757
2758  // Longs and doubles are handled in the switch.
2759  if (field_type != Primitive::kPrimLong && field_type != Primitive::kPrimDouble) {
2760    codegen_->MaybeRecordImplicitNullCheck(instruction);
2761  }
2762
2763  if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
2764    Register temp = locations->GetTemp(0).AsRegister<Register>();
2765    Register card = locations->GetTemp(1).AsRegister<Register>();
2766    codegen_->MarkGCCard(temp, card, base, value.AsRegister<Register>());
2767  }
2768
2769  if (is_volatile) {
2770    GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
2771  }
2772}
2773
2774void LocationsBuilderARM::HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info) {
2775  DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2776  LocationSummary* locations =
2777      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2778  locations->SetInAt(0, Location::RequiresRegister());
2779
2780  bool volatile_for_double = field_info.IsVolatile()
2781      && (field_info.GetFieldType() == Primitive::kPrimDouble)
2782      && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2783  bool overlap = field_info.IsVolatile() && (field_info.GetFieldType() == Primitive::kPrimLong);
2784  locations->SetOut(Location::RequiresRegister(),
2785                    (overlap ? Location::kOutputOverlap : Location::kNoOutputOverlap));
2786  if (volatile_for_double) {
2787    // Arm encoding have some additional constraints for ldrexd/strexd:
2788    // - registers need to be consecutive
2789    // - the first register should be even but not R14.
2790    // We don't test for Arm yet, and the assertion makes sure that we revisit this if we ever
2791    // enable Arm encoding.
2792    DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
2793    locations->AddTemp(Location::RequiresRegister());
2794    locations->AddTemp(Location::RequiresRegister());
2795  }
2796}
2797
2798void InstructionCodeGeneratorARM::HandleFieldGet(HInstruction* instruction,
2799                                                 const FieldInfo& field_info) {
2800  DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2801
2802  LocationSummary* locations = instruction->GetLocations();
2803  Register base = locations->InAt(0).AsRegister<Register>();
2804  Location out = locations->Out();
2805  bool is_volatile = field_info.IsVolatile();
2806  bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2807  Primitive::Type field_type = field_info.GetFieldType();
2808  uint32_t offset = field_info.GetFieldOffset().Uint32Value();
2809
2810  switch (field_type) {
2811    case Primitive::kPrimBoolean: {
2812      __ LoadFromOffset(kLoadUnsignedByte, out.AsRegister<Register>(), base, offset);
2813      break;
2814    }
2815
2816    case Primitive::kPrimByte: {
2817      __ LoadFromOffset(kLoadSignedByte, out.AsRegister<Register>(), base, offset);
2818      break;
2819    }
2820
2821    case Primitive::kPrimShort: {
2822      __ LoadFromOffset(kLoadSignedHalfword, out.AsRegister<Register>(), base, offset);
2823      break;
2824    }
2825
2826    case Primitive::kPrimChar: {
2827      __ LoadFromOffset(kLoadUnsignedHalfword, out.AsRegister<Register>(), base, offset);
2828      break;
2829    }
2830
2831    case Primitive::kPrimInt:
2832    case Primitive::kPrimNot: {
2833      __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), base, offset);
2834      break;
2835    }
2836
2837    case Primitive::kPrimLong: {
2838      if (is_volatile && !atomic_ldrd_strd) {
2839        GenerateWideAtomicLoad(base, offset,
2840                               out.AsRegisterPairLow<Register>(),
2841                               out.AsRegisterPairHigh<Register>());
2842      } else {
2843        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), base, offset);
2844      }
2845      break;
2846    }
2847
2848    case Primitive::kPrimFloat: {
2849      __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), base, offset);
2850      break;
2851    }
2852
2853    case Primitive::kPrimDouble: {
2854      DRegister out_reg = FromLowSToD(out.AsFpuRegisterPairLow<SRegister>());
2855      if (is_volatile && !atomic_ldrd_strd) {
2856        Register lo = locations->GetTemp(0).AsRegister<Register>();
2857        Register hi = locations->GetTemp(1).AsRegister<Register>();
2858        GenerateWideAtomicLoad(base, offset, lo, hi);
2859        codegen_->MaybeRecordImplicitNullCheck(instruction);
2860        __ vmovdrr(out_reg, lo, hi);
2861      } else {
2862        __ LoadDFromOffset(out_reg, base, offset);
2863        codegen_->MaybeRecordImplicitNullCheck(instruction);
2864      }
2865      break;
2866    }
2867
2868    case Primitive::kPrimVoid:
2869      LOG(FATAL) << "Unreachable type " << field_type;
2870      UNREACHABLE();
2871  }
2872
2873  // Doubles are handled in the switch.
2874  if (field_type != Primitive::kPrimDouble) {
2875    codegen_->MaybeRecordImplicitNullCheck(instruction);
2876  }
2877
2878  if (is_volatile) {
2879    GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
2880  }
2881}
2882
2883void LocationsBuilderARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2884  HandleFieldSet(instruction, instruction->GetFieldInfo());
2885}
2886
2887void InstructionCodeGeneratorARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2888  HandleFieldSet(instruction, instruction->GetFieldInfo());
2889}
2890
2891void LocationsBuilderARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2892  HandleFieldGet(instruction, instruction->GetFieldInfo());
2893}
2894
2895void InstructionCodeGeneratorARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2896  HandleFieldGet(instruction, instruction->GetFieldInfo());
2897}
2898
2899void LocationsBuilderARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2900  HandleFieldGet(instruction, instruction->GetFieldInfo());
2901}
2902
2903void InstructionCodeGeneratorARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2904  HandleFieldGet(instruction, instruction->GetFieldInfo());
2905}
2906
2907void LocationsBuilderARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2908  HandleFieldSet(instruction, instruction->GetFieldInfo());
2909}
2910
2911void InstructionCodeGeneratorARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2912  HandleFieldSet(instruction, instruction->GetFieldInfo());
2913}
2914
2915void LocationsBuilderARM::VisitNullCheck(HNullCheck* instruction) {
2916  LocationSummary* locations =
2917      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2918  locations->SetInAt(0, Location::RequiresRegister());
2919  if (instruction->HasUses()) {
2920    locations->SetOut(Location::SameAsFirstInput());
2921  }
2922}
2923
2924void InstructionCodeGeneratorARM::GenerateImplicitNullCheck(HNullCheck* instruction) {
2925  if (codegen_->CanMoveNullCheckToUser(instruction)) {
2926    return;
2927  }
2928  Location obj = instruction->GetLocations()->InAt(0);
2929
2930  __ LoadFromOffset(kLoadWord, IP, obj.AsRegister<Register>(), 0);
2931  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
2932}
2933
2934void InstructionCodeGeneratorARM::GenerateExplicitNullCheck(HNullCheck* instruction) {
2935  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM(instruction);
2936  codegen_->AddSlowPath(slow_path);
2937
2938  LocationSummary* locations = instruction->GetLocations();
2939  Location obj = locations->InAt(0);
2940
2941  __ cmp(obj.AsRegister<Register>(), ShifterOperand(0));
2942  __ b(slow_path->GetEntryLabel(), EQ);
2943}
2944
2945void InstructionCodeGeneratorARM::VisitNullCheck(HNullCheck* instruction) {
2946  if (codegen_->GetCompilerOptions().GetImplicitNullChecks()) {
2947    GenerateImplicitNullCheck(instruction);
2948  } else {
2949    GenerateExplicitNullCheck(instruction);
2950  }
2951}
2952
2953void LocationsBuilderARM::VisitArrayGet(HArrayGet* instruction) {
2954  LocationSummary* locations =
2955      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2956  locations->SetInAt(0, Location::RequiresRegister());
2957  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2958  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2959}
2960
2961void InstructionCodeGeneratorARM::VisitArrayGet(HArrayGet* instruction) {
2962  LocationSummary* locations = instruction->GetLocations();
2963  Register obj = locations->InAt(0).AsRegister<Register>();
2964  Location index = locations->InAt(1);
2965
2966  switch (instruction->GetType()) {
2967    case Primitive::kPrimBoolean: {
2968      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
2969      Register out = locations->Out().AsRegister<Register>();
2970      if (index.IsConstant()) {
2971        size_t offset =
2972            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2973        __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
2974      } else {
2975        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
2976        __ LoadFromOffset(kLoadUnsignedByte, out, IP, data_offset);
2977      }
2978      break;
2979    }
2980
2981    case Primitive::kPrimByte: {
2982      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
2983      Register out = locations->Out().AsRegister<Register>();
2984      if (index.IsConstant()) {
2985        size_t offset =
2986            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2987        __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
2988      } else {
2989        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
2990        __ LoadFromOffset(kLoadSignedByte, out, IP, data_offset);
2991      }
2992      break;
2993    }
2994
2995    case Primitive::kPrimShort: {
2996      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
2997      Register out = locations->Out().AsRegister<Register>();
2998      if (index.IsConstant()) {
2999        size_t offset =
3000            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
3001        __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
3002      } else {
3003        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
3004        __ LoadFromOffset(kLoadSignedHalfword, out, IP, data_offset);
3005      }
3006      break;
3007    }
3008
3009    case Primitive::kPrimChar: {
3010      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
3011      Register out = locations->Out().AsRegister<Register>();
3012      if (index.IsConstant()) {
3013        size_t offset =
3014            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
3015        __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
3016      } else {
3017        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
3018        __ LoadFromOffset(kLoadUnsignedHalfword, out, IP, data_offset);
3019      }
3020      break;
3021    }
3022
3023    case Primitive::kPrimInt:
3024    case Primitive::kPrimNot: {
3025      DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
3026      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
3027      Register out = locations->Out().AsRegister<Register>();
3028      if (index.IsConstant()) {
3029        size_t offset =
3030            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3031        __ LoadFromOffset(kLoadWord, out, obj, offset);
3032      } else {
3033        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3034        __ LoadFromOffset(kLoadWord, out, IP, data_offset);
3035      }
3036      break;
3037    }
3038
3039    case Primitive::kPrimLong: {
3040      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
3041      Location out = locations->Out();
3042      if (index.IsConstant()) {
3043        size_t offset =
3044            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3045        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset);
3046      } else {
3047        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3048        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), IP, data_offset);
3049      }
3050      break;
3051    }
3052
3053    case Primitive::kPrimFloat: {
3054      uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
3055      Location out = locations->Out();
3056      DCHECK(out.IsFpuRegister());
3057      if (index.IsConstant()) {
3058        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3059        __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), obj, offset);
3060      } else {
3061        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3062        __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), IP, data_offset);
3063      }
3064      break;
3065    }
3066
3067    case Primitive::kPrimDouble: {
3068      uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
3069      Location out = locations->Out();
3070      DCHECK(out.IsFpuRegisterPair());
3071      if (index.IsConstant()) {
3072        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3073        __ LoadDFromOffset(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), obj, offset);
3074      } else {
3075        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3076        __ LoadDFromOffset(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), IP, data_offset);
3077      }
3078      break;
3079    }
3080
3081    case Primitive::kPrimVoid:
3082      LOG(FATAL) << "Unreachable type " << instruction->GetType();
3083      UNREACHABLE();
3084  }
3085  codegen_->MaybeRecordImplicitNullCheck(instruction);
3086}
3087
3088void LocationsBuilderARM::VisitArraySet(HArraySet* instruction) {
3089  Primitive::Type value_type = instruction->GetComponentType();
3090
3091  bool needs_write_barrier =
3092      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
3093  bool needs_runtime_call = instruction->NeedsTypeCheck();
3094
3095  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3096      instruction, needs_runtime_call ? LocationSummary::kCall : LocationSummary::kNoCall);
3097  if (needs_runtime_call) {
3098    InvokeRuntimeCallingConvention calling_convention;
3099    locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3100    locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3101    locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
3102  } else {
3103    locations->SetInAt(0, Location::RequiresRegister());
3104    locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3105    locations->SetInAt(2, Location::RequiresRegister());
3106
3107    if (needs_write_barrier) {
3108      // Temporary registers for the write barrier.
3109      locations->AddTemp(Location::RequiresRegister());
3110      locations->AddTemp(Location::RequiresRegister());
3111    }
3112  }
3113}
3114
3115void InstructionCodeGeneratorARM::VisitArraySet(HArraySet* instruction) {
3116  LocationSummary* locations = instruction->GetLocations();
3117  Register obj = locations->InAt(0).AsRegister<Register>();
3118  Location index = locations->InAt(1);
3119  Primitive::Type value_type = instruction->GetComponentType();
3120  bool needs_runtime_call = locations->WillCall();
3121  bool needs_write_barrier =
3122      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
3123
3124  switch (value_type) {
3125    case Primitive::kPrimBoolean:
3126    case Primitive::kPrimByte: {
3127      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
3128      Register value = locations->InAt(2).AsRegister<Register>();
3129      if (index.IsConstant()) {
3130        size_t offset =
3131            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
3132        __ StoreToOffset(kStoreByte, value, obj, offset);
3133      } else {
3134        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
3135        __ StoreToOffset(kStoreByte, value, IP, data_offset);
3136      }
3137      break;
3138    }
3139
3140    case Primitive::kPrimShort:
3141    case Primitive::kPrimChar: {
3142      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
3143      Register value = locations->InAt(2).AsRegister<Register>();
3144      if (index.IsConstant()) {
3145        size_t offset =
3146            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
3147        __ StoreToOffset(kStoreHalfword, value, obj, offset);
3148      } else {
3149        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
3150        __ StoreToOffset(kStoreHalfword, value, IP, data_offset);
3151      }
3152      break;
3153    }
3154
3155    case Primitive::kPrimInt:
3156    case Primitive::kPrimNot: {
3157      if (!needs_runtime_call) {
3158        uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
3159        Register value = locations->InAt(2).AsRegister<Register>();
3160        if (index.IsConstant()) {
3161          size_t offset =
3162              (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3163          __ StoreToOffset(kStoreWord, value, obj, offset);
3164        } else {
3165          DCHECK(index.IsRegister()) << index;
3166          __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3167          __ StoreToOffset(kStoreWord, value, IP, data_offset);
3168        }
3169        codegen_->MaybeRecordImplicitNullCheck(instruction);
3170        if (needs_write_barrier) {
3171          DCHECK_EQ(value_type, Primitive::kPrimNot);
3172          Register temp = locations->GetTemp(0).AsRegister<Register>();
3173          Register card = locations->GetTemp(1).AsRegister<Register>();
3174          codegen_->MarkGCCard(temp, card, obj, value);
3175        }
3176      } else {
3177        DCHECK_EQ(value_type, Primitive::kPrimNot);
3178        codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
3179                                instruction,
3180                                instruction->GetDexPc());
3181      }
3182      break;
3183    }
3184
3185    case Primitive::kPrimLong: {
3186      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
3187      Location value = locations->InAt(2);
3188      if (index.IsConstant()) {
3189        size_t offset =
3190            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3191        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset);
3192      } else {
3193        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3194        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), IP, data_offset);
3195      }
3196      break;
3197    }
3198
3199    case Primitive::kPrimFloat: {
3200      uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
3201      Location value = locations->InAt(2);
3202      DCHECK(value.IsFpuRegister());
3203      if (index.IsConstant()) {
3204        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3205        __ StoreSToOffset(value.AsFpuRegister<SRegister>(), obj, offset);
3206      } else {
3207        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3208        __ StoreSToOffset(value.AsFpuRegister<SRegister>(), IP, data_offset);
3209      }
3210      break;
3211    }
3212
3213    case Primitive::kPrimDouble: {
3214      uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
3215      Location value = locations->InAt(2);
3216      DCHECK(value.IsFpuRegisterPair());
3217      if (index.IsConstant()) {
3218        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3219        __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), obj, offset);
3220      } else {
3221        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3222        __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), IP, data_offset);
3223      }
3224
3225      break;
3226    }
3227
3228    case Primitive::kPrimVoid:
3229      LOG(FATAL) << "Unreachable type " << value_type;
3230      UNREACHABLE();
3231  }
3232
3233  // Ints and objects are handled in the switch.
3234  if (value_type != Primitive::kPrimInt && value_type != Primitive::kPrimNot) {
3235    codegen_->MaybeRecordImplicitNullCheck(instruction);
3236  }
3237}
3238
3239void LocationsBuilderARM::VisitArrayLength(HArrayLength* instruction) {
3240  LocationSummary* locations =
3241      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3242  locations->SetInAt(0, Location::RequiresRegister());
3243  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3244}
3245
3246void InstructionCodeGeneratorARM::VisitArrayLength(HArrayLength* instruction) {
3247  LocationSummary* locations = instruction->GetLocations();
3248  uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
3249  Register obj = locations->InAt(0).AsRegister<Register>();
3250  Register out = locations->Out().AsRegister<Register>();
3251  __ LoadFromOffset(kLoadWord, out, obj, offset);
3252  codegen_->MaybeRecordImplicitNullCheck(instruction);
3253}
3254
3255void LocationsBuilderARM::VisitBoundsCheck(HBoundsCheck* instruction) {
3256  LocationSummary* locations =
3257      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3258  locations->SetInAt(0, Location::RequiresRegister());
3259  locations->SetInAt(1, Location::RequiresRegister());
3260  if (instruction->HasUses()) {
3261    locations->SetOut(Location::SameAsFirstInput());
3262  }
3263}
3264
3265void InstructionCodeGeneratorARM::VisitBoundsCheck(HBoundsCheck* instruction) {
3266  LocationSummary* locations = instruction->GetLocations();
3267  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM(
3268      instruction, locations->InAt(0), locations->InAt(1));
3269  codegen_->AddSlowPath(slow_path);
3270
3271  Register index = locations->InAt(0).AsRegister<Register>();
3272  Register length = locations->InAt(1).AsRegister<Register>();
3273
3274  __ cmp(index, ShifterOperand(length));
3275  __ b(slow_path->GetEntryLabel(), CS);
3276}
3277
3278void CodeGeneratorARM::MarkGCCard(Register temp, Register card, Register object, Register value) {
3279  Label is_null;
3280  __ CompareAndBranchIfZero(value, &is_null);
3281  __ LoadFromOffset(kLoadWord, card, TR, Thread::CardTableOffset<kArmWordSize>().Int32Value());
3282  __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
3283  __ strb(card, Address(card, temp));
3284  __ Bind(&is_null);
3285}
3286
3287void LocationsBuilderARM::VisitTemporary(HTemporary* temp) {
3288  temp->SetLocations(nullptr);
3289}
3290
3291void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp) {
3292  // Nothing to do, this is driven by the code generator.
3293  UNUSED(temp);
3294}
3295
3296void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction) {
3297  UNUSED(instruction);
3298  LOG(FATAL) << "Unreachable";
3299}
3300
3301void InstructionCodeGeneratorARM::VisitParallelMove(HParallelMove* instruction) {
3302  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
3303}
3304
3305void LocationsBuilderARM::VisitSuspendCheck(HSuspendCheck* instruction) {
3306  new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
3307}
3308
3309void InstructionCodeGeneratorARM::VisitSuspendCheck(HSuspendCheck* instruction) {
3310  HBasicBlock* block = instruction->GetBlock();
3311  if (block->GetLoopInformation() != nullptr) {
3312    DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
3313    // The back edge will generate the suspend check.
3314    return;
3315  }
3316  if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
3317    // The goto will generate the suspend check.
3318    return;
3319  }
3320  GenerateSuspendCheck(instruction, nullptr);
3321}
3322
3323void InstructionCodeGeneratorARM::GenerateSuspendCheck(HSuspendCheck* instruction,
3324                                                       HBasicBlock* successor) {
3325  SuspendCheckSlowPathARM* slow_path =
3326      new (GetGraph()->GetArena()) SuspendCheckSlowPathARM(instruction, successor);
3327  codegen_->AddSlowPath(slow_path);
3328
3329  __ LoadFromOffset(
3330      kLoadUnsignedHalfword, IP, TR, Thread::ThreadFlagsOffset<kArmWordSize>().Int32Value());
3331  __ cmp(IP, ShifterOperand(0));
3332  // TODO: Figure out the branch offsets and use cbz/cbnz.
3333  if (successor == nullptr) {
3334    __ b(slow_path->GetEntryLabel(), NE);
3335    __ Bind(slow_path->GetReturnLabel());
3336  } else {
3337    __ b(codegen_->GetLabelOf(successor), EQ);
3338    __ b(slow_path->GetEntryLabel());
3339  }
3340}
3341
3342ArmAssembler* ParallelMoveResolverARM::GetAssembler() const {
3343  return codegen_->GetAssembler();
3344}
3345
3346void ParallelMoveResolverARM::EmitMove(size_t index) {
3347  MoveOperands* move = moves_.Get(index);
3348  Location source = move->GetSource();
3349  Location destination = move->GetDestination();
3350
3351  if (source.IsRegister()) {
3352    if (destination.IsRegister()) {
3353      __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>());
3354    } else {
3355      DCHECK(destination.IsStackSlot());
3356      __ StoreToOffset(kStoreWord, source.AsRegister<Register>(),
3357                       SP, destination.GetStackIndex());
3358    }
3359  } else if (source.IsStackSlot()) {
3360    if (destination.IsRegister()) {
3361      __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(),
3362                        SP, source.GetStackIndex());
3363    } else if (destination.IsFpuRegister()) {
3364      __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex());
3365    } else {
3366      DCHECK(destination.IsStackSlot());
3367      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
3368      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3369    }
3370  } else if (source.IsFpuRegister()) {
3371    if (destination.IsFpuRegister()) {
3372      __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>());
3373    } else {
3374      DCHECK(destination.IsStackSlot());
3375      __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex());
3376    }
3377  } else if (source.IsDoubleStackSlot()) {
3378    if (destination.IsDoubleStackSlot()) {
3379      __ LoadDFromOffset(DTMP, SP, source.GetStackIndex());
3380      __ StoreDToOffset(DTMP, SP, destination.GetStackIndex());
3381    } else if (destination.IsRegisterPair()) {
3382      DCHECK(ExpectedPairLayout(destination));
3383      __ LoadFromOffset(
3384          kLoadWordPair, destination.AsRegisterPairLow<Register>(), SP, source.GetStackIndex());
3385    } else {
3386      DCHECK(destination.IsFpuRegisterPair()) << destination;
3387      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
3388                         SP,
3389                         source.GetStackIndex());
3390    }
3391  } else if (source.IsRegisterPair()) {
3392    if (destination.IsRegisterPair()) {
3393      __ Mov(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairLow<Register>());
3394      __ Mov(destination.AsRegisterPairHigh<Register>(), source.AsRegisterPairHigh<Register>());
3395    } else {
3396      DCHECK(destination.IsDoubleStackSlot()) << destination;
3397      DCHECK(ExpectedPairLayout(source));
3398      __ StoreToOffset(
3399          kStoreWordPair, source.AsRegisterPairLow<Register>(), SP, destination.GetStackIndex());
3400    }
3401  } else if (source.IsFpuRegisterPair()) {
3402    if (destination.IsFpuRegisterPair()) {
3403      __ vmovd(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
3404               FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()));
3405    } else {
3406      DCHECK(destination.IsDoubleStackSlot()) << destination;
3407      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
3408                        SP,
3409                        destination.GetStackIndex());
3410    }
3411  } else {
3412    DCHECK(source.IsConstant()) << source;
3413    HConstant* constant = source.GetConstant();
3414    if (constant->IsIntConstant() || constant->IsNullConstant()) {
3415      int32_t value = CodeGenerator::GetInt32ValueOf(constant);
3416      if (destination.IsRegister()) {
3417        __ LoadImmediate(destination.AsRegister<Register>(), value);
3418      } else {
3419        DCHECK(destination.IsStackSlot());
3420        __ LoadImmediate(IP, value);
3421        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3422      }
3423    } else if (constant->IsLongConstant()) {
3424      int64_t value = constant->AsLongConstant()->GetValue();
3425      if (destination.IsRegisterPair()) {
3426        __ LoadImmediate(destination.AsRegisterPairLow<Register>(), Low32Bits(value));
3427        __ LoadImmediate(destination.AsRegisterPairHigh<Register>(), High32Bits(value));
3428      } else {
3429        DCHECK(destination.IsDoubleStackSlot()) << destination;
3430        __ LoadImmediate(IP, Low32Bits(value));
3431        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3432        __ LoadImmediate(IP, High32Bits(value));
3433        __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
3434      }
3435    } else if (constant->IsDoubleConstant()) {
3436      double value = constant->AsDoubleConstant()->GetValue();
3437      if (destination.IsFpuRegisterPair()) {
3438        __ LoadDImmediate(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()), value);
3439      } else {
3440        DCHECK(destination.IsDoubleStackSlot()) << destination;
3441        uint64_t int_value = bit_cast<uint64_t, double>(value);
3442        __ LoadImmediate(IP, Low32Bits(int_value));
3443        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3444        __ LoadImmediate(IP, High32Bits(int_value));
3445        __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
3446      }
3447    } else {
3448      DCHECK(constant->IsFloatConstant()) << constant->DebugName();
3449      float value = constant->AsFloatConstant()->GetValue();
3450      if (destination.IsFpuRegister()) {
3451        __ LoadSImmediate(destination.AsFpuRegister<SRegister>(), value);
3452      } else {
3453        DCHECK(destination.IsStackSlot());
3454        __ LoadImmediate(IP, bit_cast<int32_t, float>(value));
3455        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3456      }
3457    }
3458  }
3459}
3460
3461void ParallelMoveResolverARM::Exchange(Register reg, int mem) {
3462  __ Mov(IP, reg);
3463  __ LoadFromOffset(kLoadWord, reg, SP, mem);
3464  __ StoreToOffset(kStoreWord, IP, SP, mem);
3465}
3466
3467void ParallelMoveResolverARM::Exchange(int mem1, int mem2) {
3468  ScratchRegisterScope ensure_scratch(this, IP, R0, codegen_->GetNumberOfCoreRegisters());
3469  int stack_offset = ensure_scratch.IsSpilled() ? kArmWordSize : 0;
3470  __ LoadFromOffset(kLoadWord, static_cast<Register>(ensure_scratch.GetRegister()),
3471                    SP, mem1 + stack_offset);
3472  __ LoadFromOffset(kLoadWord, IP, SP, mem2 + stack_offset);
3473  __ StoreToOffset(kStoreWord, static_cast<Register>(ensure_scratch.GetRegister()),
3474                   SP, mem2 + stack_offset);
3475  __ StoreToOffset(kStoreWord, IP, SP, mem1 + stack_offset);
3476}
3477
3478void ParallelMoveResolverARM::EmitSwap(size_t index) {
3479  MoveOperands* move = moves_.Get(index);
3480  Location source = move->GetSource();
3481  Location destination = move->GetDestination();
3482
3483  if (source.IsRegister() && destination.IsRegister()) {
3484    DCHECK_NE(source.AsRegister<Register>(), IP);
3485    DCHECK_NE(destination.AsRegister<Register>(), IP);
3486    __ Mov(IP, source.AsRegister<Register>());
3487    __ Mov(source.AsRegister<Register>(), destination.AsRegister<Register>());
3488    __ Mov(destination.AsRegister<Register>(), IP);
3489  } else if (source.IsRegister() && destination.IsStackSlot()) {
3490    Exchange(source.AsRegister<Register>(), destination.GetStackIndex());
3491  } else if (source.IsStackSlot() && destination.IsRegister()) {
3492    Exchange(destination.AsRegister<Register>(), source.GetStackIndex());
3493  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
3494    Exchange(source.GetStackIndex(), destination.GetStackIndex());
3495  } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
3496    __ vmovrs(IP, source.AsFpuRegister<SRegister>());
3497    __ vmovs(source.AsFpuRegister<SRegister>(), destination.AsFpuRegister<SRegister>());
3498    __ vmovsr(destination.AsFpuRegister<SRegister>(), IP);
3499  } else if (source.IsRegisterPair() && destination.IsRegisterPair()) {
3500    __ vmovdrr(DTMP, source.AsRegisterPairLow<Register>(), source.AsRegisterPairHigh<Register>());
3501    __ Mov(source.AsRegisterPairLow<Register>(), destination.AsRegisterPairLow<Register>());
3502    __ Mov(source.AsRegisterPairHigh<Register>(), destination.AsRegisterPairHigh<Register>());
3503    __ vmovrrd(destination.AsRegisterPairLow<Register>(),
3504               destination.AsRegisterPairHigh<Register>(),
3505               DTMP);
3506  } else if (source.IsRegisterPair() || destination.IsRegisterPair()) {
3507    Register low_reg = source.IsRegisterPair()
3508        ? source.AsRegisterPairLow<Register>()
3509        : destination.AsRegisterPairLow<Register>();
3510    int mem = source.IsRegisterPair()
3511        ? destination.GetStackIndex()
3512        : source.GetStackIndex();
3513    DCHECK(ExpectedPairLayout(source.IsRegisterPair() ? source : destination));
3514    __ vmovdrr(DTMP, low_reg, static_cast<Register>(low_reg + 1));
3515    __ LoadFromOffset(kLoadWordPair, low_reg, SP, mem);
3516    __ StoreDToOffset(DTMP, SP, mem);
3517  } else if (source.IsFpuRegisterPair() && destination.IsFpuRegisterPair()) {
3518    DRegister first = FromLowSToD(source.AsFpuRegisterPairLow<SRegister>());
3519    DRegister second = FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>());
3520    __ vmovd(DTMP, first);
3521    __ vmovd(first, second);
3522    __ vmovd(second, DTMP);
3523  } else if (source.IsFpuRegisterPair() || destination.IsFpuRegisterPair()) {
3524    DRegister reg = source.IsFpuRegisterPair()
3525        ? FromLowSToD(source.AsFpuRegisterPairLow<SRegister>())
3526        : FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>());
3527    int mem = source.IsFpuRegisterPair()
3528        ? destination.GetStackIndex()
3529        : source.GetStackIndex();
3530    __ vmovd(DTMP, reg);
3531    __ LoadDFromOffset(reg, SP, mem);
3532    __ StoreDToOffset(DTMP, SP, mem);
3533  } else if (source.IsFpuRegister() || destination.IsFpuRegister()) {
3534    SRegister reg = source.IsFpuRegister() ? source.AsFpuRegister<SRegister>()
3535                                           : destination.AsFpuRegister<SRegister>();
3536    int mem = source.IsFpuRegister()
3537        ? destination.GetStackIndex()
3538        : source.GetStackIndex();
3539
3540    __ vmovrs(IP, reg);
3541    __ LoadSFromOffset(reg, SP, mem);
3542    __ StoreToOffset(kStoreWord, IP, SP, mem);
3543  } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
3544    Exchange(source.GetStackIndex(), destination.GetStackIndex());
3545    Exchange(source.GetHighStackIndex(kArmWordSize), destination.GetHighStackIndex(kArmWordSize));
3546  } else {
3547    LOG(FATAL) << "Unimplemented" << source << " <-> " << destination;
3548  }
3549}
3550
3551void ParallelMoveResolverARM::SpillScratch(int reg) {
3552  __ Push(static_cast<Register>(reg));
3553}
3554
3555void ParallelMoveResolverARM::RestoreScratch(int reg) {
3556  __ Pop(static_cast<Register>(reg));
3557}
3558
3559void LocationsBuilderARM::VisitLoadClass(HLoadClass* cls) {
3560  LocationSummary::CallKind call_kind = cls->CanCallRuntime()
3561      ? LocationSummary::kCallOnSlowPath
3562      : LocationSummary::kNoCall;
3563  LocationSummary* locations =
3564      new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
3565  locations->SetOut(Location::RequiresRegister());
3566}
3567
3568void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) {
3569  Register out = cls->GetLocations()->Out().AsRegister<Register>();
3570  if (cls->IsReferrersClass()) {
3571    DCHECK(!cls->CanCallRuntime());
3572    DCHECK(!cls->MustGenerateClinitCheck());
3573    codegen_->LoadCurrentMethod(out);
3574    __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
3575  } else {
3576    DCHECK(cls->CanCallRuntime());
3577    codegen_->LoadCurrentMethod(out);
3578    __ LoadFromOffset(
3579        kLoadWord, out, out, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value());
3580    __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
3581
3582    SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
3583        cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
3584    codegen_->AddSlowPath(slow_path);
3585    __ cmp(out, ShifterOperand(0));
3586    __ b(slow_path->GetEntryLabel(), EQ);
3587    if (cls->MustGenerateClinitCheck()) {
3588      GenerateClassInitializationCheck(slow_path, out);
3589    } else {
3590      __ Bind(slow_path->GetExitLabel());
3591    }
3592  }
3593}
3594
3595void LocationsBuilderARM::VisitClinitCheck(HClinitCheck* check) {
3596  LocationSummary* locations =
3597      new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
3598  locations->SetInAt(0, Location::RequiresRegister());
3599  if (check->HasUses()) {
3600    locations->SetOut(Location::SameAsFirstInput());
3601  }
3602}
3603
3604void InstructionCodeGeneratorARM::VisitClinitCheck(HClinitCheck* check) {
3605  // We assume the class is not null.
3606  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
3607      check->GetLoadClass(), check, check->GetDexPc(), true);
3608  codegen_->AddSlowPath(slow_path);
3609  GenerateClassInitializationCheck(slow_path,
3610                                   check->GetLocations()->InAt(0).AsRegister<Register>());
3611}
3612
3613void InstructionCodeGeneratorARM::GenerateClassInitializationCheck(
3614    SlowPathCodeARM* slow_path, Register class_reg) {
3615  __ LoadFromOffset(kLoadWord, IP, class_reg, mirror::Class::StatusOffset().Int32Value());
3616  __ cmp(IP, ShifterOperand(mirror::Class::kStatusInitialized));
3617  __ b(slow_path->GetEntryLabel(), LT);
3618  // Even if the initialized flag is set, we may be in a situation where caches are not synced
3619  // properly. Therefore, we do a memory fence.
3620  __ dmb(ISH);
3621  __ Bind(slow_path->GetExitLabel());
3622}
3623
3624void LocationsBuilderARM::VisitLoadString(HLoadString* load) {
3625  LocationSummary* locations =
3626      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
3627  locations->SetOut(Location::RequiresRegister());
3628}
3629
3630void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) {
3631  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM(load);
3632  codegen_->AddSlowPath(slow_path);
3633
3634  Register out = load->GetLocations()->Out().AsRegister<Register>();
3635  codegen_->LoadCurrentMethod(out);
3636  __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
3637  __ LoadFromOffset(kLoadWord, out, out, mirror::Class::DexCacheStringsOffset().Int32Value());
3638  __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(load->GetStringIndex()));
3639  __ cmp(out, ShifterOperand(0));
3640  __ b(slow_path->GetEntryLabel(), EQ);
3641  __ Bind(slow_path->GetExitLabel());
3642}
3643
3644void LocationsBuilderARM::VisitLoadException(HLoadException* load) {
3645  LocationSummary* locations =
3646      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
3647  locations->SetOut(Location::RequiresRegister());
3648}
3649
3650void InstructionCodeGeneratorARM::VisitLoadException(HLoadException* load) {
3651  Register out = load->GetLocations()->Out().AsRegister<Register>();
3652  int32_t offset = Thread::ExceptionOffset<kArmWordSize>().Int32Value();
3653  __ LoadFromOffset(kLoadWord, out, TR, offset);
3654  __ LoadImmediate(IP, 0);
3655  __ StoreToOffset(kStoreWord, IP, TR, offset);
3656}
3657
3658void LocationsBuilderARM::VisitThrow(HThrow* instruction) {
3659  LocationSummary* locations =
3660      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3661  InvokeRuntimeCallingConvention calling_convention;
3662  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3663}
3664
3665void InstructionCodeGeneratorARM::VisitThrow(HThrow* instruction) {
3666  codegen_->InvokeRuntime(
3667      QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc());
3668}
3669
3670void LocationsBuilderARM::VisitInstanceOf(HInstanceOf* instruction) {
3671  LocationSummary::CallKind call_kind = instruction->IsClassFinal()
3672      ? LocationSummary::kNoCall
3673      : LocationSummary::kCallOnSlowPath;
3674  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3675  locations->SetInAt(0, Location::RequiresRegister());
3676  locations->SetInAt(1, Location::RequiresRegister());
3677  // The out register is used as a temporary, so it overlaps with the inputs.
3678  locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3679}
3680
3681void InstructionCodeGeneratorARM::VisitInstanceOf(HInstanceOf* instruction) {
3682  LocationSummary* locations = instruction->GetLocations();
3683  Register obj = locations->InAt(0).AsRegister<Register>();
3684  Register cls = locations->InAt(1).AsRegister<Register>();
3685  Register out = locations->Out().AsRegister<Register>();
3686  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3687  Label done, zero;
3688  SlowPathCodeARM* slow_path = nullptr;
3689
3690  // Return 0 if `obj` is null.
3691  // TODO: avoid this check if we know obj is not null.
3692  __ cmp(obj, ShifterOperand(0));
3693  __ b(&zero, EQ);
3694  // Compare the class of `obj` with `cls`.
3695  __ LoadFromOffset(kLoadWord, out, obj, class_offset);
3696  __ cmp(out, ShifterOperand(cls));
3697  if (instruction->IsClassFinal()) {
3698    // Classes must be equal for the instanceof to succeed.
3699    __ b(&zero, NE);
3700    __ LoadImmediate(out, 1);
3701    __ b(&done);
3702  } else {
3703    // If the classes are not equal, we go into a slow path.
3704    DCHECK(locations->OnlyCallsOnSlowPath());
3705    slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
3706        instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc());
3707    codegen_->AddSlowPath(slow_path);
3708    __ b(slow_path->GetEntryLabel(), NE);
3709    __ LoadImmediate(out, 1);
3710    __ b(&done);
3711  }
3712  __ Bind(&zero);
3713  __ LoadImmediate(out, 0);
3714  if (slow_path != nullptr) {
3715    __ Bind(slow_path->GetExitLabel());
3716  }
3717  __ Bind(&done);
3718}
3719
3720void LocationsBuilderARM::VisitCheckCast(HCheckCast* instruction) {
3721  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3722      instruction, LocationSummary::kCallOnSlowPath);
3723  locations->SetInAt(0, Location::RequiresRegister());
3724  locations->SetInAt(1, Location::RequiresRegister());
3725  locations->AddTemp(Location::RequiresRegister());
3726}
3727
3728void InstructionCodeGeneratorARM::VisitCheckCast(HCheckCast* instruction) {
3729  LocationSummary* locations = instruction->GetLocations();
3730  Register obj = locations->InAt(0).AsRegister<Register>();
3731  Register cls = locations->InAt(1).AsRegister<Register>();
3732  Register temp = locations->GetTemp(0).AsRegister<Register>();
3733  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3734
3735  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
3736      instruction, locations->InAt(1), locations->GetTemp(0), instruction->GetDexPc());
3737  codegen_->AddSlowPath(slow_path);
3738
3739  // TODO: avoid this check if we know obj is not null.
3740  __ cmp(obj, ShifterOperand(0));
3741  __ b(slow_path->GetExitLabel(), EQ);
3742  // Compare the class of `obj` with `cls`.
3743  __ LoadFromOffset(kLoadWord, temp, obj, class_offset);
3744  __ cmp(temp, ShifterOperand(cls));
3745  __ b(slow_path->GetEntryLabel(), NE);
3746  __ Bind(slow_path->GetExitLabel());
3747}
3748
3749void LocationsBuilderARM::VisitMonitorOperation(HMonitorOperation* instruction) {
3750  LocationSummary* locations =
3751      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3752  InvokeRuntimeCallingConvention calling_convention;
3753  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3754}
3755
3756void InstructionCodeGeneratorARM::VisitMonitorOperation(HMonitorOperation* instruction) {
3757  codegen_->InvokeRuntime(instruction->IsEnter()
3758        ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
3759      instruction,
3760      instruction->GetDexPc());
3761}
3762
3763void LocationsBuilderARM::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
3764void LocationsBuilderARM::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
3765void LocationsBuilderARM::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
3766
3767void LocationsBuilderARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
3768  LocationSummary* locations =
3769      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3770  DCHECK(instruction->GetResultType() == Primitive::kPrimInt
3771         || instruction->GetResultType() == Primitive::kPrimLong);
3772  locations->SetInAt(0, Location::RequiresRegister());
3773  locations->SetInAt(1, Location::RequiresRegister());
3774  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3775}
3776
3777void InstructionCodeGeneratorARM::VisitAnd(HAnd* instruction) {
3778  HandleBitwiseOperation(instruction);
3779}
3780
3781void InstructionCodeGeneratorARM::VisitOr(HOr* instruction) {
3782  HandleBitwiseOperation(instruction);
3783}
3784
3785void InstructionCodeGeneratorARM::VisitXor(HXor* instruction) {
3786  HandleBitwiseOperation(instruction);
3787}
3788
3789void InstructionCodeGeneratorARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
3790  LocationSummary* locations = instruction->GetLocations();
3791
3792  if (instruction->GetResultType() == Primitive::kPrimInt) {
3793    Register first = locations->InAt(0).AsRegister<Register>();
3794    Register second = locations->InAt(1).AsRegister<Register>();
3795    Register out = locations->Out().AsRegister<Register>();
3796    if (instruction->IsAnd()) {
3797      __ and_(out, first, ShifterOperand(second));
3798    } else if (instruction->IsOr()) {
3799      __ orr(out, first, ShifterOperand(second));
3800    } else {
3801      DCHECK(instruction->IsXor());
3802      __ eor(out, first, ShifterOperand(second));
3803    }
3804  } else {
3805    DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3806    Location first = locations->InAt(0);
3807    Location second = locations->InAt(1);
3808    Location out = locations->Out();
3809    if (instruction->IsAnd()) {
3810      __ and_(out.AsRegisterPairLow<Register>(),
3811              first.AsRegisterPairLow<Register>(),
3812              ShifterOperand(second.AsRegisterPairLow<Register>()));
3813      __ and_(out.AsRegisterPairHigh<Register>(),
3814              first.AsRegisterPairHigh<Register>(),
3815              ShifterOperand(second.AsRegisterPairHigh<Register>()));
3816    } else if (instruction->IsOr()) {
3817      __ orr(out.AsRegisterPairLow<Register>(),
3818             first.AsRegisterPairLow<Register>(),
3819             ShifterOperand(second.AsRegisterPairLow<Register>()));
3820      __ orr(out.AsRegisterPairHigh<Register>(),
3821             first.AsRegisterPairHigh<Register>(),
3822             ShifterOperand(second.AsRegisterPairHigh<Register>()));
3823    } else {
3824      DCHECK(instruction->IsXor());
3825      __ eor(out.AsRegisterPairLow<Register>(),
3826             first.AsRegisterPairLow<Register>(),
3827             ShifterOperand(second.AsRegisterPairLow<Register>()));
3828      __ eor(out.AsRegisterPairHigh<Register>(),
3829             first.AsRegisterPairHigh<Register>(),
3830             ShifterOperand(second.AsRegisterPairHigh<Register>()));
3831    }
3832  }
3833}
3834
3835void CodeGeneratorARM::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Register temp) {
3836  DCHECK_EQ(temp, kArtMethodRegister);
3837
3838  // TODO: Implement all kinds of calls:
3839  // 1) boot -> boot
3840  // 2) app -> boot
3841  // 3) app -> app
3842  //
3843  // Currently we implement the app -> app logic, which looks up in the resolve cache.
3844
3845  // temp = method;
3846  LoadCurrentMethod(temp);
3847  if (!invoke->IsRecursive()) {
3848    // temp = temp->dex_cache_resolved_methods_;
3849    __ LoadFromOffset(
3850        kLoadWord, temp, temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value());
3851    // temp = temp[index_in_cache]
3852    __ LoadFromOffset(
3853        kLoadWord, temp, temp, CodeGenerator::GetCacheOffset(invoke->GetDexMethodIndex()));
3854    // LR = temp[offset_of_quick_compiled_code]
3855    __ LoadFromOffset(kLoadWord, LR, temp,
3856                      mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
3857                          kArmWordSize).Int32Value());
3858    // LR()
3859    __ blx(LR);
3860  } else {
3861    __ bl(GetFrameEntryLabel());
3862  }
3863
3864  RecordPcInfo(invoke, invoke->GetDexPc());
3865  DCHECK(!IsLeafMethod());
3866}
3867
3868void LocationsBuilderARM::VisitBoundType(HBoundType* instruction) {
3869  // Nothing to do, this should be removed during prepare for register allocator.
3870  UNUSED(instruction);
3871  LOG(FATAL) << "Unreachable";
3872}
3873
3874void InstructionCodeGeneratorARM::VisitBoundType(HBoundType* instruction) {
3875  // Nothing to do, this should be removed during prepare for register allocator.
3876  UNUSED(instruction);
3877  LOG(FATAL) << "Unreachable";
3878}
3879
3880}  // namespace arm
3881}  // namespace art
3882