code_generator_arm.cc revision f7a0c4e421b5edaad5b7a15bfff687da28d0b287
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
20#include "entrypoints/quick/quick_entrypoints.h"
21#include "gc/accounting/card_table.h"
22#include "mirror/array-inl.h"
23#include "mirror/art_method.h"
24#include "mirror/class.h"
25#include "thread.h"
26#include "utils/arm/assembler_arm.h"
27#include "utils/arm/managed_register_arm.h"
28#include "utils/assembler.h"
29#include "utils/stack_checks.h"
30
31namespace art {
32
33namespace arm {
34
35static DRegister FromLowSToD(SRegister reg) {
36  DCHECK_EQ(reg % 2, 0);
37  return static_cast<DRegister>(reg / 2);
38}
39
40static bool ExpectedPairLayout(Location location) {
41  // We expected this for both core and fpu register pairs.
42  return ((location.low() & 1) == 0) && (location.low() + 1 == location.high());
43}
44
45static constexpr int kCurrentMethodStackOffset = 0;
46
47static constexpr Register kRuntimeParameterCoreRegisters[] = { R0, R1, R2, R3 };
48static constexpr size_t kRuntimeParameterCoreRegistersLength =
49    arraysize(kRuntimeParameterCoreRegisters);
50static constexpr SRegister kRuntimeParameterFpuRegisters[] = { S0, S1, S2, S3 };
51static constexpr size_t kRuntimeParameterFpuRegistersLength =
52    arraysize(kRuntimeParameterFpuRegisters);
53// We unconditionally allocate R5 to ensure we can do long operations
54// with baseline.
55static constexpr Register kCoreSavedRegisterForBaseline = R5;
56static constexpr Register kCoreCalleeSaves[] =
57    { R5, R6, R7, R8, R10, R11, PC };
58static constexpr SRegister kFpuCalleeSaves[] =
59    { S16, S17, S18, S19, S20, S21, S22, S23, S24, S25, S26, S27, S28, S29, S30, S31 };
60
61class InvokeRuntimeCallingConvention : public CallingConvention<Register, SRegister> {
62 public:
63  InvokeRuntimeCallingConvention()
64      : CallingConvention(kRuntimeParameterCoreRegisters,
65                          kRuntimeParameterCoreRegistersLength,
66                          kRuntimeParameterFpuRegisters,
67                          kRuntimeParameterFpuRegistersLength) {}
68
69 private:
70  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
71};
72
73#define __ reinterpret_cast<ArmAssembler*>(codegen->GetAssembler())->
74#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArmWordSize, x).Int32Value()
75
76class SlowPathCodeARM : public SlowPathCode {
77 public:
78  SlowPathCodeARM() : entry_label_(), exit_label_() {}
79
80  Label* GetEntryLabel() { return &entry_label_; }
81  Label* GetExitLabel() { return &exit_label_; }
82
83 private:
84  Label entry_label_;
85  Label exit_label_;
86
87  DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM);
88};
89
90class NullCheckSlowPathARM : public SlowPathCodeARM {
91 public:
92  explicit NullCheckSlowPathARM(HNullCheck* instruction) : instruction_(instruction) {}
93
94  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
95    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
96    __ Bind(GetEntryLabel());
97    arm_codegen->InvokeRuntime(
98        QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc());
99  }
100
101 private:
102  HNullCheck* const instruction_;
103  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM);
104};
105
106class DivZeroCheckSlowPathARM : public SlowPathCodeARM {
107 public:
108  explicit DivZeroCheckSlowPathARM(HDivZeroCheck* instruction) : instruction_(instruction) {}
109
110  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
111    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
112    __ Bind(GetEntryLabel());
113    arm_codegen->InvokeRuntime(
114        QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc());
115  }
116
117 private:
118  HDivZeroCheck* const instruction_;
119  DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM);
120};
121
122class SuspendCheckSlowPathARM : public SlowPathCodeARM {
123 public:
124  SuspendCheckSlowPathARM(HSuspendCheck* instruction, HBasicBlock* successor)
125      : instruction_(instruction), successor_(successor) {}
126
127  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
128    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
129    __ Bind(GetEntryLabel());
130    codegen->SaveLiveRegisters(instruction_->GetLocations());
131    arm_codegen->InvokeRuntime(
132        QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc());
133    codegen->RestoreLiveRegisters(instruction_->GetLocations());
134    if (successor_ == nullptr) {
135      __ b(GetReturnLabel());
136    } else {
137      __ b(arm_codegen->GetLabelOf(successor_));
138    }
139  }
140
141  Label* GetReturnLabel() {
142    DCHECK(successor_ == nullptr);
143    return &return_label_;
144  }
145
146 private:
147  HSuspendCheck* const instruction_;
148  // If not null, the block to branch to after the suspend check.
149  HBasicBlock* const successor_;
150
151  // If `successor_` is null, the label to branch to after the suspend check.
152  Label return_label_;
153
154  DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM);
155};
156
157class BoundsCheckSlowPathARM : public SlowPathCodeARM {
158 public:
159  BoundsCheckSlowPathARM(HBoundsCheck* instruction,
160                         Location index_location,
161                         Location length_location)
162      : instruction_(instruction),
163        index_location_(index_location),
164        length_location_(length_location) {}
165
166  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
167    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
168    __ Bind(GetEntryLabel());
169    // We're moving two locations to locations that could overlap, so we need a parallel
170    // move resolver.
171    InvokeRuntimeCallingConvention calling_convention;
172    codegen->EmitParallelMoves(
173        index_location_,
174        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
175        length_location_,
176        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
177    arm_codegen->InvokeRuntime(
178        QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc());
179  }
180
181 private:
182  HBoundsCheck* const instruction_;
183  const Location index_location_;
184  const Location length_location_;
185
186  DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM);
187};
188
189class LoadClassSlowPathARM : public SlowPathCodeARM {
190 public:
191  LoadClassSlowPathARM(HLoadClass* cls,
192                       HInstruction* at,
193                       uint32_t dex_pc,
194                       bool do_clinit)
195      : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
196    DCHECK(at->IsLoadClass() || at->IsClinitCheck());
197  }
198
199  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
200    LocationSummary* locations = at_->GetLocations();
201
202    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
203    __ Bind(GetEntryLabel());
204    codegen->SaveLiveRegisters(locations);
205
206    InvokeRuntimeCallingConvention calling_convention;
207    __ LoadImmediate(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex());
208    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
209    int32_t entry_point_offset = do_clinit_
210        ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
211        : QUICK_ENTRY_POINT(pInitializeType);
212    arm_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_);
213
214    // Move the class to the desired location.
215    Location out = locations->Out();
216    if (out.IsValid()) {
217      DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
218      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
219    }
220    codegen->RestoreLiveRegisters(locations);
221    __ b(GetExitLabel());
222  }
223
224 private:
225  // The class this slow path will load.
226  HLoadClass* const cls_;
227
228  // The instruction where this slow path is happening.
229  // (Might be the load class or an initialization check).
230  HInstruction* const at_;
231
232  // The dex PC of `at_`.
233  const uint32_t dex_pc_;
234
235  // Whether to initialize the class.
236  const bool do_clinit_;
237
238  DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM);
239};
240
241class LoadStringSlowPathARM : public SlowPathCodeARM {
242 public:
243  explicit LoadStringSlowPathARM(HLoadString* instruction) : instruction_(instruction) {}
244
245  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
246    LocationSummary* locations = instruction_->GetLocations();
247    DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
248
249    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
250    __ Bind(GetEntryLabel());
251    codegen->SaveLiveRegisters(locations);
252
253    InvokeRuntimeCallingConvention calling_convention;
254    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
255    __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction_->GetStringIndex());
256    arm_codegen->InvokeRuntime(
257        QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc());
258    arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
259
260    codegen->RestoreLiveRegisters(locations);
261    __ b(GetExitLabel());
262  }
263
264 private:
265  HLoadString* const instruction_;
266
267  DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM);
268};
269
270class TypeCheckSlowPathARM : public SlowPathCodeARM {
271 public:
272  TypeCheckSlowPathARM(HInstruction* instruction,
273                       Location class_to_check,
274                       Location object_class,
275                       uint32_t dex_pc)
276      : instruction_(instruction),
277        class_to_check_(class_to_check),
278        object_class_(object_class),
279        dex_pc_(dex_pc) {}
280
281  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
282    LocationSummary* locations = instruction_->GetLocations();
283    DCHECK(instruction_->IsCheckCast()
284           || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
285
286    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
287    __ Bind(GetEntryLabel());
288    codegen->SaveLiveRegisters(locations);
289
290    // We're moving two locations to locations that could overlap, so we need a parallel
291    // move resolver.
292    InvokeRuntimeCallingConvention calling_convention;
293    codegen->EmitParallelMoves(
294        class_to_check_,
295        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
296        object_class_,
297        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
298
299    if (instruction_->IsInstanceOf()) {
300      arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc_);
301      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
302    } else {
303      DCHECK(instruction_->IsCheckCast());
304      arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc_);
305    }
306
307    codegen->RestoreLiveRegisters(locations);
308    __ b(GetExitLabel());
309  }
310
311 private:
312  HInstruction* const instruction_;
313  const Location class_to_check_;
314  const Location object_class_;
315  uint32_t dex_pc_;
316
317  DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM);
318};
319
320#undef __
321
322#undef __
323#define __ reinterpret_cast<ArmAssembler*>(GetAssembler())->
324
325inline Condition ARMCondition(IfCondition cond) {
326  switch (cond) {
327    case kCondEQ: return EQ;
328    case kCondNE: return NE;
329    case kCondLT: return LT;
330    case kCondLE: return LE;
331    case kCondGT: return GT;
332    case kCondGE: return GE;
333    default:
334      LOG(FATAL) << "Unknown if condition";
335  }
336  return EQ;        // Unreachable.
337}
338
339inline Condition ARMOppositeCondition(IfCondition cond) {
340  switch (cond) {
341    case kCondEQ: return NE;
342    case kCondNE: return EQ;
343    case kCondLT: return GE;
344    case kCondLE: return GT;
345    case kCondGT: return LE;
346    case kCondGE: return LT;
347    default:
348      LOG(FATAL) << "Unknown if condition";
349  }
350  return EQ;        // Unreachable.
351}
352
353void CodeGeneratorARM::DumpCoreRegister(std::ostream& stream, int reg) const {
354  stream << ArmManagedRegister::FromCoreRegister(Register(reg));
355}
356
357void CodeGeneratorARM::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
358  stream << ArmManagedRegister::FromSRegister(SRegister(reg));
359}
360
361size_t CodeGeneratorARM::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
362  __ StoreToOffset(kStoreWord, static_cast<Register>(reg_id), SP, stack_index);
363  return kArmWordSize;
364}
365
366size_t CodeGeneratorARM::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
367  __ LoadFromOffset(kLoadWord, static_cast<Register>(reg_id), SP, stack_index);
368  return kArmWordSize;
369}
370
371size_t CodeGeneratorARM::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
372  __ StoreSToOffset(static_cast<SRegister>(reg_id), SP, stack_index);
373  return kArmWordSize;
374}
375
376size_t CodeGeneratorARM::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
377  __ LoadSFromOffset(static_cast<SRegister>(reg_id), SP, stack_index);
378  return kArmWordSize;
379}
380
381CodeGeneratorARM::CodeGeneratorARM(HGraph* graph,
382                                   const ArmInstructionSetFeatures& isa_features,
383                                   const CompilerOptions& compiler_options)
384    : CodeGenerator(graph,
385                    kNumberOfCoreRegisters,
386                    kNumberOfSRegisters,
387                    kNumberOfRegisterPairs,
388                    ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
389                                        arraysize(kCoreCalleeSaves)),
390                    ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
391                                        arraysize(kFpuCalleeSaves)),
392                    compiler_options),
393      block_labels_(graph->GetArena(), 0),
394      location_builder_(graph, this),
395      instruction_visitor_(graph, this),
396      move_resolver_(graph->GetArena(), this),
397      assembler_(true),
398      isa_features_(isa_features) {
399  // Save one extra register for baseline. Note that on thumb2, there is no easy
400  // instruction to restore just the PC, so this actually helps both baseline
401  // and non-baseline to save and restore at least two registers at entry and exit.
402  AddAllocatedRegister(Location::RegisterLocation(kCoreSavedRegisterForBaseline));
403  // Save the PC register to mimic Quick.
404  AddAllocatedRegister(Location::RegisterLocation(PC));
405}
406
407Location CodeGeneratorARM::AllocateFreeRegister(Primitive::Type type) const {
408  switch (type) {
409    case Primitive::kPrimLong: {
410      size_t reg = FindFreeEntry(blocked_register_pairs_, kNumberOfRegisterPairs);
411      ArmManagedRegister pair =
412          ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
413      DCHECK(!blocked_core_registers_[pair.AsRegisterPairLow()]);
414      DCHECK(!blocked_core_registers_[pair.AsRegisterPairHigh()]);
415
416      blocked_core_registers_[pair.AsRegisterPairLow()] = true;
417      blocked_core_registers_[pair.AsRegisterPairHigh()] = true;
418      UpdateBlockedPairRegisters();
419      return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
420    }
421
422    case Primitive::kPrimByte:
423    case Primitive::kPrimBoolean:
424    case Primitive::kPrimChar:
425    case Primitive::kPrimShort:
426    case Primitive::kPrimInt:
427    case Primitive::kPrimNot: {
428      int reg = FindFreeEntry(blocked_core_registers_, kNumberOfCoreRegisters);
429      // Block all register pairs that contain `reg`.
430      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
431        ArmManagedRegister current =
432            ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
433        if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
434          blocked_register_pairs_[i] = true;
435        }
436      }
437      return Location::RegisterLocation(reg);
438    }
439
440    case Primitive::kPrimFloat: {
441      int reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfSRegisters);
442      return Location::FpuRegisterLocation(reg);
443    }
444
445    case Primitive::kPrimDouble: {
446      int reg = FindTwoFreeConsecutiveAlignedEntries(blocked_fpu_registers_, kNumberOfSRegisters);
447      DCHECK_EQ(reg % 2, 0);
448      return Location::FpuRegisterPairLocation(reg, reg + 1);
449    }
450
451    case Primitive::kPrimVoid:
452      LOG(FATAL) << "Unreachable type " << type;
453  }
454
455  return Location();
456}
457
458void CodeGeneratorARM::SetupBlockedRegisters(bool is_baseline) const {
459  // Don't allocate the dalvik style register pair passing.
460  blocked_register_pairs_[R1_R2] = true;
461
462  // Stack register, LR and PC are always reserved.
463  blocked_core_registers_[SP] = true;
464  blocked_core_registers_[LR] = true;
465  blocked_core_registers_[PC] = true;
466
467  // Reserve thread register.
468  blocked_core_registers_[TR] = true;
469
470  // Reserve temp register.
471  blocked_core_registers_[IP] = true;
472
473  if (is_baseline) {
474    for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
475      blocked_core_registers_[kCoreCalleeSaves[i]] = true;
476    }
477
478    blocked_core_registers_[kCoreSavedRegisterForBaseline] = false;
479
480    for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
481      blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
482    }
483  }
484
485  UpdateBlockedPairRegisters();
486}
487
488void CodeGeneratorARM::UpdateBlockedPairRegisters() const {
489  for (int i = 0; i < kNumberOfRegisterPairs; i++) {
490    ArmManagedRegister current =
491        ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
492    if (blocked_core_registers_[current.AsRegisterPairLow()]
493        || blocked_core_registers_[current.AsRegisterPairHigh()]) {
494      blocked_register_pairs_[i] = true;
495    }
496  }
497}
498
499InstructionCodeGeneratorARM::InstructionCodeGeneratorARM(HGraph* graph, CodeGeneratorARM* codegen)
500      : HGraphVisitor(graph),
501        assembler_(codegen->GetAssembler()),
502        codegen_(codegen) {}
503
504static uint32_t LeastSignificantBit(uint32_t mask) {
505  // ffs starts at 1.
506  return ffs(mask) - 1;
507}
508
509void CodeGeneratorARM::ComputeSpillMask() {
510  core_spill_mask_ = allocated_registers_.GetCoreRegisters() & core_callee_save_mask_;
511  DCHECK_NE(core_spill_mask_, 0u) << "At least the return address register must be saved";
512  fpu_spill_mask_ = allocated_registers_.GetFloatingPointRegisters() & fpu_callee_save_mask_;
513  // We use vpush and vpop for saving and restoring floating point registers, which take
514  // a SRegister and the number of registers to save/restore after that SRegister. We
515  // therefore update the `fpu_spill_mask_` to also contain those registers not allocated,
516  // but in the range.
517  if (fpu_spill_mask_ != 0) {
518    uint32_t least_significant_bit = LeastSignificantBit(fpu_spill_mask_);
519    uint32_t most_significant_bit = MostSignificantBit(fpu_spill_mask_);
520    for (uint32_t i = least_significant_bit + 1 ; i < most_significant_bit; ++i) {
521      fpu_spill_mask_ |= (1 << i);
522    }
523  }
524}
525
526void CodeGeneratorARM::GenerateFrameEntry() {
527  bool skip_overflow_check =
528      IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm);
529  DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
530  __ Bind(&frame_entry_label_);
531
532  if (!skip_overflow_check) {
533    __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm)));
534    __ LoadFromOffset(kLoadWord, IP, IP, 0);
535    RecordPcInfo(nullptr, 0);
536  }
537
538  // PC is in the list of callee-save to mimic Quick, but we need to push
539  // LR at entry instead.
540  __ PushList((core_spill_mask_ & (~(1 << PC))) | 1 << LR);
541  if (fpu_spill_mask_ != 0) {
542    SRegister start_register = SRegister(LeastSignificantBit(fpu_spill_mask_));
543    __ vpushs(start_register, POPCOUNT(fpu_spill_mask_));
544  }
545  __ AddConstant(SP, -(GetFrameSize() - FrameEntrySpillSize()));
546  __ StoreToOffset(kStoreWord, R0, SP, 0);
547}
548
549void CodeGeneratorARM::GenerateFrameExit() {
550  __ AddConstant(SP, GetFrameSize() - FrameEntrySpillSize());
551  if (fpu_spill_mask_ != 0) {
552    SRegister start_register = SRegister(LeastSignificantBit(fpu_spill_mask_));
553    __ vpops(start_register, POPCOUNT(fpu_spill_mask_));
554  }
555  __ PopList(core_spill_mask_);
556}
557
558void CodeGeneratorARM::Bind(HBasicBlock* block) {
559  __ Bind(GetLabelOf(block));
560}
561
562Location CodeGeneratorARM::GetStackLocation(HLoadLocal* load) const {
563  switch (load->GetType()) {
564    case Primitive::kPrimLong:
565    case Primitive::kPrimDouble:
566      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
567      break;
568
569    case Primitive::kPrimInt:
570    case Primitive::kPrimNot:
571    case Primitive::kPrimFloat:
572      return Location::StackSlot(GetStackSlot(load->GetLocal()));
573
574    case Primitive::kPrimBoolean:
575    case Primitive::kPrimByte:
576    case Primitive::kPrimChar:
577    case Primitive::kPrimShort:
578    case Primitive::kPrimVoid:
579      LOG(FATAL) << "Unexpected type " << load->GetType();
580  }
581
582  LOG(FATAL) << "Unreachable";
583  return Location();
584}
585
586Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
587  switch (type) {
588    case Primitive::kPrimBoolean:
589    case Primitive::kPrimByte:
590    case Primitive::kPrimChar:
591    case Primitive::kPrimShort:
592    case Primitive::kPrimInt:
593    case Primitive::kPrimNot: {
594      uint32_t index = gp_index_++;
595      uint32_t stack_index = stack_index_++;
596      if (index < calling_convention.GetNumberOfRegisters()) {
597        return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
598      } else {
599        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
600      }
601    }
602
603    case Primitive::kPrimLong: {
604      uint32_t index = gp_index_;
605      uint32_t stack_index = stack_index_;
606      gp_index_ += 2;
607      stack_index_ += 2;
608      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
609        if (calling_convention.GetRegisterAt(index) == R1) {
610          // Skip R1, and use R2_R3 instead.
611          gp_index_++;
612          index++;
613        }
614      }
615      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
616        DCHECK_EQ(calling_convention.GetRegisterAt(index) + 1,
617                  calling_convention.GetRegisterAt(index + 1));
618        return Location::RegisterPairLocation(calling_convention.GetRegisterAt(index),
619                                              calling_convention.GetRegisterAt(index + 1));
620      } else {
621        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
622      }
623    }
624
625    case Primitive::kPrimFloat: {
626      uint32_t stack_index = stack_index_++;
627      if (float_index_ % 2 == 0) {
628        float_index_ = std::max(double_index_, float_index_);
629      }
630      if (float_index_ < calling_convention.GetNumberOfFpuRegisters()) {
631        return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(float_index_++));
632      } else {
633        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
634      }
635    }
636
637    case Primitive::kPrimDouble: {
638      double_index_ = std::max(double_index_, RoundUp(float_index_, 2));
639      uint32_t stack_index = stack_index_;
640      stack_index_ += 2;
641      if (double_index_ + 1 < calling_convention.GetNumberOfFpuRegisters()) {
642        uint32_t index = double_index_;
643        double_index_ += 2;
644        Location result = Location::FpuRegisterPairLocation(
645          calling_convention.GetFpuRegisterAt(index),
646          calling_convention.GetFpuRegisterAt(index + 1));
647        DCHECK(ExpectedPairLayout(result));
648        return result;
649      } else {
650        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
651      }
652    }
653
654    case Primitive::kPrimVoid:
655      LOG(FATAL) << "Unexpected parameter type " << type;
656      break;
657  }
658  return Location();
659}
660
661Location InvokeDexCallingConventionVisitor::GetReturnLocation(Primitive::Type type) {
662  switch (type) {
663    case Primitive::kPrimBoolean:
664    case Primitive::kPrimByte:
665    case Primitive::kPrimChar:
666    case Primitive::kPrimShort:
667    case Primitive::kPrimInt:
668    case Primitive::kPrimNot: {
669      return Location::RegisterLocation(R0);
670    }
671
672    case Primitive::kPrimFloat: {
673      return Location::FpuRegisterLocation(S0);
674    }
675
676    case Primitive::kPrimLong: {
677      return Location::RegisterPairLocation(R0, R1);
678    }
679
680    case Primitive::kPrimDouble: {
681      return Location::FpuRegisterPairLocation(S0, S1);
682    }
683
684    case Primitive::kPrimVoid:
685      return Location();
686  }
687  UNREACHABLE();
688  return Location();
689}
690
691void CodeGeneratorARM::Move32(Location destination, Location source) {
692  if (source.Equals(destination)) {
693    return;
694  }
695  if (destination.IsRegister()) {
696    if (source.IsRegister()) {
697      __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>());
698    } else if (source.IsFpuRegister()) {
699      __ vmovrs(destination.AsRegister<Register>(), source.AsFpuRegister<SRegister>());
700    } else {
701      __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(), SP, source.GetStackIndex());
702    }
703  } else if (destination.IsFpuRegister()) {
704    if (source.IsRegister()) {
705      __ vmovsr(destination.AsFpuRegister<SRegister>(), source.AsRegister<Register>());
706    } else if (source.IsFpuRegister()) {
707      __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>());
708    } else {
709      __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex());
710    }
711  } else {
712    DCHECK(destination.IsStackSlot()) << destination;
713    if (source.IsRegister()) {
714      __ StoreToOffset(kStoreWord, source.AsRegister<Register>(), SP, destination.GetStackIndex());
715    } else if (source.IsFpuRegister()) {
716      __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex());
717    } else {
718      DCHECK(source.IsStackSlot()) << source;
719      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
720      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
721    }
722  }
723}
724
725void CodeGeneratorARM::Move64(Location destination, Location source) {
726  if (source.Equals(destination)) {
727    return;
728  }
729  if (destination.IsRegisterPair()) {
730    if (source.IsRegisterPair()) {
731      EmitParallelMoves(
732          Location::RegisterLocation(source.AsRegisterPairHigh<Register>()),
733          Location::RegisterLocation(destination.AsRegisterPairHigh<Register>()),
734          Location::RegisterLocation(source.AsRegisterPairLow<Register>()),
735          Location::RegisterLocation(destination.AsRegisterPairLow<Register>()));
736    } else if (source.IsFpuRegister()) {
737      UNIMPLEMENTED(FATAL);
738    } else {
739      DCHECK(source.IsDoubleStackSlot());
740      DCHECK(ExpectedPairLayout(destination));
741      __ LoadFromOffset(kLoadWordPair, destination.AsRegisterPairLow<Register>(),
742                        SP, source.GetStackIndex());
743    }
744  } else if (destination.IsFpuRegisterPair()) {
745    if (source.IsDoubleStackSlot()) {
746      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
747                         SP,
748                         source.GetStackIndex());
749    } else {
750      UNIMPLEMENTED(FATAL);
751    }
752  } else {
753    DCHECK(destination.IsDoubleStackSlot());
754    if (source.IsRegisterPair()) {
755      // No conflict possible, so just do the moves.
756      if (source.AsRegisterPairLow<Register>() == R1) {
757        DCHECK_EQ(source.AsRegisterPairHigh<Register>(), R2);
758        __ StoreToOffset(kStoreWord, R1, SP, destination.GetStackIndex());
759        __ StoreToOffset(kStoreWord, R2, SP, destination.GetHighStackIndex(kArmWordSize));
760      } else {
761        __ StoreToOffset(kStoreWordPair, source.AsRegisterPairLow<Register>(),
762                         SP, destination.GetStackIndex());
763      }
764    } else if (source.IsFpuRegisterPair()) {
765      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
766                        SP,
767                        destination.GetStackIndex());
768    } else {
769      DCHECK(source.IsDoubleStackSlot());
770      EmitParallelMoves(
771          Location::StackSlot(source.GetStackIndex()),
772          Location::StackSlot(destination.GetStackIndex()),
773          Location::StackSlot(source.GetHighStackIndex(kArmWordSize)),
774          Location::StackSlot(destination.GetHighStackIndex(kArmWordSize)));
775    }
776  }
777}
778
779void CodeGeneratorARM::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
780  LocationSummary* locations = instruction->GetLocations();
781  if (locations != nullptr && locations->Out().Equals(location)) {
782    return;
783  }
784
785  if (locations != nullptr && locations->Out().IsConstant()) {
786    HConstant* const_to_move = locations->Out().GetConstant();
787    if (const_to_move->IsIntConstant()) {
788      int32_t value = const_to_move->AsIntConstant()->GetValue();
789      if (location.IsRegister()) {
790        __ LoadImmediate(location.AsRegister<Register>(), value);
791      } else {
792        DCHECK(location.IsStackSlot());
793        __ LoadImmediate(IP, value);
794        __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
795      }
796    } else {
797      DCHECK(const_to_move->IsLongConstant()) << const_to_move->DebugName();
798      int64_t value = const_to_move->AsLongConstant()->GetValue();
799      if (location.IsRegisterPair()) {
800        __ LoadImmediate(location.AsRegisterPairLow<Register>(), Low32Bits(value));
801        __ LoadImmediate(location.AsRegisterPairHigh<Register>(), High32Bits(value));
802      } else {
803        DCHECK(location.IsDoubleStackSlot());
804        __ LoadImmediate(IP, Low32Bits(value));
805        __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
806        __ LoadImmediate(IP, High32Bits(value));
807        __ StoreToOffset(kStoreWord, IP, SP, location.GetHighStackIndex(kArmWordSize));
808      }
809    }
810  } else if (instruction->IsLoadLocal()) {
811    uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
812    switch (instruction->GetType()) {
813      case Primitive::kPrimBoolean:
814      case Primitive::kPrimByte:
815      case Primitive::kPrimChar:
816      case Primitive::kPrimShort:
817      case Primitive::kPrimInt:
818      case Primitive::kPrimNot:
819      case Primitive::kPrimFloat:
820        Move32(location, Location::StackSlot(stack_slot));
821        break;
822
823      case Primitive::kPrimLong:
824      case Primitive::kPrimDouble:
825        Move64(location, Location::DoubleStackSlot(stack_slot));
826        break;
827
828      default:
829        LOG(FATAL) << "Unexpected type " << instruction->GetType();
830    }
831  } else if (instruction->IsTemporary()) {
832    Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
833    if (temp_location.IsStackSlot()) {
834      Move32(location, temp_location);
835    } else {
836      DCHECK(temp_location.IsDoubleStackSlot());
837      Move64(location, temp_location);
838    }
839  } else {
840    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
841    switch (instruction->GetType()) {
842      case Primitive::kPrimBoolean:
843      case Primitive::kPrimByte:
844      case Primitive::kPrimChar:
845      case Primitive::kPrimShort:
846      case Primitive::kPrimNot:
847      case Primitive::kPrimInt:
848      case Primitive::kPrimFloat:
849        Move32(location, locations->Out());
850        break;
851
852      case Primitive::kPrimLong:
853      case Primitive::kPrimDouble:
854        Move64(location, locations->Out());
855        break;
856
857      default:
858        LOG(FATAL) << "Unexpected type " << instruction->GetType();
859    }
860  }
861}
862
863void CodeGeneratorARM::InvokeRuntime(int32_t entry_point_offset,
864                                     HInstruction* instruction,
865                                     uint32_t dex_pc) {
866  __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
867  __ blx(LR);
868  RecordPcInfo(instruction, dex_pc);
869  DCHECK(instruction->IsSuspendCheck()
870      || instruction->IsBoundsCheck()
871      || instruction->IsNullCheck()
872      || instruction->IsDivZeroCheck()
873      || instruction->GetLocations()->CanCall()
874      || !IsLeafMethod());
875}
876
877void LocationsBuilderARM::VisitGoto(HGoto* got) {
878  got->SetLocations(nullptr);
879}
880
881void InstructionCodeGeneratorARM::VisitGoto(HGoto* got) {
882  HBasicBlock* successor = got->GetSuccessor();
883  DCHECK(!successor->IsExitBlock());
884
885  HBasicBlock* block = got->GetBlock();
886  HInstruction* previous = got->GetPrevious();
887
888  HLoopInformation* info = block->GetLoopInformation();
889  if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
890    codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
891    GenerateSuspendCheck(info->GetSuspendCheck(), successor);
892    return;
893  }
894
895  if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
896    GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
897  }
898  if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
899    __ b(codegen_->GetLabelOf(successor));
900  }
901}
902
903void LocationsBuilderARM::VisitExit(HExit* exit) {
904  exit->SetLocations(nullptr);
905}
906
907void InstructionCodeGeneratorARM::VisitExit(HExit* exit) {
908  UNUSED(exit);
909  if (kIsDebugBuild) {
910    __ Comment("Unreachable");
911    __ bkpt(0);
912  }
913}
914
915void LocationsBuilderARM::VisitIf(HIf* if_instr) {
916  LocationSummary* locations =
917      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
918  HInstruction* cond = if_instr->InputAt(0);
919  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
920    locations->SetInAt(0, Location::RequiresRegister());
921  }
922}
923
924void InstructionCodeGeneratorARM::VisitIf(HIf* if_instr) {
925  HInstruction* cond = if_instr->InputAt(0);
926  if (cond->IsIntConstant()) {
927    // Constant condition, statically compared against 1.
928    int32_t cond_value = cond->AsIntConstant()->GetValue();
929    if (cond_value == 1) {
930      if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
931                                     if_instr->IfTrueSuccessor())) {
932        __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
933      }
934      return;
935    } else {
936      DCHECK_EQ(cond_value, 0);
937    }
938  } else {
939    if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
940      // Condition has been materialized, compare the output to 0
941      DCHECK(if_instr->GetLocations()->InAt(0).IsRegister());
942      __ cmp(if_instr->GetLocations()->InAt(0).AsRegister<Register>(),
943             ShifterOperand(0));
944      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()), NE);
945    } else {
946      // Condition has not been materialized, use its inputs as the
947      // comparison and its condition as the branch condition.
948      LocationSummary* locations = cond->GetLocations();
949      DCHECK(locations->InAt(0).IsRegister()) << locations->InAt(0);
950      Register left = locations->InAt(0).AsRegister<Register>();
951      if (locations->InAt(1).IsRegister()) {
952        __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>()));
953      } else {
954        DCHECK(locations->InAt(1).IsConstant());
955        int32_t value =
956            locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
957        ShifterOperand operand;
958        if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) {
959          __ cmp(left, operand);
960        } else {
961          Register temp = IP;
962          __ LoadImmediate(temp, value);
963          __ cmp(left, ShifterOperand(temp));
964        }
965      }
966      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()),
967           ARMCondition(cond->AsCondition()->GetCondition()));
968    }
969  }
970  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
971                                 if_instr->IfFalseSuccessor())) {
972    __ b(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
973  }
974}
975
976
977void LocationsBuilderARM::VisitCondition(HCondition* comp) {
978  LocationSummary* locations =
979      new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall);
980  locations->SetInAt(0, Location::RequiresRegister());
981  locations->SetInAt(1, Location::RegisterOrConstant(comp->InputAt(1)));
982  if (comp->NeedsMaterialization()) {
983    locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
984  }
985}
986
987void InstructionCodeGeneratorARM::VisitCondition(HCondition* comp) {
988  if (!comp->NeedsMaterialization()) return;
989  LocationSummary* locations = comp->GetLocations();
990  Register left = locations->InAt(0).AsRegister<Register>();
991
992  if (locations->InAt(1).IsRegister()) {
993    __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>()));
994  } else {
995    DCHECK(locations->InAt(1).IsConstant());
996    int32_t value = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
997    ShifterOperand operand;
998    if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) {
999      __ cmp(left, operand);
1000    } else {
1001      Register temp = IP;
1002      __ LoadImmediate(temp, value);
1003      __ cmp(left, ShifterOperand(temp));
1004    }
1005  }
1006  __ it(ARMCondition(comp->GetCondition()), kItElse);
1007  __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(1),
1008         ARMCondition(comp->GetCondition()));
1009  __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(0),
1010         ARMOppositeCondition(comp->GetCondition()));
1011}
1012
1013void LocationsBuilderARM::VisitEqual(HEqual* comp) {
1014  VisitCondition(comp);
1015}
1016
1017void InstructionCodeGeneratorARM::VisitEqual(HEqual* comp) {
1018  VisitCondition(comp);
1019}
1020
1021void LocationsBuilderARM::VisitNotEqual(HNotEqual* comp) {
1022  VisitCondition(comp);
1023}
1024
1025void InstructionCodeGeneratorARM::VisitNotEqual(HNotEqual* comp) {
1026  VisitCondition(comp);
1027}
1028
1029void LocationsBuilderARM::VisitLessThan(HLessThan* comp) {
1030  VisitCondition(comp);
1031}
1032
1033void InstructionCodeGeneratorARM::VisitLessThan(HLessThan* comp) {
1034  VisitCondition(comp);
1035}
1036
1037void LocationsBuilderARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1038  VisitCondition(comp);
1039}
1040
1041void InstructionCodeGeneratorARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1042  VisitCondition(comp);
1043}
1044
1045void LocationsBuilderARM::VisitGreaterThan(HGreaterThan* comp) {
1046  VisitCondition(comp);
1047}
1048
1049void InstructionCodeGeneratorARM::VisitGreaterThan(HGreaterThan* comp) {
1050  VisitCondition(comp);
1051}
1052
1053void LocationsBuilderARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1054  VisitCondition(comp);
1055}
1056
1057void InstructionCodeGeneratorARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1058  VisitCondition(comp);
1059}
1060
1061void LocationsBuilderARM::VisitLocal(HLocal* local) {
1062  local->SetLocations(nullptr);
1063}
1064
1065void InstructionCodeGeneratorARM::VisitLocal(HLocal* local) {
1066  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
1067}
1068
1069void LocationsBuilderARM::VisitLoadLocal(HLoadLocal* load) {
1070  load->SetLocations(nullptr);
1071}
1072
1073void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load) {
1074  // Nothing to do, this is driven by the code generator.
1075  UNUSED(load);
1076}
1077
1078void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) {
1079  LocationSummary* locations =
1080      new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
1081  switch (store->InputAt(1)->GetType()) {
1082    case Primitive::kPrimBoolean:
1083    case Primitive::kPrimByte:
1084    case Primitive::kPrimChar:
1085    case Primitive::kPrimShort:
1086    case Primitive::kPrimInt:
1087    case Primitive::kPrimNot:
1088    case Primitive::kPrimFloat:
1089      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
1090      break;
1091
1092    case Primitive::kPrimLong:
1093    case Primitive::kPrimDouble:
1094      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
1095      break;
1096
1097    default:
1098      LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType();
1099  }
1100}
1101
1102void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store) {
1103  UNUSED(store);
1104}
1105
1106void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) {
1107  LocationSummary* locations =
1108      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1109  locations->SetOut(Location::ConstantLocation(constant));
1110}
1111
1112void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant) {
1113  // Will be generated at use site.
1114  UNUSED(constant);
1115}
1116
1117void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) {
1118  LocationSummary* locations =
1119      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1120  locations->SetOut(Location::ConstantLocation(constant));
1121}
1122
1123void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant) {
1124  // Will be generated at use site.
1125  UNUSED(constant);
1126}
1127
1128void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) {
1129  LocationSummary* locations =
1130      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1131  locations->SetOut(Location::ConstantLocation(constant));
1132}
1133
1134void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant) {
1135  // Will be generated at use site.
1136  UNUSED(constant);
1137}
1138
1139void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) {
1140  LocationSummary* locations =
1141      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1142  locations->SetOut(Location::ConstantLocation(constant));
1143}
1144
1145void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant) {
1146  // Will be generated at use site.
1147  UNUSED(constant);
1148}
1149
1150void LocationsBuilderARM::VisitReturnVoid(HReturnVoid* ret) {
1151  ret->SetLocations(nullptr);
1152}
1153
1154void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret) {
1155  UNUSED(ret);
1156  codegen_->GenerateFrameExit();
1157}
1158
1159void LocationsBuilderARM::VisitReturn(HReturn* ret) {
1160  LocationSummary* locations =
1161      new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
1162  locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType()));
1163}
1164
1165void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret) {
1166  UNUSED(ret);
1167  codegen_->GenerateFrameExit();
1168}
1169
1170void LocationsBuilderARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1171  HandleInvoke(invoke);
1172}
1173
1174void CodeGeneratorARM::LoadCurrentMethod(Register reg) {
1175  __ LoadFromOffset(kLoadWord, reg, SP, kCurrentMethodStackOffset);
1176}
1177
1178void InstructionCodeGeneratorARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1179  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1180
1181  // TODO: Implement all kinds of calls:
1182  // 1) boot -> boot
1183  // 2) app -> boot
1184  // 3) app -> app
1185  //
1186  // Currently we implement the app -> app logic, which looks up in the resolve cache.
1187
1188  // temp = method;
1189  codegen_->LoadCurrentMethod(temp);
1190  if (!invoke->IsRecursive()) {
1191    // temp = temp->dex_cache_resolved_methods_;
1192    __ LoadFromOffset(
1193        kLoadWord, temp, temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value());
1194    // temp = temp[index_in_cache]
1195    __ LoadFromOffset(
1196        kLoadWord, temp, temp, CodeGenerator::GetCacheOffset(invoke->GetDexMethodIndex()));
1197    // LR = temp[offset_of_quick_compiled_code]
1198    __ LoadFromOffset(kLoadWord, LR, temp,
1199                      mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1200                             kArmWordSize).Int32Value());
1201    // LR()
1202    __ blx(LR);
1203  } else {
1204    __ bl(codegen_->GetFrameEntryLabel());
1205  }
1206
1207  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1208  DCHECK(!codegen_->IsLeafMethod());
1209}
1210
1211void LocationsBuilderARM::HandleInvoke(HInvoke* invoke) {
1212  LocationSummary* locations =
1213      new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1214  locations->AddTemp(Location::RegisterLocation(R0));
1215
1216  InvokeDexCallingConventionVisitor calling_convention_visitor;
1217  for (size_t i = 0; i < invoke->InputCount(); i++) {
1218    HInstruction* input = invoke->InputAt(i);
1219    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1220  }
1221
1222  locations->SetOut(calling_convention_visitor.GetReturnLocation(invoke->GetType()));
1223}
1224
1225void LocationsBuilderARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1226  HandleInvoke(invoke);
1227}
1228
1229void InstructionCodeGeneratorARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1230  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1231  uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() +
1232          invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
1233  LocationSummary* locations = invoke->GetLocations();
1234  Location receiver = locations->InAt(0);
1235  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1236  // temp = object->GetClass();
1237  if (receiver.IsStackSlot()) {
1238    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1239    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1240  } else {
1241    __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
1242  }
1243  codegen_->MaybeRecordImplicitNullCheck(invoke);
1244  // temp = temp->GetMethodAt(method_offset);
1245  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1246      kArmWordSize).Int32Value();
1247  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1248  // LR = temp->GetEntryPoint();
1249  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1250  // LR();
1251  __ blx(LR);
1252  DCHECK(!codegen_->IsLeafMethod());
1253  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1254}
1255
1256void LocationsBuilderARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1257  HandleInvoke(invoke);
1258  // Add the hidden argument.
1259  invoke->GetLocations()->AddTemp(Location::RegisterLocation(R12));
1260}
1261
1262void InstructionCodeGeneratorARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1263  // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
1264  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1265  uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() +
1266          (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry);
1267  LocationSummary* locations = invoke->GetLocations();
1268  Location receiver = locations->InAt(0);
1269  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1270
1271  // Set the hidden argument.
1272  __ LoadImmediate(invoke->GetLocations()->GetTemp(1).AsRegister<Register>(),
1273                   invoke->GetDexMethodIndex());
1274
1275  // temp = object->GetClass();
1276  if (receiver.IsStackSlot()) {
1277    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1278    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1279  } else {
1280    __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
1281  }
1282  codegen_->MaybeRecordImplicitNullCheck(invoke);
1283  // temp = temp->GetImtEntryAt(method_offset);
1284  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1285      kArmWordSize).Int32Value();
1286  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1287  // LR = temp->GetEntryPoint();
1288  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1289  // LR();
1290  __ blx(LR);
1291  DCHECK(!codegen_->IsLeafMethod());
1292  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1293}
1294
1295void LocationsBuilderARM::VisitNeg(HNeg* neg) {
1296  LocationSummary* locations =
1297      new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
1298  switch (neg->GetResultType()) {
1299    case Primitive::kPrimInt: {
1300      locations->SetInAt(0, Location::RequiresRegister());
1301      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1302      break;
1303    }
1304    case Primitive::kPrimLong: {
1305      locations->SetInAt(0, Location::RequiresRegister());
1306      locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1307      break;
1308    }
1309
1310    case Primitive::kPrimFloat:
1311    case Primitive::kPrimDouble:
1312      locations->SetInAt(0, Location::RequiresFpuRegister());
1313      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1314      break;
1315
1316    default:
1317      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1318  }
1319}
1320
1321void InstructionCodeGeneratorARM::VisitNeg(HNeg* neg) {
1322  LocationSummary* locations = neg->GetLocations();
1323  Location out = locations->Out();
1324  Location in = locations->InAt(0);
1325  switch (neg->GetResultType()) {
1326    case Primitive::kPrimInt:
1327      DCHECK(in.IsRegister());
1328      __ rsb(out.AsRegister<Register>(), in.AsRegister<Register>(), ShifterOperand(0));
1329      break;
1330
1331    case Primitive::kPrimLong:
1332      DCHECK(in.IsRegisterPair());
1333      // out.lo = 0 - in.lo (and update the carry/borrow (C) flag)
1334      __ rsbs(out.AsRegisterPairLow<Register>(),
1335              in.AsRegisterPairLow<Register>(),
1336              ShifterOperand(0));
1337      // We cannot emit an RSC (Reverse Subtract with Carry)
1338      // instruction here, as it does not exist in the Thumb-2
1339      // instruction set.  We use the following approach
1340      // using SBC and SUB instead.
1341      //
1342      // out.hi = -C
1343      __ sbc(out.AsRegisterPairHigh<Register>(),
1344             out.AsRegisterPairHigh<Register>(),
1345             ShifterOperand(out.AsRegisterPairHigh<Register>()));
1346      // out.hi = out.hi - in.hi
1347      __ sub(out.AsRegisterPairHigh<Register>(),
1348             out.AsRegisterPairHigh<Register>(),
1349             ShifterOperand(in.AsRegisterPairHigh<Register>()));
1350      break;
1351
1352    case Primitive::kPrimFloat:
1353      DCHECK(in.IsFpuRegister());
1354      __ vnegs(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>());
1355      break;
1356
1357    case Primitive::kPrimDouble:
1358      DCHECK(in.IsFpuRegisterPair());
1359      __ vnegd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1360               FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1361      break;
1362
1363    default:
1364      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1365  }
1366}
1367
1368void LocationsBuilderARM::VisitTypeConversion(HTypeConversion* conversion) {
1369  Primitive::Type result_type = conversion->GetResultType();
1370  Primitive::Type input_type = conversion->GetInputType();
1371  DCHECK_NE(result_type, input_type);
1372
1373  // The float-to-long and double-to-long type conversions rely on a
1374  // call to the runtime.
1375  LocationSummary::CallKind call_kind =
1376      ((input_type == Primitive::kPrimFloat || input_type == Primitive::kPrimDouble)
1377       && result_type == Primitive::kPrimLong)
1378      ? LocationSummary::kCall
1379      : LocationSummary::kNoCall;
1380  LocationSummary* locations =
1381      new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind);
1382
1383  switch (result_type) {
1384    case Primitive::kPrimByte:
1385      switch (input_type) {
1386        case Primitive::kPrimShort:
1387        case Primitive::kPrimInt:
1388        case Primitive::kPrimChar:
1389          // Processing a Dex `int-to-byte' instruction.
1390          locations->SetInAt(0, Location::RequiresRegister());
1391          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1392          break;
1393
1394        default:
1395          LOG(FATAL) << "Unexpected type conversion from " << input_type
1396                     << " to " << result_type;
1397      }
1398      break;
1399
1400    case Primitive::kPrimShort:
1401      switch (input_type) {
1402        case Primitive::kPrimByte:
1403        case Primitive::kPrimInt:
1404        case Primitive::kPrimChar:
1405          // Processing a Dex `int-to-short' instruction.
1406          locations->SetInAt(0, Location::RequiresRegister());
1407          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1408          break;
1409
1410        default:
1411          LOG(FATAL) << "Unexpected type conversion from " << input_type
1412                     << " to " << result_type;
1413      }
1414      break;
1415
1416    case Primitive::kPrimInt:
1417      switch (input_type) {
1418        case Primitive::kPrimLong:
1419          // Processing a Dex `long-to-int' instruction.
1420          locations->SetInAt(0, Location::Any());
1421          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1422          break;
1423
1424        case Primitive::kPrimFloat:
1425          // Processing a Dex `float-to-int' instruction.
1426          locations->SetInAt(0, Location::RequiresFpuRegister());
1427          locations->SetOut(Location::RequiresRegister());
1428          locations->AddTemp(Location::RequiresFpuRegister());
1429          break;
1430
1431        case Primitive::kPrimDouble:
1432          // Processing a Dex `double-to-int' instruction.
1433          locations->SetInAt(0, Location::RequiresFpuRegister());
1434          locations->SetOut(Location::RequiresRegister());
1435          locations->AddTemp(Location::RequiresFpuRegister());
1436          break;
1437
1438        default:
1439          LOG(FATAL) << "Unexpected type conversion from " << input_type
1440                     << " to " << result_type;
1441      }
1442      break;
1443
1444    case Primitive::kPrimLong:
1445      switch (input_type) {
1446        case Primitive::kPrimByte:
1447        case Primitive::kPrimShort:
1448        case Primitive::kPrimInt:
1449        case Primitive::kPrimChar:
1450          // Processing a Dex `int-to-long' instruction.
1451          locations->SetInAt(0, Location::RequiresRegister());
1452          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1453          break;
1454
1455        case Primitive::kPrimFloat: {
1456          // Processing a Dex `float-to-long' instruction.
1457          InvokeRuntimeCallingConvention calling_convention;
1458          locations->SetInAt(0, Location::FpuRegisterLocation(
1459              calling_convention.GetFpuRegisterAt(0)));
1460          locations->SetOut(Location::RegisterPairLocation(R0, R1));
1461          break;
1462        }
1463
1464        case Primitive::kPrimDouble: {
1465          // Processing a Dex `double-to-long' instruction.
1466          InvokeRuntimeCallingConvention calling_convention;
1467          locations->SetInAt(0, Location::FpuRegisterPairLocation(
1468              calling_convention.GetFpuRegisterAt(0),
1469              calling_convention.GetFpuRegisterAt(1)));
1470          locations->SetOut(Location::RegisterPairLocation(R0, R1));
1471          break;
1472        }
1473
1474        default:
1475          LOG(FATAL) << "Unexpected type conversion from " << input_type
1476                     << " to " << result_type;
1477      }
1478      break;
1479
1480    case Primitive::kPrimChar:
1481      switch (input_type) {
1482        case Primitive::kPrimByte:
1483        case Primitive::kPrimShort:
1484        case Primitive::kPrimInt:
1485          // Processing a Dex `int-to-char' instruction.
1486          locations->SetInAt(0, Location::RequiresRegister());
1487          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1488          break;
1489
1490        default:
1491          LOG(FATAL) << "Unexpected type conversion from " << input_type
1492                     << " to " << result_type;
1493      }
1494      break;
1495
1496    case Primitive::kPrimFloat:
1497      switch (input_type) {
1498        case Primitive::kPrimByte:
1499        case Primitive::kPrimShort:
1500        case Primitive::kPrimInt:
1501        case Primitive::kPrimChar:
1502          // Processing a Dex `int-to-float' instruction.
1503          locations->SetInAt(0, Location::RequiresRegister());
1504          locations->SetOut(Location::RequiresFpuRegister());
1505          break;
1506
1507        case Primitive::kPrimLong:
1508          // Processing a Dex `long-to-float' instruction.
1509          locations->SetInAt(0, Location::RequiresRegister());
1510          locations->SetOut(Location::RequiresFpuRegister());
1511          locations->AddTemp(Location::RequiresRegister());
1512          locations->AddTemp(Location::RequiresRegister());
1513          locations->AddTemp(Location::RequiresFpuRegister());
1514          locations->AddTemp(Location::RequiresFpuRegister());
1515          break;
1516
1517        case Primitive::kPrimDouble:
1518          // Processing a Dex `double-to-float' instruction.
1519          locations->SetInAt(0, Location::RequiresFpuRegister());
1520          locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1521          break;
1522
1523        default:
1524          LOG(FATAL) << "Unexpected type conversion from " << input_type
1525                     << " to " << result_type;
1526      };
1527      break;
1528
1529    case Primitive::kPrimDouble:
1530      switch (input_type) {
1531        case Primitive::kPrimByte:
1532        case Primitive::kPrimShort:
1533        case Primitive::kPrimInt:
1534        case Primitive::kPrimChar:
1535          // Processing a Dex `int-to-double' instruction.
1536          locations->SetInAt(0, Location::RequiresRegister());
1537          locations->SetOut(Location::RequiresFpuRegister());
1538          break;
1539
1540        case Primitive::kPrimLong:
1541          // Processing a Dex `long-to-double' instruction.
1542          locations->SetInAt(0, Location::RequiresRegister());
1543          locations->SetOut(Location::RequiresFpuRegister());
1544          locations->AddTemp(Location::RequiresRegister());
1545          locations->AddTemp(Location::RequiresRegister());
1546          locations->AddTemp(Location::RequiresFpuRegister());
1547          break;
1548
1549        case Primitive::kPrimFloat:
1550          // Processing a Dex `float-to-double' instruction.
1551          locations->SetInAt(0, Location::RequiresFpuRegister());
1552          locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1553          break;
1554
1555        default:
1556          LOG(FATAL) << "Unexpected type conversion from " << input_type
1557                     << " to " << result_type;
1558      };
1559      break;
1560
1561    default:
1562      LOG(FATAL) << "Unexpected type conversion from " << input_type
1563                 << " to " << result_type;
1564  }
1565}
1566
1567void InstructionCodeGeneratorARM::VisitTypeConversion(HTypeConversion* conversion) {
1568  LocationSummary* locations = conversion->GetLocations();
1569  Location out = locations->Out();
1570  Location in = locations->InAt(0);
1571  Primitive::Type result_type = conversion->GetResultType();
1572  Primitive::Type input_type = conversion->GetInputType();
1573  DCHECK_NE(result_type, input_type);
1574  switch (result_type) {
1575    case Primitive::kPrimByte:
1576      switch (input_type) {
1577        case Primitive::kPrimShort:
1578        case Primitive::kPrimInt:
1579        case Primitive::kPrimChar:
1580          // Processing a Dex `int-to-byte' instruction.
1581          __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 8);
1582          break;
1583
1584        default:
1585          LOG(FATAL) << "Unexpected type conversion from " << input_type
1586                     << " to " << result_type;
1587      }
1588      break;
1589
1590    case Primitive::kPrimShort:
1591      switch (input_type) {
1592        case Primitive::kPrimByte:
1593        case Primitive::kPrimInt:
1594        case Primitive::kPrimChar:
1595          // Processing a Dex `int-to-short' instruction.
1596          __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16);
1597          break;
1598
1599        default:
1600          LOG(FATAL) << "Unexpected type conversion from " << input_type
1601                     << " to " << result_type;
1602      }
1603      break;
1604
1605    case Primitive::kPrimInt:
1606      switch (input_type) {
1607        case Primitive::kPrimLong:
1608          // Processing a Dex `long-to-int' instruction.
1609          DCHECK(out.IsRegister());
1610          if (in.IsRegisterPair()) {
1611            __ Mov(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
1612          } else if (in.IsDoubleStackSlot()) {
1613            __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), SP, in.GetStackIndex());
1614          } else {
1615            DCHECK(in.IsConstant());
1616            DCHECK(in.GetConstant()->IsLongConstant());
1617            int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
1618            __ LoadImmediate(out.AsRegister<Register>(), static_cast<int32_t>(value));
1619          }
1620          break;
1621
1622        case Primitive::kPrimFloat: {
1623          // Processing a Dex `float-to-int' instruction.
1624          SRegister temp = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
1625          __ vmovs(temp, in.AsFpuRegister<SRegister>());
1626          __ vcvtis(temp, temp);
1627          __ vmovrs(out.AsRegister<Register>(), temp);
1628          break;
1629        }
1630
1631        case Primitive::kPrimDouble: {
1632          // Processing a Dex `double-to-int' instruction.
1633          SRegister temp_s = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
1634          DRegister temp_d = FromLowSToD(temp_s);
1635          __ vmovd(temp_d, FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1636          __ vcvtid(temp_s, temp_d);
1637          __ vmovrs(out.AsRegister<Register>(), temp_s);
1638          break;
1639        }
1640
1641        default:
1642          LOG(FATAL) << "Unexpected type conversion from " << input_type
1643                     << " to " << result_type;
1644      }
1645      break;
1646
1647    case Primitive::kPrimLong:
1648      switch (input_type) {
1649        case Primitive::kPrimByte:
1650        case Primitive::kPrimShort:
1651        case Primitive::kPrimInt:
1652        case Primitive::kPrimChar:
1653          // Processing a Dex `int-to-long' instruction.
1654          DCHECK(out.IsRegisterPair());
1655          DCHECK(in.IsRegister());
1656          __ Mov(out.AsRegisterPairLow<Register>(), in.AsRegister<Register>());
1657          // Sign extension.
1658          __ Asr(out.AsRegisterPairHigh<Register>(),
1659                 out.AsRegisterPairLow<Register>(),
1660                 31);
1661          break;
1662
1663        case Primitive::kPrimFloat:
1664          // Processing a Dex `float-to-long' instruction.
1665          codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pF2l),
1666                                  conversion,
1667                                  conversion->GetDexPc());
1668          break;
1669
1670        case Primitive::kPrimDouble:
1671          // Processing a Dex `double-to-long' instruction.
1672          codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pD2l),
1673                                  conversion,
1674                                  conversion->GetDexPc());
1675          break;
1676
1677        default:
1678          LOG(FATAL) << "Unexpected type conversion from " << input_type
1679                     << " to " << result_type;
1680      }
1681      break;
1682
1683    case Primitive::kPrimChar:
1684      switch (input_type) {
1685        case Primitive::kPrimByte:
1686        case Primitive::kPrimShort:
1687        case Primitive::kPrimInt:
1688          // Processing a Dex `int-to-char' instruction.
1689          __ ubfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16);
1690          break;
1691
1692        default:
1693          LOG(FATAL) << "Unexpected type conversion from " << input_type
1694                     << " to " << result_type;
1695      }
1696      break;
1697
1698    case Primitive::kPrimFloat:
1699      switch (input_type) {
1700        case Primitive::kPrimByte:
1701        case Primitive::kPrimShort:
1702        case Primitive::kPrimInt:
1703        case Primitive::kPrimChar: {
1704          // Processing a Dex `int-to-float' instruction.
1705          __ vmovsr(out.AsFpuRegister<SRegister>(), in.AsRegister<Register>());
1706          __ vcvtsi(out.AsFpuRegister<SRegister>(), out.AsFpuRegister<SRegister>());
1707          break;
1708        }
1709
1710        case Primitive::kPrimLong: {
1711          // Processing a Dex `long-to-float' instruction.
1712          Register low = in.AsRegisterPairLow<Register>();
1713          Register high = in.AsRegisterPairHigh<Register>();
1714          SRegister output = out.AsFpuRegister<SRegister>();
1715          Register constant_low = locations->GetTemp(0).AsRegister<Register>();
1716          Register constant_high = locations->GetTemp(1).AsRegister<Register>();
1717          SRegister temp1_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>();
1718          DRegister temp1_d = FromLowSToD(temp1_s);
1719          SRegister temp2_s = locations->GetTemp(3).AsFpuRegisterPairLow<SRegister>();
1720          DRegister temp2_d = FromLowSToD(temp2_s);
1721
1722          // Operations use doubles for precision reasons (each 32-bit
1723          // half of a long fits in the 53-bit mantissa of a double,
1724          // but not in the 24-bit mantissa of a float).  This is
1725          // especially important for the low bits.  The result is
1726          // eventually converted to float.
1727
1728          // temp1_d = int-to-double(high)
1729          __ vmovsr(temp1_s, high);
1730          __ vcvtdi(temp1_d, temp1_s);
1731          // Using vmovd to load the `k2Pow32EncodingForDouble` constant
1732          // as an immediate value into `temp2_d` does not work, as
1733          // this instruction only transfers 8 significant bits of its
1734          // immediate operand.  Instead, use two 32-bit core
1735          // registers to load `k2Pow32EncodingForDouble` into
1736          // `temp2_d`.
1737          __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble));
1738          __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble));
1739          __ vmovdrr(temp2_d, constant_low, constant_high);
1740          // temp1_d = temp1_d * 2^32
1741          __ vmuld(temp1_d, temp1_d, temp2_d);
1742          // temp2_d = unsigned-to-double(low)
1743          __ vmovsr(temp2_s, low);
1744          __ vcvtdu(temp2_d, temp2_s);
1745          // temp1_d = temp1_d + temp2_d
1746          __ vaddd(temp1_d, temp1_d, temp2_d);
1747          // output = double-to-float(temp1_d);
1748          __ vcvtsd(output, temp1_d);
1749          break;
1750        }
1751
1752        case Primitive::kPrimDouble:
1753          // Processing a Dex `double-to-float' instruction.
1754          __ vcvtsd(out.AsFpuRegister<SRegister>(),
1755                    FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1756          break;
1757
1758        default:
1759          LOG(FATAL) << "Unexpected type conversion from " << input_type
1760                     << " to " << result_type;
1761      };
1762      break;
1763
1764    case Primitive::kPrimDouble:
1765      switch (input_type) {
1766        case Primitive::kPrimByte:
1767        case Primitive::kPrimShort:
1768        case Primitive::kPrimInt:
1769        case Primitive::kPrimChar: {
1770          // Processing a Dex `int-to-double' instruction.
1771          __ vmovsr(out.AsFpuRegisterPairLow<SRegister>(), in.AsRegister<Register>());
1772          __ vcvtdi(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1773                    out.AsFpuRegisterPairLow<SRegister>());
1774          break;
1775        }
1776
1777        case Primitive::kPrimLong: {
1778          // Processing a Dex `long-to-double' instruction.
1779          Register low = in.AsRegisterPairLow<Register>();
1780          Register high = in.AsRegisterPairHigh<Register>();
1781          SRegister out_s = out.AsFpuRegisterPairLow<SRegister>();
1782          DRegister out_d = FromLowSToD(out_s);
1783          Register constant_low = locations->GetTemp(0).AsRegister<Register>();
1784          Register constant_high = locations->GetTemp(1).AsRegister<Register>();
1785          SRegister temp_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>();
1786          DRegister temp_d = FromLowSToD(temp_s);
1787
1788          // out_d = int-to-double(high)
1789          __ vmovsr(out_s, high);
1790          __ vcvtdi(out_d, out_s);
1791          // Using vmovd to load the `k2Pow32EncodingForDouble` constant
1792          // as an immediate value into `temp_d` does not work, as
1793          // this instruction only transfers 8 significant bits of its
1794          // immediate operand.  Instead, use two 32-bit core
1795          // registers to load `k2Pow32EncodingForDouble` into `temp_d`.
1796          __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble));
1797          __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble));
1798          __ vmovdrr(temp_d, constant_low, constant_high);
1799          // out_d = out_d * 2^32
1800          __ vmuld(out_d, out_d, temp_d);
1801          // temp_d = unsigned-to-double(low)
1802          __ vmovsr(temp_s, low);
1803          __ vcvtdu(temp_d, temp_s);
1804          // out_d = out_d + temp_d
1805          __ vaddd(out_d, out_d, temp_d);
1806          break;
1807        }
1808
1809        case Primitive::kPrimFloat:
1810          // Processing a Dex `float-to-double' instruction.
1811          __ vcvtds(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1812                    in.AsFpuRegister<SRegister>());
1813          break;
1814
1815        default:
1816          LOG(FATAL) << "Unexpected type conversion from " << input_type
1817                     << " to " << result_type;
1818      };
1819      break;
1820
1821    default:
1822      LOG(FATAL) << "Unexpected type conversion from " << input_type
1823                 << " to " << result_type;
1824  }
1825}
1826
1827void LocationsBuilderARM::VisitAdd(HAdd* add) {
1828  LocationSummary* locations =
1829      new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
1830  switch (add->GetResultType()) {
1831    case Primitive::kPrimInt: {
1832      locations->SetInAt(0, Location::RequiresRegister());
1833      locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
1834      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1835      break;
1836    }
1837
1838    case Primitive::kPrimLong: {
1839      locations->SetInAt(0, Location::RequiresRegister());
1840      locations->SetInAt(1, Location::RequiresRegister());
1841      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1842      break;
1843    }
1844
1845    case Primitive::kPrimFloat:
1846    case Primitive::kPrimDouble: {
1847      locations->SetInAt(0, Location::RequiresFpuRegister());
1848      locations->SetInAt(1, Location::RequiresFpuRegister());
1849      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1850      break;
1851    }
1852
1853    default:
1854      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1855  }
1856}
1857
1858void InstructionCodeGeneratorARM::VisitAdd(HAdd* add) {
1859  LocationSummary* locations = add->GetLocations();
1860  Location out = locations->Out();
1861  Location first = locations->InAt(0);
1862  Location second = locations->InAt(1);
1863  switch (add->GetResultType()) {
1864    case Primitive::kPrimInt:
1865      if (second.IsRegister()) {
1866        __ add(out.AsRegister<Register>(),
1867               first.AsRegister<Register>(),
1868               ShifterOperand(second.AsRegister<Register>()));
1869      } else {
1870        __ AddConstant(out.AsRegister<Register>(),
1871                       first.AsRegister<Register>(),
1872                       second.GetConstant()->AsIntConstant()->GetValue());
1873      }
1874      break;
1875
1876    case Primitive::kPrimLong: {
1877      DCHECK(second.IsRegisterPair());
1878      __ adds(out.AsRegisterPairLow<Register>(),
1879              first.AsRegisterPairLow<Register>(),
1880              ShifterOperand(second.AsRegisterPairLow<Register>()));
1881      __ adc(out.AsRegisterPairHigh<Register>(),
1882             first.AsRegisterPairHigh<Register>(),
1883             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1884      break;
1885    }
1886
1887    case Primitive::kPrimFloat:
1888      __ vadds(out.AsFpuRegister<SRegister>(),
1889               first.AsFpuRegister<SRegister>(),
1890               second.AsFpuRegister<SRegister>());
1891      break;
1892
1893    case Primitive::kPrimDouble:
1894      __ vaddd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1895               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1896               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1897      break;
1898
1899    default:
1900      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1901  }
1902}
1903
1904void LocationsBuilderARM::VisitSub(HSub* sub) {
1905  LocationSummary* locations =
1906      new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
1907  switch (sub->GetResultType()) {
1908    case Primitive::kPrimInt: {
1909      locations->SetInAt(0, Location::RequiresRegister());
1910      locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1)));
1911      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1912      break;
1913    }
1914
1915    case Primitive::kPrimLong: {
1916      locations->SetInAt(0, Location::RequiresRegister());
1917      locations->SetInAt(1, Location::RequiresRegister());
1918      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1919      break;
1920    }
1921    case Primitive::kPrimFloat:
1922    case Primitive::kPrimDouble: {
1923      locations->SetInAt(0, Location::RequiresFpuRegister());
1924      locations->SetInAt(1, Location::RequiresFpuRegister());
1925      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1926      break;
1927    }
1928    default:
1929      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1930  }
1931}
1932
1933void InstructionCodeGeneratorARM::VisitSub(HSub* sub) {
1934  LocationSummary* locations = sub->GetLocations();
1935  Location out = locations->Out();
1936  Location first = locations->InAt(0);
1937  Location second = locations->InAt(1);
1938  switch (sub->GetResultType()) {
1939    case Primitive::kPrimInt: {
1940      if (second.IsRegister()) {
1941        __ sub(out.AsRegister<Register>(),
1942               first.AsRegister<Register>(),
1943               ShifterOperand(second.AsRegister<Register>()));
1944      } else {
1945        __ AddConstant(out.AsRegister<Register>(),
1946                       first.AsRegister<Register>(),
1947                       -second.GetConstant()->AsIntConstant()->GetValue());
1948      }
1949      break;
1950    }
1951
1952    case Primitive::kPrimLong: {
1953      DCHECK(second.IsRegisterPair());
1954      __ subs(out.AsRegisterPairLow<Register>(),
1955              first.AsRegisterPairLow<Register>(),
1956              ShifterOperand(second.AsRegisterPairLow<Register>()));
1957      __ sbc(out.AsRegisterPairHigh<Register>(),
1958             first.AsRegisterPairHigh<Register>(),
1959             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1960      break;
1961    }
1962
1963    case Primitive::kPrimFloat: {
1964      __ vsubs(out.AsFpuRegister<SRegister>(),
1965               first.AsFpuRegister<SRegister>(),
1966               second.AsFpuRegister<SRegister>());
1967      break;
1968    }
1969
1970    case Primitive::kPrimDouble: {
1971      __ vsubd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1972               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1973               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1974      break;
1975    }
1976
1977
1978    default:
1979      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1980  }
1981}
1982
1983void LocationsBuilderARM::VisitMul(HMul* mul) {
1984  LocationSummary* locations =
1985      new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
1986  switch (mul->GetResultType()) {
1987    case Primitive::kPrimInt:
1988    case Primitive::kPrimLong:  {
1989      locations->SetInAt(0, Location::RequiresRegister());
1990      locations->SetInAt(1, Location::RequiresRegister());
1991      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1992      break;
1993    }
1994
1995    case Primitive::kPrimFloat:
1996    case Primitive::kPrimDouble: {
1997      locations->SetInAt(0, Location::RequiresFpuRegister());
1998      locations->SetInAt(1, Location::RequiresFpuRegister());
1999      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2000      break;
2001    }
2002
2003    default:
2004      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2005  }
2006}
2007
2008void InstructionCodeGeneratorARM::VisitMul(HMul* mul) {
2009  LocationSummary* locations = mul->GetLocations();
2010  Location out = locations->Out();
2011  Location first = locations->InAt(0);
2012  Location second = locations->InAt(1);
2013  switch (mul->GetResultType()) {
2014    case Primitive::kPrimInt: {
2015      __ mul(out.AsRegister<Register>(),
2016             first.AsRegister<Register>(),
2017             second.AsRegister<Register>());
2018      break;
2019    }
2020    case Primitive::kPrimLong: {
2021      Register out_hi = out.AsRegisterPairHigh<Register>();
2022      Register out_lo = out.AsRegisterPairLow<Register>();
2023      Register in1_hi = first.AsRegisterPairHigh<Register>();
2024      Register in1_lo = first.AsRegisterPairLow<Register>();
2025      Register in2_hi = second.AsRegisterPairHigh<Register>();
2026      Register in2_lo = second.AsRegisterPairLow<Register>();
2027
2028      // Extra checks to protect caused by the existence of R1_R2.
2029      // The algorithm is wrong if out.hi is either in1.lo or in2.lo:
2030      // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2);
2031      DCHECK_NE(out_hi, in1_lo);
2032      DCHECK_NE(out_hi, in2_lo);
2033
2034      // input: in1 - 64 bits, in2 - 64 bits
2035      // output: out
2036      // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
2037      // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
2038      // parts: out.lo = (in1.lo * in2.lo)[31:0]
2039
2040      // IP <- in1.lo * in2.hi
2041      __ mul(IP, in1_lo, in2_hi);
2042      // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo
2043      __ mla(out_hi, in1_hi, in2_lo, IP);
2044      // out.lo <- (in1.lo * in2.lo)[31:0];
2045      __ umull(out_lo, IP, in1_lo, in2_lo);
2046      // out.hi <- in2.hi * in1.lo +  in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
2047      __ add(out_hi, out_hi, ShifterOperand(IP));
2048      break;
2049    }
2050
2051    case Primitive::kPrimFloat: {
2052      __ vmuls(out.AsFpuRegister<SRegister>(),
2053               first.AsFpuRegister<SRegister>(),
2054               second.AsFpuRegister<SRegister>());
2055      break;
2056    }
2057
2058    case Primitive::kPrimDouble: {
2059      __ vmuld(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2060               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2061               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2062      break;
2063    }
2064
2065    default:
2066      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2067  }
2068}
2069
2070void LocationsBuilderARM::VisitDiv(HDiv* div) {
2071  LocationSummary::CallKind call_kind = div->GetResultType() == Primitive::kPrimLong
2072      ? LocationSummary::kCall
2073      : LocationSummary::kNoCall;
2074  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
2075
2076  switch (div->GetResultType()) {
2077    case Primitive::kPrimInt: {
2078      locations->SetInAt(0, Location::RequiresRegister());
2079      locations->SetInAt(1, Location::RequiresRegister());
2080      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2081      break;
2082    }
2083    case Primitive::kPrimLong: {
2084      InvokeRuntimeCallingConvention calling_convention;
2085      locations->SetInAt(0, Location::RegisterPairLocation(
2086          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2087      locations->SetInAt(1, Location::RegisterPairLocation(
2088          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2089      locations->SetOut(Location::RegisterPairLocation(R0, R1));
2090      break;
2091    }
2092    case Primitive::kPrimFloat:
2093    case Primitive::kPrimDouble: {
2094      locations->SetInAt(0, Location::RequiresFpuRegister());
2095      locations->SetInAt(1, Location::RequiresFpuRegister());
2096      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2097      break;
2098    }
2099
2100    default:
2101      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2102  }
2103}
2104
2105void InstructionCodeGeneratorARM::VisitDiv(HDiv* div) {
2106  LocationSummary* locations = div->GetLocations();
2107  Location out = locations->Out();
2108  Location first = locations->InAt(0);
2109  Location second = locations->InAt(1);
2110
2111  switch (div->GetResultType()) {
2112    case Primitive::kPrimInt: {
2113      __ sdiv(out.AsRegister<Register>(),
2114              first.AsRegister<Register>(),
2115              second.AsRegister<Register>());
2116      break;
2117    }
2118
2119    case Primitive::kPrimLong: {
2120      InvokeRuntimeCallingConvention calling_convention;
2121      DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
2122      DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
2123      DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegisterPairLow<Register>());
2124      DCHECK_EQ(calling_convention.GetRegisterAt(3), second.AsRegisterPairHigh<Register>());
2125      DCHECK_EQ(R0, out.AsRegisterPairLow<Register>());
2126      DCHECK_EQ(R1, out.AsRegisterPairHigh<Register>());
2127
2128      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLdiv), div, div->GetDexPc());
2129      break;
2130    }
2131
2132    case Primitive::kPrimFloat: {
2133      __ vdivs(out.AsFpuRegister<SRegister>(),
2134               first.AsFpuRegister<SRegister>(),
2135               second.AsFpuRegister<SRegister>());
2136      break;
2137    }
2138
2139    case Primitive::kPrimDouble: {
2140      __ vdivd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2141               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2142               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2143      break;
2144    }
2145
2146    default:
2147      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2148  }
2149}
2150
2151void LocationsBuilderARM::VisitRem(HRem* rem) {
2152  Primitive::Type type = rem->GetResultType();
2153  LocationSummary::CallKind call_kind = type == Primitive::kPrimInt
2154      ? LocationSummary::kNoCall
2155      : LocationSummary::kCall;
2156  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
2157
2158  switch (type) {
2159    case Primitive::kPrimInt: {
2160      locations->SetInAt(0, Location::RequiresRegister());
2161      locations->SetInAt(1, Location::RequiresRegister());
2162      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2163      locations->AddTemp(Location::RequiresRegister());
2164      break;
2165    }
2166    case Primitive::kPrimLong: {
2167      InvokeRuntimeCallingConvention calling_convention;
2168      locations->SetInAt(0, Location::RegisterPairLocation(
2169          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2170      locations->SetInAt(1, Location::RegisterPairLocation(
2171          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2172      // The runtime helper puts the output in R2,R3.
2173      locations->SetOut(Location::RegisterPairLocation(R2, R3));
2174      break;
2175    }
2176    case Primitive::kPrimFloat: {
2177      InvokeRuntimeCallingConvention calling_convention;
2178      locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
2179      locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
2180      locations->SetOut(Location::FpuRegisterLocation(S0));
2181      break;
2182    }
2183
2184    case Primitive::kPrimDouble: {
2185      InvokeRuntimeCallingConvention calling_convention;
2186      locations->SetInAt(0, Location::FpuRegisterPairLocation(
2187          calling_convention.GetFpuRegisterAt(0), calling_convention.GetFpuRegisterAt(1)));
2188      locations->SetInAt(1, Location::FpuRegisterPairLocation(
2189          calling_convention.GetFpuRegisterAt(2), calling_convention.GetFpuRegisterAt(3)));
2190      locations->SetOut(Location::Location::FpuRegisterPairLocation(S0, S1));
2191      break;
2192    }
2193
2194    default:
2195      LOG(FATAL) << "Unexpected rem type " << type;
2196  }
2197}
2198
2199void InstructionCodeGeneratorARM::VisitRem(HRem* rem) {
2200  LocationSummary* locations = rem->GetLocations();
2201  Location out = locations->Out();
2202  Location first = locations->InAt(0);
2203  Location second = locations->InAt(1);
2204
2205  Primitive::Type type = rem->GetResultType();
2206  switch (type) {
2207    case Primitive::kPrimInt: {
2208      Register reg1 = first.AsRegister<Register>();
2209      Register reg2 = second.AsRegister<Register>();
2210      Register temp = locations->GetTemp(0).AsRegister<Register>();
2211
2212      // temp = reg1 / reg2  (integer division)
2213      // temp = temp * reg2
2214      // dest = reg1 - temp
2215      __ sdiv(temp, reg1, reg2);
2216      __ mul(temp, temp, reg2);
2217      __ sub(out.AsRegister<Register>(), reg1, ShifterOperand(temp));
2218      break;
2219    }
2220
2221    case Primitive::kPrimLong: {
2222      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLmod), rem, rem->GetDexPc());
2223      break;
2224    }
2225
2226    case Primitive::kPrimFloat: {
2227      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmodf), rem, rem->GetDexPc());
2228      break;
2229    }
2230
2231    case Primitive::kPrimDouble: {
2232      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmod), rem, rem->GetDexPc());
2233      break;
2234    }
2235
2236    default:
2237      LOG(FATAL) << "Unexpected rem type " << type;
2238  }
2239}
2240
2241void LocationsBuilderARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2242  LocationSummary* locations =
2243      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2244  locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2245  if (instruction->HasUses()) {
2246    locations->SetOut(Location::SameAsFirstInput());
2247  }
2248}
2249
2250void InstructionCodeGeneratorARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2251  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM(instruction);
2252  codegen_->AddSlowPath(slow_path);
2253
2254  LocationSummary* locations = instruction->GetLocations();
2255  Location value = locations->InAt(0);
2256
2257  switch (instruction->GetType()) {
2258    case Primitive::kPrimInt: {
2259      if (value.IsRegister()) {
2260        __ cmp(value.AsRegister<Register>(), ShifterOperand(0));
2261        __ b(slow_path->GetEntryLabel(), EQ);
2262      } else {
2263        DCHECK(value.IsConstant()) << value;
2264        if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
2265          __ b(slow_path->GetEntryLabel());
2266        }
2267      }
2268      break;
2269    }
2270    case Primitive::kPrimLong: {
2271      if (value.IsRegisterPair()) {
2272        __ orrs(IP,
2273                value.AsRegisterPairLow<Register>(),
2274                ShifterOperand(value.AsRegisterPairHigh<Register>()));
2275        __ b(slow_path->GetEntryLabel(), EQ);
2276      } else {
2277        DCHECK(value.IsConstant()) << value;
2278        if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
2279          __ b(slow_path->GetEntryLabel());
2280        }
2281      }
2282      break;
2283    default:
2284      LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
2285    }
2286  }
2287}
2288
2289void LocationsBuilderARM::HandleShift(HBinaryOperation* op) {
2290  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2291
2292  LocationSummary::CallKind call_kind = op->GetResultType() == Primitive::kPrimLong
2293      ? LocationSummary::kCall
2294      : LocationSummary::kNoCall;
2295  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(op, call_kind);
2296
2297  switch (op->GetResultType()) {
2298    case Primitive::kPrimInt: {
2299      locations->SetInAt(0, Location::RequiresRegister());
2300      locations->SetInAt(1, Location::RegisterOrConstant(op->InputAt(1)));
2301      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2302      break;
2303    }
2304    case Primitive::kPrimLong: {
2305      InvokeRuntimeCallingConvention calling_convention;
2306      locations->SetInAt(0, Location::RegisterPairLocation(
2307          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2308      locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2309      // The runtime helper puts the output in R0,R1.
2310      locations->SetOut(Location::RegisterPairLocation(R0, R1));
2311      break;
2312    }
2313    default:
2314      LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
2315  }
2316}
2317
2318void InstructionCodeGeneratorARM::HandleShift(HBinaryOperation* op) {
2319  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2320
2321  LocationSummary* locations = op->GetLocations();
2322  Location out = locations->Out();
2323  Location first = locations->InAt(0);
2324  Location second = locations->InAt(1);
2325
2326  Primitive::Type type = op->GetResultType();
2327  switch (type) {
2328    case Primitive::kPrimInt: {
2329      Register out_reg = out.AsRegister<Register>();
2330      Register first_reg = first.AsRegister<Register>();
2331      // Arm doesn't mask the shift count so we need to do it ourselves.
2332      if (second.IsRegister()) {
2333        Register second_reg = second.AsRegister<Register>();
2334        __ and_(second_reg, second_reg, ShifterOperand(kMaxIntShiftValue));
2335        if (op->IsShl()) {
2336          __ Lsl(out_reg, first_reg, second_reg);
2337        } else if (op->IsShr()) {
2338          __ Asr(out_reg, first_reg, second_reg);
2339        } else {
2340          __ Lsr(out_reg, first_reg, second_reg);
2341        }
2342      } else {
2343        int32_t cst = second.GetConstant()->AsIntConstant()->GetValue();
2344        uint32_t shift_value = static_cast<uint32_t>(cst & kMaxIntShiftValue);
2345        if (shift_value == 0) {  // arm does not support shifting with 0 immediate.
2346          __ Mov(out_reg, first_reg);
2347        } else if (op->IsShl()) {
2348          __ Lsl(out_reg, first_reg, shift_value);
2349        } else if (op->IsShr()) {
2350          __ Asr(out_reg, first_reg, shift_value);
2351        } else {
2352          __ Lsr(out_reg, first_reg, shift_value);
2353        }
2354      }
2355      break;
2356    }
2357    case Primitive::kPrimLong: {
2358      // TODO: Inline the assembly instead of calling the runtime.
2359      InvokeRuntimeCallingConvention calling_convention;
2360      DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
2361      DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
2362      DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegister<Register>());
2363      DCHECK_EQ(R0, out.AsRegisterPairLow<Register>());
2364      DCHECK_EQ(R1, out.AsRegisterPairHigh<Register>());
2365
2366      int32_t entry_point_offset;
2367      if (op->IsShl()) {
2368        entry_point_offset = QUICK_ENTRY_POINT(pShlLong);
2369      } else if (op->IsShr()) {
2370        entry_point_offset = QUICK_ENTRY_POINT(pShrLong);
2371      } else {
2372        entry_point_offset = QUICK_ENTRY_POINT(pUshrLong);
2373      }
2374      __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
2375      __ blx(LR);
2376      break;
2377    }
2378    default:
2379      LOG(FATAL) << "Unexpected operation type " << type;
2380  }
2381}
2382
2383void LocationsBuilderARM::VisitShl(HShl* shl) {
2384  HandleShift(shl);
2385}
2386
2387void InstructionCodeGeneratorARM::VisitShl(HShl* shl) {
2388  HandleShift(shl);
2389}
2390
2391void LocationsBuilderARM::VisitShr(HShr* shr) {
2392  HandleShift(shr);
2393}
2394
2395void InstructionCodeGeneratorARM::VisitShr(HShr* shr) {
2396  HandleShift(shr);
2397}
2398
2399void LocationsBuilderARM::VisitUShr(HUShr* ushr) {
2400  HandleShift(ushr);
2401}
2402
2403void InstructionCodeGeneratorARM::VisitUShr(HUShr* ushr) {
2404  HandleShift(ushr);
2405}
2406
2407void LocationsBuilderARM::VisitNewInstance(HNewInstance* instruction) {
2408  LocationSummary* locations =
2409      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2410  InvokeRuntimeCallingConvention calling_convention;
2411  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2412  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2413  locations->SetOut(Location::RegisterLocation(R0));
2414}
2415
2416void InstructionCodeGeneratorARM::VisitNewInstance(HNewInstance* instruction) {
2417  InvokeRuntimeCallingConvention calling_convention;
2418  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
2419  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
2420  codegen_->InvokeRuntime(GetThreadOffset<kArmWordSize>(instruction->GetEntrypoint()).Int32Value(),
2421                          instruction,
2422                          instruction->GetDexPc());
2423}
2424
2425void LocationsBuilderARM::VisitNewArray(HNewArray* instruction) {
2426  LocationSummary* locations =
2427      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2428  InvokeRuntimeCallingConvention calling_convention;
2429  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2430  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2431  locations->SetOut(Location::RegisterLocation(R0));
2432  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2433}
2434
2435void InstructionCodeGeneratorARM::VisitNewArray(HNewArray* instruction) {
2436  InvokeRuntimeCallingConvention calling_convention;
2437  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(2));
2438  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
2439  codegen_->InvokeRuntime(GetThreadOffset<kArmWordSize>(instruction->GetEntrypoint()).Int32Value(),
2440                          instruction,
2441                          instruction->GetDexPc());
2442}
2443
2444void LocationsBuilderARM::VisitParameterValue(HParameterValue* instruction) {
2445  LocationSummary* locations =
2446      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2447  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
2448  if (location.IsStackSlot()) {
2449    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2450  } else if (location.IsDoubleStackSlot()) {
2451    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2452  }
2453  locations->SetOut(location);
2454}
2455
2456void InstructionCodeGeneratorARM::VisitParameterValue(HParameterValue* instruction) {
2457  // Nothing to do, the parameter is already at its location.
2458  UNUSED(instruction);
2459}
2460
2461void LocationsBuilderARM::VisitNot(HNot* not_) {
2462  LocationSummary* locations =
2463      new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
2464  locations->SetInAt(0, Location::RequiresRegister());
2465  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2466}
2467
2468void InstructionCodeGeneratorARM::VisitNot(HNot* not_) {
2469  LocationSummary* locations = not_->GetLocations();
2470  Location out = locations->Out();
2471  Location in = locations->InAt(0);
2472  switch (not_->InputAt(0)->GetType()) {
2473    case Primitive::kPrimInt:
2474      __ mvn(out.AsRegister<Register>(), ShifterOperand(in.AsRegister<Register>()));
2475      break;
2476
2477    case Primitive::kPrimLong:
2478      __ mvn(out.AsRegisterPairLow<Register>(),
2479             ShifterOperand(in.AsRegisterPairLow<Register>()));
2480      __ mvn(out.AsRegisterPairHigh<Register>(),
2481             ShifterOperand(in.AsRegisterPairHigh<Register>()));
2482      break;
2483
2484    default:
2485      LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
2486  }
2487}
2488
2489void LocationsBuilderARM::VisitCompare(HCompare* compare) {
2490  LocationSummary* locations =
2491      new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2492  switch (compare->InputAt(0)->GetType()) {
2493    case Primitive::kPrimLong: {
2494      locations->SetInAt(0, Location::RequiresRegister());
2495      locations->SetInAt(1, Location::RequiresRegister());
2496      // Output overlaps because it is written before doing the low comparison.
2497      locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2498      break;
2499    }
2500    case Primitive::kPrimFloat:
2501    case Primitive::kPrimDouble: {
2502      locations->SetInAt(0, Location::RequiresFpuRegister());
2503      locations->SetInAt(1, Location::RequiresFpuRegister());
2504      locations->SetOut(Location::RequiresRegister());
2505      break;
2506    }
2507    default:
2508      LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2509  }
2510}
2511
2512void InstructionCodeGeneratorARM::VisitCompare(HCompare* compare) {
2513  LocationSummary* locations = compare->GetLocations();
2514  Register out = locations->Out().AsRegister<Register>();
2515  Location left = locations->InAt(0);
2516  Location right = locations->InAt(1);
2517
2518  Label less, greater, done;
2519  Primitive::Type type = compare->InputAt(0)->GetType();
2520  switch (type) {
2521    case Primitive::kPrimLong: {
2522      __ cmp(left.AsRegisterPairHigh<Register>(),
2523             ShifterOperand(right.AsRegisterPairHigh<Register>()));  // Signed compare.
2524      __ b(&less, LT);
2525      __ b(&greater, GT);
2526      // Do LoadImmediate before any `cmp`, as LoadImmediate might affect the status flags.
2527      __ LoadImmediate(out, 0);
2528      __ cmp(left.AsRegisterPairLow<Register>(),
2529             ShifterOperand(right.AsRegisterPairLow<Register>()));  // Unsigned compare.
2530      break;
2531    }
2532    case Primitive::kPrimFloat:
2533    case Primitive::kPrimDouble: {
2534      __ LoadImmediate(out, 0);
2535      if (type == Primitive::kPrimFloat) {
2536        __ vcmps(left.AsFpuRegister<SRegister>(), right.AsFpuRegister<SRegister>());
2537      } else {
2538        __ vcmpd(FromLowSToD(left.AsFpuRegisterPairLow<SRegister>()),
2539                 FromLowSToD(right.AsFpuRegisterPairLow<SRegister>()));
2540      }
2541      __ vmstat();  // transfer FP status register to ARM APSR.
2542      __ b(compare->IsGtBias() ? &greater : &less, VS);  // VS for unordered.
2543      break;
2544    }
2545    default:
2546      LOG(FATAL) << "Unexpected compare type " << type;
2547  }
2548  __ b(&done, EQ);
2549  __ b(&less, CC);  // CC is for both: unsigned compare for longs and 'less than' for floats.
2550
2551  __ Bind(&greater);
2552  __ LoadImmediate(out, 1);
2553  __ b(&done);
2554
2555  __ Bind(&less);
2556  __ LoadImmediate(out, -1);
2557
2558  __ Bind(&done);
2559}
2560
2561void LocationsBuilderARM::VisitPhi(HPhi* instruction) {
2562  LocationSummary* locations =
2563      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2564  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
2565    locations->SetInAt(i, Location::Any());
2566  }
2567  locations->SetOut(Location::Any());
2568}
2569
2570void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction) {
2571  UNUSED(instruction);
2572  LOG(FATAL) << "Unreachable";
2573}
2574
2575void InstructionCodeGeneratorARM::GenerateMemoryBarrier(MemBarrierKind kind) {
2576  // TODO (ported from quick): revisit Arm barrier kinds
2577  DmbOptions flavour = DmbOptions::ISH;  // quiet c++ warnings
2578  switch (kind) {
2579    case MemBarrierKind::kAnyStore:
2580    case MemBarrierKind::kLoadAny:
2581    case MemBarrierKind::kAnyAny: {
2582      flavour = DmbOptions::ISH;
2583      break;
2584    }
2585    case MemBarrierKind::kStoreStore: {
2586      flavour = DmbOptions::ISHST;
2587      break;
2588    }
2589    default:
2590      LOG(FATAL) << "Unexpected memory barrier " << kind;
2591  }
2592  __ dmb(flavour);
2593}
2594
2595void InstructionCodeGeneratorARM::GenerateWideAtomicLoad(Register addr,
2596                                                         uint32_t offset,
2597                                                         Register out_lo,
2598                                                         Register out_hi) {
2599  if (offset != 0) {
2600    __ LoadImmediate(out_lo, offset);
2601    __ add(IP, addr, ShifterOperand(out_lo));
2602    addr = IP;
2603  }
2604  __ ldrexd(out_lo, out_hi, addr);
2605}
2606
2607void InstructionCodeGeneratorARM::GenerateWideAtomicStore(Register addr,
2608                                                          uint32_t offset,
2609                                                          Register value_lo,
2610                                                          Register value_hi,
2611                                                          Register temp1,
2612                                                          Register temp2,
2613                                                          HInstruction* instruction) {
2614  Label fail;
2615  if (offset != 0) {
2616    __ LoadImmediate(temp1, offset);
2617    __ add(IP, addr, ShifterOperand(temp1));
2618    addr = IP;
2619  }
2620  __ Bind(&fail);
2621  // We need a load followed by store. (The address used in a STREX instruction must
2622  // be the same as the address in the most recently executed LDREX instruction.)
2623  __ ldrexd(temp1, temp2, addr);
2624  codegen_->MaybeRecordImplicitNullCheck(instruction);
2625  __ strexd(temp1, value_lo, value_hi, addr);
2626  __ cmp(temp1, ShifterOperand(0));
2627  __ b(&fail, NE);
2628}
2629
2630void LocationsBuilderARM::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) {
2631  DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2632
2633  LocationSummary* locations =
2634      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2635  locations->SetInAt(0, Location::RequiresRegister());
2636  locations->SetInAt(1, Location::RequiresRegister());
2637
2638
2639  Primitive::Type field_type = field_info.GetFieldType();
2640  bool is_wide = field_type == Primitive::kPrimLong || field_type == Primitive::kPrimDouble;
2641  bool generate_volatile = field_info.IsVolatile()
2642      && is_wide
2643      && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2644  // Temporary registers for the write barrier.
2645  // TODO: consider renaming StoreNeedsWriteBarrier to StoreNeedsGCMark.
2646  if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
2647    locations->AddTemp(Location::RequiresRegister());
2648    locations->AddTemp(Location::RequiresRegister());
2649  } else if (generate_volatile) {
2650    // Arm encoding have some additional constraints for ldrexd/strexd:
2651    // - registers need to be consecutive
2652    // - the first register should be even but not R14.
2653    // We don't test for Arm yet, and the assertion makes sure that we revisit this if we ever
2654    // enable Arm encoding.
2655    DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
2656
2657    locations->AddTemp(Location::RequiresRegister());
2658    locations->AddTemp(Location::RequiresRegister());
2659    if (field_type == Primitive::kPrimDouble) {
2660      // For doubles we need two more registers to copy the value.
2661      locations->AddTemp(Location::RegisterLocation(R2));
2662      locations->AddTemp(Location::RegisterLocation(R3));
2663    }
2664  }
2665}
2666
2667void InstructionCodeGeneratorARM::HandleFieldSet(HInstruction* instruction,
2668                                                 const FieldInfo& field_info) {
2669  DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2670
2671  LocationSummary* locations = instruction->GetLocations();
2672  Register base = locations->InAt(0).AsRegister<Register>();
2673  Location value = locations->InAt(1);
2674
2675  bool is_volatile = field_info.IsVolatile();
2676  bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2677  Primitive::Type field_type = field_info.GetFieldType();
2678  uint32_t offset = field_info.GetFieldOffset().Uint32Value();
2679
2680  if (is_volatile) {
2681    GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
2682  }
2683
2684  switch (field_type) {
2685    case Primitive::kPrimBoolean:
2686    case Primitive::kPrimByte: {
2687      __ StoreToOffset(kStoreByte, value.AsRegister<Register>(), base, offset);
2688      break;
2689    }
2690
2691    case Primitive::kPrimShort:
2692    case Primitive::kPrimChar: {
2693      __ StoreToOffset(kStoreHalfword, value.AsRegister<Register>(), base, offset);
2694      break;
2695    }
2696
2697    case Primitive::kPrimInt:
2698    case Primitive::kPrimNot: {
2699      __ StoreToOffset(kStoreWord, value.AsRegister<Register>(), base, offset);
2700      break;
2701    }
2702
2703    case Primitive::kPrimLong: {
2704      if (is_volatile && !atomic_ldrd_strd) {
2705        GenerateWideAtomicStore(base, offset,
2706                                value.AsRegisterPairLow<Register>(),
2707                                value.AsRegisterPairHigh<Register>(),
2708                                locations->GetTemp(0).AsRegister<Register>(),
2709                                locations->GetTemp(1).AsRegister<Register>(),
2710                                instruction);
2711      } else {
2712        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), base, offset);
2713        codegen_->MaybeRecordImplicitNullCheck(instruction);
2714      }
2715      break;
2716    }
2717
2718    case Primitive::kPrimFloat: {
2719      __ StoreSToOffset(value.AsFpuRegister<SRegister>(), base, offset);
2720      break;
2721    }
2722
2723    case Primitive::kPrimDouble: {
2724      DRegister value_reg = FromLowSToD(value.AsFpuRegisterPairLow<SRegister>());
2725      if (is_volatile && !atomic_ldrd_strd) {
2726        Register value_reg_lo = locations->GetTemp(0).AsRegister<Register>();
2727        Register value_reg_hi = locations->GetTemp(1).AsRegister<Register>();
2728
2729        __ vmovrrd(value_reg_lo, value_reg_hi, value_reg);
2730
2731        GenerateWideAtomicStore(base, offset,
2732                                value_reg_lo,
2733                                value_reg_hi,
2734                                locations->GetTemp(2).AsRegister<Register>(),
2735                                locations->GetTemp(3).AsRegister<Register>(),
2736                                instruction);
2737      } else {
2738        __ StoreDToOffset(value_reg, base, offset);
2739        codegen_->MaybeRecordImplicitNullCheck(instruction);
2740      }
2741      break;
2742    }
2743
2744    case Primitive::kPrimVoid:
2745      LOG(FATAL) << "Unreachable type " << field_type;
2746      UNREACHABLE();
2747  }
2748
2749  // Longs and doubles are handled in the switch.
2750  if (field_type != Primitive::kPrimLong && field_type != Primitive::kPrimDouble) {
2751    codegen_->MaybeRecordImplicitNullCheck(instruction);
2752  }
2753
2754  if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
2755    Register temp = locations->GetTemp(0).AsRegister<Register>();
2756    Register card = locations->GetTemp(1).AsRegister<Register>();
2757    codegen_->MarkGCCard(temp, card, base, value.AsRegister<Register>());
2758  }
2759
2760  if (is_volatile) {
2761    GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
2762  }
2763}
2764
2765void LocationsBuilderARM::HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info) {
2766  DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2767  LocationSummary* locations =
2768      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2769  locations->SetInAt(0, Location::RequiresRegister());
2770
2771  bool volatile_for_double = field_info.IsVolatile()
2772      && (field_info.GetFieldType() == Primitive::kPrimDouble)
2773      && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2774  bool overlap = field_info.IsVolatile() && (field_info.GetFieldType() == Primitive::kPrimLong);
2775  locations->SetOut(Location::RequiresRegister(),
2776                    (overlap ? Location::kOutputOverlap : Location::kNoOutputOverlap));
2777  if (volatile_for_double) {
2778    // Arm encoding have some additional constraints for ldrexd/strexd:
2779    // - registers need to be consecutive
2780    // - the first register should be even but not R14.
2781    // We don't test for Arm yet, and the assertion makes sure that we revisit this if we ever
2782    // enable Arm encoding.
2783    DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
2784    locations->AddTemp(Location::RequiresRegister());
2785    locations->AddTemp(Location::RequiresRegister());
2786  }
2787}
2788
2789void InstructionCodeGeneratorARM::HandleFieldGet(HInstruction* instruction,
2790                                                 const FieldInfo& field_info) {
2791  DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2792
2793  LocationSummary* locations = instruction->GetLocations();
2794  Register base = locations->InAt(0).AsRegister<Register>();
2795  Location out = locations->Out();
2796  bool is_volatile = field_info.IsVolatile();
2797  bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2798  Primitive::Type field_type = field_info.GetFieldType();
2799  uint32_t offset = field_info.GetFieldOffset().Uint32Value();
2800
2801  switch (field_type) {
2802    case Primitive::kPrimBoolean: {
2803      __ LoadFromOffset(kLoadUnsignedByte, out.AsRegister<Register>(), base, offset);
2804      break;
2805    }
2806
2807    case Primitive::kPrimByte: {
2808      __ LoadFromOffset(kLoadSignedByte, out.AsRegister<Register>(), base, offset);
2809      break;
2810    }
2811
2812    case Primitive::kPrimShort: {
2813      __ LoadFromOffset(kLoadSignedHalfword, out.AsRegister<Register>(), base, offset);
2814      break;
2815    }
2816
2817    case Primitive::kPrimChar: {
2818      __ LoadFromOffset(kLoadUnsignedHalfword, out.AsRegister<Register>(), base, offset);
2819      break;
2820    }
2821
2822    case Primitive::kPrimInt:
2823    case Primitive::kPrimNot: {
2824      __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), base, offset);
2825      break;
2826    }
2827
2828    case Primitive::kPrimLong: {
2829      if (is_volatile && !atomic_ldrd_strd) {
2830        GenerateWideAtomicLoad(base, offset,
2831                               out.AsRegisterPairLow<Register>(),
2832                               out.AsRegisterPairHigh<Register>());
2833      } else {
2834        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), base, offset);
2835      }
2836      break;
2837    }
2838
2839    case Primitive::kPrimFloat: {
2840      __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), base, offset);
2841      break;
2842    }
2843
2844    case Primitive::kPrimDouble: {
2845      DRegister out_reg = FromLowSToD(out.AsFpuRegisterPairLow<SRegister>());
2846      if (is_volatile && !atomic_ldrd_strd) {
2847        Register lo = locations->GetTemp(0).AsRegister<Register>();
2848        Register hi = locations->GetTemp(1).AsRegister<Register>();
2849        GenerateWideAtomicLoad(base, offset, lo, hi);
2850        codegen_->MaybeRecordImplicitNullCheck(instruction);
2851        __ vmovdrr(out_reg, lo, hi);
2852      } else {
2853        __ LoadDFromOffset(out_reg, base, offset);
2854        codegen_->MaybeRecordImplicitNullCheck(instruction);
2855      }
2856      break;
2857    }
2858
2859    case Primitive::kPrimVoid:
2860      LOG(FATAL) << "Unreachable type " << field_type;
2861      UNREACHABLE();
2862  }
2863
2864  // Doubles are handled in the switch.
2865  if (field_type != Primitive::kPrimDouble) {
2866    codegen_->MaybeRecordImplicitNullCheck(instruction);
2867  }
2868
2869  if (is_volatile) {
2870    GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
2871  }
2872}
2873
2874void LocationsBuilderARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2875  HandleFieldSet(instruction, instruction->GetFieldInfo());
2876}
2877
2878void InstructionCodeGeneratorARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2879  HandleFieldSet(instruction, instruction->GetFieldInfo());
2880}
2881
2882void LocationsBuilderARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2883  HandleFieldGet(instruction, instruction->GetFieldInfo());
2884}
2885
2886void InstructionCodeGeneratorARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2887  HandleFieldGet(instruction, instruction->GetFieldInfo());
2888}
2889
2890void LocationsBuilderARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2891  HandleFieldGet(instruction, instruction->GetFieldInfo());
2892}
2893
2894void InstructionCodeGeneratorARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2895  HandleFieldGet(instruction, instruction->GetFieldInfo());
2896}
2897
2898void LocationsBuilderARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2899  HandleFieldSet(instruction, instruction->GetFieldInfo());
2900}
2901
2902void InstructionCodeGeneratorARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2903  HandleFieldSet(instruction, instruction->GetFieldInfo());
2904}
2905
2906void LocationsBuilderARM::VisitNullCheck(HNullCheck* instruction) {
2907  LocationSummary* locations =
2908      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2909  locations->SetInAt(0, Location::RequiresRegister());
2910  if (instruction->HasUses()) {
2911    locations->SetOut(Location::SameAsFirstInput());
2912  }
2913}
2914
2915void InstructionCodeGeneratorARM::GenerateImplicitNullCheck(HNullCheck* instruction) {
2916  if (codegen_->CanMoveNullCheckToUser(instruction)) {
2917    return;
2918  }
2919  Location obj = instruction->GetLocations()->InAt(0);
2920
2921  __ LoadFromOffset(kLoadWord, IP, obj.AsRegister<Register>(), 0);
2922  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
2923}
2924
2925void InstructionCodeGeneratorARM::GenerateExplicitNullCheck(HNullCheck* instruction) {
2926  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM(instruction);
2927  codegen_->AddSlowPath(slow_path);
2928
2929  LocationSummary* locations = instruction->GetLocations();
2930  Location obj = locations->InAt(0);
2931
2932  __ cmp(obj.AsRegister<Register>(), ShifterOperand(0));
2933  __ b(slow_path->GetEntryLabel(), EQ);
2934}
2935
2936void InstructionCodeGeneratorARM::VisitNullCheck(HNullCheck* instruction) {
2937  if (codegen_->GetCompilerOptions().GetImplicitNullChecks()) {
2938    GenerateImplicitNullCheck(instruction);
2939  } else {
2940    GenerateExplicitNullCheck(instruction);
2941  }
2942}
2943
2944void LocationsBuilderARM::VisitArrayGet(HArrayGet* instruction) {
2945  LocationSummary* locations =
2946      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2947  locations->SetInAt(0, Location::RequiresRegister());
2948  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2949  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2950}
2951
2952void InstructionCodeGeneratorARM::VisitArrayGet(HArrayGet* instruction) {
2953  LocationSummary* locations = instruction->GetLocations();
2954  Register obj = locations->InAt(0).AsRegister<Register>();
2955  Location index = locations->InAt(1);
2956
2957  switch (instruction->GetType()) {
2958    case Primitive::kPrimBoolean: {
2959      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
2960      Register out = locations->Out().AsRegister<Register>();
2961      if (index.IsConstant()) {
2962        size_t offset =
2963            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2964        __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
2965      } else {
2966        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
2967        __ LoadFromOffset(kLoadUnsignedByte, out, IP, data_offset);
2968      }
2969      break;
2970    }
2971
2972    case Primitive::kPrimByte: {
2973      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
2974      Register out = locations->Out().AsRegister<Register>();
2975      if (index.IsConstant()) {
2976        size_t offset =
2977            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2978        __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
2979      } else {
2980        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
2981        __ LoadFromOffset(kLoadSignedByte, out, IP, data_offset);
2982      }
2983      break;
2984    }
2985
2986    case Primitive::kPrimShort: {
2987      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
2988      Register out = locations->Out().AsRegister<Register>();
2989      if (index.IsConstant()) {
2990        size_t offset =
2991            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2992        __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
2993      } else {
2994        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
2995        __ LoadFromOffset(kLoadSignedHalfword, out, IP, data_offset);
2996      }
2997      break;
2998    }
2999
3000    case Primitive::kPrimChar: {
3001      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
3002      Register out = locations->Out().AsRegister<Register>();
3003      if (index.IsConstant()) {
3004        size_t offset =
3005            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
3006        __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
3007      } else {
3008        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
3009        __ LoadFromOffset(kLoadUnsignedHalfword, out, IP, data_offset);
3010      }
3011      break;
3012    }
3013
3014    case Primitive::kPrimInt:
3015    case Primitive::kPrimNot: {
3016      DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
3017      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
3018      Register out = locations->Out().AsRegister<Register>();
3019      if (index.IsConstant()) {
3020        size_t offset =
3021            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3022        __ LoadFromOffset(kLoadWord, out, obj, offset);
3023      } else {
3024        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3025        __ LoadFromOffset(kLoadWord, out, IP, data_offset);
3026      }
3027      break;
3028    }
3029
3030    case Primitive::kPrimLong: {
3031      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
3032      Location out = locations->Out();
3033      if (index.IsConstant()) {
3034        size_t offset =
3035            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3036        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset);
3037      } else {
3038        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3039        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), IP, data_offset);
3040      }
3041      break;
3042    }
3043
3044    case Primitive::kPrimFloat: {
3045      uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
3046      Location out = locations->Out();
3047      DCHECK(out.IsFpuRegister());
3048      if (index.IsConstant()) {
3049        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3050        __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), obj, offset);
3051      } else {
3052        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3053        __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), IP, data_offset);
3054      }
3055      break;
3056    }
3057
3058    case Primitive::kPrimDouble: {
3059      uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
3060      Location out = locations->Out();
3061      DCHECK(out.IsFpuRegisterPair());
3062      if (index.IsConstant()) {
3063        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3064        __ LoadDFromOffset(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), obj, offset);
3065      } else {
3066        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3067        __ LoadDFromOffset(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), IP, data_offset);
3068      }
3069      break;
3070    }
3071
3072    case Primitive::kPrimVoid:
3073      LOG(FATAL) << "Unreachable type " << instruction->GetType();
3074      UNREACHABLE();
3075  }
3076  codegen_->MaybeRecordImplicitNullCheck(instruction);
3077}
3078
3079void LocationsBuilderARM::VisitArraySet(HArraySet* instruction) {
3080  Primitive::Type value_type = instruction->GetComponentType();
3081
3082  bool needs_write_barrier =
3083      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
3084  bool needs_runtime_call = instruction->NeedsTypeCheck();
3085
3086  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3087      instruction, needs_runtime_call ? LocationSummary::kCall : LocationSummary::kNoCall);
3088  if (needs_runtime_call) {
3089    InvokeRuntimeCallingConvention calling_convention;
3090    locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3091    locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3092    locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
3093  } else {
3094    locations->SetInAt(0, Location::RequiresRegister());
3095    locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3096    locations->SetInAt(2, Location::RequiresRegister());
3097
3098    if (needs_write_barrier) {
3099      // Temporary registers for the write barrier.
3100      locations->AddTemp(Location::RequiresRegister());
3101      locations->AddTemp(Location::RequiresRegister());
3102    }
3103  }
3104}
3105
3106void InstructionCodeGeneratorARM::VisitArraySet(HArraySet* instruction) {
3107  LocationSummary* locations = instruction->GetLocations();
3108  Register obj = locations->InAt(0).AsRegister<Register>();
3109  Location index = locations->InAt(1);
3110  Primitive::Type value_type = instruction->GetComponentType();
3111  bool needs_runtime_call = locations->WillCall();
3112  bool needs_write_barrier =
3113      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
3114
3115  switch (value_type) {
3116    case Primitive::kPrimBoolean:
3117    case Primitive::kPrimByte: {
3118      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
3119      Register value = locations->InAt(2).AsRegister<Register>();
3120      if (index.IsConstant()) {
3121        size_t offset =
3122            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
3123        __ StoreToOffset(kStoreByte, value, obj, offset);
3124      } else {
3125        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
3126        __ StoreToOffset(kStoreByte, value, IP, data_offset);
3127      }
3128      break;
3129    }
3130
3131    case Primitive::kPrimShort:
3132    case Primitive::kPrimChar: {
3133      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
3134      Register value = locations->InAt(2).AsRegister<Register>();
3135      if (index.IsConstant()) {
3136        size_t offset =
3137            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
3138        __ StoreToOffset(kStoreHalfword, value, obj, offset);
3139      } else {
3140        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
3141        __ StoreToOffset(kStoreHalfword, value, IP, data_offset);
3142      }
3143      break;
3144    }
3145
3146    case Primitive::kPrimInt:
3147    case Primitive::kPrimNot: {
3148      if (!needs_runtime_call) {
3149        uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
3150        Register value = locations->InAt(2).AsRegister<Register>();
3151        if (index.IsConstant()) {
3152          size_t offset =
3153              (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3154          __ StoreToOffset(kStoreWord, value, obj, offset);
3155        } else {
3156          DCHECK(index.IsRegister()) << index;
3157          __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3158          __ StoreToOffset(kStoreWord, value, IP, data_offset);
3159        }
3160        codegen_->MaybeRecordImplicitNullCheck(instruction);
3161        if (needs_write_barrier) {
3162          DCHECK_EQ(value_type, Primitive::kPrimNot);
3163          Register temp = locations->GetTemp(0).AsRegister<Register>();
3164          Register card = locations->GetTemp(1).AsRegister<Register>();
3165          codegen_->MarkGCCard(temp, card, obj, value);
3166        }
3167      } else {
3168        DCHECK_EQ(value_type, Primitive::kPrimNot);
3169        codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
3170                                instruction,
3171                                instruction->GetDexPc());
3172      }
3173      break;
3174    }
3175
3176    case Primitive::kPrimLong: {
3177      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
3178      Location value = locations->InAt(2);
3179      if (index.IsConstant()) {
3180        size_t offset =
3181            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3182        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset);
3183      } else {
3184        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3185        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), IP, data_offset);
3186      }
3187      break;
3188    }
3189
3190    case Primitive::kPrimFloat: {
3191      uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
3192      Location value = locations->InAt(2);
3193      DCHECK(value.IsFpuRegister());
3194      if (index.IsConstant()) {
3195        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3196        __ StoreSToOffset(value.AsFpuRegister<SRegister>(), obj, offset);
3197      } else {
3198        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3199        __ StoreSToOffset(value.AsFpuRegister<SRegister>(), IP, data_offset);
3200      }
3201      break;
3202    }
3203
3204    case Primitive::kPrimDouble: {
3205      uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
3206      Location value = locations->InAt(2);
3207      DCHECK(value.IsFpuRegisterPair());
3208      if (index.IsConstant()) {
3209        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3210        __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), obj, offset);
3211      } else {
3212        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3213        __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), IP, data_offset);
3214      }
3215
3216      break;
3217    }
3218
3219    case Primitive::kPrimVoid:
3220      LOG(FATAL) << "Unreachable type " << value_type;
3221      UNREACHABLE();
3222  }
3223
3224  // Ints and objects are handled in the switch.
3225  if (value_type != Primitive::kPrimInt && value_type != Primitive::kPrimNot) {
3226    codegen_->MaybeRecordImplicitNullCheck(instruction);
3227  }
3228}
3229
3230void LocationsBuilderARM::VisitArrayLength(HArrayLength* instruction) {
3231  LocationSummary* locations =
3232      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3233  locations->SetInAt(0, Location::RequiresRegister());
3234  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3235}
3236
3237void InstructionCodeGeneratorARM::VisitArrayLength(HArrayLength* instruction) {
3238  LocationSummary* locations = instruction->GetLocations();
3239  uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
3240  Register obj = locations->InAt(0).AsRegister<Register>();
3241  Register out = locations->Out().AsRegister<Register>();
3242  __ LoadFromOffset(kLoadWord, out, obj, offset);
3243  codegen_->MaybeRecordImplicitNullCheck(instruction);
3244}
3245
3246void LocationsBuilderARM::VisitBoundsCheck(HBoundsCheck* instruction) {
3247  LocationSummary* locations =
3248      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3249  locations->SetInAt(0, Location::RequiresRegister());
3250  locations->SetInAt(1, Location::RequiresRegister());
3251  if (instruction->HasUses()) {
3252    locations->SetOut(Location::SameAsFirstInput());
3253  }
3254}
3255
3256void InstructionCodeGeneratorARM::VisitBoundsCheck(HBoundsCheck* instruction) {
3257  LocationSummary* locations = instruction->GetLocations();
3258  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM(
3259      instruction, locations->InAt(0), locations->InAt(1));
3260  codegen_->AddSlowPath(slow_path);
3261
3262  Register index = locations->InAt(0).AsRegister<Register>();
3263  Register length = locations->InAt(1).AsRegister<Register>();
3264
3265  __ cmp(index, ShifterOperand(length));
3266  __ b(slow_path->GetEntryLabel(), CS);
3267}
3268
3269void CodeGeneratorARM::MarkGCCard(Register temp, Register card, Register object, Register value) {
3270  Label is_null;
3271  __ CompareAndBranchIfZero(value, &is_null);
3272  __ LoadFromOffset(kLoadWord, card, TR, Thread::CardTableOffset<kArmWordSize>().Int32Value());
3273  __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
3274  __ strb(card, Address(card, temp));
3275  __ Bind(&is_null);
3276}
3277
3278void LocationsBuilderARM::VisitTemporary(HTemporary* temp) {
3279  temp->SetLocations(nullptr);
3280}
3281
3282void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp) {
3283  // Nothing to do, this is driven by the code generator.
3284  UNUSED(temp);
3285}
3286
3287void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction) {
3288  UNUSED(instruction);
3289  LOG(FATAL) << "Unreachable";
3290}
3291
3292void InstructionCodeGeneratorARM::VisitParallelMove(HParallelMove* instruction) {
3293  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
3294}
3295
3296void LocationsBuilderARM::VisitSuspendCheck(HSuspendCheck* instruction) {
3297  new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
3298}
3299
3300void InstructionCodeGeneratorARM::VisitSuspendCheck(HSuspendCheck* instruction) {
3301  HBasicBlock* block = instruction->GetBlock();
3302  if (block->GetLoopInformation() != nullptr) {
3303    DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
3304    // The back edge will generate the suspend check.
3305    return;
3306  }
3307  if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
3308    // The goto will generate the suspend check.
3309    return;
3310  }
3311  GenerateSuspendCheck(instruction, nullptr);
3312}
3313
3314void InstructionCodeGeneratorARM::GenerateSuspendCheck(HSuspendCheck* instruction,
3315                                                       HBasicBlock* successor) {
3316  SuspendCheckSlowPathARM* slow_path =
3317      new (GetGraph()->GetArena()) SuspendCheckSlowPathARM(instruction, successor);
3318  codegen_->AddSlowPath(slow_path);
3319
3320  __ LoadFromOffset(
3321      kLoadUnsignedHalfword, IP, TR, Thread::ThreadFlagsOffset<kArmWordSize>().Int32Value());
3322  __ cmp(IP, ShifterOperand(0));
3323  // TODO: Figure out the branch offsets and use cbz/cbnz.
3324  if (successor == nullptr) {
3325    __ b(slow_path->GetEntryLabel(), NE);
3326    __ Bind(slow_path->GetReturnLabel());
3327  } else {
3328    __ b(codegen_->GetLabelOf(successor), EQ);
3329    __ b(slow_path->GetEntryLabel());
3330  }
3331}
3332
3333ArmAssembler* ParallelMoveResolverARM::GetAssembler() const {
3334  return codegen_->GetAssembler();
3335}
3336
3337void ParallelMoveResolverARM::EmitMove(size_t index) {
3338  MoveOperands* move = moves_.Get(index);
3339  Location source = move->GetSource();
3340  Location destination = move->GetDestination();
3341
3342  if (source.IsRegister()) {
3343    if (destination.IsRegister()) {
3344      __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>());
3345    } else {
3346      DCHECK(destination.IsStackSlot());
3347      __ StoreToOffset(kStoreWord, source.AsRegister<Register>(),
3348                       SP, destination.GetStackIndex());
3349    }
3350  } else if (source.IsStackSlot()) {
3351    if (destination.IsRegister()) {
3352      __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(),
3353                        SP, source.GetStackIndex());
3354    } else if (destination.IsFpuRegister()) {
3355      __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex());
3356    } else {
3357      DCHECK(destination.IsStackSlot());
3358      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
3359      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3360    }
3361  } else if (source.IsFpuRegister()) {
3362    if (destination.IsFpuRegister()) {
3363      __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>());
3364    } else {
3365      DCHECK(destination.IsStackSlot());
3366      __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex());
3367    }
3368  } else if (source.IsDoubleStackSlot()) {
3369    if (destination.IsDoubleStackSlot()) {
3370      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
3371      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3372      __ LoadFromOffset(kLoadWord, IP, SP, source.GetHighStackIndex(kArmWordSize));
3373      __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
3374    } else if (destination.IsRegisterPair()) {
3375      DCHECK(ExpectedPairLayout(destination));
3376      __ LoadFromOffset(
3377          kLoadWordPair, destination.AsRegisterPairLow<Register>(), SP, source.GetStackIndex());
3378    } else {
3379      DCHECK(destination.IsFpuRegisterPair()) << destination;
3380      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
3381                         SP,
3382                         source.GetStackIndex());
3383    }
3384  } else if (source.IsRegisterPair()) {
3385    if (destination.IsRegisterPair()) {
3386      __ Mov(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairLow<Register>());
3387      __ Mov(destination.AsRegisterPairHigh<Register>(), source.AsRegisterPairHigh<Register>());
3388    } else {
3389      DCHECK(destination.IsDoubleStackSlot()) << destination;
3390      DCHECK(ExpectedPairLayout(source));
3391      __ StoreToOffset(
3392          kStoreWordPair, source.AsRegisterPairLow<Register>(), SP, destination.GetStackIndex());
3393    }
3394  } else if (source.IsFpuRegisterPair()) {
3395    if (destination.IsFpuRegisterPair()) {
3396      __ vmovd(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
3397               FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()));
3398    } else {
3399      DCHECK(destination.IsDoubleStackSlot()) << destination;
3400      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
3401                        SP,
3402                        destination.GetStackIndex());
3403    }
3404  } else {
3405    DCHECK(source.IsConstant()) << source;
3406    HInstruction* constant = source.GetConstant();
3407    if (constant->IsIntConstant()) {
3408      int32_t value = constant->AsIntConstant()->GetValue();
3409      if (destination.IsRegister()) {
3410        __ LoadImmediate(destination.AsRegister<Register>(), value);
3411      } else {
3412        DCHECK(destination.IsStackSlot());
3413        __ LoadImmediate(IP, value);
3414        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3415      }
3416    } else if (constant->IsLongConstant()) {
3417      int64_t value = constant->AsLongConstant()->GetValue();
3418      if (destination.IsRegisterPair()) {
3419        __ LoadImmediate(destination.AsRegisterPairLow<Register>(), Low32Bits(value));
3420        __ LoadImmediate(destination.AsRegisterPairHigh<Register>(), High32Bits(value));
3421      } else {
3422        DCHECK(destination.IsDoubleStackSlot()) << destination;
3423        __ LoadImmediate(IP, Low32Bits(value));
3424        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3425        __ LoadImmediate(IP, High32Bits(value));
3426        __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
3427      }
3428    } else if (constant->IsDoubleConstant()) {
3429      double value = constant->AsDoubleConstant()->GetValue();
3430      if (destination.IsFpuRegisterPair()) {
3431        __ LoadDImmediate(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()), value);
3432      } else {
3433        DCHECK(destination.IsDoubleStackSlot()) << destination;
3434        uint64_t int_value = bit_cast<uint64_t, double>(value);
3435        __ LoadImmediate(IP, Low32Bits(int_value));
3436        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3437        __ LoadImmediate(IP, High32Bits(int_value));
3438        __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
3439      }
3440    } else {
3441      DCHECK(constant->IsFloatConstant()) << constant->DebugName();
3442      float value = constant->AsFloatConstant()->GetValue();
3443      if (destination.IsFpuRegister()) {
3444        __ LoadSImmediate(destination.AsFpuRegister<SRegister>(), value);
3445      } else {
3446        DCHECK(destination.IsStackSlot());
3447        __ LoadImmediate(IP, bit_cast<int32_t, float>(value));
3448        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3449      }
3450    }
3451  }
3452}
3453
3454void ParallelMoveResolverARM::Exchange(Register reg, int mem) {
3455  __ Mov(IP, reg);
3456  __ LoadFromOffset(kLoadWord, reg, SP, mem);
3457  __ StoreToOffset(kStoreWord, IP, SP, mem);
3458}
3459
3460void ParallelMoveResolverARM::Exchange(int mem1, int mem2) {
3461  ScratchRegisterScope ensure_scratch(this, IP, R0, codegen_->GetNumberOfCoreRegisters());
3462  int stack_offset = ensure_scratch.IsSpilled() ? kArmWordSize : 0;
3463  __ LoadFromOffset(kLoadWord, static_cast<Register>(ensure_scratch.GetRegister()),
3464                    SP, mem1 + stack_offset);
3465  __ LoadFromOffset(kLoadWord, IP, SP, mem2 + stack_offset);
3466  __ StoreToOffset(kStoreWord, static_cast<Register>(ensure_scratch.GetRegister()),
3467                   SP, mem2 + stack_offset);
3468  __ StoreToOffset(kStoreWord, IP, SP, mem1 + stack_offset);
3469}
3470
3471void ParallelMoveResolverARM::EmitSwap(size_t index) {
3472  MoveOperands* move = moves_.Get(index);
3473  Location source = move->GetSource();
3474  Location destination = move->GetDestination();
3475
3476  if (source.IsRegister() && destination.IsRegister()) {
3477    DCHECK_NE(source.AsRegister<Register>(), IP);
3478    DCHECK_NE(destination.AsRegister<Register>(), IP);
3479    __ Mov(IP, source.AsRegister<Register>());
3480    __ Mov(source.AsRegister<Register>(), destination.AsRegister<Register>());
3481    __ Mov(destination.AsRegister<Register>(), IP);
3482  } else if (source.IsRegister() && destination.IsStackSlot()) {
3483    Exchange(source.AsRegister<Register>(), destination.GetStackIndex());
3484  } else if (source.IsStackSlot() && destination.IsRegister()) {
3485    Exchange(destination.AsRegister<Register>(), source.GetStackIndex());
3486  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
3487    Exchange(source.GetStackIndex(), destination.GetStackIndex());
3488  } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
3489    __ vmovrs(IP, source.AsFpuRegister<SRegister>());
3490    __ vmovs(source.AsFpuRegister<SRegister>(), destination.AsFpuRegister<SRegister>());
3491    __ vmovsr(destination.AsFpuRegister<SRegister>(), IP);
3492  } else if (source.IsRegisterPair() && destination.IsRegisterPair()) {
3493    __ Mov(IP, source.AsRegisterPairLow<Register>());
3494    __ Mov(source.AsRegisterPairLow<Register>(), destination.AsRegisterPairLow<Register>());
3495    __ Mov(destination.AsRegisterPairLow<Register>(), IP);
3496    __ Mov(IP, source.AsRegisterPairHigh<Register>());
3497    __ Mov(source.AsRegisterPairHigh<Register>(), destination.AsRegisterPairHigh<Register>());
3498    __ Mov(destination.AsRegisterPairHigh<Register>(), IP);
3499  } else if (source.IsRegisterPair() || destination.IsRegisterPair()) {
3500    // TODO: Find a D register available in the parallel moves,
3501    // or reserve globally a D register.
3502    DRegister tmp = D0;
3503    Register low_reg = source.IsRegisterPair()
3504        ? source.AsRegisterPairLow<Register>()
3505        : destination.AsRegisterPairLow<Register>();
3506    int mem = source.IsRegisterPair()
3507        ? destination.GetStackIndex()
3508        : source.GetStackIndex();
3509    DCHECK(ExpectedPairLayout(source.IsRegisterPair() ? source : destination));
3510    // Make room for the pushed DRegister.
3511    mem += 8;
3512    __ vpushd(tmp, 1);
3513    __ vmovdrr(tmp, low_reg, static_cast<Register>(low_reg + 1));
3514    __ LoadFromOffset(kLoadWordPair, low_reg, SP, mem);
3515    __ StoreDToOffset(tmp, SP, mem);
3516    __ vpopd(tmp, 1);
3517  } else if (source.IsFpuRegisterPair() && destination.IsFpuRegisterPair()) {
3518    // TODO: Find a D register available in the parallel moves,
3519    // or reserve globally a D register.
3520    DRegister tmp = D0;
3521    DRegister first = FromLowSToD(source.AsFpuRegisterPairLow<SRegister>());
3522    DRegister second = FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>());
3523    while (tmp == first || tmp == second) {
3524      tmp = static_cast<DRegister>(tmp + 1);
3525    }
3526    __ vpushd(tmp, 1);
3527    __ vmovd(tmp, first);
3528    __ vmovd(first, second);
3529    __ vmovd(second, tmp);
3530    __ vpopd(tmp, 1);
3531  } else if (source.IsFpuRegisterPair() || destination.IsFpuRegisterPair()) {
3532    DRegister reg = source.IsFpuRegisterPair()
3533        ? FromLowSToD(source.AsFpuRegisterPairLow<SRegister>())
3534        : FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>());
3535    int mem = source.IsFpuRegisterPair()
3536        ? destination.GetStackIndex()
3537        : source.GetStackIndex();
3538    // TODO: Find or reserve a D register.
3539    DRegister tmp = reg == D0 ? D1 : D0;
3540    // Make room for the pushed DRegister.
3541    mem += 8;
3542    __ vpushd(tmp, 1);
3543    __ vmovd(tmp, reg);
3544    __ LoadDFromOffset(reg, SP, mem);
3545    __ StoreDToOffset(tmp, SP, mem);
3546    __ vpopd(tmp, 1);
3547  } else if (source.IsFpuRegister() || destination.IsFpuRegister()) {
3548    SRegister reg = source.IsFpuRegister() ? source.AsFpuRegister<SRegister>()
3549                                           : destination.AsFpuRegister<SRegister>();
3550    int mem = source.IsFpuRegister()
3551        ? destination.GetStackIndex()
3552        : source.GetStackIndex();
3553
3554    __ vmovrs(IP, reg);
3555    __ LoadSFromOffset(reg, SP, mem);
3556    __ StoreToOffset(kStoreWord, IP, SP, mem);
3557  } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
3558    Exchange(source.GetStackIndex(), destination.GetStackIndex());
3559    Exchange(source.GetHighStackIndex(kArmWordSize), destination.GetHighStackIndex(kArmWordSize));
3560  } else {
3561    LOG(FATAL) << "Unimplemented" << source << " <-> " << destination;
3562  }
3563}
3564
3565void ParallelMoveResolverARM::SpillScratch(int reg) {
3566  __ Push(static_cast<Register>(reg));
3567}
3568
3569void ParallelMoveResolverARM::RestoreScratch(int reg) {
3570  __ Pop(static_cast<Register>(reg));
3571}
3572
3573void LocationsBuilderARM::VisitLoadClass(HLoadClass* cls) {
3574  LocationSummary::CallKind call_kind = cls->CanCallRuntime()
3575      ? LocationSummary::kCallOnSlowPath
3576      : LocationSummary::kNoCall;
3577  LocationSummary* locations =
3578      new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
3579  locations->SetOut(Location::RequiresRegister());
3580}
3581
3582void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) {
3583  Register out = cls->GetLocations()->Out().AsRegister<Register>();
3584  if (cls->IsReferrersClass()) {
3585    DCHECK(!cls->CanCallRuntime());
3586    DCHECK(!cls->MustGenerateClinitCheck());
3587    codegen_->LoadCurrentMethod(out);
3588    __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
3589  } else {
3590    DCHECK(cls->CanCallRuntime());
3591    codegen_->LoadCurrentMethod(out);
3592    __ LoadFromOffset(
3593        kLoadWord, out, out, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value());
3594    __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
3595
3596    SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
3597        cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
3598    codegen_->AddSlowPath(slow_path);
3599    __ cmp(out, ShifterOperand(0));
3600    __ b(slow_path->GetEntryLabel(), EQ);
3601    if (cls->MustGenerateClinitCheck()) {
3602      GenerateClassInitializationCheck(slow_path, out);
3603    } else {
3604      __ Bind(slow_path->GetExitLabel());
3605    }
3606  }
3607}
3608
3609void LocationsBuilderARM::VisitClinitCheck(HClinitCheck* check) {
3610  LocationSummary* locations =
3611      new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
3612  locations->SetInAt(0, Location::RequiresRegister());
3613  if (check->HasUses()) {
3614    locations->SetOut(Location::SameAsFirstInput());
3615  }
3616}
3617
3618void InstructionCodeGeneratorARM::VisitClinitCheck(HClinitCheck* check) {
3619  // We assume the class is not null.
3620  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
3621      check->GetLoadClass(), check, check->GetDexPc(), true);
3622  codegen_->AddSlowPath(slow_path);
3623  GenerateClassInitializationCheck(slow_path,
3624                                   check->GetLocations()->InAt(0).AsRegister<Register>());
3625}
3626
3627void InstructionCodeGeneratorARM::GenerateClassInitializationCheck(
3628    SlowPathCodeARM* slow_path, Register class_reg) {
3629  __ LoadFromOffset(kLoadWord, IP, class_reg, mirror::Class::StatusOffset().Int32Value());
3630  __ cmp(IP, ShifterOperand(mirror::Class::kStatusInitialized));
3631  __ b(slow_path->GetEntryLabel(), LT);
3632  // Even if the initialized flag is set, we may be in a situation where caches are not synced
3633  // properly. Therefore, we do a memory fence.
3634  __ dmb(ISH);
3635  __ Bind(slow_path->GetExitLabel());
3636}
3637
3638void LocationsBuilderARM::VisitLoadString(HLoadString* load) {
3639  LocationSummary* locations =
3640      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
3641  locations->SetOut(Location::RequiresRegister());
3642}
3643
3644void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) {
3645  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM(load);
3646  codegen_->AddSlowPath(slow_path);
3647
3648  Register out = load->GetLocations()->Out().AsRegister<Register>();
3649  codegen_->LoadCurrentMethod(out);
3650  __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
3651  __ LoadFromOffset(kLoadWord, out, out, mirror::Class::DexCacheStringsOffset().Int32Value());
3652  __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(load->GetStringIndex()));
3653  __ cmp(out, ShifterOperand(0));
3654  __ b(slow_path->GetEntryLabel(), EQ);
3655  __ Bind(slow_path->GetExitLabel());
3656}
3657
3658void LocationsBuilderARM::VisitLoadException(HLoadException* load) {
3659  LocationSummary* locations =
3660      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
3661  locations->SetOut(Location::RequiresRegister());
3662}
3663
3664void InstructionCodeGeneratorARM::VisitLoadException(HLoadException* load) {
3665  Register out = load->GetLocations()->Out().AsRegister<Register>();
3666  int32_t offset = Thread::ExceptionOffset<kArmWordSize>().Int32Value();
3667  __ LoadFromOffset(kLoadWord, out, TR, offset);
3668  __ LoadImmediate(IP, 0);
3669  __ StoreToOffset(kStoreWord, IP, TR, offset);
3670}
3671
3672void LocationsBuilderARM::VisitThrow(HThrow* instruction) {
3673  LocationSummary* locations =
3674      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3675  InvokeRuntimeCallingConvention calling_convention;
3676  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3677}
3678
3679void InstructionCodeGeneratorARM::VisitThrow(HThrow* instruction) {
3680  codegen_->InvokeRuntime(
3681      QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc());
3682}
3683
3684void LocationsBuilderARM::VisitInstanceOf(HInstanceOf* instruction) {
3685  LocationSummary::CallKind call_kind = instruction->IsClassFinal()
3686      ? LocationSummary::kNoCall
3687      : LocationSummary::kCallOnSlowPath;
3688  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3689  locations->SetInAt(0, Location::RequiresRegister());
3690  locations->SetInAt(1, Location::RequiresRegister());
3691  // The out register is used as a temporary, so it overlaps with the inputs.
3692  locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3693}
3694
3695void InstructionCodeGeneratorARM::VisitInstanceOf(HInstanceOf* instruction) {
3696  LocationSummary* locations = instruction->GetLocations();
3697  Register obj = locations->InAt(0).AsRegister<Register>();
3698  Register cls = locations->InAt(1).AsRegister<Register>();
3699  Register out = locations->Out().AsRegister<Register>();
3700  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3701  Label done, zero;
3702  SlowPathCodeARM* slow_path = nullptr;
3703
3704  // Return 0 if `obj` is null.
3705  // TODO: avoid this check if we know obj is not null.
3706  __ cmp(obj, ShifterOperand(0));
3707  __ b(&zero, EQ);
3708  // Compare the class of `obj` with `cls`.
3709  __ LoadFromOffset(kLoadWord, out, obj, class_offset);
3710  __ cmp(out, ShifterOperand(cls));
3711  if (instruction->IsClassFinal()) {
3712    // Classes must be equal for the instanceof to succeed.
3713    __ b(&zero, NE);
3714    __ LoadImmediate(out, 1);
3715    __ b(&done);
3716  } else {
3717    // If the classes are not equal, we go into a slow path.
3718    DCHECK(locations->OnlyCallsOnSlowPath());
3719    slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
3720        instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc());
3721    codegen_->AddSlowPath(slow_path);
3722    __ b(slow_path->GetEntryLabel(), NE);
3723    __ LoadImmediate(out, 1);
3724    __ b(&done);
3725  }
3726  __ Bind(&zero);
3727  __ LoadImmediate(out, 0);
3728  if (slow_path != nullptr) {
3729    __ Bind(slow_path->GetExitLabel());
3730  }
3731  __ Bind(&done);
3732}
3733
3734void LocationsBuilderARM::VisitCheckCast(HCheckCast* instruction) {
3735  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3736      instruction, LocationSummary::kCallOnSlowPath);
3737  locations->SetInAt(0, Location::RequiresRegister());
3738  locations->SetInAt(1, Location::RequiresRegister());
3739  locations->AddTemp(Location::RequiresRegister());
3740}
3741
3742void InstructionCodeGeneratorARM::VisitCheckCast(HCheckCast* instruction) {
3743  LocationSummary* locations = instruction->GetLocations();
3744  Register obj = locations->InAt(0).AsRegister<Register>();
3745  Register cls = locations->InAt(1).AsRegister<Register>();
3746  Register temp = locations->GetTemp(0).AsRegister<Register>();
3747  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3748
3749  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
3750      instruction, locations->InAt(1), locations->GetTemp(0), instruction->GetDexPc());
3751  codegen_->AddSlowPath(slow_path);
3752
3753  // TODO: avoid this check if we know obj is not null.
3754  __ cmp(obj, ShifterOperand(0));
3755  __ b(slow_path->GetExitLabel(), EQ);
3756  // Compare the class of `obj` with `cls`.
3757  __ LoadFromOffset(kLoadWord, temp, obj, class_offset);
3758  __ cmp(temp, ShifterOperand(cls));
3759  __ b(slow_path->GetEntryLabel(), NE);
3760  __ Bind(slow_path->GetExitLabel());
3761}
3762
3763void LocationsBuilderARM::VisitMonitorOperation(HMonitorOperation* instruction) {
3764  LocationSummary* locations =
3765      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3766  InvokeRuntimeCallingConvention calling_convention;
3767  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3768}
3769
3770void InstructionCodeGeneratorARM::VisitMonitorOperation(HMonitorOperation* instruction) {
3771  codegen_->InvokeRuntime(instruction->IsEnter()
3772        ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
3773      instruction,
3774      instruction->GetDexPc());
3775}
3776
3777void LocationsBuilderARM::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
3778void LocationsBuilderARM::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
3779void LocationsBuilderARM::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
3780
3781void LocationsBuilderARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
3782  LocationSummary* locations =
3783      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3784  DCHECK(instruction->GetResultType() == Primitive::kPrimInt
3785         || instruction->GetResultType() == Primitive::kPrimLong);
3786  locations->SetInAt(0, Location::RequiresRegister());
3787  locations->SetInAt(1, Location::RequiresRegister());
3788  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3789}
3790
3791void InstructionCodeGeneratorARM::VisitAnd(HAnd* instruction) {
3792  HandleBitwiseOperation(instruction);
3793}
3794
3795void InstructionCodeGeneratorARM::VisitOr(HOr* instruction) {
3796  HandleBitwiseOperation(instruction);
3797}
3798
3799void InstructionCodeGeneratorARM::VisitXor(HXor* instruction) {
3800  HandleBitwiseOperation(instruction);
3801}
3802
3803void InstructionCodeGeneratorARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
3804  LocationSummary* locations = instruction->GetLocations();
3805
3806  if (instruction->GetResultType() == Primitive::kPrimInt) {
3807    Register first = locations->InAt(0).AsRegister<Register>();
3808    Register second = locations->InAt(1).AsRegister<Register>();
3809    Register out = locations->Out().AsRegister<Register>();
3810    if (instruction->IsAnd()) {
3811      __ and_(out, first, ShifterOperand(second));
3812    } else if (instruction->IsOr()) {
3813      __ orr(out, first, ShifterOperand(second));
3814    } else {
3815      DCHECK(instruction->IsXor());
3816      __ eor(out, first, ShifterOperand(second));
3817    }
3818  } else {
3819    DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3820    Location first = locations->InAt(0);
3821    Location second = locations->InAt(1);
3822    Location out = locations->Out();
3823    if (instruction->IsAnd()) {
3824      __ and_(out.AsRegisterPairLow<Register>(),
3825              first.AsRegisterPairLow<Register>(),
3826              ShifterOperand(second.AsRegisterPairLow<Register>()));
3827      __ and_(out.AsRegisterPairHigh<Register>(),
3828              first.AsRegisterPairHigh<Register>(),
3829              ShifterOperand(second.AsRegisterPairHigh<Register>()));
3830    } else if (instruction->IsOr()) {
3831      __ orr(out.AsRegisterPairLow<Register>(),
3832             first.AsRegisterPairLow<Register>(),
3833             ShifterOperand(second.AsRegisterPairLow<Register>()));
3834      __ orr(out.AsRegisterPairHigh<Register>(),
3835             first.AsRegisterPairHigh<Register>(),
3836             ShifterOperand(second.AsRegisterPairHigh<Register>()));
3837    } else {
3838      DCHECK(instruction->IsXor());
3839      __ eor(out.AsRegisterPairLow<Register>(),
3840             first.AsRegisterPairLow<Register>(),
3841             ShifterOperand(second.AsRegisterPairLow<Register>()));
3842      __ eor(out.AsRegisterPairHigh<Register>(),
3843             first.AsRegisterPairHigh<Register>(),
3844             ShifterOperand(second.AsRegisterPairHigh<Register>()));
3845    }
3846  }
3847}
3848
3849}  // namespace arm
3850}  // namespace art
3851