code_generator_arm.cc revision 68e15009173f92fe717546a621b56413d5e9fba1
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
20#include "entrypoints/quick/quick_entrypoints.h"
21#include "gc/accounting/card_table.h"
22#include "intrinsics.h"
23#include "intrinsics_arm.h"
24#include "mirror/array-inl.h"
25#include "mirror/art_method.h"
26#include "mirror/class.h"
27#include "thread.h"
28#include "utils/arm/assembler_arm.h"
29#include "utils/arm/managed_register_arm.h"
30#include "utils/assembler.h"
31#include "utils/stack_checks.h"
32
33namespace art {
34
35namespace arm {
36
37static bool ExpectedPairLayout(Location location) {
38  // We expected this for both core and fpu register pairs.
39  return ((location.low() & 1) == 0) && (location.low() + 1 == location.high());
40}
41
42static constexpr int kCurrentMethodStackOffset = 0;
43
44static constexpr Register kRuntimeParameterCoreRegisters[] = { R0, R1, R2, R3 };
45static constexpr size_t kRuntimeParameterCoreRegistersLength =
46    arraysize(kRuntimeParameterCoreRegisters);
47static constexpr SRegister kRuntimeParameterFpuRegisters[] = { S0, S1, S2, S3 };
48static constexpr size_t kRuntimeParameterFpuRegistersLength =
49    arraysize(kRuntimeParameterFpuRegisters);
50// We unconditionally allocate R5 to ensure we can do long operations
51// with baseline.
52static constexpr Register kCoreSavedRegisterForBaseline = R5;
53static constexpr Register kCoreCalleeSaves[] =
54    { R5, R6, R7, R8, R10, R11, PC };
55static constexpr SRegister kFpuCalleeSaves[] =
56    { S16, S17, S18, S19, S20, S21, S22, S23, S24, S25, S26, S27, S28, S29, S30, S31 };
57
58// D31 cannot be split into two S registers, and the register allocator only works on
59// S registers. Therefore there is no need to block it.
60static constexpr DRegister DTMP = D31;
61
62class InvokeRuntimeCallingConvention : public CallingConvention<Register, SRegister> {
63 public:
64  InvokeRuntimeCallingConvention()
65      : CallingConvention(kRuntimeParameterCoreRegisters,
66                          kRuntimeParameterCoreRegistersLength,
67                          kRuntimeParameterFpuRegisters,
68                          kRuntimeParameterFpuRegistersLength) {}
69
70 private:
71  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
72};
73
74#define __ reinterpret_cast<ArmAssembler*>(codegen->GetAssembler())->
75#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArmWordSize, x).Int32Value()
76
77class NullCheckSlowPathARM : public SlowPathCodeARM {
78 public:
79  explicit NullCheckSlowPathARM(HNullCheck* instruction) : instruction_(instruction) {}
80
81  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
82    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
83    __ Bind(GetEntryLabel());
84    arm_codegen->InvokeRuntime(
85        QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc(), this);
86  }
87
88 private:
89  HNullCheck* const instruction_;
90  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM);
91};
92
93class DivZeroCheckSlowPathARM : public SlowPathCodeARM {
94 public:
95  explicit DivZeroCheckSlowPathARM(HDivZeroCheck* instruction) : instruction_(instruction) {}
96
97  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
98    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
99    __ Bind(GetEntryLabel());
100    arm_codegen->InvokeRuntime(
101        QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc(), this);
102  }
103
104 private:
105  HDivZeroCheck* const instruction_;
106  DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM);
107};
108
109class SuspendCheckSlowPathARM : public SlowPathCodeARM {
110 public:
111  SuspendCheckSlowPathARM(HSuspendCheck* instruction, HBasicBlock* successor)
112      : instruction_(instruction), successor_(successor) {}
113
114  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
115    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
116    __ Bind(GetEntryLabel());
117    SaveLiveRegisters(codegen, instruction_->GetLocations());
118    arm_codegen->InvokeRuntime(
119        QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc(), this);
120    RestoreLiveRegisters(codegen, instruction_->GetLocations());
121    if (successor_ == nullptr) {
122      __ b(GetReturnLabel());
123    } else {
124      __ b(arm_codegen->GetLabelOf(successor_));
125    }
126  }
127
128  Label* GetReturnLabel() {
129    DCHECK(successor_ == nullptr);
130    return &return_label_;
131  }
132
133 private:
134  HSuspendCheck* const instruction_;
135  // If not null, the block to branch to after the suspend check.
136  HBasicBlock* const successor_;
137
138  // If `successor_` is null, the label to branch to after the suspend check.
139  Label return_label_;
140
141  DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM);
142};
143
144class BoundsCheckSlowPathARM : public SlowPathCodeARM {
145 public:
146  BoundsCheckSlowPathARM(HBoundsCheck* instruction,
147                         Location index_location,
148                         Location length_location)
149      : instruction_(instruction),
150        index_location_(index_location),
151        length_location_(length_location) {}
152
153  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
154    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
155    __ Bind(GetEntryLabel());
156    // We're moving two locations to locations that could overlap, so we need a parallel
157    // move resolver.
158    InvokeRuntimeCallingConvention calling_convention;
159    codegen->EmitParallelMoves(
160        index_location_,
161        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
162        length_location_,
163        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
164    arm_codegen->InvokeRuntime(
165        QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc(), this);
166  }
167
168 private:
169  HBoundsCheck* const instruction_;
170  const Location index_location_;
171  const Location length_location_;
172
173  DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM);
174};
175
176class LoadClassSlowPathARM : public SlowPathCodeARM {
177 public:
178  LoadClassSlowPathARM(HLoadClass* cls,
179                       HInstruction* at,
180                       uint32_t dex_pc,
181                       bool do_clinit)
182      : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
183    DCHECK(at->IsLoadClass() || at->IsClinitCheck());
184  }
185
186  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
187    LocationSummary* locations = at_->GetLocations();
188
189    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
190    __ Bind(GetEntryLabel());
191    SaveLiveRegisters(codegen, locations);
192
193    InvokeRuntimeCallingConvention calling_convention;
194    __ LoadImmediate(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex());
195    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
196    int32_t entry_point_offset = do_clinit_
197        ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
198        : QUICK_ENTRY_POINT(pInitializeType);
199    arm_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_, this);
200
201    // Move the class to the desired location.
202    Location out = locations->Out();
203    if (out.IsValid()) {
204      DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
205      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
206    }
207    RestoreLiveRegisters(codegen, locations);
208    __ b(GetExitLabel());
209  }
210
211 private:
212  // The class this slow path will load.
213  HLoadClass* const cls_;
214
215  // The instruction where this slow path is happening.
216  // (Might be the load class or an initialization check).
217  HInstruction* const at_;
218
219  // The dex PC of `at_`.
220  const uint32_t dex_pc_;
221
222  // Whether to initialize the class.
223  const bool do_clinit_;
224
225  DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM);
226};
227
228class LoadStringSlowPathARM : public SlowPathCodeARM {
229 public:
230  explicit LoadStringSlowPathARM(HLoadString* instruction) : instruction_(instruction) {}
231
232  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
233    LocationSummary* locations = instruction_->GetLocations();
234    DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
235
236    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
237    __ Bind(GetEntryLabel());
238    SaveLiveRegisters(codegen, locations);
239
240    InvokeRuntimeCallingConvention calling_convention;
241    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
242    __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction_->GetStringIndex());
243    arm_codegen->InvokeRuntime(
244        QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc(), this);
245    arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
246
247    RestoreLiveRegisters(codegen, locations);
248    __ b(GetExitLabel());
249  }
250
251 private:
252  HLoadString* const instruction_;
253
254  DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM);
255};
256
257class TypeCheckSlowPathARM : public SlowPathCodeARM {
258 public:
259  TypeCheckSlowPathARM(HInstruction* instruction,
260                       Location class_to_check,
261                       Location object_class,
262                       uint32_t dex_pc)
263      : instruction_(instruction),
264        class_to_check_(class_to_check),
265        object_class_(object_class),
266        dex_pc_(dex_pc) {}
267
268  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
269    LocationSummary* locations = instruction_->GetLocations();
270    DCHECK(instruction_->IsCheckCast()
271           || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
272
273    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
274    __ Bind(GetEntryLabel());
275    SaveLiveRegisters(codegen, locations);
276
277    // We're moving two locations to locations that could overlap, so we need a parallel
278    // move resolver.
279    InvokeRuntimeCallingConvention calling_convention;
280    codegen->EmitParallelMoves(
281        class_to_check_,
282        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
283        object_class_,
284        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
285
286    if (instruction_->IsInstanceOf()) {
287      arm_codegen->InvokeRuntime(
288          QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc_, this);
289      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
290    } else {
291      DCHECK(instruction_->IsCheckCast());
292      arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc_, this);
293    }
294
295    RestoreLiveRegisters(codegen, locations);
296    __ b(GetExitLabel());
297  }
298
299 private:
300  HInstruction* const instruction_;
301  const Location class_to_check_;
302  const Location object_class_;
303  uint32_t dex_pc_;
304
305  DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM);
306};
307
308#undef __
309
310#undef __
311#define __ reinterpret_cast<ArmAssembler*>(GetAssembler())->
312
313inline Condition ARMCondition(IfCondition cond) {
314  switch (cond) {
315    case kCondEQ: return EQ;
316    case kCondNE: return NE;
317    case kCondLT: return LT;
318    case kCondLE: return LE;
319    case kCondGT: return GT;
320    case kCondGE: return GE;
321    default:
322      LOG(FATAL) << "Unknown if condition";
323  }
324  return EQ;        // Unreachable.
325}
326
327inline Condition ARMOppositeCondition(IfCondition cond) {
328  switch (cond) {
329    case kCondEQ: return NE;
330    case kCondNE: return EQ;
331    case kCondLT: return GE;
332    case kCondLE: return GT;
333    case kCondGT: return LE;
334    case kCondGE: return LT;
335    default:
336      LOG(FATAL) << "Unknown if condition";
337  }
338  return EQ;        // Unreachable.
339}
340
341void CodeGeneratorARM::DumpCoreRegister(std::ostream& stream, int reg) const {
342  stream << ArmManagedRegister::FromCoreRegister(Register(reg));
343}
344
345void CodeGeneratorARM::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
346  stream << ArmManagedRegister::FromSRegister(SRegister(reg));
347}
348
349size_t CodeGeneratorARM::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
350  __ StoreToOffset(kStoreWord, static_cast<Register>(reg_id), SP, stack_index);
351  return kArmWordSize;
352}
353
354size_t CodeGeneratorARM::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
355  __ LoadFromOffset(kLoadWord, static_cast<Register>(reg_id), SP, stack_index);
356  return kArmWordSize;
357}
358
359size_t CodeGeneratorARM::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
360  __ StoreSToOffset(static_cast<SRegister>(reg_id), SP, stack_index);
361  return kArmWordSize;
362}
363
364size_t CodeGeneratorARM::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
365  __ LoadSFromOffset(static_cast<SRegister>(reg_id), SP, stack_index);
366  return kArmWordSize;
367}
368
369CodeGeneratorARM::CodeGeneratorARM(HGraph* graph,
370                                   const ArmInstructionSetFeatures& isa_features,
371                                   const CompilerOptions& compiler_options)
372    : CodeGenerator(graph,
373                    kNumberOfCoreRegisters,
374                    kNumberOfSRegisters,
375                    kNumberOfRegisterPairs,
376                    ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
377                                        arraysize(kCoreCalleeSaves)),
378                    ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
379                                        arraysize(kFpuCalleeSaves)),
380                    compiler_options),
381      block_labels_(graph->GetArena(), 0),
382      location_builder_(graph, this),
383      instruction_visitor_(graph, this),
384      move_resolver_(graph->GetArena(), this),
385      assembler_(true),
386      isa_features_(isa_features) {
387  // Save the PC register to mimic Quick.
388  AddAllocatedRegister(Location::RegisterLocation(PC));
389}
390
391Location CodeGeneratorARM::AllocateFreeRegister(Primitive::Type type) const {
392  switch (type) {
393    case Primitive::kPrimLong: {
394      size_t reg = FindFreeEntry(blocked_register_pairs_, kNumberOfRegisterPairs);
395      ArmManagedRegister pair =
396          ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
397      DCHECK(!blocked_core_registers_[pair.AsRegisterPairLow()]);
398      DCHECK(!blocked_core_registers_[pair.AsRegisterPairHigh()]);
399
400      blocked_core_registers_[pair.AsRegisterPairLow()] = true;
401      blocked_core_registers_[pair.AsRegisterPairHigh()] = true;
402      UpdateBlockedPairRegisters();
403      return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
404    }
405
406    case Primitive::kPrimByte:
407    case Primitive::kPrimBoolean:
408    case Primitive::kPrimChar:
409    case Primitive::kPrimShort:
410    case Primitive::kPrimInt:
411    case Primitive::kPrimNot: {
412      int reg = FindFreeEntry(blocked_core_registers_, kNumberOfCoreRegisters);
413      // Block all register pairs that contain `reg`.
414      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
415        ArmManagedRegister current =
416            ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
417        if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
418          blocked_register_pairs_[i] = true;
419        }
420      }
421      return Location::RegisterLocation(reg);
422    }
423
424    case Primitive::kPrimFloat: {
425      int reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfSRegisters);
426      return Location::FpuRegisterLocation(reg);
427    }
428
429    case Primitive::kPrimDouble: {
430      int reg = FindTwoFreeConsecutiveAlignedEntries(blocked_fpu_registers_, kNumberOfSRegisters);
431      DCHECK_EQ(reg % 2, 0);
432      return Location::FpuRegisterPairLocation(reg, reg + 1);
433    }
434
435    case Primitive::kPrimVoid:
436      LOG(FATAL) << "Unreachable type " << type;
437  }
438
439  return Location();
440}
441
442void CodeGeneratorARM::SetupBlockedRegisters(bool is_baseline) const {
443  // Don't allocate the dalvik style register pair passing.
444  blocked_register_pairs_[R1_R2] = true;
445
446  // Stack register, LR and PC are always reserved.
447  blocked_core_registers_[SP] = true;
448  blocked_core_registers_[LR] = true;
449  blocked_core_registers_[PC] = true;
450
451  // Reserve thread register.
452  blocked_core_registers_[TR] = true;
453
454  // Reserve temp register.
455  blocked_core_registers_[IP] = true;
456
457  if (is_baseline) {
458    for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
459      blocked_core_registers_[kCoreCalleeSaves[i]] = true;
460    }
461
462    blocked_core_registers_[kCoreSavedRegisterForBaseline] = false;
463
464    for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
465      blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
466    }
467  }
468
469  UpdateBlockedPairRegisters();
470}
471
472void CodeGeneratorARM::UpdateBlockedPairRegisters() const {
473  for (int i = 0; i < kNumberOfRegisterPairs; i++) {
474    ArmManagedRegister current =
475        ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
476    if (blocked_core_registers_[current.AsRegisterPairLow()]
477        || blocked_core_registers_[current.AsRegisterPairHigh()]) {
478      blocked_register_pairs_[i] = true;
479    }
480  }
481}
482
483InstructionCodeGeneratorARM::InstructionCodeGeneratorARM(HGraph* graph, CodeGeneratorARM* codegen)
484      : HGraphVisitor(graph),
485        assembler_(codegen->GetAssembler()),
486        codegen_(codegen) {}
487
488static uint32_t LeastSignificantBit(uint32_t mask) {
489  // ffs starts at 1.
490  return ffs(mask) - 1;
491}
492
493void CodeGeneratorARM::ComputeSpillMask() {
494  core_spill_mask_ = allocated_registers_.GetCoreRegisters() & core_callee_save_mask_;
495  // Save one extra register for baseline. Note that on thumb2, there is no easy
496  // instruction to restore just the PC, so this actually helps both baseline
497  // and non-baseline to save and restore at least two registers at entry and exit.
498  core_spill_mask_ |= (1 << kCoreSavedRegisterForBaseline);
499  DCHECK_NE(core_spill_mask_, 0u) << "At least the return address register must be saved";
500  fpu_spill_mask_ = allocated_registers_.GetFloatingPointRegisters() & fpu_callee_save_mask_;
501  // We use vpush and vpop for saving and restoring floating point registers, which take
502  // a SRegister and the number of registers to save/restore after that SRegister. We
503  // therefore update the `fpu_spill_mask_` to also contain those registers not allocated,
504  // but in the range.
505  if (fpu_spill_mask_ != 0) {
506    uint32_t least_significant_bit = LeastSignificantBit(fpu_spill_mask_);
507    uint32_t most_significant_bit = MostSignificantBit(fpu_spill_mask_);
508    for (uint32_t i = least_significant_bit + 1 ; i < most_significant_bit; ++i) {
509      fpu_spill_mask_ |= (1 << i);
510    }
511  }
512}
513
514void CodeGeneratorARM::GenerateFrameEntry() {
515  bool skip_overflow_check =
516      IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm);
517  DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
518  __ Bind(&frame_entry_label_);
519
520  if (HasEmptyFrame()) {
521    return;
522  }
523
524  if (!skip_overflow_check) {
525    __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm)));
526    __ LoadFromOffset(kLoadWord, IP, IP, 0);
527    RecordPcInfo(nullptr, 0);
528  }
529
530  // PC is in the list of callee-save to mimic Quick, but we need to push
531  // LR at entry instead.
532  __ PushList((core_spill_mask_ & (~(1 << PC))) | 1 << LR);
533  if (fpu_spill_mask_ != 0) {
534    SRegister start_register = SRegister(LeastSignificantBit(fpu_spill_mask_));
535    __ vpushs(start_register, POPCOUNT(fpu_spill_mask_));
536  }
537  __ AddConstant(SP, -(GetFrameSize() - FrameEntrySpillSize()));
538  __ StoreToOffset(kStoreWord, R0, SP, 0);
539}
540
541void CodeGeneratorARM::GenerateFrameExit() {
542  if (HasEmptyFrame()) {
543    __ bx(LR);
544    return;
545  }
546  __ AddConstant(SP, GetFrameSize() - FrameEntrySpillSize());
547  if (fpu_spill_mask_ != 0) {
548    SRegister start_register = SRegister(LeastSignificantBit(fpu_spill_mask_));
549    __ vpops(start_register, POPCOUNT(fpu_spill_mask_));
550  }
551  __ PopList(core_spill_mask_);
552}
553
554void CodeGeneratorARM::Bind(HBasicBlock* block) {
555  __ Bind(GetLabelOf(block));
556}
557
558Location CodeGeneratorARM::GetStackLocation(HLoadLocal* load) const {
559  switch (load->GetType()) {
560    case Primitive::kPrimLong:
561    case Primitive::kPrimDouble:
562      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
563      break;
564
565    case Primitive::kPrimInt:
566    case Primitive::kPrimNot:
567    case Primitive::kPrimFloat:
568      return Location::StackSlot(GetStackSlot(load->GetLocal()));
569
570    case Primitive::kPrimBoolean:
571    case Primitive::kPrimByte:
572    case Primitive::kPrimChar:
573    case Primitive::kPrimShort:
574    case Primitive::kPrimVoid:
575      LOG(FATAL) << "Unexpected type " << load->GetType();
576  }
577
578  LOG(FATAL) << "Unreachable";
579  return Location();
580}
581
582Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
583  switch (type) {
584    case Primitive::kPrimBoolean:
585    case Primitive::kPrimByte:
586    case Primitive::kPrimChar:
587    case Primitive::kPrimShort:
588    case Primitive::kPrimInt:
589    case Primitive::kPrimNot: {
590      uint32_t index = gp_index_++;
591      uint32_t stack_index = stack_index_++;
592      if (index < calling_convention.GetNumberOfRegisters()) {
593        return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
594      } else {
595        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
596      }
597    }
598
599    case Primitive::kPrimLong: {
600      uint32_t index = gp_index_;
601      uint32_t stack_index = stack_index_;
602      gp_index_ += 2;
603      stack_index_ += 2;
604      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
605        if (calling_convention.GetRegisterAt(index) == R1) {
606          // Skip R1, and use R2_R3 instead.
607          gp_index_++;
608          index++;
609        }
610      }
611      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
612        DCHECK_EQ(calling_convention.GetRegisterAt(index) + 1,
613                  calling_convention.GetRegisterAt(index + 1));
614        return Location::RegisterPairLocation(calling_convention.GetRegisterAt(index),
615                                              calling_convention.GetRegisterAt(index + 1));
616      } else {
617        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
618      }
619    }
620
621    case Primitive::kPrimFloat: {
622      uint32_t stack_index = stack_index_++;
623      if (float_index_ % 2 == 0) {
624        float_index_ = std::max(double_index_, float_index_);
625      }
626      if (float_index_ < calling_convention.GetNumberOfFpuRegisters()) {
627        return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(float_index_++));
628      } else {
629        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
630      }
631    }
632
633    case Primitive::kPrimDouble: {
634      double_index_ = std::max(double_index_, RoundUp(float_index_, 2));
635      uint32_t stack_index = stack_index_;
636      stack_index_ += 2;
637      if (double_index_ + 1 < calling_convention.GetNumberOfFpuRegisters()) {
638        uint32_t index = double_index_;
639        double_index_ += 2;
640        Location result = Location::FpuRegisterPairLocation(
641          calling_convention.GetFpuRegisterAt(index),
642          calling_convention.GetFpuRegisterAt(index + 1));
643        DCHECK(ExpectedPairLayout(result));
644        return result;
645      } else {
646        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
647      }
648    }
649
650    case Primitive::kPrimVoid:
651      LOG(FATAL) << "Unexpected parameter type " << type;
652      break;
653  }
654  return Location();
655}
656
657Location InvokeDexCallingConventionVisitor::GetReturnLocation(Primitive::Type type) {
658  switch (type) {
659    case Primitive::kPrimBoolean:
660    case Primitive::kPrimByte:
661    case Primitive::kPrimChar:
662    case Primitive::kPrimShort:
663    case Primitive::kPrimInt:
664    case Primitive::kPrimNot: {
665      return Location::RegisterLocation(R0);
666    }
667
668    case Primitive::kPrimFloat: {
669      return Location::FpuRegisterLocation(S0);
670    }
671
672    case Primitive::kPrimLong: {
673      return Location::RegisterPairLocation(R0, R1);
674    }
675
676    case Primitive::kPrimDouble: {
677      return Location::FpuRegisterPairLocation(S0, S1);
678    }
679
680    case Primitive::kPrimVoid:
681      return Location();
682  }
683  UNREACHABLE();
684  return Location();
685}
686
687void CodeGeneratorARM::Move32(Location destination, Location source) {
688  if (source.Equals(destination)) {
689    return;
690  }
691  if (destination.IsRegister()) {
692    if (source.IsRegister()) {
693      __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>());
694    } else if (source.IsFpuRegister()) {
695      __ vmovrs(destination.AsRegister<Register>(), source.AsFpuRegister<SRegister>());
696    } else {
697      __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(), SP, source.GetStackIndex());
698    }
699  } else if (destination.IsFpuRegister()) {
700    if (source.IsRegister()) {
701      __ vmovsr(destination.AsFpuRegister<SRegister>(), source.AsRegister<Register>());
702    } else if (source.IsFpuRegister()) {
703      __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>());
704    } else {
705      __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex());
706    }
707  } else {
708    DCHECK(destination.IsStackSlot()) << destination;
709    if (source.IsRegister()) {
710      __ StoreToOffset(kStoreWord, source.AsRegister<Register>(), SP, destination.GetStackIndex());
711    } else if (source.IsFpuRegister()) {
712      __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex());
713    } else {
714      DCHECK(source.IsStackSlot()) << source;
715      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
716      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
717    }
718  }
719}
720
721void CodeGeneratorARM::Move64(Location destination, Location source) {
722  if (source.Equals(destination)) {
723    return;
724  }
725  if (destination.IsRegisterPair()) {
726    if (source.IsRegisterPair()) {
727      EmitParallelMoves(
728          Location::RegisterLocation(source.AsRegisterPairHigh<Register>()),
729          Location::RegisterLocation(destination.AsRegisterPairHigh<Register>()),
730          Location::RegisterLocation(source.AsRegisterPairLow<Register>()),
731          Location::RegisterLocation(destination.AsRegisterPairLow<Register>()));
732    } else if (source.IsFpuRegister()) {
733      UNIMPLEMENTED(FATAL);
734    } else {
735      DCHECK(source.IsDoubleStackSlot());
736      DCHECK(ExpectedPairLayout(destination));
737      __ LoadFromOffset(kLoadWordPair, destination.AsRegisterPairLow<Register>(),
738                        SP, source.GetStackIndex());
739    }
740  } else if (destination.IsFpuRegisterPair()) {
741    if (source.IsDoubleStackSlot()) {
742      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
743                         SP,
744                         source.GetStackIndex());
745    } else {
746      UNIMPLEMENTED(FATAL);
747    }
748  } else {
749    DCHECK(destination.IsDoubleStackSlot());
750    if (source.IsRegisterPair()) {
751      // No conflict possible, so just do the moves.
752      if (source.AsRegisterPairLow<Register>() == R1) {
753        DCHECK_EQ(source.AsRegisterPairHigh<Register>(), R2);
754        __ StoreToOffset(kStoreWord, R1, SP, destination.GetStackIndex());
755        __ StoreToOffset(kStoreWord, R2, SP, destination.GetHighStackIndex(kArmWordSize));
756      } else {
757        __ StoreToOffset(kStoreWordPair, source.AsRegisterPairLow<Register>(),
758                         SP, destination.GetStackIndex());
759      }
760    } else if (source.IsFpuRegisterPair()) {
761      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
762                        SP,
763                        destination.GetStackIndex());
764    } else {
765      DCHECK(source.IsDoubleStackSlot());
766      EmitParallelMoves(
767          Location::StackSlot(source.GetStackIndex()),
768          Location::StackSlot(destination.GetStackIndex()),
769          Location::StackSlot(source.GetHighStackIndex(kArmWordSize)),
770          Location::StackSlot(destination.GetHighStackIndex(kArmWordSize)));
771    }
772  }
773}
774
775void CodeGeneratorARM::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
776  LocationSummary* locations = instruction->GetLocations();
777  if (locations != nullptr && locations->Out().Equals(location)) {
778    return;
779  }
780
781  if (locations != nullptr && locations->Out().IsConstant()) {
782    HConstant* const_to_move = locations->Out().GetConstant();
783    if (const_to_move->IsIntConstant() || const_to_move->IsNullConstant()) {
784      int32_t value = GetInt32ValueOf(const_to_move);
785      if (location.IsRegister()) {
786        __ LoadImmediate(location.AsRegister<Register>(), value);
787      } else {
788        DCHECK(location.IsStackSlot());
789        __ LoadImmediate(IP, value);
790        __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
791      }
792    } else {
793      DCHECK(const_to_move->IsLongConstant()) << const_to_move->DebugName();
794      int64_t value = const_to_move->AsLongConstant()->GetValue();
795      if (location.IsRegisterPair()) {
796        __ LoadImmediate(location.AsRegisterPairLow<Register>(), Low32Bits(value));
797        __ LoadImmediate(location.AsRegisterPairHigh<Register>(), High32Bits(value));
798      } else {
799        DCHECK(location.IsDoubleStackSlot());
800        __ LoadImmediate(IP, Low32Bits(value));
801        __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
802        __ LoadImmediate(IP, High32Bits(value));
803        __ StoreToOffset(kStoreWord, IP, SP, location.GetHighStackIndex(kArmWordSize));
804      }
805    }
806  } else if (instruction->IsLoadLocal()) {
807    uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
808    switch (instruction->GetType()) {
809      case Primitive::kPrimBoolean:
810      case Primitive::kPrimByte:
811      case Primitive::kPrimChar:
812      case Primitive::kPrimShort:
813      case Primitive::kPrimInt:
814      case Primitive::kPrimNot:
815      case Primitive::kPrimFloat:
816        Move32(location, Location::StackSlot(stack_slot));
817        break;
818
819      case Primitive::kPrimLong:
820      case Primitive::kPrimDouble:
821        Move64(location, Location::DoubleStackSlot(stack_slot));
822        break;
823
824      default:
825        LOG(FATAL) << "Unexpected type " << instruction->GetType();
826    }
827  } else if (instruction->IsTemporary()) {
828    Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
829    if (temp_location.IsStackSlot()) {
830      Move32(location, temp_location);
831    } else {
832      DCHECK(temp_location.IsDoubleStackSlot());
833      Move64(location, temp_location);
834    }
835  } else {
836    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
837    switch (instruction->GetType()) {
838      case Primitive::kPrimBoolean:
839      case Primitive::kPrimByte:
840      case Primitive::kPrimChar:
841      case Primitive::kPrimShort:
842      case Primitive::kPrimNot:
843      case Primitive::kPrimInt:
844      case Primitive::kPrimFloat:
845        Move32(location, locations->Out());
846        break;
847
848      case Primitive::kPrimLong:
849      case Primitive::kPrimDouble:
850        Move64(location, locations->Out());
851        break;
852
853      default:
854        LOG(FATAL) << "Unexpected type " << instruction->GetType();
855    }
856  }
857}
858
859void CodeGeneratorARM::InvokeRuntime(int32_t entry_point_offset,
860                                     HInstruction* instruction,
861                                     uint32_t dex_pc,
862                                     SlowPathCode* slow_path) {
863  __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
864  __ blx(LR);
865  RecordPcInfo(instruction, dex_pc, slow_path);
866  DCHECK(instruction->IsSuspendCheck()
867      || instruction->IsBoundsCheck()
868      || instruction->IsNullCheck()
869      || instruction->IsDivZeroCheck()
870      || instruction->GetLocations()->CanCall()
871      || !IsLeafMethod());
872}
873
874void LocationsBuilderARM::VisitGoto(HGoto* got) {
875  got->SetLocations(nullptr);
876}
877
878void InstructionCodeGeneratorARM::VisitGoto(HGoto* got) {
879  HBasicBlock* successor = got->GetSuccessor();
880  DCHECK(!successor->IsExitBlock());
881
882  HBasicBlock* block = got->GetBlock();
883  HInstruction* previous = got->GetPrevious();
884
885  HLoopInformation* info = block->GetLoopInformation();
886  if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
887    codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
888    GenerateSuspendCheck(info->GetSuspendCheck(), successor);
889    return;
890  }
891
892  if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
893    GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
894  }
895  if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
896    __ b(codegen_->GetLabelOf(successor));
897  }
898}
899
900void LocationsBuilderARM::VisitExit(HExit* exit) {
901  exit->SetLocations(nullptr);
902}
903
904void InstructionCodeGeneratorARM::VisitExit(HExit* exit) {
905  UNUSED(exit);
906}
907
908void LocationsBuilderARM::VisitIf(HIf* if_instr) {
909  LocationSummary* locations =
910      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
911  HInstruction* cond = if_instr->InputAt(0);
912  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
913    locations->SetInAt(0, Location::RequiresRegister());
914  }
915}
916
917void InstructionCodeGeneratorARM::VisitIf(HIf* if_instr) {
918  HInstruction* cond = if_instr->InputAt(0);
919  if (cond->IsIntConstant()) {
920    // Constant condition, statically compared against 1.
921    int32_t cond_value = cond->AsIntConstant()->GetValue();
922    if (cond_value == 1) {
923      if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
924                                     if_instr->IfTrueSuccessor())) {
925        __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
926      }
927      return;
928    } else {
929      DCHECK_EQ(cond_value, 0);
930    }
931  } else {
932    if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
933      // Condition has been materialized, compare the output to 0
934      DCHECK(if_instr->GetLocations()->InAt(0).IsRegister());
935      __ cmp(if_instr->GetLocations()->InAt(0).AsRegister<Register>(),
936             ShifterOperand(0));
937      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()), NE);
938    } else {
939      // Condition has not been materialized, use its inputs as the
940      // comparison and its condition as the branch condition.
941      LocationSummary* locations = cond->GetLocations();
942      DCHECK(locations->InAt(0).IsRegister()) << locations->InAt(0);
943      Register left = locations->InAt(0).AsRegister<Register>();
944      if (locations->InAt(1).IsRegister()) {
945        __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>()));
946      } else {
947        DCHECK(locations->InAt(1).IsConstant());
948        HConstant* constant = locations->InAt(1).GetConstant();
949        int32_t value = CodeGenerator::GetInt32ValueOf(constant);
950        ShifterOperand operand;
951        if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) {
952          __ cmp(left, operand);
953        } else {
954          Register temp = IP;
955          __ LoadImmediate(temp, value);
956          __ cmp(left, ShifterOperand(temp));
957        }
958      }
959      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()),
960           ARMCondition(cond->AsCondition()->GetCondition()));
961    }
962  }
963  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
964                                 if_instr->IfFalseSuccessor())) {
965    __ b(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
966  }
967}
968
969
970void LocationsBuilderARM::VisitCondition(HCondition* comp) {
971  LocationSummary* locations =
972      new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall);
973  locations->SetInAt(0, Location::RequiresRegister());
974  locations->SetInAt(1, Location::RegisterOrConstant(comp->InputAt(1)));
975  if (comp->NeedsMaterialization()) {
976    locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
977  }
978}
979
980void InstructionCodeGeneratorARM::VisitCondition(HCondition* comp) {
981  if (!comp->NeedsMaterialization()) return;
982  LocationSummary* locations = comp->GetLocations();
983  Register left = locations->InAt(0).AsRegister<Register>();
984
985  if (locations->InAt(1).IsRegister()) {
986    __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>()));
987  } else {
988    DCHECK(locations->InAt(1).IsConstant());
989    int32_t value = CodeGenerator::GetInt32ValueOf(locations->InAt(1).GetConstant());
990    ShifterOperand operand;
991    if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) {
992      __ cmp(left, operand);
993    } else {
994      Register temp = IP;
995      __ LoadImmediate(temp, value);
996      __ cmp(left, ShifterOperand(temp));
997    }
998  }
999  __ it(ARMCondition(comp->GetCondition()), kItElse);
1000  __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(1),
1001         ARMCondition(comp->GetCondition()));
1002  __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(0),
1003         ARMOppositeCondition(comp->GetCondition()));
1004}
1005
1006void LocationsBuilderARM::VisitEqual(HEqual* comp) {
1007  VisitCondition(comp);
1008}
1009
1010void InstructionCodeGeneratorARM::VisitEqual(HEqual* comp) {
1011  VisitCondition(comp);
1012}
1013
1014void LocationsBuilderARM::VisitNotEqual(HNotEqual* comp) {
1015  VisitCondition(comp);
1016}
1017
1018void InstructionCodeGeneratorARM::VisitNotEqual(HNotEqual* comp) {
1019  VisitCondition(comp);
1020}
1021
1022void LocationsBuilderARM::VisitLessThan(HLessThan* comp) {
1023  VisitCondition(comp);
1024}
1025
1026void InstructionCodeGeneratorARM::VisitLessThan(HLessThan* comp) {
1027  VisitCondition(comp);
1028}
1029
1030void LocationsBuilderARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1031  VisitCondition(comp);
1032}
1033
1034void InstructionCodeGeneratorARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1035  VisitCondition(comp);
1036}
1037
1038void LocationsBuilderARM::VisitGreaterThan(HGreaterThan* comp) {
1039  VisitCondition(comp);
1040}
1041
1042void InstructionCodeGeneratorARM::VisitGreaterThan(HGreaterThan* comp) {
1043  VisitCondition(comp);
1044}
1045
1046void LocationsBuilderARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1047  VisitCondition(comp);
1048}
1049
1050void InstructionCodeGeneratorARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1051  VisitCondition(comp);
1052}
1053
1054void LocationsBuilderARM::VisitLocal(HLocal* local) {
1055  local->SetLocations(nullptr);
1056}
1057
1058void InstructionCodeGeneratorARM::VisitLocal(HLocal* local) {
1059  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
1060}
1061
1062void LocationsBuilderARM::VisitLoadLocal(HLoadLocal* load) {
1063  load->SetLocations(nullptr);
1064}
1065
1066void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load) {
1067  // Nothing to do, this is driven by the code generator.
1068  UNUSED(load);
1069}
1070
1071void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) {
1072  LocationSummary* locations =
1073      new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
1074  switch (store->InputAt(1)->GetType()) {
1075    case Primitive::kPrimBoolean:
1076    case Primitive::kPrimByte:
1077    case Primitive::kPrimChar:
1078    case Primitive::kPrimShort:
1079    case Primitive::kPrimInt:
1080    case Primitive::kPrimNot:
1081    case Primitive::kPrimFloat:
1082      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
1083      break;
1084
1085    case Primitive::kPrimLong:
1086    case Primitive::kPrimDouble:
1087      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
1088      break;
1089
1090    default:
1091      LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType();
1092  }
1093}
1094
1095void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store) {
1096  UNUSED(store);
1097}
1098
1099void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) {
1100  LocationSummary* locations =
1101      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1102  locations->SetOut(Location::ConstantLocation(constant));
1103}
1104
1105void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant) {
1106  // Will be generated at use site.
1107  UNUSED(constant);
1108}
1109
1110void LocationsBuilderARM::VisitNullConstant(HNullConstant* constant) {
1111  LocationSummary* locations =
1112      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1113  locations->SetOut(Location::ConstantLocation(constant));
1114}
1115
1116void InstructionCodeGeneratorARM::VisitNullConstant(HNullConstant* constant) {
1117  // Will be generated at use site.
1118  UNUSED(constant);
1119}
1120
1121void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) {
1122  LocationSummary* locations =
1123      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1124  locations->SetOut(Location::ConstantLocation(constant));
1125}
1126
1127void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant) {
1128  // Will be generated at use site.
1129  UNUSED(constant);
1130}
1131
1132void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) {
1133  LocationSummary* locations =
1134      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1135  locations->SetOut(Location::ConstantLocation(constant));
1136}
1137
1138void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant) {
1139  // Will be generated at use site.
1140  UNUSED(constant);
1141}
1142
1143void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) {
1144  LocationSummary* locations =
1145      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1146  locations->SetOut(Location::ConstantLocation(constant));
1147}
1148
1149void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant) {
1150  // Will be generated at use site.
1151  UNUSED(constant);
1152}
1153
1154void LocationsBuilderARM::VisitReturnVoid(HReturnVoid* ret) {
1155  ret->SetLocations(nullptr);
1156}
1157
1158void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret) {
1159  UNUSED(ret);
1160  codegen_->GenerateFrameExit();
1161}
1162
1163void LocationsBuilderARM::VisitReturn(HReturn* ret) {
1164  LocationSummary* locations =
1165      new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
1166  locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType()));
1167}
1168
1169void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret) {
1170  UNUSED(ret);
1171  codegen_->GenerateFrameExit();
1172}
1173
1174void LocationsBuilderARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1175  IntrinsicLocationsBuilderARM intrinsic(GetGraph()->GetArena(),
1176                                         codegen_->GetInstructionSetFeatures());
1177  if (intrinsic.TryDispatch(invoke)) {
1178    return;
1179  }
1180
1181  HandleInvoke(invoke);
1182}
1183
1184void CodeGeneratorARM::LoadCurrentMethod(Register reg) {
1185  DCHECK(RequiresCurrentMethod());
1186  __ LoadFromOffset(kLoadWord, reg, SP, kCurrentMethodStackOffset);
1187}
1188
1189static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM* codegen) {
1190  if (invoke->GetLocations()->Intrinsified()) {
1191    IntrinsicCodeGeneratorARM intrinsic(codegen);
1192    intrinsic.Dispatch(invoke);
1193    return true;
1194  }
1195  return false;
1196}
1197
1198void InstructionCodeGeneratorARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1199  if (TryGenerateIntrinsicCode(invoke, codegen_)) {
1200    return;
1201  }
1202
1203  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1204
1205  codegen_->GenerateStaticOrDirectCall(invoke, temp);
1206  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1207}
1208
1209void LocationsBuilderARM::HandleInvoke(HInvoke* invoke) {
1210  LocationSummary* locations =
1211      new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1212  locations->AddTemp(Location::RegisterLocation(R0));
1213
1214  InvokeDexCallingConventionVisitor calling_convention_visitor;
1215  for (size_t i = 0; i < invoke->InputCount(); i++) {
1216    HInstruction* input = invoke->InputAt(i);
1217    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1218  }
1219
1220  locations->SetOut(calling_convention_visitor.GetReturnLocation(invoke->GetType()));
1221}
1222
1223void LocationsBuilderARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1224  IntrinsicLocationsBuilderARM intrinsic(GetGraph()->GetArena(),
1225                                         codegen_->GetInstructionSetFeatures());
1226  if (intrinsic.TryDispatch(invoke)) {
1227    return;
1228  }
1229
1230  HandleInvoke(invoke);
1231}
1232
1233void InstructionCodeGeneratorARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1234  if (TryGenerateIntrinsicCode(invoke, codegen_)) {
1235    return;
1236  }
1237
1238  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1239  uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() +
1240          invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
1241  LocationSummary* locations = invoke->GetLocations();
1242  Location receiver = locations->InAt(0);
1243  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1244  // temp = object->GetClass();
1245  if (receiver.IsStackSlot()) {
1246    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1247    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1248  } else {
1249    __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
1250  }
1251  codegen_->MaybeRecordImplicitNullCheck(invoke);
1252  // temp = temp->GetMethodAt(method_offset);
1253  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1254      kArmWordSize).Int32Value();
1255  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1256  // LR = temp->GetEntryPoint();
1257  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1258  // LR();
1259  __ blx(LR);
1260  DCHECK(!codegen_->IsLeafMethod());
1261  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1262}
1263
1264void LocationsBuilderARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1265  HandleInvoke(invoke);
1266  // Add the hidden argument.
1267  invoke->GetLocations()->AddTemp(Location::RegisterLocation(R12));
1268}
1269
1270void InstructionCodeGeneratorARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1271  // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
1272  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1273  uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() +
1274          (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry);
1275  LocationSummary* locations = invoke->GetLocations();
1276  Location receiver = locations->InAt(0);
1277  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1278
1279  // Set the hidden argument.
1280  __ LoadImmediate(invoke->GetLocations()->GetTemp(1).AsRegister<Register>(),
1281                   invoke->GetDexMethodIndex());
1282
1283  // temp = object->GetClass();
1284  if (receiver.IsStackSlot()) {
1285    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1286    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1287  } else {
1288    __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
1289  }
1290  codegen_->MaybeRecordImplicitNullCheck(invoke);
1291  // temp = temp->GetImtEntryAt(method_offset);
1292  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1293      kArmWordSize).Int32Value();
1294  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1295  // LR = temp->GetEntryPoint();
1296  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1297  // LR();
1298  __ blx(LR);
1299  DCHECK(!codegen_->IsLeafMethod());
1300  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1301}
1302
1303void LocationsBuilderARM::VisitNeg(HNeg* neg) {
1304  LocationSummary* locations =
1305      new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
1306  switch (neg->GetResultType()) {
1307    case Primitive::kPrimInt: {
1308      locations->SetInAt(0, Location::RequiresRegister());
1309      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1310      break;
1311    }
1312    case Primitive::kPrimLong: {
1313      locations->SetInAt(0, Location::RequiresRegister());
1314      locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1315      break;
1316    }
1317
1318    case Primitive::kPrimFloat:
1319    case Primitive::kPrimDouble:
1320      locations->SetInAt(0, Location::RequiresFpuRegister());
1321      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1322      break;
1323
1324    default:
1325      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1326  }
1327}
1328
1329void InstructionCodeGeneratorARM::VisitNeg(HNeg* neg) {
1330  LocationSummary* locations = neg->GetLocations();
1331  Location out = locations->Out();
1332  Location in = locations->InAt(0);
1333  switch (neg->GetResultType()) {
1334    case Primitive::kPrimInt:
1335      DCHECK(in.IsRegister());
1336      __ rsb(out.AsRegister<Register>(), in.AsRegister<Register>(), ShifterOperand(0));
1337      break;
1338
1339    case Primitive::kPrimLong:
1340      DCHECK(in.IsRegisterPair());
1341      // out.lo = 0 - in.lo (and update the carry/borrow (C) flag)
1342      __ rsbs(out.AsRegisterPairLow<Register>(),
1343              in.AsRegisterPairLow<Register>(),
1344              ShifterOperand(0));
1345      // We cannot emit an RSC (Reverse Subtract with Carry)
1346      // instruction here, as it does not exist in the Thumb-2
1347      // instruction set.  We use the following approach
1348      // using SBC and SUB instead.
1349      //
1350      // out.hi = -C
1351      __ sbc(out.AsRegisterPairHigh<Register>(),
1352             out.AsRegisterPairHigh<Register>(),
1353             ShifterOperand(out.AsRegisterPairHigh<Register>()));
1354      // out.hi = out.hi - in.hi
1355      __ sub(out.AsRegisterPairHigh<Register>(),
1356             out.AsRegisterPairHigh<Register>(),
1357             ShifterOperand(in.AsRegisterPairHigh<Register>()));
1358      break;
1359
1360    case Primitive::kPrimFloat:
1361      DCHECK(in.IsFpuRegister());
1362      __ vnegs(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>());
1363      break;
1364
1365    case Primitive::kPrimDouble:
1366      DCHECK(in.IsFpuRegisterPair());
1367      __ vnegd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1368               FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1369      break;
1370
1371    default:
1372      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1373  }
1374}
1375
1376void LocationsBuilderARM::VisitTypeConversion(HTypeConversion* conversion) {
1377  Primitive::Type result_type = conversion->GetResultType();
1378  Primitive::Type input_type = conversion->GetInputType();
1379  DCHECK_NE(result_type, input_type);
1380
1381  // The float-to-long and double-to-long type conversions rely on a
1382  // call to the runtime.
1383  LocationSummary::CallKind call_kind =
1384      ((input_type == Primitive::kPrimFloat || input_type == Primitive::kPrimDouble)
1385       && result_type == Primitive::kPrimLong)
1386      ? LocationSummary::kCall
1387      : LocationSummary::kNoCall;
1388  LocationSummary* locations =
1389      new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind);
1390
1391  switch (result_type) {
1392    case Primitive::kPrimByte:
1393      switch (input_type) {
1394        case Primitive::kPrimShort:
1395        case Primitive::kPrimInt:
1396        case Primitive::kPrimChar:
1397          // Processing a Dex `int-to-byte' instruction.
1398          locations->SetInAt(0, Location::RequiresRegister());
1399          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1400          break;
1401
1402        default:
1403          LOG(FATAL) << "Unexpected type conversion from " << input_type
1404                     << " to " << result_type;
1405      }
1406      break;
1407
1408    case Primitive::kPrimShort:
1409      switch (input_type) {
1410        case Primitive::kPrimByte:
1411        case Primitive::kPrimInt:
1412        case Primitive::kPrimChar:
1413          // Processing a Dex `int-to-short' instruction.
1414          locations->SetInAt(0, Location::RequiresRegister());
1415          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1416          break;
1417
1418        default:
1419          LOG(FATAL) << "Unexpected type conversion from " << input_type
1420                     << " to " << result_type;
1421      }
1422      break;
1423
1424    case Primitive::kPrimInt:
1425      switch (input_type) {
1426        case Primitive::kPrimLong:
1427          // Processing a Dex `long-to-int' instruction.
1428          locations->SetInAt(0, Location::Any());
1429          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1430          break;
1431
1432        case Primitive::kPrimFloat:
1433          // Processing a Dex `float-to-int' instruction.
1434          locations->SetInAt(0, Location::RequiresFpuRegister());
1435          locations->SetOut(Location::RequiresRegister());
1436          locations->AddTemp(Location::RequiresFpuRegister());
1437          break;
1438
1439        case Primitive::kPrimDouble:
1440          // Processing a Dex `double-to-int' instruction.
1441          locations->SetInAt(0, Location::RequiresFpuRegister());
1442          locations->SetOut(Location::RequiresRegister());
1443          locations->AddTemp(Location::RequiresFpuRegister());
1444          break;
1445
1446        default:
1447          LOG(FATAL) << "Unexpected type conversion from " << input_type
1448                     << " to " << result_type;
1449      }
1450      break;
1451
1452    case Primitive::kPrimLong:
1453      switch (input_type) {
1454        case Primitive::kPrimByte:
1455        case Primitive::kPrimShort:
1456        case Primitive::kPrimInt:
1457        case Primitive::kPrimChar:
1458          // Processing a Dex `int-to-long' instruction.
1459          locations->SetInAt(0, Location::RequiresRegister());
1460          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1461          break;
1462
1463        case Primitive::kPrimFloat: {
1464          // Processing a Dex `float-to-long' instruction.
1465          InvokeRuntimeCallingConvention calling_convention;
1466          locations->SetInAt(0, Location::FpuRegisterLocation(
1467              calling_convention.GetFpuRegisterAt(0)));
1468          locations->SetOut(Location::RegisterPairLocation(R0, R1));
1469          break;
1470        }
1471
1472        case Primitive::kPrimDouble: {
1473          // Processing a Dex `double-to-long' instruction.
1474          InvokeRuntimeCallingConvention calling_convention;
1475          locations->SetInAt(0, Location::FpuRegisterPairLocation(
1476              calling_convention.GetFpuRegisterAt(0),
1477              calling_convention.GetFpuRegisterAt(1)));
1478          locations->SetOut(Location::RegisterPairLocation(R0, R1));
1479          break;
1480        }
1481
1482        default:
1483          LOG(FATAL) << "Unexpected type conversion from " << input_type
1484                     << " to " << result_type;
1485      }
1486      break;
1487
1488    case Primitive::kPrimChar:
1489      switch (input_type) {
1490        case Primitive::kPrimByte:
1491        case Primitive::kPrimShort:
1492        case Primitive::kPrimInt:
1493          // Processing a Dex `int-to-char' instruction.
1494          locations->SetInAt(0, Location::RequiresRegister());
1495          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1496          break;
1497
1498        default:
1499          LOG(FATAL) << "Unexpected type conversion from " << input_type
1500                     << " to " << result_type;
1501      }
1502      break;
1503
1504    case Primitive::kPrimFloat:
1505      switch (input_type) {
1506        case Primitive::kPrimByte:
1507        case Primitive::kPrimShort:
1508        case Primitive::kPrimInt:
1509        case Primitive::kPrimChar:
1510          // Processing a Dex `int-to-float' instruction.
1511          locations->SetInAt(0, Location::RequiresRegister());
1512          locations->SetOut(Location::RequiresFpuRegister());
1513          break;
1514
1515        case Primitive::kPrimLong:
1516          // Processing a Dex `long-to-float' instruction.
1517          locations->SetInAt(0, Location::RequiresRegister());
1518          locations->SetOut(Location::RequiresFpuRegister());
1519          locations->AddTemp(Location::RequiresRegister());
1520          locations->AddTemp(Location::RequiresRegister());
1521          locations->AddTemp(Location::RequiresFpuRegister());
1522          locations->AddTemp(Location::RequiresFpuRegister());
1523          break;
1524
1525        case Primitive::kPrimDouble:
1526          // Processing a Dex `double-to-float' instruction.
1527          locations->SetInAt(0, Location::RequiresFpuRegister());
1528          locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1529          break;
1530
1531        default:
1532          LOG(FATAL) << "Unexpected type conversion from " << input_type
1533                     << " to " << result_type;
1534      };
1535      break;
1536
1537    case Primitive::kPrimDouble:
1538      switch (input_type) {
1539        case Primitive::kPrimByte:
1540        case Primitive::kPrimShort:
1541        case Primitive::kPrimInt:
1542        case Primitive::kPrimChar:
1543          // Processing a Dex `int-to-double' instruction.
1544          locations->SetInAt(0, Location::RequiresRegister());
1545          locations->SetOut(Location::RequiresFpuRegister());
1546          break;
1547
1548        case Primitive::kPrimLong:
1549          // Processing a Dex `long-to-double' instruction.
1550          locations->SetInAt(0, Location::RequiresRegister());
1551          locations->SetOut(Location::RequiresFpuRegister());
1552          locations->AddTemp(Location::RequiresRegister());
1553          locations->AddTemp(Location::RequiresRegister());
1554          locations->AddTemp(Location::RequiresFpuRegister());
1555          break;
1556
1557        case Primitive::kPrimFloat:
1558          // Processing a Dex `float-to-double' instruction.
1559          locations->SetInAt(0, Location::RequiresFpuRegister());
1560          locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1561          break;
1562
1563        default:
1564          LOG(FATAL) << "Unexpected type conversion from " << input_type
1565                     << " to " << result_type;
1566      };
1567      break;
1568
1569    default:
1570      LOG(FATAL) << "Unexpected type conversion from " << input_type
1571                 << " to " << result_type;
1572  }
1573}
1574
1575void InstructionCodeGeneratorARM::VisitTypeConversion(HTypeConversion* conversion) {
1576  LocationSummary* locations = conversion->GetLocations();
1577  Location out = locations->Out();
1578  Location in = locations->InAt(0);
1579  Primitive::Type result_type = conversion->GetResultType();
1580  Primitive::Type input_type = conversion->GetInputType();
1581  DCHECK_NE(result_type, input_type);
1582  switch (result_type) {
1583    case Primitive::kPrimByte:
1584      switch (input_type) {
1585        case Primitive::kPrimShort:
1586        case Primitive::kPrimInt:
1587        case Primitive::kPrimChar:
1588          // Processing a Dex `int-to-byte' instruction.
1589          __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 8);
1590          break;
1591
1592        default:
1593          LOG(FATAL) << "Unexpected type conversion from " << input_type
1594                     << " to " << result_type;
1595      }
1596      break;
1597
1598    case Primitive::kPrimShort:
1599      switch (input_type) {
1600        case Primitive::kPrimByte:
1601        case Primitive::kPrimInt:
1602        case Primitive::kPrimChar:
1603          // Processing a Dex `int-to-short' instruction.
1604          __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16);
1605          break;
1606
1607        default:
1608          LOG(FATAL) << "Unexpected type conversion from " << input_type
1609                     << " to " << result_type;
1610      }
1611      break;
1612
1613    case Primitive::kPrimInt:
1614      switch (input_type) {
1615        case Primitive::kPrimLong:
1616          // Processing a Dex `long-to-int' instruction.
1617          DCHECK(out.IsRegister());
1618          if (in.IsRegisterPair()) {
1619            __ Mov(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
1620          } else if (in.IsDoubleStackSlot()) {
1621            __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), SP, in.GetStackIndex());
1622          } else {
1623            DCHECK(in.IsConstant());
1624            DCHECK(in.GetConstant()->IsLongConstant());
1625            int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
1626            __ LoadImmediate(out.AsRegister<Register>(), static_cast<int32_t>(value));
1627          }
1628          break;
1629
1630        case Primitive::kPrimFloat: {
1631          // Processing a Dex `float-to-int' instruction.
1632          SRegister temp = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
1633          __ vmovs(temp, in.AsFpuRegister<SRegister>());
1634          __ vcvtis(temp, temp);
1635          __ vmovrs(out.AsRegister<Register>(), temp);
1636          break;
1637        }
1638
1639        case Primitive::kPrimDouble: {
1640          // Processing a Dex `double-to-int' instruction.
1641          SRegister temp_s = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
1642          DRegister temp_d = FromLowSToD(temp_s);
1643          __ vmovd(temp_d, FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1644          __ vcvtid(temp_s, temp_d);
1645          __ vmovrs(out.AsRegister<Register>(), temp_s);
1646          break;
1647        }
1648
1649        default:
1650          LOG(FATAL) << "Unexpected type conversion from " << input_type
1651                     << " to " << result_type;
1652      }
1653      break;
1654
1655    case Primitive::kPrimLong:
1656      switch (input_type) {
1657        case Primitive::kPrimByte:
1658        case Primitive::kPrimShort:
1659        case Primitive::kPrimInt:
1660        case Primitive::kPrimChar:
1661          // Processing a Dex `int-to-long' instruction.
1662          DCHECK(out.IsRegisterPair());
1663          DCHECK(in.IsRegister());
1664          __ Mov(out.AsRegisterPairLow<Register>(), in.AsRegister<Register>());
1665          // Sign extension.
1666          __ Asr(out.AsRegisterPairHigh<Register>(),
1667                 out.AsRegisterPairLow<Register>(),
1668                 31);
1669          break;
1670
1671        case Primitive::kPrimFloat:
1672          // Processing a Dex `float-to-long' instruction.
1673          codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pF2l),
1674                                  conversion,
1675                                  conversion->GetDexPc(),
1676                                  nullptr);
1677          break;
1678
1679        case Primitive::kPrimDouble:
1680          // Processing a Dex `double-to-long' instruction.
1681          codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pD2l),
1682                                  conversion,
1683                                  conversion->GetDexPc(),
1684                                  nullptr);
1685          break;
1686
1687        default:
1688          LOG(FATAL) << "Unexpected type conversion from " << input_type
1689                     << " to " << result_type;
1690      }
1691      break;
1692
1693    case Primitive::kPrimChar:
1694      switch (input_type) {
1695        case Primitive::kPrimByte:
1696        case Primitive::kPrimShort:
1697        case Primitive::kPrimInt:
1698          // Processing a Dex `int-to-char' instruction.
1699          __ ubfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16);
1700          break;
1701
1702        default:
1703          LOG(FATAL) << "Unexpected type conversion from " << input_type
1704                     << " to " << result_type;
1705      }
1706      break;
1707
1708    case Primitive::kPrimFloat:
1709      switch (input_type) {
1710        case Primitive::kPrimByte:
1711        case Primitive::kPrimShort:
1712        case Primitive::kPrimInt:
1713        case Primitive::kPrimChar: {
1714          // Processing a Dex `int-to-float' instruction.
1715          __ vmovsr(out.AsFpuRegister<SRegister>(), in.AsRegister<Register>());
1716          __ vcvtsi(out.AsFpuRegister<SRegister>(), out.AsFpuRegister<SRegister>());
1717          break;
1718        }
1719
1720        case Primitive::kPrimLong: {
1721          // Processing a Dex `long-to-float' instruction.
1722          Register low = in.AsRegisterPairLow<Register>();
1723          Register high = in.AsRegisterPairHigh<Register>();
1724          SRegister output = out.AsFpuRegister<SRegister>();
1725          Register constant_low = locations->GetTemp(0).AsRegister<Register>();
1726          Register constant_high = locations->GetTemp(1).AsRegister<Register>();
1727          SRegister temp1_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>();
1728          DRegister temp1_d = FromLowSToD(temp1_s);
1729          SRegister temp2_s = locations->GetTemp(3).AsFpuRegisterPairLow<SRegister>();
1730          DRegister temp2_d = FromLowSToD(temp2_s);
1731
1732          // Operations use doubles for precision reasons (each 32-bit
1733          // half of a long fits in the 53-bit mantissa of a double,
1734          // but not in the 24-bit mantissa of a float).  This is
1735          // especially important for the low bits.  The result is
1736          // eventually converted to float.
1737
1738          // temp1_d = int-to-double(high)
1739          __ vmovsr(temp1_s, high);
1740          __ vcvtdi(temp1_d, temp1_s);
1741          // Using vmovd to load the `k2Pow32EncodingForDouble` constant
1742          // as an immediate value into `temp2_d` does not work, as
1743          // this instruction only transfers 8 significant bits of its
1744          // immediate operand.  Instead, use two 32-bit core
1745          // registers to load `k2Pow32EncodingForDouble` into
1746          // `temp2_d`.
1747          __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble));
1748          __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble));
1749          __ vmovdrr(temp2_d, constant_low, constant_high);
1750          // temp1_d = temp1_d * 2^32
1751          __ vmuld(temp1_d, temp1_d, temp2_d);
1752          // temp2_d = unsigned-to-double(low)
1753          __ vmovsr(temp2_s, low);
1754          __ vcvtdu(temp2_d, temp2_s);
1755          // temp1_d = temp1_d + temp2_d
1756          __ vaddd(temp1_d, temp1_d, temp2_d);
1757          // output = double-to-float(temp1_d);
1758          __ vcvtsd(output, temp1_d);
1759          break;
1760        }
1761
1762        case Primitive::kPrimDouble:
1763          // Processing a Dex `double-to-float' instruction.
1764          __ vcvtsd(out.AsFpuRegister<SRegister>(),
1765                    FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1766          break;
1767
1768        default:
1769          LOG(FATAL) << "Unexpected type conversion from " << input_type
1770                     << " to " << result_type;
1771      };
1772      break;
1773
1774    case Primitive::kPrimDouble:
1775      switch (input_type) {
1776        case Primitive::kPrimByte:
1777        case Primitive::kPrimShort:
1778        case Primitive::kPrimInt:
1779        case Primitive::kPrimChar: {
1780          // Processing a Dex `int-to-double' instruction.
1781          __ vmovsr(out.AsFpuRegisterPairLow<SRegister>(), in.AsRegister<Register>());
1782          __ vcvtdi(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1783                    out.AsFpuRegisterPairLow<SRegister>());
1784          break;
1785        }
1786
1787        case Primitive::kPrimLong: {
1788          // Processing a Dex `long-to-double' instruction.
1789          Register low = in.AsRegisterPairLow<Register>();
1790          Register high = in.AsRegisterPairHigh<Register>();
1791          SRegister out_s = out.AsFpuRegisterPairLow<SRegister>();
1792          DRegister out_d = FromLowSToD(out_s);
1793          Register constant_low = locations->GetTemp(0).AsRegister<Register>();
1794          Register constant_high = locations->GetTemp(1).AsRegister<Register>();
1795          SRegister temp_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>();
1796          DRegister temp_d = FromLowSToD(temp_s);
1797
1798          // out_d = int-to-double(high)
1799          __ vmovsr(out_s, high);
1800          __ vcvtdi(out_d, out_s);
1801          // Using vmovd to load the `k2Pow32EncodingForDouble` constant
1802          // as an immediate value into `temp_d` does not work, as
1803          // this instruction only transfers 8 significant bits of its
1804          // immediate operand.  Instead, use two 32-bit core
1805          // registers to load `k2Pow32EncodingForDouble` into `temp_d`.
1806          __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble));
1807          __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble));
1808          __ vmovdrr(temp_d, constant_low, constant_high);
1809          // out_d = out_d * 2^32
1810          __ vmuld(out_d, out_d, temp_d);
1811          // temp_d = unsigned-to-double(low)
1812          __ vmovsr(temp_s, low);
1813          __ vcvtdu(temp_d, temp_s);
1814          // out_d = out_d + temp_d
1815          __ vaddd(out_d, out_d, temp_d);
1816          break;
1817        }
1818
1819        case Primitive::kPrimFloat:
1820          // Processing a Dex `float-to-double' instruction.
1821          __ vcvtds(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1822                    in.AsFpuRegister<SRegister>());
1823          break;
1824
1825        default:
1826          LOG(FATAL) << "Unexpected type conversion from " << input_type
1827                     << " to " << result_type;
1828      };
1829      break;
1830
1831    default:
1832      LOG(FATAL) << "Unexpected type conversion from " << input_type
1833                 << " to " << result_type;
1834  }
1835}
1836
1837void LocationsBuilderARM::VisitAdd(HAdd* add) {
1838  LocationSummary* locations =
1839      new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
1840  switch (add->GetResultType()) {
1841    case Primitive::kPrimInt: {
1842      locations->SetInAt(0, Location::RequiresRegister());
1843      locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
1844      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1845      break;
1846    }
1847
1848    case Primitive::kPrimLong: {
1849      locations->SetInAt(0, Location::RequiresRegister());
1850      locations->SetInAt(1, Location::RequiresRegister());
1851      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1852      break;
1853    }
1854
1855    case Primitive::kPrimFloat:
1856    case Primitive::kPrimDouble: {
1857      locations->SetInAt(0, Location::RequiresFpuRegister());
1858      locations->SetInAt(1, Location::RequiresFpuRegister());
1859      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1860      break;
1861    }
1862
1863    default:
1864      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1865  }
1866}
1867
1868void InstructionCodeGeneratorARM::VisitAdd(HAdd* add) {
1869  LocationSummary* locations = add->GetLocations();
1870  Location out = locations->Out();
1871  Location first = locations->InAt(0);
1872  Location second = locations->InAt(1);
1873  switch (add->GetResultType()) {
1874    case Primitive::kPrimInt:
1875      if (second.IsRegister()) {
1876        __ add(out.AsRegister<Register>(),
1877               first.AsRegister<Register>(),
1878               ShifterOperand(second.AsRegister<Register>()));
1879      } else {
1880        __ AddConstant(out.AsRegister<Register>(),
1881                       first.AsRegister<Register>(),
1882                       second.GetConstant()->AsIntConstant()->GetValue());
1883      }
1884      break;
1885
1886    case Primitive::kPrimLong: {
1887      DCHECK(second.IsRegisterPair());
1888      __ adds(out.AsRegisterPairLow<Register>(),
1889              first.AsRegisterPairLow<Register>(),
1890              ShifterOperand(second.AsRegisterPairLow<Register>()));
1891      __ adc(out.AsRegisterPairHigh<Register>(),
1892             first.AsRegisterPairHigh<Register>(),
1893             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1894      break;
1895    }
1896
1897    case Primitive::kPrimFloat:
1898      __ vadds(out.AsFpuRegister<SRegister>(),
1899               first.AsFpuRegister<SRegister>(),
1900               second.AsFpuRegister<SRegister>());
1901      break;
1902
1903    case Primitive::kPrimDouble:
1904      __ vaddd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1905               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1906               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1907      break;
1908
1909    default:
1910      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1911  }
1912}
1913
1914void LocationsBuilderARM::VisitSub(HSub* sub) {
1915  LocationSummary* locations =
1916      new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
1917  switch (sub->GetResultType()) {
1918    case Primitive::kPrimInt: {
1919      locations->SetInAt(0, Location::RequiresRegister());
1920      locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1)));
1921      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1922      break;
1923    }
1924
1925    case Primitive::kPrimLong: {
1926      locations->SetInAt(0, Location::RequiresRegister());
1927      locations->SetInAt(1, Location::RequiresRegister());
1928      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1929      break;
1930    }
1931    case Primitive::kPrimFloat:
1932    case Primitive::kPrimDouble: {
1933      locations->SetInAt(0, Location::RequiresFpuRegister());
1934      locations->SetInAt(1, Location::RequiresFpuRegister());
1935      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1936      break;
1937    }
1938    default:
1939      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1940  }
1941}
1942
1943void InstructionCodeGeneratorARM::VisitSub(HSub* sub) {
1944  LocationSummary* locations = sub->GetLocations();
1945  Location out = locations->Out();
1946  Location first = locations->InAt(0);
1947  Location second = locations->InAt(1);
1948  switch (sub->GetResultType()) {
1949    case Primitive::kPrimInt: {
1950      if (second.IsRegister()) {
1951        __ sub(out.AsRegister<Register>(),
1952               first.AsRegister<Register>(),
1953               ShifterOperand(second.AsRegister<Register>()));
1954      } else {
1955        __ AddConstant(out.AsRegister<Register>(),
1956                       first.AsRegister<Register>(),
1957                       -second.GetConstant()->AsIntConstant()->GetValue());
1958      }
1959      break;
1960    }
1961
1962    case Primitive::kPrimLong: {
1963      DCHECK(second.IsRegisterPair());
1964      __ subs(out.AsRegisterPairLow<Register>(),
1965              first.AsRegisterPairLow<Register>(),
1966              ShifterOperand(second.AsRegisterPairLow<Register>()));
1967      __ sbc(out.AsRegisterPairHigh<Register>(),
1968             first.AsRegisterPairHigh<Register>(),
1969             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1970      break;
1971    }
1972
1973    case Primitive::kPrimFloat: {
1974      __ vsubs(out.AsFpuRegister<SRegister>(),
1975               first.AsFpuRegister<SRegister>(),
1976               second.AsFpuRegister<SRegister>());
1977      break;
1978    }
1979
1980    case Primitive::kPrimDouble: {
1981      __ vsubd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1982               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1983               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1984      break;
1985    }
1986
1987
1988    default:
1989      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1990  }
1991}
1992
1993void LocationsBuilderARM::VisitMul(HMul* mul) {
1994  LocationSummary* locations =
1995      new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
1996  switch (mul->GetResultType()) {
1997    case Primitive::kPrimInt:
1998    case Primitive::kPrimLong:  {
1999      locations->SetInAt(0, Location::RequiresRegister());
2000      locations->SetInAt(1, Location::RequiresRegister());
2001      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2002      break;
2003    }
2004
2005    case Primitive::kPrimFloat:
2006    case Primitive::kPrimDouble: {
2007      locations->SetInAt(0, Location::RequiresFpuRegister());
2008      locations->SetInAt(1, Location::RequiresFpuRegister());
2009      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2010      break;
2011    }
2012
2013    default:
2014      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2015  }
2016}
2017
2018void InstructionCodeGeneratorARM::VisitMul(HMul* mul) {
2019  LocationSummary* locations = mul->GetLocations();
2020  Location out = locations->Out();
2021  Location first = locations->InAt(0);
2022  Location second = locations->InAt(1);
2023  switch (mul->GetResultType()) {
2024    case Primitive::kPrimInt: {
2025      __ mul(out.AsRegister<Register>(),
2026             first.AsRegister<Register>(),
2027             second.AsRegister<Register>());
2028      break;
2029    }
2030    case Primitive::kPrimLong: {
2031      Register out_hi = out.AsRegisterPairHigh<Register>();
2032      Register out_lo = out.AsRegisterPairLow<Register>();
2033      Register in1_hi = first.AsRegisterPairHigh<Register>();
2034      Register in1_lo = first.AsRegisterPairLow<Register>();
2035      Register in2_hi = second.AsRegisterPairHigh<Register>();
2036      Register in2_lo = second.AsRegisterPairLow<Register>();
2037
2038      // Extra checks to protect caused by the existence of R1_R2.
2039      // The algorithm is wrong if out.hi is either in1.lo or in2.lo:
2040      // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2);
2041      DCHECK_NE(out_hi, in1_lo);
2042      DCHECK_NE(out_hi, in2_lo);
2043
2044      // input: in1 - 64 bits, in2 - 64 bits
2045      // output: out
2046      // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
2047      // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
2048      // parts: out.lo = (in1.lo * in2.lo)[31:0]
2049
2050      // IP <- in1.lo * in2.hi
2051      __ mul(IP, in1_lo, in2_hi);
2052      // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo
2053      __ mla(out_hi, in1_hi, in2_lo, IP);
2054      // out.lo <- (in1.lo * in2.lo)[31:0];
2055      __ umull(out_lo, IP, in1_lo, in2_lo);
2056      // out.hi <- in2.hi * in1.lo +  in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
2057      __ add(out_hi, out_hi, ShifterOperand(IP));
2058      break;
2059    }
2060
2061    case Primitive::kPrimFloat: {
2062      __ vmuls(out.AsFpuRegister<SRegister>(),
2063               first.AsFpuRegister<SRegister>(),
2064               second.AsFpuRegister<SRegister>());
2065      break;
2066    }
2067
2068    case Primitive::kPrimDouble: {
2069      __ vmuld(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2070               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2071               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2072      break;
2073    }
2074
2075    default:
2076      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2077  }
2078}
2079
2080void LocationsBuilderARM::VisitDiv(HDiv* div) {
2081  LocationSummary::CallKind call_kind = div->GetResultType() == Primitive::kPrimLong
2082      ? LocationSummary::kCall
2083      : LocationSummary::kNoCall;
2084  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
2085
2086  switch (div->GetResultType()) {
2087    case Primitive::kPrimInt: {
2088      locations->SetInAt(0, Location::RequiresRegister());
2089      locations->SetInAt(1, Location::RequiresRegister());
2090      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2091      break;
2092    }
2093    case Primitive::kPrimLong: {
2094      InvokeRuntimeCallingConvention calling_convention;
2095      locations->SetInAt(0, Location::RegisterPairLocation(
2096          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2097      locations->SetInAt(1, Location::RegisterPairLocation(
2098          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2099      locations->SetOut(Location::RegisterPairLocation(R0, R1));
2100      break;
2101    }
2102    case Primitive::kPrimFloat:
2103    case Primitive::kPrimDouble: {
2104      locations->SetInAt(0, Location::RequiresFpuRegister());
2105      locations->SetInAt(1, Location::RequiresFpuRegister());
2106      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2107      break;
2108    }
2109
2110    default:
2111      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2112  }
2113}
2114
2115void InstructionCodeGeneratorARM::VisitDiv(HDiv* div) {
2116  LocationSummary* locations = div->GetLocations();
2117  Location out = locations->Out();
2118  Location first = locations->InAt(0);
2119  Location second = locations->InAt(1);
2120
2121  switch (div->GetResultType()) {
2122    case Primitive::kPrimInt: {
2123      __ sdiv(out.AsRegister<Register>(),
2124              first.AsRegister<Register>(),
2125              second.AsRegister<Register>());
2126      break;
2127    }
2128
2129    case Primitive::kPrimLong: {
2130      InvokeRuntimeCallingConvention calling_convention;
2131      DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
2132      DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
2133      DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegisterPairLow<Register>());
2134      DCHECK_EQ(calling_convention.GetRegisterAt(3), second.AsRegisterPairHigh<Register>());
2135      DCHECK_EQ(R0, out.AsRegisterPairLow<Register>());
2136      DCHECK_EQ(R1, out.AsRegisterPairHigh<Register>());
2137
2138      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLdiv), div, div->GetDexPc(), nullptr);
2139      break;
2140    }
2141
2142    case Primitive::kPrimFloat: {
2143      __ vdivs(out.AsFpuRegister<SRegister>(),
2144               first.AsFpuRegister<SRegister>(),
2145               second.AsFpuRegister<SRegister>());
2146      break;
2147    }
2148
2149    case Primitive::kPrimDouble: {
2150      __ vdivd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2151               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2152               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2153      break;
2154    }
2155
2156    default:
2157      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2158  }
2159}
2160
2161void LocationsBuilderARM::VisitRem(HRem* rem) {
2162  Primitive::Type type = rem->GetResultType();
2163  LocationSummary::CallKind call_kind = type == Primitive::kPrimInt
2164      ? LocationSummary::kNoCall
2165      : LocationSummary::kCall;
2166  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
2167
2168  switch (type) {
2169    case Primitive::kPrimInt: {
2170      locations->SetInAt(0, Location::RequiresRegister());
2171      locations->SetInAt(1, Location::RequiresRegister());
2172      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2173      locations->AddTemp(Location::RequiresRegister());
2174      break;
2175    }
2176    case Primitive::kPrimLong: {
2177      InvokeRuntimeCallingConvention calling_convention;
2178      locations->SetInAt(0, Location::RegisterPairLocation(
2179          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2180      locations->SetInAt(1, Location::RegisterPairLocation(
2181          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2182      // The runtime helper puts the output in R2,R3.
2183      locations->SetOut(Location::RegisterPairLocation(R2, R3));
2184      break;
2185    }
2186    case Primitive::kPrimFloat: {
2187      InvokeRuntimeCallingConvention calling_convention;
2188      locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
2189      locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
2190      locations->SetOut(Location::FpuRegisterLocation(S0));
2191      break;
2192    }
2193
2194    case Primitive::kPrimDouble: {
2195      InvokeRuntimeCallingConvention calling_convention;
2196      locations->SetInAt(0, Location::FpuRegisterPairLocation(
2197          calling_convention.GetFpuRegisterAt(0), calling_convention.GetFpuRegisterAt(1)));
2198      locations->SetInAt(1, Location::FpuRegisterPairLocation(
2199          calling_convention.GetFpuRegisterAt(2), calling_convention.GetFpuRegisterAt(3)));
2200      locations->SetOut(Location::Location::FpuRegisterPairLocation(S0, S1));
2201      break;
2202    }
2203
2204    default:
2205      LOG(FATAL) << "Unexpected rem type " << type;
2206  }
2207}
2208
2209void InstructionCodeGeneratorARM::VisitRem(HRem* rem) {
2210  LocationSummary* locations = rem->GetLocations();
2211  Location out = locations->Out();
2212  Location first = locations->InAt(0);
2213  Location second = locations->InAt(1);
2214
2215  Primitive::Type type = rem->GetResultType();
2216  switch (type) {
2217    case Primitive::kPrimInt: {
2218      Register reg1 = first.AsRegister<Register>();
2219      Register reg2 = second.AsRegister<Register>();
2220      Register temp = locations->GetTemp(0).AsRegister<Register>();
2221
2222      // temp = reg1 / reg2  (integer division)
2223      // temp = temp * reg2
2224      // dest = reg1 - temp
2225      __ sdiv(temp, reg1, reg2);
2226      __ mul(temp, temp, reg2);
2227      __ sub(out.AsRegister<Register>(), reg1, ShifterOperand(temp));
2228      break;
2229    }
2230
2231    case Primitive::kPrimLong: {
2232      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLmod), rem, rem->GetDexPc(), nullptr);
2233      break;
2234    }
2235
2236    case Primitive::kPrimFloat: {
2237      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmodf), rem, rem->GetDexPc(), nullptr);
2238      break;
2239    }
2240
2241    case Primitive::kPrimDouble: {
2242      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmod), rem, rem->GetDexPc(), nullptr);
2243      break;
2244    }
2245
2246    default:
2247      LOG(FATAL) << "Unexpected rem type " << type;
2248  }
2249}
2250
2251void LocationsBuilderARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2252  LocationSummary* locations =
2253      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2254  locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2255  if (instruction->HasUses()) {
2256    locations->SetOut(Location::SameAsFirstInput());
2257  }
2258}
2259
2260void InstructionCodeGeneratorARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2261  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM(instruction);
2262  codegen_->AddSlowPath(slow_path);
2263
2264  LocationSummary* locations = instruction->GetLocations();
2265  Location value = locations->InAt(0);
2266
2267  switch (instruction->GetType()) {
2268    case Primitive::kPrimInt: {
2269      if (value.IsRegister()) {
2270        __ cmp(value.AsRegister<Register>(), ShifterOperand(0));
2271        __ b(slow_path->GetEntryLabel(), EQ);
2272      } else {
2273        DCHECK(value.IsConstant()) << value;
2274        if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
2275          __ b(slow_path->GetEntryLabel());
2276        }
2277      }
2278      break;
2279    }
2280    case Primitive::kPrimLong: {
2281      if (value.IsRegisterPair()) {
2282        __ orrs(IP,
2283                value.AsRegisterPairLow<Register>(),
2284                ShifterOperand(value.AsRegisterPairHigh<Register>()));
2285        __ b(slow_path->GetEntryLabel(), EQ);
2286      } else {
2287        DCHECK(value.IsConstant()) << value;
2288        if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
2289          __ b(slow_path->GetEntryLabel());
2290        }
2291      }
2292      break;
2293    default:
2294      LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
2295    }
2296  }
2297}
2298
2299void LocationsBuilderARM::HandleShift(HBinaryOperation* op) {
2300  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2301
2302  LocationSummary::CallKind call_kind = op->GetResultType() == Primitive::kPrimLong
2303      ? LocationSummary::kCall
2304      : LocationSummary::kNoCall;
2305  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(op, call_kind);
2306
2307  switch (op->GetResultType()) {
2308    case Primitive::kPrimInt: {
2309      locations->SetInAt(0, Location::RequiresRegister());
2310      locations->SetInAt(1, Location::RegisterOrConstant(op->InputAt(1)));
2311      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2312      break;
2313    }
2314    case Primitive::kPrimLong: {
2315      InvokeRuntimeCallingConvention calling_convention;
2316      locations->SetInAt(0, Location::RegisterPairLocation(
2317          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2318      locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2319      // The runtime helper puts the output in R0,R1.
2320      locations->SetOut(Location::RegisterPairLocation(R0, R1));
2321      break;
2322    }
2323    default:
2324      LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
2325  }
2326}
2327
2328void InstructionCodeGeneratorARM::HandleShift(HBinaryOperation* op) {
2329  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2330
2331  LocationSummary* locations = op->GetLocations();
2332  Location out = locations->Out();
2333  Location first = locations->InAt(0);
2334  Location second = locations->InAt(1);
2335
2336  Primitive::Type type = op->GetResultType();
2337  switch (type) {
2338    case Primitive::kPrimInt: {
2339      Register out_reg = out.AsRegister<Register>();
2340      Register first_reg = first.AsRegister<Register>();
2341      // Arm doesn't mask the shift count so we need to do it ourselves.
2342      if (second.IsRegister()) {
2343        Register second_reg = second.AsRegister<Register>();
2344        __ and_(second_reg, second_reg, ShifterOperand(kMaxIntShiftValue));
2345        if (op->IsShl()) {
2346          __ Lsl(out_reg, first_reg, second_reg);
2347        } else if (op->IsShr()) {
2348          __ Asr(out_reg, first_reg, second_reg);
2349        } else {
2350          __ Lsr(out_reg, first_reg, second_reg);
2351        }
2352      } else {
2353        int32_t cst = second.GetConstant()->AsIntConstant()->GetValue();
2354        uint32_t shift_value = static_cast<uint32_t>(cst & kMaxIntShiftValue);
2355        if (shift_value == 0) {  // arm does not support shifting with 0 immediate.
2356          __ Mov(out_reg, first_reg);
2357        } else if (op->IsShl()) {
2358          __ Lsl(out_reg, first_reg, shift_value);
2359        } else if (op->IsShr()) {
2360          __ Asr(out_reg, first_reg, shift_value);
2361        } else {
2362          __ Lsr(out_reg, first_reg, shift_value);
2363        }
2364      }
2365      break;
2366    }
2367    case Primitive::kPrimLong: {
2368      // TODO: Inline the assembly instead of calling the runtime.
2369      InvokeRuntimeCallingConvention calling_convention;
2370      DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
2371      DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
2372      DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegister<Register>());
2373      DCHECK_EQ(R0, out.AsRegisterPairLow<Register>());
2374      DCHECK_EQ(R1, out.AsRegisterPairHigh<Register>());
2375
2376      int32_t entry_point_offset;
2377      if (op->IsShl()) {
2378        entry_point_offset = QUICK_ENTRY_POINT(pShlLong);
2379      } else if (op->IsShr()) {
2380        entry_point_offset = QUICK_ENTRY_POINT(pShrLong);
2381      } else {
2382        entry_point_offset = QUICK_ENTRY_POINT(pUshrLong);
2383      }
2384      __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
2385      __ blx(LR);
2386      break;
2387    }
2388    default:
2389      LOG(FATAL) << "Unexpected operation type " << type;
2390  }
2391}
2392
2393void LocationsBuilderARM::VisitShl(HShl* shl) {
2394  HandleShift(shl);
2395}
2396
2397void InstructionCodeGeneratorARM::VisitShl(HShl* shl) {
2398  HandleShift(shl);
2399}
2400
2401void LocationsBuilderARM::VisitShr(HShr* shr) {
2402  HandleShift(shr);
2403}
2404
2405void InstructionCodeGeneratorARM::VisitShr(HShr* shr) {
2406  HandleShift(shr);
2407}
2408
2409void LocationsBuilderARM::VisitUShr(HUShr* ushr) {
2410  HandleShift(ushr);
2411}
2412
2413void InstructionCodeGeneratorARM::VisitUShr(HUShr* ushr) {
2414  HandleShift(ushr);
2415}
2416
2417void LocationsBuilderARM::VisitNewInstance(HNewInstance* instruction) {
2418  LocationSummary* locations =
2419      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2420  InvokeRuntimeCallingConvention calling_convention;
2421  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2422  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2423  locations->SetOut(Location::RegisterLocation(R0));
2424}
2425
2426void InstructionCodeGeneratorARM::VisitNewInstance(HNewInstance* instruction) {
2427  InvokeRuntimeCallingConvention calling_convention;
2428  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
2429  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
2430  codegen_->InvokeRuntime(GetThreadOffset<kArmWordSize>(instruction->GetEntrypoint()).Int32Value(),
2431                          instruction,
2432                          instruction->GetDexPc(),
2433                          nullptr);
2434}
2435
2436void LocationsBuilderARM::VisitNewArray(HNewArray* instruction) {
2437  LocationSummary* locations =
2438      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2439  InvokeRuntimeCallingConvention calling_convention;
2440  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2441  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2442  locations->SetOut(Location::RegisterLocation(R0));
2443  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2444}
2445
2446void InstructionCodeGeneratorARM::VisitNewArray(HNewArray* instruction) {
2447  InvokeRuntimeCallingConvention calling_convention;
2448  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(2));
2449  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
2450  codegen_->InvokeRuntime(GetThreadOffset<kArmWordSize>(instruction->GetEntrypoint()).Int32Value(),
2451                          instruction,
2452                          instruction->GetDexPc(),
2453                          nullptr);
2454}
2455
2456void LocationsBuilderARM::VisitParameterValue(HParameterValue* instruction) {
2457  LocationSummary* locations =
2458      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2459  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
2460  if (location.IsStackSlot()) {
2461    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2462  } else if (location.IsDoubleStackSlot()) {
2463    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2464  }
2465  locations->SetOut(location);
2466}
2467
2468void InstructionCodeGeneratorARM::VisitParameterValue(HParameterValue* instruction) {
2469  // Nothing to do, the parameter is already at its location.
2470  UNUSED(instruction);
2471}
2472
2473void LocationsBuilderARM::VisitNot(HNot* not_) {
2474  LocationSummary* locations =
2475      new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
2476  locations->SetInAt(0, Location::RequiresRegister());
2477  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2478}
2479
2480void InstructionCodeGeneratorARM::VisitNot(HNot* not_) {
2481  LocationSummary* locations = not_->GetLocations();
2482  Location out = locations->Out();
2483  Location in = locations->InAt(0);
2484  switch (not_->GetResultType()) {
2485    case Primitive::kPrimInt:
2486      __ mvn(out.AsRegister<Register>(), ShifterOperand(in.AsRegister<Register>()));
2487      break;
2488
2489    case Primitive::kPrimLong:
2490      __ mvn(out.AsRegisterPairLow<Register>(),
2491             ShifterOperand(in.AsRegisterPairLow<Register>()));
2492      __ mvn(out.AsRegisterPairHigh<Register>(),
2493             ShifterOperand(in.AsRegisterPairHigh<Register>()));
2494      break;
2495
2496    default:
2497      LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
2498  }
2499}
2500
2501void LocationsBuilderARM::VisitCompare(HCompare* compare) {
2502  LocationSummary* locations =
2503      new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2504  switch (compare->InputAt(0)->GetType()) {
2505    case Primitive::kPrimLong: {
2506      locations->SetInAt(0, Location::RequiresRegister());
2507      locations->SetInAt(1, Location::RequiresRegister());
2508      // Output overlaps because it is written before doing the low comparison.
2509      locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2510      break;
2511    }
2512    case Primitive::kPrimFloat:
2513    case Primitive::kPrimDouble: {
2514      locations->SetInAt(0, Location::RequiresFpuRegister());
2515      locations->SetInAt(1, Location::RequiresFpuRegister());
2516      locations->SetOut(Location::RequiresRegister());
2517      break;
2518    }
2519    default:
2520      LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2521  }
2522}
2523
2524void InstructionCodeGeneratorARM::VisitCompare(HCompare* compare) {
2525  LocationSummary* locations = compare->GetLocations();
2526  Register out = locations->Out().AsRegister<Register>();
2527  Location left = locations->InAt(0);
2528  Location right = locations->InAt(1);
2529
2530  Label less, greater, done;
2531  Primitive::Type type = compare->InputAt(0)->GetType();
2532  switch (type) {
2533    case Primitive::kPrimLong: {
2534      __ cmp(left.AsRegisterPairHigh<Register>(),
2535             ShifterOperand(right.AsRegisterPairHigh<Register>()));  // Signed compare.
2536      __ b(&less, LT);
2537      __ b(&greater, GT);
2538      // Do LoadImmediate before any `cmp`, as LoadImmediate might affect the status flags.
2539      __ LoadImmediate(out, 0);
2540      __ cmp(left.AsRegisterPairLow<Register>(),
2541             ShifterOperand(right.AsRegisterPairLow<Register>()));  // Unsigned compare.
2542      break;
2543    }
2544    case Primitive::kPrimFloat:
2545    case Primitive::kPrimDouble: {
2546      __ LoadImmediate(out, 0);
2547      if (type == Primitive::kPrimFloat) {
2548        __ vcmps(left.AsFpuRegister<SRegister>(), right.AsFpuRegister<SRegister>());
2549      } else {
2550        __ vcmpd(FromLowSToD(left.AsFpuRegisterPairLow<SRegister>()),
2551                 FromLowSToD(right.AsFpuRegisterPairLow<SRegister>()));
2552      }
2553      __ vmstat();  // transfer FP status register to ARM APSR.
2554      __ b(compare->IsGtBias() ? &greater : &less, VS);  // VS for unordered.
2555      break;
2556    }
2557    default:
2558      LOG(FATAL) << "Unexpected compare type " << type;
2559  }
2560  __ b(&done, EQ);
2561  __ b(&less, CC);  // CC is for both: unsigned compare for longs and 'less than' for floats.
2562
2563  __ Bind(&greater);
2564  __ LoadImmediate(out, 1);
2565  __ b(&done);
2566
2567  __ Bind(&less);
2568  __ LoadImmediate(out, -1);
2569
2570  __ Bind(&done);
2571}
2572
2573void LocationsBuilderARM::VisitPhi(HPhi* instruction) {
2574  LocationSummary* locations =
2575      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2576  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
2577    locations->SetInAt(i, Location::Any());
2578  }
2579  locations->SetOut(Location::Any());
2580}
2581
2582void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction) {
2583  UNUSED(instruction);
2584  LOG(FATAL) << "Unreachable";
2585}
2586
2587void InstructionCodeGeneratorARM::GenerateMemoryBarrier(MemBarrierKind kind) {
2588  // TODO (ported from quick): revisit Arm barrier kinds
2589  DmbOptions flavour = DmbOptions::ISH;  // quiet c++ warnings
2590  switch (kind) {
2591    case MemBarrierKind::kAnyStore:
2592    case MemBarrierKind::kLoadAny:
2593    case MemBarrierKind::kAnyAny: {
2594      flavour = DmbOptions::ISH;
2595      break;
2596    }
2597    case MemBarrierKind::kStoreStore: {
2598      flavour = DmbOptions::ISHST;
2599      break;
2600    }
2601    default:
2602      LOG(FATAL) << "Unexpected memory barrier " << kind;
2603  }
2604  __ dmb(flavour);
2605}
2606
2607void InstructionCodeGeneratorARM::GenerateWideAtomicLoad(Register addr,
2608                                                         uint32_t offset,
2609                                                         Register out_lo,
2610                                                         Register out_hi) {
2611  if (offset != 0) {
2612    __ LoadImmediate(out_lo, offset);
2613    __ add(IP, addr, ShifterOperand(out_lo));
2614    addr = IP;
2615  }
2616  __ ldrexd(out_lo, out_hi, addr);
2617}
2618
2619void InstructionCodeGeneratorARM::GenerateWideAtomicStore(Register addr,
2620                                                          uint32_t offset,
2621                                                          Register value_lo,
2622                                                          Register value_hi,
2623                                                          Register temp1,
2624                                                          Register temp2,
2625                                                          HInstruction* instruction) {
2626  Label fail;
2627  if (offset != 0) {
2628    __ LoadImmediate(temp1, offset);
2629    __ add(IP, addr, ShifterOperand(temp1));
2630    addr = IP;
2631  }
2632  __ Bind(&fail);
2633  // We need a load followed by store. (The address used in a STREX instruction must
2634  // be the same as the address in the most recently executed LDREX instruction.)
2635  __ ldrexd(temp1, temp2, addr);
2636  codegen_->MaybeRecordImplicitNullCheck(instruction);
2637  __ strexd(temp1, value_lo, value_hi, addr);
2638  __ cmp(temp1, ShifterOperand(0));
2639  __ b(&fail, NE);
2640}
2641
2642void LocationsBuilderARM::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) {
2643  DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2644
2645  LocationSummary* locations =
2646      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2647  locations->SetInAt(0, Location::RequiresRegister());
2648  locations->SetInAt(1, Location::RequiresRegister());
2649
2650
2651  Primitive::Type field_type = field_info.GetFieldType();
2652  bool is_wide = field_type == Primitive::kPrimLong || field_type == Primitive::kPrimDouble;
2653  bool generate_volatile = field_info.IsVolatile()
2654      && is_wide
2655      && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2656  // Temporary registers for the write barrier.
2657  // TODO: consider renaming StoreNeedsWriteBarrier to StoreNeedsGCMark.
2658  if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
2659    locations->AddTemp(Location::RequiresRegister());
2660    locations->AddTemp(Location::RequiresRegister());
2661  } else if (generate_volatile) {
2662    // Arm encoding have some additional constraints for ldrexd/strexd:
2663    // - registers need to be consecutive
2664    // - the first register should be even but not R14.
2665    // We don't test for Arm yet, and the assertion makes sure that we revisit this if we ever
2666    // enable Arm encoding.
2667    DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
2668
2669    locations->AddTemp(Location::RequiresRegister());
2670    locations->AddTemp(Location::RequiresRegister());
2671    if (field_type == Primitive::kPrimDouble) {
2672      // For doubles we need two more registers to copy the value.
2673      locations->AddTemp(Location::RegisterLocation(R2));
2674      locations->AddTemp(Location::RegisterLocation(R3));
2675    }
2676  }
2677}
2678
2679void InstructionCodeGeneratorARM::HandleFieldSet(HInstruction* instruction,
2680                                                 const FieldInfo& field_info) {
2681  DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2682
2683  LocationSummary* locations = instruction->GetLocations();
2684  Register base = locations->InAt(0).AsRegister<Register>();
2685  Location value = locations->InAt(1);
2686
2687  bool is_volatile = field_info.IsVolatile();
2688  bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2689  Primitive::Type field_type = field_info.GetFieldType();
2690  uint32_t offset = field_info.GetFieldOffset().Uint32Value();
2691
2692  if (is_volatile) {
2693    GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
2694  }
2695
2696  switch (field_type) {
2697    case Primitive::kPrimBoolean:
2698    case Primitive::kPrimByte: {
2699      __ StoreToOffset(kStoreByte, value.AsRegister<Register>(), base, offset);
2700      break;
2701    }
2702
2703    case Primitive::kPrimShort:
2704    case Primitive::kPrimChar: {
2705      __ StoreToOffset(kStoreHalfword, value.AsRegister<Register>(), base, offset);
2706      break;
2707    }
2708
2709    case Primitive::kPrimInt:
2710    case Primitive::kPrimNot: {
2711      __ StoreToOffset(kStoreWord, value.AsRegister<Register>(), base, offset);
2712      break;
2713    }
2714
2715    case Primitive::kPrimLong: {
2716      if (is_volatile && !atomic_ldrd_strd) {
2717        GenerateWideAtomicStore(base, offset,
2718                                value.AsRegisterPairLow<Register>(),
2719                                value.AsRegisterPairHigh<Register>(),
2720                                locations->GetTemp(0).AsRegister<Register>(),
2721                                locations->GetTemp(1).AsRegister<Register>(),
2722                                instruction);
2723      } else {
2724        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), base, offset);
2725        codegen_->MaybeRecordImplicitNullCheck(instruction);
2726      }
2727      break;
2728    }
2729
2730    case Primitive::kPrimFloat: {
2731      __ StoreSToOffset(value.AsFpuRegister<SRegister>(), base, offset);
2732      break;
2733    }
2734
2735    case Primitive::kPrimDouble: {
2736      DRegister value_reg = FromLowSToD(value.AsFpuRegisterPairLow<SRegister>());
2737      if (is_volatile && !atomic_ldrd_strd) {
2738        Register value_reg_lo = locations->GetTemp(0).AsRegister<Register>();
2739        Register value_reg_hi = locations->GetTemp(1).AsRegister<Register>();
2740
2741        __ vmovrrd(value_reg_lo, value_reg_hi, value_reg);
2742
2743        GenerateWideAtomicStore(base, offset,
2744                                value_reg_lo,
2745                                value_reg_hi,
2746                                locations->GetTemp(2).AsRegister<Register>(),
2747                                locations->GetTemp(3).AsRegister<Register>(),
2748                                instruction);
2749      } else {
2750        __ StoreDToOffset(value_reg, base, offset);
2751        codegen_->MaybeRecordImplicitNullCheck(instruction);
2752      }
2753      break;
2754    }
2755
2756    case Primitive::kPrimVoid:
2757      LOG(FATAL) << "Unreachable type " << field_type;
2758      UNREACHABLE();
2759  }
2760
2761  // Longs and doubles are handled in the switch.
2762  if (field_type != Primitive::kPrimLong && field_type != Primitive::kPrimDouble) {
2763    codegen_->MaybeRecordImplicitNullCheck(instruction);
2764  }
2765
2766  if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
2767    Register temp = locations->GetTemp(0).AsRegister<Register>();
2768    Register card = locations->GetTemp(1).AsRegister<Register>();
2769    codegen_->MarkGCCard(temp, card, base, value.AsRegister<Register>());
2770  }
2771
2772  if (is_volatile) {
2773    GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
2774  }
2775}
2776
2777void LocationsBuilderARM::HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info) {
2778  DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2779  LocationSummary* locations =
2780      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2781  locations->SetInAt(0, Location::RequiresRegister());
2782
2783  bool volatile_for_double = field_info.IsVolatile()
2784      && (field_info.GetFieldType() == Primitive::kPrimDouble)
2785      && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2786  bool overlap = field_info.IsVolatile() && (field_info.GetFieldType() == Primitive::kPrimLong);
2787  locations->SetOut(Location::RequiresRegister(),
2788                    (overlap ? Location::kOutputOverlap : Location::kNoOutputOverlap));
2789  if (volatile_for_double) {
2790    // Arm encoding have some additional constraints for ldrexd/strexd:
2791    // - registers need to be consecutive
2792    // - the first register should be even but not R14.
2793    // We don't test for Arm yet, and the assertion makes sure that we revisit this if we ever
2794    // enable Arm encoding.
2795    DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
2796    locations->AddTemp(Location::RequiresRegister());
2797    locations->AddTemp(Location::RequiresRegister());
2798  }
2799}
2800
2801void InstructionCodeGeneratorARM::HandleFieldGet(HInstruction* instruction,
2802                                                 const FieldInfo& field_info) {
2803  DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2804
2805  LocationSummary* locations = instruction->GetLocations();
2806  Register base = locations->InAt(0).AsRegister<Register>();
2807  Location out = locations->Out();
2808  bool is_volatile = field_info.IsVolatile();
2809  bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2810  Primitive::Type field_type = field_info.GetFieldType();
2811  uint32_t offset = field_info.GetFieldOffset().Uint32Value();
2812
2813  switch (field_type) {
2814    case Primitive::kPrimBoolean: {
2815      __ LoadFromOffset(kLoadUnsignedByte, out.AsRegister<Register>(), base, offset);
2816      break;
2817    }
2818
2819    case Primitive::kPrimByte: {
2820      __ LoadFromOffset(kLoadSignedByte, out.AsRegister<Register>(), base, offset);
2821      break;
2822    }
2823
2824    case Primitive::kPrimShort: {
2825      __ LoadFromOffset(kLoadSignedHalfword, out.AsRegister<Register>(), base, offset);
2826      break;
2827    }
2828
2829    case Primitive::kPrimChar: {
2830      __ LoadFromOffset(kLoadUnsignedHalfword, out.AsRegister<Register>(), base, offset);
2831      break;
2832    }
2833
2834    case Primitive::kPrimInt:
2835    case Primitive::kPrimNot: {
2836      __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), base, offset);
2837      break;
2838    }
2839
2840    case Primitive::kPrimLong: {
2841      if (is_volatile && !atomic_ldrd_strd) {
2842        GenerateWideAtomicLoad(base, offset,
2843                               out.AsRegisterPairLow<Register>(),
2844                               out.AsRegisterPairHigh<Register>());
2845      } else {
2846        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), base, offset);
2847      }
2848      break;
2849    }
2850
2851    case Primitive::kPrimFloat: {
2852      __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), base, offset);
2853      break;
2854    }
2855
2856    case Primitive::kPrimDouble: {
2857      DRegister out_reg = FromLowSToD(out.AsFpuRegisterPairLow<SRegister>());
2858      if (is_volatile && !atomic_ldrd_strd) {
2859        Register lo = locations->GetTemp(0).AsRegister<Register>();
2860        Register hi = locations->GetTemp(1).AsRegister<Register>();
2861        GenerateWideAtomicLoad(base, offset, lo, hi);
2862        codegen_->MaybeRecordImplicitNullCheck(instruction);
2863        __ vmovdrr(out_reg, lo, hi);
2864      } else {
2865        __ LoadDFromOffset(out_reg, base, offset);
2866        codegen_->MaybeRecordImplicitNullCheck(instruction);
2867      }
2868      break;
2869    }
2870
2871    case Primitive::kPrimVoid:
2872      LOG(FATAL) << "Unreachable type " << field_type;
2873      UNREACHABLE();
2874  }
2875
2876  // Doubles are handled in the switch.
2877  if (field_type != Primitive::kPrimDouble) {
2878    codegen_->MaybeRecordImplicitNullCheck(instruction);
2879  }
2880
2881  if (is_volatile) {
2882    GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
2883  }
2884}
2885
2886void LocationsBuilderARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2887  HandleFieldSet(instruction, instruction->GetFieldInfo());
2888}
2889
2890void InstructionCodeGeneratorARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2891  HandleFieldSet(instruction, instruction->GetFieldInfo());
2892}
2893
2894void LocationsBuilderARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2895  HandleFieldGet(instruction, instruction->GetFieldInfo());
2896}
2897
2898void InstructionCodeGeneratorARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2899  HandleFieldGet(instruction, instruction->GetFieldInfo());
2900}
2901
2902void LocationsBuilderARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2903  HandleFieldGet(instruction, instruction->GetFieldInfo());
2904}
2905
2906void InstructionCodeGeneratorARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2907  HandleFieldGet(instruction, instruction->GetFieldInfo());
2908}
2909
2910void LocationsBuilderARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2911  HandleFieldSet(instruction, instruction->GetFieldInfo());
2912}
2913
2914void InstructionCodeGeneratorARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2915  HandleFieldSet(instruction, instruction->GetFieldInfo());
2916}
2917
2918void LocationsBuilderARM::VisitNullCheck(HNullCheck* instruction) {
2919  LocationSummary* locations =
2920      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2921  locations->SetInAt(0, Location::RequiresRegister());
2922  if (instruction->HasUses()) {
2923    locations->SetOut(Location::SameAsFirstInput());
2924  }
2925}
2926
2927void InstructionCodeGeneratorARM::GenerateImplicitNullCheck(HNullCheck* instruction) {
2928  if (codegen_->CanMoveNullCheckToUser(instruction)) {
2929    return;
2930  }
2931  Location obj = instruction->GetLocations()->InAt(0);
2932
2933  __ LoadFromOffset(kLoadWord, IP, obj.AsRegister<Register>(), 0);
2934  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
2935}
2936
2937void InstructionCodeGeneratorARM::GenerateExplicitNullCheck(HNullCheck* instruction) {
2938  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM(instruction);
2939  codegen_->AddSlowPath(slow_path);
2940
2941  LocationSummary* locations = instruction->GetLocations();
2942  Location obj = locations->InAt(0);
2943
2944  __ cmp(obj.AsRegister<Register>(), ShifterOperand(0));
2945  __ b(slow_path->GetEntryLabel(), EQ);
2946}
2947
2948void InstructionCodeGeneratorARM::VisitNullCheck(HNullCheck* instruction) {
2949  if (codegen_->GetCompilerOptions().GetImplicitNullChecks()) {
2950    GenerateImplicitNullCheck(instruction);
2951  } else {
2952    GenerateExplicitNullCheck(instruction);
2953  }
2954}
2955
2956void LocationsBuilderARM::VisitArrayGet(HArrayGet* instruction) {
2957  LocationSummary* locations =
2958      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2959  locations->SetInAt(0, Location::RequiresRegister());
2960  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2961  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2962}
2963
2964void InstructionCodeGeneratorARM::VisitArrayGet(HArrayGet* instruction) {
2965  LocationSummary* locations = instruction->GetLocations();
2966  Register obj = locations->InAt(0).AsRegister<Register>();
2967  Location index = locations->InAt(1);
2968
2969  switch (instruction->GetType()) {
2970    case Primitive::kPrimBoolean: {
2971      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
2972      Register out = locations->Out().AsRegister<Register>();
2973      if (index.IsConstant()) {
2974        size_t offset =
2975            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2976        __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
2977      } else {
2978        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
2979        __ LoadFromOffset(kLoadUnsignedByte, out, IP, data_offset);
2980      }
2981      break;
2982    }
2983
2984    case Primitive::kPrimByte: {
2985      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
2986      Register out = locations->Out().AsRegister<Register>();
2987      if (index.IsConstant()) {
2988        size_t offset =
2989            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2990        __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
2991      } else {
2992        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
2993        __ LoadFromOffset(kLoadSignedByte, out, IP, data_offset);
2994      }
2995      break;
2996    }
2997
2998    case Primitive::kPrimShort: {
2999      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
3000      Register out = locations->Out().AsRegister<Register>();
3001      if (index.IsConstant()) {
3002        size_t offset =
3003            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
3004        __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
3005      } else {
3006        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
3007        __ LoadFromOffset(kLoadSignedHalfword, out, IP, data_offset);
3008      }
3009      break;
3010    }
3011
3012    case Primitive::kPrimChar: {
3013      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
3014      Register out = locations->Out().AsRegister<Register>();
3015      if (index.IsConstant()) {
3016        size_t offset =
3017            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
3018        __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
3019      } else {
3020        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
3021        __ LoadFromOffset(kLoadUnsignedHalfword, out, IP, data_offset);
3022      }
3023      break;
3024    }
3025
3026    case Primitive::kPrimInt:
3027    case Primitive::kPrimNot: {
3028      DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
3029      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
3030      Register out = locations->Out().AsRegister<Register>();
3031      if (index.IsConstant()) {
3032        size_t offset =
3033            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3034        __ LoadFromOffset(kLoadWord, out, obj, offset);
3035      } else {
3036        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3037        __ LoadFromOffset(kLoadWord, out, IP, data_offset);
3038      }
3039      break;
3040    }
3041
3042    case Primitive::kPrimLong: {
3043      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
3044      Location out = locations->Out();
3045      if (index.IsConstant()) {
3046        size_t offset =
3047            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3048        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset);
3049      } else {
3050        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3051        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), IP, data_offset);
3052      }
3053      break;
3054    }
3055
3056    case Primitive::kPrimFloat: {
3057      uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
3058      Location out = locations->Out();
3059      DCHECK(out.IsFpuRegister());
3060      if (index.IsConstant()) {
3061        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3062        __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), obj, offset);
3063      } else {
3064        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3065        __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), IP, data_offset);
3066      }
3067      break;
3068    }
3069
3070    case Primitive::kPrimDouble: {
3071      uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
3072      Location out = locations->Out();
3073      DCHECK(out.IsFpuRegisterPair());
3074      if (index.IsConstant()) {
3075        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3076        __ LoadDFromOffset(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), obj, offset);
3077      } else {
3078        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3079        __ LoadDFromOffset(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), IP, data_offset);
3080      }
3081      break;
3082    }
3083
3084    case Primitive::kPrimVoid:
3085      LOG(FATAL) << "Unreachable type " << instruction->GetType();
3086      UNREACHABLE();
3087  }
3088  codegen_->MaybeRecordImplicitNullCheck(instruction);
3089}
3090
3091void LocationsBuilderARM::VisitArraySet(HArraySet* instruction) {
3092  Primitive::Type value_type = instruction->GetComponentType();
3093
3094  bool needs_write_barrier =
3095      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
3096  bool needs_runtime_call = instruction->NeedsTypeCheck();
3097
3098  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3099      instruction, needs_runtime_call ? LocationSummary::kCall : LocationSummary::kNoCall);
3100  if (needs_runtime_call) {
3101    InvokeRuntimeCallingConvention calling_convention;
3102    locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3103    locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3104    locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
3105  } else {
3106    locations->SetInAt(0, Location::RequiresRegister());
3107    locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3108    locations->SetInAt(2, Location::RequiresRegister());
3109
3110    if (needs_write_barrier) {
3111      // Temporary registers for the write barrier.
3112      locations->AddTemp(Location::RequiresRegister());
3113      locations->AddTemp(Location::RequiresRegister());
3114    }
3115  }
3116}
3117
3118void InstructionCodeGeneratorARM::VisitArraySet(HArraySet* instruction) {
3119  LocationSummary* locations = instruction->GetLocations();
3120  Register obj = locations->InAt(0).AsRegister<Register>();
3121  Location index = locations->InAt(1);
3122  Primitive::Type value_type = instruction->GetComponentType();
3123  bool needs_runtime_call = locations->WillCall();
3124  bool needs_write_barrier =
3125      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
3126
3127  switch (value_type) {
3128    case Primitive::kPrimBoolean:
3129    case Primitive::kPrimByte: {
3130      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
3131      Register value = locations->InAt(2).AsRegister<Register>();
3132      if (index.IsConstant()) {
3133        size_t offset =
3134            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
3135        __ StoreToOffset(kStoreByte, value, obj, offset);
3136      } else {
3137        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
3138        __ StoreToOffset(kStoreByte, value, IP, data_offset);
3139      }
3140      break;
3141    }
3142
3143    case Primitive::kPrimShort:
3144    case Primitive::kPrimChar: {
3145      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
3146      Register value = locations->InAt(2).AsRegister<Register>();
3147      if (index.IsConstant()) {
3148        size_t offset =
3149            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
3150        __ StoreToOffset(kStoreHalfword, value, obj, offset);
3151      } else {
3152        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
3153        __ StoreToOffset(kStoreHalfword, value, IP, data_offset);
3154      }
3155      break;
3156    }
3157
3158    case Primitive::kPrimInt:
3159    case Primitive::kPrimNot: {
3160      if (!needs_runtime_call) {
3161        uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
3162        Register value = locations->InAt(2).AsRegister<Register>();
3163        if (index.IsConstant()) {
3164          size_t offset =
3165              (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3166          __ StoreToOffset(kStoreWord, value, obj, offset);
3167        } else {
3168          DCHECK(index.IsRegister()) << index;
3169          __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3170          __ StoreToOffset(kStoreWord, value, IP, data_offset);
3171        }
3172        codegen_->MaybeRecordImplicitNullCheck(instruction);
3173        if (needs_write_barrier) {
3174          DCHECK_EQ(value_type, Primitive::kPrimNot);
3175          Register temp = locations->GetTemp(0).AsRegister<Register>();
3176          Register card = locations->GetTemp(1).AsRegister<Register>();
3177          codegen_->MarkGCCard(temp, card, obj, value);
3178        }
3179      } else {
3180        DCHECK_EQ(value_type, Primitive::kPrimNot);
3181        codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
3182                                instruction,
3183                                instruction->GetDexPc(),
3184                                nullptr);
3185      }
3186      break;
3187    }
3188
3189    case Primitive::kPrimLong: {
3190      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
3191      Location value = locations->InAt(2);
3192      if (index.IsConstant()) {
3193        size_t offset =
3194            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3195        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset);
3196      } else {
3197        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3198        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), IP, data_offset);
3199      }
3200      break;
3201    }
3202
3203    case Primitive::kPrimFloat: {
3204      uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
3205      Location value = locations->InAt(2);
3206      DCHECK(value.IsFpuRegister());
3207      if (index.IsConstant()) {
3208        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3209        __ StoreSToOffset(value.AsFpuRegister<SRegister>(), obj, offset);
3210      } else {
3211        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3212        __ StoreSToOffset(value.AsFpuRegister<SRegister>(), IP, data_offset);
3213      }
3214      break;
3215    }
3216
3217    case Primitive::kPrimDouble: {
3218      uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
3219      Location value = locations->InAt(2);
3220      DCHECK(value.IsFpuRegisterPair());
3221      if (index.IsConstant()) {
3222        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3223        __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), obj, offset);
3224      } else {
3225        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3226        __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), IP, data_offset);
3227      }
3228
3229      break;
3230    }
3231
3232    case Primitive::kPrimVoid:
3233      LOG(FATAL) << "Unreachable type " << value_type;
3234      UNREACHABLE();
3235  }
3236
3237  // Ints and objects are handled in the switch.
3238  if (value_type != Primitive::kPrimInt && value_type != Primitive::kPrimNot) {
3239    codegen_->MaybeRecordImplicitNullCheck(instruction);
3240  }
3241}
3242
3243void LocationsBuilderARM::VisitArrayLength(HArrayLength* instruction) {
3244  LocationSummary* locations =
3245      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3246  locations->SetInAt(0, Location::RequiresRegister());
3247  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3248}
3249
3250void InstructionCodeGeneratorARM::VisitArrayLength(HArrayLength* instruction) {
3251  LocationSummary* locations = instruction->GetLocations();
3252  uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
3253  Register obj = locations->InAt(0).AsRegister<Register>();
3254  Register out = locations->Out().AsRegister<Register>();
3255  __ LoadFromOffset(kLoadWord, out, obj, offset);
3256  codegen_->MaybeRecordImplicitNullCheck(instruction);
3257}
3258
3259void LocationsBuilderARM::VisitBoundsCheck(HBoundsCheck* instruction) {
3260  LocationSummary* locations =
3261      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3262  locations->SetInAt(0, Location::RequiresRegister());
3263  locations->SetInAt(1, Location::RequiresRegister());
3264  if (instruction->HasUses()) {
3265    locations->SetOut(Location::SameAsFirstInput());
3266  }
3267}
3268
3269void InstructionCodeGeneratorARM::VisitBoundsCheck(HBoundsCheck* instruction) {
3270  LocationSummary* locations = instruction->GetLocations();
3271  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM(
3272      instruction, locations->InAt(0), locations->InAt(1));
3273  codegen_->AddSlowPath(slow_path);
3274
3275  Register index = locations->InAt(0).AsRegister<Register>();
3276  Register length = locations->InAt(1).AsRegister<Register>();
3277
3278  __ cmp(index, ShifterOperand(length));
3279  __ b(slow_path->GetEntryLabel(), CS);
3280}
3281
3282void CodeGeneratorARM::MarkGCCard(Register temp, Register card, Register object, Register value) {
3283  Label is_null;
3284  __ CompareAndBranchIfZero(value, &is_null);
3285  __ LoadFromOffset(kLoadWord, card, TR, Thread::CardTableOffset<kArmWordSize>().Int32Value());
3286  __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
3287  __ strb(card, Address(card, temp));
3288  __ Bind(&is_null);
3289}
3290
3291void LocationsBuilderARM::VisitTemporary(HTemporary* temp) {
3292  temp->SetLocations(nullptr);
3293}
3294
3295void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp) {
3296  // Nothing to do, this is driven by the code generator.
3297  UNUSED(temp);
3298}
3299
3300void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction) {
3301  UNUSED(instruction);
3302  LOG(FATAL) << "Unreachable";
3303}
3304
3305void InstructionCodeGeneratorARM::VisitParallelMove(HParallelMove* instruction) {
3306  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
3307}
3308
3309void LocationsBuilderARM::VisitSuspendCheck(HSuspendCheck* instruction) {
3310  new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
3311}
3312
3313void InstructionCodeGeneratorARM::VisitSuspendCheck(HSuspendCheck* instruction) {
3314  HBasicBlock* block = instruction->GetBlock();
3315  if (block->GetLoopInformation() != nullptr) {
3316    DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
3317    // The back edge will generate the suspend check.
3318    return;
3319  }
3320  if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
3321    // The goto will generate the suspend check.
3322    return;
3323  }
3324  GenerateSuspendCheck(instruction, nullptr);
3325}
3326
3327void InstructionCodeGeneratorARM::GenerateSuspendCheck(HSuspendCheck* instruction,
3328                                                       HBasicBlock* successor) {
3329  SuspendCheckSlowPathARM* slow_path =
3330      new (GetGraph()->GetArena()) SuspendCheckSlowPathARM(instruction, successor);
3331  codegen_->AddSlowPath(slow_path);
3332
3333  __ LoadFromOffset(
3334      kLoadUnsignedHalfword, IP, TR, Thread::ThreadFlagsOffset<kArmWordSize>().Int32Value());
3335  __ cmp(IP, ShifterOperand(0));
3336  // TODO: Figure out the branch offsets and use cbz/cbnz.
3337  if (successor == nullptr) {
3338    __ b(slow_path->GetEntryLabel(), NE);
3339    __ Bind(slow_path->GetReturnLabel());
3340  } else {
3341    __ b(codegen_->GetLabelOf(successor), EQ);
3342    __ b(slow_path->GetEntryLabel());
3343  }
3344}
3345
3346ArmAssembler* ParallelMoveResolverARM::GetAssembler() const {
3347  return codegen_->GetAssembler();
3348}
3349
3350void ParallelMoveResolverARM::EmitMove(size_t index) {
3351  MoveOperands* move = moves_.Get(index);
3352  Location source = move->GetSource();
3353  Location destination = move->GetDestination();
3354
3355  if (source.IsRegister()) {
3356    if (destination.IsRegister()) {
3357      __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>());
3358    } else {
3359      DCHECK(destination.IsStackSlot());
3360      __ StoreToOffset(kStoreWord, source.AsRegister<Register>(),
3361                       SP, destination.GetStackIndex());
3362    }
3363  } else if (source.IsStackSlot()) {
3364    if (destination.IsRegister()) {
3365      __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(),
3366                        SP, source.GetStackIndex());
3367    } else if (destination.IsFpuRegister()) {
3368      __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex());
3369    } else {
3370      DCHECK(destination.IsStackSlot());
3371      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
3372      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3373    }
3374  } else if (source.IsFpuRegister()) {
3375    if (destination.IsFpuRegister()) {
3376      __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>());
3377    } else {
3378      DCHECK(destination.IsStackSlot());
3379      __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex());
3380    }
3381  } else if (source.IsDoubleStackSlot()) {
3382    if (destination.IsDoubleStackSlot()) {
3383      __ LoadDFromOffset(DTMP, SP, source.GetStackIndex());
3384      __ StoreDToOffset(DTMP, SP, destination.GetStackIndex());
3385    } else if (destination.IsRegisterPair()) {
3386      DCHECK(ExpectedPairLayout(destination));
3387      __ LoadFromOffset(
3388          kLoadWordPair, destination.AsRegisterPairLow<Register>(), SP, source.GetStackIndex());
3389    } else {
3390      DCHECK(destination.IsFpuRegisterPair()) << destination;
3391      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
3392                         SP,
3393                         source.GetStackIndex());
3394    }
3395  } else if (source.IsRegisterPair()) {
3396    if (destination.IsRegisterPair()) {
3397      __ Mov(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairLow<Register>());
3398      __ Mov(destination.AsRegisterPairHigh<Register>(), source.AsRegisterPairHigh<Register>());
3399    } else {
3400      DCHECK(destination.IsDoubleStackSlot()) << destination;
3401      DCHECK(ExpectedPairLayout(source));
3402      __ StoreToOffset(
3403          kStoreWordPair, source.AsRegisterPairLow<Register>(), SP, destination.GetStackIndex());
3404    }
3405  } else if (source.IsFpuRegisterPair()) {
3406    if (destination.IsFpuRegisterPair()) {
3407      __ vmovd(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
3408               FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()));
3409    } else {
3410      DCHECK(destination.IsDoubleStackSlot()) << destination;
3411      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
3412                        SP,
3413                        destination.GetStackIndex());
3414    }
3415  } else {
3416    DCHECK(source.IsConstant()) << source;
3417    HConstant* constant = source.GetConstant();
3418    if (constant->IsIntConstant() || constant->IsNullConstant()) {
3419      int32_t value = CodeGenerator::GetInt32ValueOf(constant);
3420      if (destination.IsRegister()) {
3421        __ LoadImmediate(destination.AsRegister<Register>(), value);
3422      } else {
3423        DCHECK(destination.IsStackSlot());
3424        __ LoadImmediate(IP, value);
3425        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3426      }
3427    } else if (constant->IsLongConstant()) {
3428      int64_t value = constant->AsLongConstant()->GetValue();
3429      if (destination.IsRegisterPair()) {
3430        __ LoadImmediate(destination.AsRegisterPairLow<Register>(), Low32Bits(value));
3431        __ LoadImmediate(destination.AsRegisterPairHigh<Register>(), High32Bits(value));
3432      } else {
3433        DCHECK(destination.IsDoubleStackSlot()) << destination;
3434        __ LoadImmediate(IP, Low32Bits(value));
3435        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3436        __ LoadImmediate(IP, High32Bits(value));
3437        __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
3438      }
3439    } else if (constant->IsDoubleConstant()) {
3440      double value = constant->AsDoubleConstant()->GetValue();
3441      if (destination.IsFpuRegisterPair()) {
3442        __ LoadDImmediate(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()), value);
3443      } else {
3444        DCHECK(destination.IsDoubleStackSlot()) << destination;
3445        uint64_t int_value = bit_cast<uint64_t, double>(value);
3446        __ LoadImmediate(IP, Low32Bits(int_value));
3447        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3448        __ LoadImmediate(IP, High32Bits(int_value));
3449        __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
3450      }
3451    } else {
3452      DCHECK(constant->IsFloatConstant()) << constant->DebugName();
3453      float value = constant->AsFloatConstant()->GetValue();
3454      if (destination.IsFpuRegister()) {
3455        __ LoadSImmediate(destination.AsFpuRegister<SRegister>(), value);
3456      } else {
3457        DCHECK(destination.IsStackSlot());
3458        __ LoadImmediate(IP, bit_cast<int32_t, float>(value));
3459        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3460      }
3461    }
3462  }
3463}
3464
3465void ParallelMoveResolverARM::Exchange(Register reg, int mem) {
3466  __ Mov(IP, reg);
3467  __ LoadFromOffset(kLoadWord, reg, SP, mem);
3468  __ StoreToOffset(kStoreWord, IP, SP, mem);
3469}
3470
3471void ParallelMoveResolverARM::Exchange(int mem1, int mem2) {
3472  ScratchRegisterScope ensure_scratch(this, IP, R0, codegen_->GetNumberOfCoreRegisters());
3473  int stack_offset = ensure_scratch.IsSpilled() ? kArmWordSize : 0;
3474  __ LoadFromOffset(kLoadWord, static_cast<Register>(ensure_scratch.GetRegister()),
3475                    SP, mem1 + stack_offset);
3476  __ LoadFromOffset(kLoadWord, IP, SP, mem2 + stack_offset);
3477  __ StoreToOffset(kStoreWord, static_cast<Register>(ensure_scratch.GetRegister()),
3478                   SP, mem2 + stack_offset);
3479  __ StoreToOffset(kStoreWord, IP, SP, mem1 + stack_offset);
3480}
3481
3482void ParallelMoveResolverARM::EmitSwap(size_t index) {
3483  MoveOperands* move = moves_.Get(index);
3484  Location source = move->GetSource();
3485  Location destination = move->GetDestination();
3486
3487  if (source.IsRegister() && destination.IsRegister()) {
3488    DCHECK_NE(source.AsRegister<Register>(), IP);
3489    DCHECK_NE(destination.AsRegister<Register>(), IP);
3490    __ Mov(IP, source.AsRegister<Register>());
3491    __ Mov(source.AsRegister<Register>(), destination.AsRegister<Register>());
3492    __ Mov(destination.AsRegister<Register>(), IP);
3493  } else if (source.IsRegister() && destination.IsStackSlot()) {
3494    Exchange(source.AsRegister<Register>(), destination.GetStackIndex());
3495  } else if (source.IsStackSlot() && destination.IsRegister()) {
3496    Exchange(destination.AsRegister<Register>(), source.GetStackIndex());
3497  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
3498    Exchange(source.GetStackIndex(), destination.GetStackIndex());
3499  } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
3500    __ vmovrs(IP, source.AsFpuRegister<SRegister>());
3501    __ vmovs(source.AsFpuRegister<SRegister>(), destination.AsFpuRegister<SRegister>());
3502    __ vmovsr(destination.AsFpuRegister<SRegister>(), IP);
3503  } else if (source.IsRegisterPair() && destination.IsRegisterPair()) {
3504    __ vmovdrr(DTMP, source.AsRegisterPairLow<Register>(), source.AsRegisterPairHigh<Register>());
3505    __ Mov(source.AsRegisterPairLow<Register>(), destination.AsRegisterPairLow<Register>());
3506    __ Mov(source.AsRegisterPairHigh<Register>(), destination.AsRegisterPairHigh<Register>());
3507    __ vmovrrd(destination.AsRegisterPairLow<Register>(),
3508               destination.AsRegisterPairHigh<Register>(),
3509               DTMP);
3510  } else if (source.IsRegisterPair() || destination.IsRegisterPair()) {
3511    Register low_reg = source.IsRegisterPair()
3512        ? source.AsRegisterPairLow<Register>()
3513        : destination.AsRegisterPairLow<Register>();
3514    int mem = source.IsRegisterPair()
3515        ? destination.GetStackIndex()
3516        : source.GetStackIndex();
3517    DCHECK(ExpectedPairLayout(source.IsRegisterPair() ? source : destination));
3518    __ vmovdrr(DTMP, low_reg, static_cast<Register>(low_reg + 1));
3519    __ LoadFromOffset(kLoadWordPair, low_reg, SP, mem);
3520    __ StoreDToOffset(DTMP, SP, mem);
3521  } else if (source.IsFpuRegisterPair() && destination.IsFpuRegisterPair()) {
3522    DRegister first = FromLowSToD(source.AsFpuRegisterPairLow<SRegister>());
3523    DRegister second = FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>());
3524    __ vmovd(DTMP, first);
3525    __ vmovd(first, second);
3526    __ vmovd(second, DTMP);
3527  } else if (source.IsFpuRegisterPair() || destination.IsFpuRegisterPair()) {
3528    DRegister reg = source.IsFpuRegisterPair()
3529        ? FromLowSToD(source.AsFpuRegisterPairLow<SRegister>())
3530        : FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>());
3531    int mem = source.IsFpuRegisterPair()
3532        ? destination.GetStackIndex()
3533        : source.GetStackIndex();
3534    __ vmovd(DTMP, reg);
3535    __ LoadDFromOffset(reg, SP, mem);
3536    __ StoreDToOffset(DTMP, SP, mem);
3537  } else if (source.IsFpuRegister() || destination.IsFpuRegister()) {
3538    SRegister reg = source.IsFpuRegister() ? source.AsFpuRegister<SRegister>()
3539                                           : destination.AsFpuRegister<SRegister>();
3540    int mem = source.IsFpuRegister()
3541        ? destination.GetStackIndex()
3542        : source.GetStackIndex();
3543
3544    __ vmovrs(IP, reg);
3545    __ LoadSFromOffset(reg, SP, mem);
3546    __ StoreToOffset(kStoreWord, IP, SP, mem);
3547  } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
3548    Exchange(source.GetStackIndex(), destination.GetStackIndex());
3549    Exchange(source.GetHighStackIndex(kArmWordSize), destination.GetHighStackIndex(kArmWordSize));
3550  } else {
3551    LOG(FATAL) << "Unimplemented" << source << " <-> " << destination;
3552  }
3553}
3554
3555void ParallelMoveResolverARM::SpillScratch(int reg) {
3556  __ Push(static_cast<Register>(reg));
3557}
3558
3559void ParallelMoveResolverARM::RestoreScratch(int reg) {
3560  __ Pop(static_cast<Register>(reg));
3561}
3562
3563void LocationsBuilderARM::VisitLoadClass(HLoadClass* cls) {
3564  LocationSummary::CallKind call_kind = cls->CanCallRuntime()
3565      ? LocationSummary::kCallOnSlowPath
3566      : LocationSummary::kNoCall;
3567  LocationSummary* locations =
3568      new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
3569  locations->SetOut(Location::RequiresRegister());
3570}
3571
3572void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) {
3573  Register out = cls->GetLocations()->Out().AsRegister<Register>();
3574  if (cls->IsReferrersClass()) {
3575    DCHECK(!cls->CanCallRuntime());
3576    DCHECK(!cls->MustGenerateClinitCheck());
3577    codegen_->LoadCurrentMethod(out);
3578    __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
3579  } else {
3580    DCHECK(cls->CanCallRuntime());
3581    codegen_->LoadCurrentMethod(out);
3582    __ LoadFromOffset(
3583        kLoadWord, out, out, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value());
3584    __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
3585
3586    SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
3587        cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
3588    codegen_->AddSlowPath(slow_path);
3589    __ cmp(out, ShifterOperand(0));
3590    __ b(slow_path->GetEntryLabel(), EQ);
3591    if (cls->MustGenerateClinitCheck()) {
3592      GenerateClassInitializationCheck(slow_path, out);
3593    } else {
3594      __ Bind(slow_path->GetExitLabel());
3595    }
3596  }
3597}
3598
3599void LocationsBuilderARM::VisitClinitCheck(HClinitCheck* check) {
3600  LocationSummary* locations =
3601      new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
3602  locations->SetInAt(0, Location::RequiresRegister());
3603  if (check->HasUses()) {
3604    locations->SetOut(Location::SameAsFirstInput());
3605  }
3606}
3607
3608void InstructionCodeGeneratorARM::VisitClinitCheck(HClinitCheck* check) {
3609  // We assume the class is not null.
3610  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
3611      check->GetLoadClass(), check, check->GetDexPc(), true);
3612  codegen_->AddSlowPath(slow_path);
3613  GenerateClassInitializationCheck(slow_path,
3614                                   check->GetLocations()->InAt(0).AsRegister<Register>());
3615}
3616
3617void InstructionCodeGeneratorARM::GenerateClassInitializationCheck(
3618    SlowPathCodeARM* slow_path, Register class_reg) {
3619  __ LoadFromOffset(kLoadWord, IP, class_reg, mirror::Class::StatusOffset().Int32Value());
3620  __ cmp(IP, ShifterOperand(mirror::Class::kStatusInitialized));
3621  __ b(slow_path->GetEntryLabel(), LT);
3622  // Even if the initialized flag is set, we may be in a situation where caches are not synced
3623  // properly. Therefore, we do a memory fence.
3624  __ dmb(ISH);
3625  __ Bind(slow_path->GetExitLabel());
3626}
3627
3628void LocationsBuilderARM::VisitLoadString(HLoadString* load) {
3629  LocationSummary* locations =
3630      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
3631  locations->SetOut(Location::RequiresRegister());
3632}
3633
3634void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) {
3635  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM(load);
3636  codegen_->AddSlowPath(slow_path);
3637
3638  Register out = load->GetLocations()->Out().AsRegister<Register>();
3639  codegen_->LoadCurrentMethod(out);
3640  __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
3641  __ LoadFromOffset(kLoadWord, out, out, mirror::Class::DexCacheStringsOffset().Int32Value());
3642  __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(load->GetStringIndex()));
3643  __ cmp(out, ShifterOperand(0));
3644  __ b(slow_path->GetEntryLabel(), EQ);
3645  __ Bind(slow_path->GetExitLabel());
3646}
3647
3648void LocationsBuilderARM::VisitLoadException(HLoadException* load) {
3649  LocationSummary* locations =
3650      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
3651  locations->SetOut(Location::RequiresRegister());
3652}
3653
3654void InstructionCodeGeneratorARM::VisitLoadException(HLoadException* load) {
3655  Register out = load->GetLocations()->Out().AsRegister<Register>();
3656  int32_t offset = Thread::ExceptionOffset<kArmWordSize>().Int32Value();
3657  __ LoadFromOffset(kLoadWord, out, TR, offset);
3658  __ LoadImmediate(IP, 0);
3659  __ StoreToOffset(kStoreWord, IP, TR, offset);
3660}
3661
3662void LocationsBuilderARM::VisitThrow(HThrow* instruction) {
3663  LocationSummary* locations =
3664      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3665  InvokeRuntimeCallingConvention calling_convention;
3666  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3667}
3668
3669void InstructionCodeGeneratorARM::VisitThrow(HThrow* instruction) {
3670  codegen_->InvokeRuntime(
3671      QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc(), nullptr);
3672}
3673
3674void LocationsBuilderARM::VisitInstanceOf(HInstanceOf* instruction) {
3675  LocationSummary::CallKind call_kind = instruction->IsClassFinal()
3676      ? LocationSummary::kNoCall
3677      : LocationSummary::kCallOnSlowPath;
3678  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3679  locations->SetInAt(0, Location::RequiresRegister());
3680  locations->SetInAt(1, Location::RequiresRegister());
3681  // The out register is used as a temporary, so it overlaps with the inputs.
3682  locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3683}
3684
3685void InstructionCodeGeneratorARM::VisitInstanceOf(HInstanceOf* instruction) {
3686  LocationSummary* locations = instruction->GetLocations();
3687  Register obj = locations->InAt(0).AsRegister<Register>();
3688  Register cls = locations->InAt(1).AsRegister<Register>();
3689  Register out = locations->Out().AsRegister<Register>();
3690  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3691  Label done, zero;
3692  SlowPathCodeARM* slow_path = nullptr;
3693
3694  // Return 0 if `obj` is null.
3695  // TODO: avoid this check if we know obj is not null.
3696  __ cmp(obj, ShifterOperand(0));
3697  __ b(&zero, EQ);
3698  // Compare the class of `obj` with `cls`.
3699  __ LoadFromOffset(kLoadWord, out, obj, class_offset);
3700  __ cmp(out, ShifterOperand(cls));
3701  if (instruction->IsClassFinal()) {
3702    // Classes must be equal for the instanceof to succeed.
3703    __ b(&zero, NE);
3704    __ LoadImmediate(out, 1);
3705    __ b(&done);
3706  } else {
3707    // If the classes are not equal, we go into a slow path.
3708    DCHECK(locations->OnlyCallsOnSlowPath());
3709    slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
3710        instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc());
3711    codegen_->AddSlowPath(slow_path);
3712    __ b(slow_path->GetEntryLabel(), NE);
3713    __ LoadImmediate(out, 1);
3714    __ b(&done);
3715  }
3716  __ Bind(&zero);
3717  __ LoadImmediate(out, 0);
3718  if (slow_path != nullptr) {
3719    __ Bind(slow_path->GetExitLabel());
3720  }
3721  __ Bind(&done);
3722}
3723
3724void LocationsBuilderARM::VisitCheckCast(HCheckCast* instruction) {
3725  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3726      instruction, LocationSummary::kCallOnSlowPath);
3727  locations->SetInAt(0, Location::RequiresRegister());
3728  locations->SetInAt(1, Location::RequiresRegister());
3729  locations->AddTemp(Location::RequiresRegister());
3730}
3731
3732void InstructionCodeGeneratorARM::VisitCheckCast(HCheckCast* instruction) {
3733  LocationSummary* locations = instruction->GetLocations();
3734  Register obj = locations->InAt(0).AsRegister<Register>();
3735  Register cls = locations->InAt(1).AsRegister<Register>();
3736  Register temp = locations->GetTemp(0).AsRegister<Register>();
3737  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3738
3739  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
3740      instruction, locations->InAt(1), locations->GetTemp(0), instruction->GetDexPc());
3741  codegen_->AddSlowPath(slow_path);
3742
3743  // TODO: avoid this check if we know obj is not null.
3744  __ cmp(obj, ShifterOperand(0));
3745  __ b(slow_path->GetExitLabel(), EQ);
3746  // Compare the class of `obj` with `cls`.
3747  __ LoadFromOffset(kLoadWord, temp, obj, class_offset);
3748  __ cmp(temp, ShifterOperand(cls));
3749  __ b(slow_path->GetEntryLabel(), NE);
3750  __ Bind(slow_path->GetExitLabel());
3751}
3752
3753void LocationsBuilderARM::VisitMonitorOperation(HMonitorOperation* instruction) {
3754  LocationSummary* locations =
3755      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3756  InvokeRuntimeCallingConvention calling_convention;
3757  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3758}
3759
3760void InstructionCodeGeneratorARM::VisitMonitorOperation(HMonitorOperation* instruction) {
3761  codegen_->InvokeRuntime(instruction->IsEnter()
3762        ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
3763      instruction,
3764      instruction->GetDexPc(),
3765      nullptr);
3766}
3767
3768void LocationsBuilderARM::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
3769void LocationsBuilderARM::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
3770void LocationsBuilderARM::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
3771
3772void LocationsBuilderARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
3773  LocationSummary* locations =
3774      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3775  DCHECK(instruction->GetResultType() == Primitive::kPrimInt
3776         || instruction->GetResultType() == Primitive::kPrimLong);
3777  locations->SetInAt(0, Location::RequiresRegister());
3778  locations->SetInAt(1, Location::RequiresRegister());
3779  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3780}
3781
3782void InstructionCodeGeneratorARM::VisitAnd(HAnd* instruction) {
3783  HandleBitwiseOperation(instruction);
3784}
3785
3786void InstructionCodeGeneratorARM::VisitOr(HOr* instruction) {
3787  HandleBitwiseOperation(instruction);
3788}
3789
3790void InstructionCodeGeneratorARM::VisitXor(HXor* instruction) {
3791  HandleBitwiseOperation(instruction);
3792}
3793
3794void InstructionCodeGeneratorARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
3795  LocationSummary* locations = instruction->GetLocations();
3796
3797  if (instruction->GetResultType() == Primitive::kPrimInt) {
3798    Register first = locations->InAt(0).AsRegister<Register>();
3799    Register second = locations->InAt(1).AsRegister<Register>();
3800    Register out = locations->Out().AsRegister<Register>();
3801    if (instruction->IsAnd()) {
3802      __ and_(out, first, ShifterOperand(second));
3803    } else if (instruction->IsOr()) {
3804      __ orr(out, first, ShifterOperand(second));
3805    } else {
3806      DCHECK(instruction->IsXor());
3807      __ eor(out, first, ShifterOperand(second));
3808    }
3809  } else {
3810    DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3811    Location first = locations->InAt(0);
3812    Location second = locations->InAt(1);
3813    Location out = locations->Out();
3814    if (instruction->IsAnd()) {
3815      __ and_(out.AsRegisterPairLow<Register>(),
3816              first.AsRegisterPairLow<Register>(),
3817              ShifterOperand(second.AsRegisterPairLow<Register>()));
3818      __ and_(out.AsRegisterPairHigh<Register>(),
3819              first.AsRegisterPairHigh<Register>(),
3820              ShifterOperand(second.AsRegisterPairHigh<Register>()));
3821    } else if (instruction->IsOr()) {
3822      __ orr(out.AsRegisterPairLow<Register>(),
3823             first.AsRegisterPairLow<Register>(),
3824             ShifterOperand(second.AsRegisterPairLow<Register>()));
3825      __ orr(out.AsRegisterPairHigh<Register>(),
3826             first.AsRegisterPairHigh<Register>(),
3827             ShifterOperand(second.AsRegisterPairHigh<Register>()));
3828    } else {
3829      DCHECK(instruction->IsXor());
3830      __ eor(out.AsRegisterPairLow<Register>(),
3831             first.AsRegisterPairLow<Register>(),
3832             ShifterOperand(second.AsRegisterPairLow<Register>()));
3833      __ eor(out.AsRegisterPairHigh<Register>(),
3834             first.AsRegisterPairHigh<Register>(),
3835             ShifterOperand(second.AsRegisterPairHigh<Register>()));
3836    }
3837  }
3838}
3839
3840void CodeGeneratorARM::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Register temp) {
3841  DCHECK_EQ(temp, kArtMethodRegister);
3842
3843  // TODO: Implement all kinds of calls:
3844  // 1) boot -> boot
3845  // 2) app -> boot
3846  // 3) app -> app
3847  //
3848  // Currently we implement the app -> app logic, which looks up in the resolve cache.
3849
3850  // temp = method;
3851  LoadCurrentMethod(temp);
3852  if (!invoke->IsRecursive()) {
3853    // temp = temp->dex_cache_resolved_methods_;
3854    __ LoadFromOffset(
3855        kLoadWord, temp, temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value());
3856    // temp = temp[index_in_cache]
3857    __ LoadFromOffset(
3858        kLoadWord, temp, temp, CodeGenerator::GetCacheOffset(invoke->GetDexMethodIndex()));
3859    // LR = temp[offset_of_quick_compiled_code]
3860    __ LoadFromOffset(kLoadWord, LR, temp,
3861                      mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
3862                          kArmWordSize).Int32Value());
3863    // LR()
3864    __ blx(LR);
3865  } else {
3866    __ bl(GetFrameEntryLabel());
3867  }
3868
3869  DCHECK(!IsLeafMethod());
3870}
3871
3872void LocationsBuilderARM::VisitBoundType(HBoundType* instruction) {
3873  // Nothing to do, this should be removed during prepare for register allocator.
3874  UNUSED(instruction);
3875  LOG(FATAL) << "Unreachable";
3876}
3877
3878void InstructionCodeGeneratorARM::VisitBoundType(HBoundType* instruction) {
3879  // Nothing to do, this should be removed during prepare for register allocator.
3880  UNUSED(instruction);
3881  LOG(FATAL) << "Unreachable";
3882}
3883
3884}  // namespace arm
3885}  // namespace art
3886