code_generator_arm.cc revision c0572a451944f78397619dec34a38c36c11e9d2a
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
20#include "entrypoints/quick/quick_entrypoints.h"
21#include "gc/accounting/card_table.h"
22#include "mirror/array-inl.h"
23#include "mirror/art_method.h"
24#include "mirror/class.h"
25#include "thread.h"
26#include "utils/arm/assembler_arm.h"
27#include "utils/arm/managed_register_arm.h"
28#include "utils/assembler.h"
29#include "utils/stack_checks.h"
30
31namespace art {
32
33namespace arm {
34
35static DRegister FromLowSToD(SRegister reg) {
36  DCHECK_EQ(reg % 2, 0);
37  return static_cast<DRegister>(reg / 2);
38}
39
40static bool ExpectedPairLayout(Location location) {
41  // We expected this for both core and fpu register pairs.
42  return ((location.low() & 1) == 0) && (location.low() + 1 == location.high());
43}
44
45static constexpr int kCurrentMethodStackOffset = 0;
46
47static constexpr Register kRuntimeParameterCoreRegisters[] = { R0, R1, R2, R3 };
48static constexpr size_t kRuntimeParameterCoreRegistersLength =
49    arraysize(kRuntimeParameterCoreRegisters);
50static constexpr SRegister kRuntimeParameterFpuRegisters[] = { S0, S1, S2, S3 };
51static constexpr size_t kRuntimeParameterFpuRegistersLength =
52    arraysize(kRuntimeParameterFpuRegisters);
53// We unconditionally allocate R5 to ensure we can do long operations
54// with baseline.
55static constexpr Register kCoreSavedRegisterForBaseline = R5;
56static constexpr Register kCoreCalleeSaves[] =
57    { R5, R6, R7, R8, R10, R11, PC };
58static constexpr SRegister kFpuCalleeSaves[] =
59    { S16, S17, S18, S19, S20, S21, S22, S23, S24, S25, S26, S27, S28, S29, S30, S31 };
60
61class InvokeRuntimeCallingConvention : public CallingConvention<Register, SRegister> {
62 public:
63  InvokeRuntimeCallingConvention()
64      : CallingConvention(kRuntimeParameterCoreRegisters,
65                          kRuntimeParameterCoreRegistersLength,
66                          kRuntimeParameterFpuRegisters,
67                          kRuntimeParameterFpuRegistersLength) {}
68
69 private:
70  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
71};
72
73#define __ reinterpret_cast<ArmAssembler*>(codegen->GetAssembler())->
74#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArmWordSize, x).Int32Value()
75
76class SlowPathCodeARM : public SlowPathCode {
77 public:
78  SlowPathCodeARM() : entry_label_(), exit_label_() {}
79
80  Label* GetEntryLabel() { return &entry_label_; }
81  Label* GetExitLabel() { return &exit_label_; }
82
83 private:
84  Label entry_label_;
85  Label exit_label_;
86
87  DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM);
88};
89
90class NullCheckSlowPathARM : public SlowPathCodeARM {
91 public:
92  explicit NullCheckSlowPathARM(HNullCheck* instruction) : instruction_(instruction) {}
93
94  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
95    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
96    __ Bind(GetEntryLabel());
97    arm_codegen->InvokeRuntime(
98        QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc());
99  }
100
101 private:
102  HNullCheck* const instruction_;
103  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM);
104};
105
106class DivZeroCheckSlowPathARM : public SlowPathCodeARM {
107 public:
108  explicit DivZeroCheckSlowPathARM(HDivZeroCheck* instruction) : instruction_(instruction) {}
109
110  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
111    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
112    __ Bind(GetEntryLabel());
113    arm_codegen->InvokeRuntime(
114        QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc());
115  }
116
117 private:
118  HDivZeroCheck* const instruction_;
119  DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM);
120};
121
122class SuspendCheckSlowPathARM : public SlowPathCodeARM {
123 public:
124  SuspendCheckSlowPathARM(HSuspendCheck* instruction, HBasicBlock* successor)
125      : instruction_(instruction), successor_(successor) {}
126
127  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
128    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
129    __ Bind(GetEntryLabel());
130    codegen->SaveLiveRegisters(instruction_->GetLocations());
131    arm_codegen->InvokeRuntime(
132        QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc());
133    codegen->RestoreLiveRegisters(instruction_->GetLocations());
134    if (successor_ == nullptr) {
135      __ b(GetReturnLabel());
136    } else {
137      __ b(arm_codegen->GetLabelOf(successor_));
138    }
139  }
140
141  Label* GetReturnLabel() {
142    DCHECK(successor_ == nullptr);
143    return &return_label_;
144  }
145
146 private:
147  HSuspendCheck* const instruction_;
148  // If not null, the block to branch to after the suspend check.
149  HBasicBlock* const successor_;
150
151  // If `successor_` is null, the label to branch to after the suspend check.
152  Label return_label_;
153
154  DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM);
155};
156
157class BoundsCheckSlowPathARM : public SlowPathCodeARM {
158 public:
159  BoundsCheckSlowPathARM(HBoundsCheck* instruction,
160                         Location index_location,
161                         Location length_location)
162      : instruction_(instruction),
163        index_location_(index_location),
164        length_location_(length_location) {}
165
166  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
167    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
168    __ Bind(GetEntryLabel());
169    // We're moving two locations to locations that could overlap, so we need a parallel
170    // move resolver.
171    InvokeRuntimeCallingConvention calling_convention;
172    codegen->EmitParallelMoves(
173        index_location_,
174        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
175        length_location_,
176        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
177    arm_codegen->InvokeRuntime(
178        QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc());
179  }
180
181 private:
182  HBoundsCheck* const instruction_;
183  const Location index_location_;
184  const Location length_location_;
185
186  DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM);
187};
188
189class LoadClassSlowPathARM : public SlowPathCodeARM {
190 public:
191  LoadClassSlowPathARM(HLoadClass* cls,
192                       HInstruction* at,
193                       uint32_t dex_pc,
194                       bool do_clinit)
195      : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
196    DCHECK(at->IsLoadClass() || at->IsClinitCheck());
197  }
198
199  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
200    LocationSummary* locations = at_->GetLocations();
201
202    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
203    __ Bind(GetEntryLabel());
204    codegen->SaveLiveRegisters(locations);
205
206    InvokeRuntimeCallingConvention calling_convention;
207    __ LoadImmediate(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex());
208    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
209    int32_t entry_point_offset = do_clinit_
210        ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
211        : QUICK_ENTRY_POINT(pInitializeType);
212    arm_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_);
213
214    // Move the class to the desired location.
215    Location out = locations->Out();
216    if (out.IsValid()) {
217      DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
218      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
219    }
220    codegen->RestoreLiveRegisters(locations);
221    __ b(GetExitLabel());
222  }
223
224 private:
225  // The class this slow path will load.
226  HLoadClass* const cls_;
227
228  // The instruction where this slow path is happening.
229  // (Might be the load class or an initialization check).
230  HInstruction* const at_;
231
232  // The dex PC of `at_`.
233  const uint32_t dex_pc_;
234
235  // Whether to initialize the class.
236  const bool do_clinit_;
237
238  DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM);
239};
240
241class LoadStringSlowPathARM : public SlowPathCodeARM {
242 public:
243  explicit LoadStringSlowPathARM(HLoadString* instruction) : instruction_(instruction) {}
244
245  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
246    LocationSummary* locations = instruction_->GetLocations();
247    DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
248
249    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
250    __ Bind(GetEntryLabel());
251    codegen->SaveLiveRegisters(locations);
252
253    InvokeRuntimeCallingConvention calling_convention;
254    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
255    __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction_->GetStringIndex());
256    arm_codegen->InvokeRuntime(
257        QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc());
258    arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
259
260    codegen->RestoreLiveRegisters(locations);
261    __ b(GetExitLabel());
262  }
263
264 private:
265  HLoadString* const instruction_;
266
267  DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM);
268};
269
270class TypeCheckSlowPathARM : public SlowPathCodeARM {
271 public:
272  TypeCheckSlowPathARM(HInstruction* instruction,
273                       Location class_to_check,
274                       Location object_class,
275                       uint32_t dex_pc)
276      : instruction_(instruction),
277        class_to_check_(class_to_check),
278        object_class_(object_class),
279        dex_pc_(dex_pc) {}
280
281  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
282    LocationSummary* locations = instruction_->GetLocations();
283    DCHECK(instruction_->IsCheckCast()
284           || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
285
286    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
287    __ Bind(GetEntryLabel());
288    codegen->SaveLiveRegisters(locations);
289
290    // We're moving two locations to locations that could overlap, so we need a parallel
291    // move resolver.
292    InvokeRuntimeCallingConvention calling_convention;
293    codegen->EmitParallelMoves(
294        class_to_check_,
295        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
296        object_class_,
297        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
298
299    if (instruction_->IsInstanceOf()) {
300      arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc_);
301      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
302    } else {
303      DCHECK(instruction_->IsCheckCast());
304      arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc_);
305    }
306
307    codegen->RestoreLiveRegisters(locations);
308    __ b(GetExitLabel());
309  }
310
311 private:
312  HInstruction* const instruction_;
313  const Location class_to_check_;
314  const Location object_class_;
315  uint32_t dex_pc_;
316
317  DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM);
318};
319
320#undef __
321
322#undef __
323#define __ reinterpret_cast<ArmAssembler*>(GetAssembler())->
324
325inline Condition ARMCondition(IfCondition cond) {
326  switch (cond) {
327    case kCondEQ: return EQ;
328    case kCondNE: return NE;
329    case kCondLT: return LT;
330    case kCondLE: return LE;
331    case kCondGT: return GT;
332    case kCondGE: return GE;
333    default:
334      LOG(FATAL) << "Unknown if condition";
335  }
336  return EQ;        // Unreachable.
337}
338
339inline Condition ARMOppositeCondition(IfCondition cond) {
340  switch (cond) {
341    case kCondEQ: return NE;
342    case kCondNE: return EQ;
343    case kCondLT: return GE;
344    case kCondLE: return GT;
345    case kCondGT: return LE;
346    case kCondGE: return LT;
347    default:
348      LOG(FATAL) << "Unknown if condition";
349  }
350  return EQ;        // Unreachable.
351}
352
353void CodeGeneratorARM::DumpCoreRegister(std::ostream& stream, int reg) const {
354  stream << ArmManagedRegister::FromCoreRegister(Register(reg));
355}
356
357void CodeGeneratorARM::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
358  stream << ArmManagedRegister::FromSRegister(SRegister(reg));
359}
360
361size_t CodeGeneratorARM::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
362  __ StoreToOffset(kStoreWord, static_cast<Register>(reg_id), SP, stack_index);
363  return kArmWordSize;
364}
365
366size_t CodeGeneratorARM::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
367  __ LoadFromOffset(kLoadWord, static_cast<Register>(reg_id), SP, stack_index);
368  return kArmWordSize;
369}
370
371size_t CodeGeneratorARM::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
372  __ StoreSToOffset(static_cast<SRegister>(reg_id), SP, stack_index);
373  return kArmWordSize;
374}
375
376size_t CodeGeneratorARM::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
377  __ LoadSFromOffset(static_cast<SRegister>(reg_id), SP, stack_index);
378  return kArmWordSize;
379}
380
381CodeGeneratorARM::CodeGeneratorARM(HGraph* graph,
382                                   const ArmInstructionSetFeatures& isa_features,
383                                   const CompilerOptions& compiler_options)
384    : CodeGenerator(graph,
385                    kNumberOfCoreRegisters,
386                    kNumberOfSRegisters,
387                    kNumberOfRegisterPairs,
388                    ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
389                                        arraysize(kCoreCalleeSaves)),
390                    ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
391                                        arraysize(kFpuCalleeSaves)),
392                    compiler_options),
393      block_labels_(graph->GetArena(), 0),
394      location_builder_(graph, this),
395      instruction_visitor_(graph, this),
396      move_resolver_(graph->GetArena(), this),
397      assembler_(true),
398      isa_features_(isa_features) {
399  // Save the PC register to mimic Quick.
400  AddAllocatedRegister(Location::RegisterLocation(PC));
401}
402
403Location CodeGeneratorARM::AllocateFreeRegister(Primitive::Type type) const {
404  switch (type) {
405    case Primitive::kPrimLong: {
406      size_t reg = FindFreeEntry(blocked_register_pairs_, kNumberOfRegisterPairs);
407      ArmManagedRegister pair =
408          ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
409      DCHECK(!blocked_core_registers_[pair.AsRegisterPairLow()]);
410      DCHECK(!blocked_core_registers_[pair.AsRegisterPairHigh()]);
411
412      blocked_core_registers_[pair.AsRegisterPairLow()] = true;
413      blocked_core_registers_[pair.AsRegisterPairHigh()] = true;
414      UpdateBlockedPairRegisters();
415      return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
416    }
417
418    case Primitive::kPrimByte:
419    case Primitive::kPrimBoolean:
420    case Primitive::kPrimChar:
421    case Primitive::kPrimShort:
422    case Primitive::kPrimInt:
423    case Primitive::kPrimNot: {
424      int reg = FindFreeEntry(blocked_core_registers_, kNumberOfCoreRegisters);
425      // Block all register pairs that contain `reg`.
426      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
427        ArmManagedRegister current =
428            ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
429        if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
430          blocked_register_pairs_[i] = true;
431        }
432      }
433      return Location::RegisterLocation(reg);
434    }
435
436    case Primitive::kPrimFloat: {
437      int reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfSRegisters);
438      return Location::FpuRegisterLocation(reg);
439    }
440
441    case Primitive::kPrimDouble: {
442      int reg = FindTwoFreeConsecutiveAlignedEntries(blocked_fpu_registers_, kNumberOfSRegisters);
443      DCHECK_EQ(reg % 2, 0);
444      return Location::FpuRegisterPairLocation(reg, reg + 1);
445    }
446
447    case Primitive::kPrimVoid:
448      LOG(FATAL) << "Unreachable type " << type;
449  }
450
451  return Location();
452}
453
454void CodeGeneratorARM::SetupBlockedRegisters(bool is_baseline) const {
455  // Don't allocate the dalvik style register pair passing.
456  blocked_register_pairs_[R1_R2] = true;
457
458  // Stack register, LR and PC are always reserved.
459  blocked_core_registers_[SP] = true;
460  blocked_core_registers_[LR] = true;
461  blocked_core_registers_[PC] = true;
462
463  // Reserve thread register.
464  blocked_core_registers_[TR] = true;
465
466  // Reserve temp register.
467  blocked_core_registers_[IP] = true;
468
469  if (is_baseline) {
470    for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
471      blocked_core_registers_[kCoreCalleeSaves[i]] = true;
472    }
473
474    blocked_core_registers_[kCoreSavedRegisterForBaseline] = false;
475
476    for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
477      blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
478    }
479  }
480
481  UpdateBlockedPairRegisters();
482}
483
484void CodeGeneratorARM::UpdateBlockedPairRegisters() const {
485  for (int i = 0; i < kNumberOfRegisterPairs; i++) {
486    ArmManagedRegister current =
487        ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
488    if (blocked_core_registers_[current.AsRegisterPairLow()]
489        || blocked_core_registers_[current.AsRegisterPairHigh()]) {
490      blocked_register_pairs_[i] = true;
491    }
492  }
493}
494
495InstructionCodeGeneratorARM::InstructionCodeGeneratorARM(HGraph* graph, CodeGeneratorARM* codegen)
496      : HGraphVisitor(graph),
497        assembler_(codegen->GetAssembler()),
498        codegen_(codegen) {}
499
500static uint32_t LeastSignificantBit(uint32_t mask) {
501  // ffs starts at 1.
502  return ffs(mask) - 1;
503}
504
505void CodeGeneratorARM::ComputeSpillMask() {
506  core_spill_mask_ = allocated_registers_.GetCoreRegisters() & core_callee_save_mask_;
507  // Save one extra register for baseline. Note that on thumb2, there is no easy
508  // instruction to restore just the PC, so this actually helps both baseline
509  // and non-baseline to save and restore at least two registers at entry and exit.
510  core_spill_mask_ |= (1 << kCoreSavedRegisterForBaseline);
511  DCHECK_NE(core_spill_mask_, 0u) << "At least the return address register must be saved";
512  fpu_spill_mask_ = allocated_registers_.GetFloatingPointRegisters() & fpu_callee_save_mask_;
513  // We use vpush and vpop for saving and restoring floating point registers, which take
514  // a SRegister and the number of registers to save/restore after that SRegister. We
515  // therefore update the `fpu_spill_mask_` to also contain those registers not allocated,
516  // but in the range.
517  if (fpu_spill_mask_ != 0) {
518    uint32_t least_significant_bit = LeastSignificantBit(fpu_spill_mask_);
519    uint32_t most_significant_bit = MostSignificantBit(fpu_spill_mask_);
520    for (uint32_t i = least_significant_bit + 1 ; i < most_significant_bit; ++i) {
521      fpu_spill_mask_ |= (1 << i);
522    }
523  }
524}
525
526void CodeGeneratorARM::GenerateFrameEntry() {
527  bool skip_overflow_check =
528      IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm);
529  DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
530  __ Bind(&frame_entry_label_);
531
532  if (HasEmptyFrame()) {
533    return;
534  }
535
536  if (!skip_overflow_check) {
537    __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm)));
538    __ LoadFromOffset(kLoadWord, IP, IP, 0);
539    RecordPcInfo(nullptr, 0);
540  }
541
542  // PC is in the list of callee-save to mimic Quick, but we need to push
543  // LR at entry instead.
544  __ PushList((core_spill_mask_ & (~(1 << PC))) | 1 << LR);
545  if (fpu_spill_mask_ != 0) {
546    SRegister start_register = SRegister(LeastSignificantBit(fpu_spill_mask_));
547    __ vpushs(start_register, POPCOUNT(fpu_spill_mask_));
548  }
549  __ AddConstant(SP, -(GetFrameSize() - FrameEntrySpillSize()));
550  __ StoreToOffset(kStoreWord, R0, SP, 0);
551}
552
553void CodeGeneratorARM::GenerateFrameExit() {
554  if (HasEmptyFrame()) {
555    __ bx(LR);
556    return;
557  }
558  __ AddConstant(SP, GetFrameSize() - FrameEntrySpillSize());
559  if (fpu_spill_mask_ != 0) {
560    SRegister start_register = SRegister(LeastSignificantBit(fpu_spill_mask_));
561    __ vpops(start_register, POPCOUNT(fpu_spill_mask_));
562  }
563  __ PopList(core_spill_mask_);
564}
565
566void CodeGeneratorARM::Bind(HBasicBlock* block) {
567  __ Bind(GetLabelOf(block));
568}
569
570Location CodeGeneratorARM::GetStackLocation(HLoadLocal* load) const {
571  switch (load->GetType()) {
572    case Primitive::kPrimLong:
573    case Primitive::kPrimDouble:
574      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
575      break;
576
577    case Primitive::kPrimInt:
578    case Primitive::kPrimNot:
579    case Primitive::kPrimFloat:
580      return Location::StackSlot(GetStackSlot(load->GetLocal()));
581
582    case Primitive::kPrimBoolean:
583    case Primitive::kPrimByte:
584    case Primitive::kPrimChar:
585    case Primitive::kPrimShort:
586    case Primitive::kPrimVoid:
587      LOG(FATAL) << "Unexpected type " << load->GetType();
588  }
589
590  LOG(FATAL) << "Unreachable";
591  return Location();
592}
593
594Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
595  switch (type) {
596    case Primitive::kPrimBoolean:
597    case Primitive::kPrimByte:
598    case Primitive::kPrimChar:
599    case Primitive::kPrimShort:
600    case Primitive::kPrimInt:
601    case Primitive::kPrimNot: {
602      uint32_t index = gp_index_++;
603      uint32_t stack_index = stack_index_++;
604      if (index < calling_convention.GetNumberOfRegisters()) {
605        return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
606      } else {
607        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
608      }
609    }
610
611    case Primitive::kPrimLong: {
612      uint32_t index = gp_index_;
613      uint32_t stack_index = stack_index_;
614      gp_index_ += 2;
615      stack_index_ += 2;
616      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
617        if (calling_convention.GetRegisterAt(index) == R1) {
618          // Skip R1, and use R2_R3 instead.
619          gp_index_++;
620          index++;
621        }
622      }
623      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
624        DCHECK_EQ(calling_convention.GetRegisterAt(index) + 1,
625                  calling_convention.GetRegisterAt(index + 1));
626        return Location::RegisterPairLocation(calling_convention.GetRegisterAt(index),
627                                              calling_convention.GetRegisterAt(index + 1));
628      } else {
629        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
630      }
631    }
632
633    case Primitive::kPrimFloat: {
634      uint32_t stack_index = stack_index_++;
635      if (float_index_ % 2 == 0) {
636        float_index_ = std::max(double_index_, float_index_);
637      }
638      if (float_index_ < calling_convention.GetNumberOfFpuRegisters()) {
639        return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(float_index_++));
640      } else {
641        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
642      }
643    }
644
645    case Primitive::kPrimDouble: {
646      double_index_ = std::max(double_index_, RoundUp(float_index_, 2));
647      uint32_t stack_index = stack_index_;
648      stack_index_ += 2;
649      if (double_index_ + 1 < calling_convention.GetNumberOfFpuRegisters()) {
650        uint32_t index = double_index_;
651        double_index_ += 2;
652        Location result = Location::FpuRegisterPairLocation(
653          calling_convention.GetFpuRegisterAt(index),
654          calling_convention.GetFpuRegisterAt(index + 1));
655        DCHECK(ExpectedPairLayout(result));
656        return result;
657      } else {
658        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
659      }
660    }
661
662    case Primitive::kPrimVoid:
663      LOG(FATAL) << "Unexpected parameter type " << type;
664      break;
665  }
666  return Location();
667}
668
669Location InvokeDexCallingConventionVisitor::GetReturnLocation(Primitive::Type type) {
670  switch (type) {
671    case Primitive::kPrimBoolean:
672    case Primitive::kPrimByte:
673    case Primitive::kPrimChar:
674    case Primitive::kPrimShort:
675    case Primitive::kPrimInt:
676    case Primitive::kPrimNot: {
677      return Location::RegisterLocation(R0);
678    }
679
680    case Primitive::kPrimFloat: {
681      return Location::FpuRegisterLocation(S0);
682    }
683
684    case Primitive::kPrimLong: {
685      return Location::RegisterPairLocation(R0, R1);
686    }
687
688    case Primitive::kPrimDouble: {
689      return Location::FpuRegisterPairLocation(S0, S1);
690    }
691
692    case Primitive::kPrimVoid:
693      return Location();
694  }
695  UNREACHABLE();
696  return Location();
697}
698
699void CodeGeneratorARM::Move32(Location destination, Location source) {
700  if (source.Equals(destination)) {
701    return;
702  }
703  if (destination.IsRegister()) {
704    if (source.IsRegister()) {
705      __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>());
706    } else if (source.IsFpuRegister()) {
707      __ vmovrs(destination.AsRegister<Register>(), source.AsFpuRegister<SRegister>());
708    } else {
709      __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(), SP, source.GetStackIndex());
710    }
711  } else if (destination.IsFpuRegister()) {
712    if (source.IsRegister()) {
713      __ vmovsr(destination.AsFpuRegister<SRegister>(), source.AsRegister<Register>());
714    } else if (source.IsFpuRegister()) {
715      __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>());
716    } else {
717      __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex());
718    }
719  } else {
720    DCHECK(destination.IsStackSlot()) << destination;
721    if (source.IsRegister()) {
722      __ StoreToOffset(kStoreWord, source.AsRegister<Register>(), SP, destination.GetStackIndex());
723    } else if (source.IsFpuRegister()) {
724      __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex());
725    } else {
726      DCHECK(source.IsStackSlot()) << source;
727      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
728      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
729    }
730  }
731}
732
733void CodeGeneratorARM::Move64(Location destination, Location source) {
734  if (source.Equals(destination)) {
735    return;
736  }
737  if (destination.IsRegisterPair()) {
738    if (source.IsRegisterPair()) {
739      EmitParallelMoves(
740          Location::RegisterLocation(source.AsRegisterPairHigh<Register>()),
741          Location::RegisterLocation(destination.AsRegisterPairHigh<Register>()),
742          Location::RegisterLocation(source.AsRegisterPairLow<Register>()),
743          Location::RegisterLocation(destination.AsRegisterPairLow<Register>()));
744    } else if (source.IsFpuRegister()) {
745      UNIMPLEMENTED(FATAL);
746    } else {
747      DCHECK(source.IsDoubleStackSlot());
748      DCHECK(ExpectedPairLayout(destination));
749      __ LoadFromOffset(kLoadWordPair, destination.AsRegisterPairLow<Register>(),
750                        SP, source.GetStackIndex());
751    }
752  } else if (destination.IsFpuRegisterPair()) {
753    if (source.IsDoubleStackSlot()) {
754      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
755                         SP,
756                         source.GetStackIndex());
757    } else {
758      UNIMPLEMENTED(FATAL);
759    }
760  } else {
761    DCHECK(destination.IsDoubleStackSlot());
762    if (source.IsRegisterPair()) {
763      // No conflict possible, so just do the moves.
764      if (source.AsRegisterPairLow<Register>() == R1) {
765        DCHECK_EQ(source.AsRegisterPairHigh<Register>(), R2);
766        __ StoreToOffset(kStoreWord, R1, SP, destination.GetStackIndex());
767        __ StoreToOffset(kStoreWord, R2, SP, destination.GetHighStackIndex(kArmWordSize));
768      } else {
769        __ StoreToOffset(kStoreWordPair, source.AsRegisterPairLow<Register>(),
770                         SP, destination.GetStackIndex());
771      }
772    } else if (source.IsFpuRegisterPair()) {
773      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
774                        SP,
775                        destination.GetStackIndex());
776    } else {
777      DCHECK(source.IsDoubleStackSlot());
778      EmitParallelMoves(
779          Location::StackSlot(source.GetStackIndex()),
780          Location::StackSlot(destination.GetStackIndex()),
781          Location::StackSlot(source.GetHighStackIndex(kArmWordSize)),
782          Location::StackSlot(destination.GetHighStackIndex(kArmWordSize)));
783    }
784  }
785}
786
787void CodeGeneratorARM::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
788  LocationSummary* locations = instruction->GetLocations();
789  if (locations != nullptr && locations->Out().Equals(location)) {
790    return;
791  }
792
793  if (locations != nullptr && locations->Out().IsConstant()) {
794    HConstant* const_to_move = locations->Out().GetConstant();
795    if (const_to_move->IsIntConstant()) {
796      int32_t value = const_to_move->AsIntConstant()->GetValue();
797      if (location.IsRegister()) {
798        __ LoadImmediate(location.AsRegister<Register>(), value);
799      } else {
800        DCHECK(location.IsStackSlot());
801        __ LoadImmediate(IP, value);
802        __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
803      }
804    } else {
805      DCHECK(const_to_move->IsLongConstant()) << const_to_move->DebugName();
806      int64_t value = const_to_move->AsLongConstant()->GetValue();
807      if (location.IsRegisterPair()) {
808        __ LoadImmediate(location.AsRegisterPairLow<Register>(), Low32Bits(value));
809        __ LoadImmediate(location.AsRegisterPairHigh<Register>(), High32Bits(value));
810      } else {
811        DCHECK(location.IsDoubleStackSlot());
812        __ LoadImmediate(IP, Low32Bits(value));
813        __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
814        __ LoadImmediate(IP, High32Bits(value));
815        __ StoreToOffset(kStoreWord, IP, SP, location.GetHighStackIndex(kArmWordSize));
816      }
817    }
818  } else if (instruction->IsLoadLocal()) {
819    uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
820    switch (instruction->GetType()) {
821      case Primitive::kPrimBoolean:
822      case Primitive::kPrimByte:
823      case Primitive::kPrimChar:
824      case Primitive::kPrimShort:
825      case Primitive::kPrimInt:
826      case Primitive::kPrimNot:
827      case Primitive::kPrimFloat:
828        Move32(location, Location::StackSlot(stack_slot));
829        break;
830
831      case Primitive::kPrimLong:
832      case Primitive::kPrimDouble:
833        Move64(location, Location::DoubleStackSlot(stack_slot));
834        break;
835
836      default:
837        LOG(FATAL) << "Unexpected type " << instruction->GetType();
838    }
839  } else if (instruction->IsTemporary()) {
840    Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
841    if (temp_location.IsStackSlot()) {
842      Move32(location, temp_location);
843    } else {
844      DCHECK(temp_location.IsDoubleStackSlot());
845      Move64(location, temp_location);
846    }
847  } else {
848    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
849    switch (instruction->GetType()) {
850      case Primitive::kPrimBoolean:
851      case Primitive::kPrimByte:
852      case Primitive::kPrimChar:
853      case Primitive::kPrimShort:
854      case Primitive::kPrimNot:
855      case Primitive::kPrimInt:
856      case Primitive::kPrimFloat:
857        Move32(location, locations->Out());
858        break;
859
860      case Primitive::kPrimLong:
861      case Primitive::kPrimDouble:
862        Move64(location, locations->Out());
863        break;
864
865      default:
866        LOG(FATAL) << "Unexpected type " << instruction->GetType();
867    }
868  }
869}
870
871void CodeGeneratorARM::InvokeRuntime(int32_t entry_point_offset,
872                                     HInstruction* instruction,
873                                     uint32_t dex_pc) {
874  __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
875  __ blx(LR);
876  RecordPcInfo(instruction, dex_pc);
877  DCHECK(instruction->IsSuspendCheck()
878      || instruction->IsBoundsCheck()
879      || instruction->IsNullCheck()
880      || instruction->IsDivZeroCheck()
881      || instruction->GetLocations()->CanCall()
882      || !IsLeafMethod());
883}
884
885void LocationsBuilderARM::VisitGoto(HGoto* got) {
886  got->SetLocations(nullptr);
887}
888
889void InstructionCodeGeneratorARM::VisitGoto(HGoto* got) {
890  HBasicBlock* successor = got->GetSuccessor();
891  DCHECK(!successor->IsExitBlock());
892
893  HBasicBlock* block = got->GetBlock();
894  HInstruction* previous = got->GetPrevious();
895
896  HLoopInformation* info = block->GetLoopInformation();
897  if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
898    codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
899    GenerateSuspendCheck(info->GetSuspendCheck(), successor);
900    return;
901  }
902
903  if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
904    GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
905  }
906  if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
907    __ b(codegen_->GetLabelOf(successor));
908  }
909}
910
911void LocationsBuilderARM::VisitExit(HExit* exit) {
912  exit->SetLocations(nullptr);
913}
914
915void InstructionCodeGeneratorARM::VisitExit(HExit* exit) {
916  UNUSED(exit);
917  if (kIsDebugBuild) {
918    __ Comment("Unreachable");
919    __ bkpt(0);
920  }
921}
922
923void LocationsBuilderARM::VisitIf(HIf* if_instr) {
924  LocationSummary* locations =
925      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
926  HInstruction* cond = if_instr->InputAt(0);
927  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
928    locations->SetInAt(0, Location::RequiresRegister());
929  }
930}
931
932void InstructionCodeGeneratorARM::VisitIf(HIf* if_instr) {
933  HInstruction* cond = if_instr->InputAt(0);
934  if (cond->IsIntConstant()) {
935    // Constant condition, statically compared against 1.
936    int32_t cond_value = cond->AsIntConstant()->GetValue();
937    if (cond_value == 1) {
938      if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
939                                     if_instr->IfTrueSuccessor())) {
940        __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
941      }
942      return;
943    } else {
944      DCHECK_EQ(cond_value, 0);
945    }
946  } else {
947    if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
948      // Condition has been materialized, compare the output to 0
949      DCHECK(if_instr->GetLocations()->InAt(0).IsRegister());
950      __ cmp(if_instr->GetLocations()->InAt(0).AsRegister<Register>(),
951             ShifterOperand(0));
952      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()), NE);
953    } else {
954      // Condition has not been materialized, use its inputs as the
955      // comparison and its condition as the branch condition.
956      LocationSummary* locations = cond->GetLocations();
957      DCHECK(locations->InAt(0).IsRegister()) << locations->InAt(0);
958      Register left = locations->InAt(0).AsRegister<Register>();
959      if (locations->InAt(1).IsRegister()) {
960        __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>()));
961      } else {
962        DCHECK(locations->InAt(1).IsConstant());
963        int32_t value =
964            locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
965        ShifterOperand operand;
966        if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) {
967          __ cmp(left, operand);
968        } else {
969          Register temp = IP;
970          __ LoadImmediate(temp, value);
971          __ cmp(left, ShifterOperand(temp));
972        }
973      }
974      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()),
975           ARMCondition(cond->AsCondition()->GetCondition()));
976    }
977  }
978  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
979                                 if_instr->IfFalseSuccessor())) {
980    __ b(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
981  }
982}
983
984
985void LocationsBuilderARM::VisitCondition(HCondition* comp) {
986  LocationSummary* locations =
987      new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall);
988  locations->SetInAt(0, Location::RequiresRegister());
989  locations->SetInAt(1, Location::RegisterOrConstant(comp->InputAt(1)));
990  if (comp->NeedsMaterialization()) {
991    locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
992  }
993}
994
995void InstructionCodeGeneratorARM::VisitCondition(HCondition* comp) {
996  if (!comp->NeedsMaterialization()) return;
997  LocationSummary* locations = comp->GetLocations();
998  Register left = locations->InAt(0).AsRegister<Register>();
999
1000  if (locations->InAt(1).IsRegister()) {
1001    __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>()));
1002  } else {
1003    DCHECK(locations->InAt(1).IsConstant());
1004    int32_t value = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
1005    ShifterOperand operand;
1006    if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) {
1007      __ cmp(left, operand);
1008    } else {
1009      Register temp = IP;
1010      __ LoadImmediate(temp, value);
1011      __ cmp(left, ShifterOperand(temp));
1012    }
1013  }
1014  __ it(ARMCondition(comp->GetCondition()), kItElse);
1015  __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(1),
1016         ARMCondition(comp->GetCondition()));
1017  __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(0),
1018         ARMOppositeCondition(comp->GetCondition()));
1019}
1020
1021void LocationsBuilderARM::VisitEqual(HEqual* comp) {
1022  VisitCondition(comp);
1023}
1024
1025void InstructionCodeGeneratorARM::VisitEqual(HEqual* comp) {
1026  VisitCondition(comp);
1027}
1028
1029void LocationsBuilderARM::VisitNotEqual(HNotEqual* comp) {
1030  VisitCondition(comp);
1031}
1032
1033void InstructionCodeGeneratorARM::VisitNotEqual(HNotEqual* comp) {
1034  VisitCondition(comp);
1035}
1036
1037void LocationsBuilderARM::VisitLessThan(HLessThan* comp) {
1038  VisitCondition(comp);
1039}
1040
1041void InstructionCodeGeneratorARM::VisitLessThan(HLessThan* comp) {
1042  VisitCondition(comp);
1043}
1044
1045void LocationsBuilderARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1046  VisitCondition(comp);
1047}
1048
1049void InstructionCodeGeneratorARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1050  VisitCondition(comp);
1051}
1052
1053void LocationsBuilderARM::VisitGreaterThan(HGreaterThan* comp) {
1054  VisitCondition(comp);
1055}
1056
1057void InstructionCodeGeneratorARM::VisitGreaterThan(HGreaterThan* comp) {
1058  VisitCondition(comp);
1059}
1060
1061void LocationsBuilderARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1062  VisitCondition(comp);
1063}
1064
1065void InstructionCodeGeneratorARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1066  VisitCondition(comp);
1067}
1068
1069void LocationsBuilderARM::VisitLocal(HLocal* local) {
1070  local->SetLocations(nullptr);
1071}
1072
1073void InstructionCodeGeneratorARM::VisitLocal(HLocal* local) {
1074  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
1075}
1076
1077void LocationsBuilderARM::VisitLoadLocal(HLoadLocal* load) {
1078  load->SetLocations(nullptr);
1079}
1080
1081void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load) {
1082  // Nothing to do, this is driven by the code generator.
1083  UNUSED(load);
1084}
1085
1086void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) {
1087  LocationSummary* locations =
1088      new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
1089  switch (store->InputAt(1)->GetType()) {
1090    case Primitive::kPrimBoolean:
1091    case Primitive::kPrimByte:
1092    case Primitive::kPrimChar:
1093    case Primitive::kPrimShort:
1094    case Primitive::kPrimInt:
1095    case Primitive::kPrimNot:
1096    case Primitive::kPrimFloat:
1097      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
1098      break;
1099
1100    case Primitive::kPrimLong:
1101    case Primitive::kPrimDouble:
1102      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
1103      break;
1104
1105    default:
1106      LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType();
1107  }
1108}
1109
1110void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store) {
1111  UNUSED(store);
1112}
1113
1114void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) {
1115  LocationSummary* locations =
1116      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1117  locations->SetOut(Location::ConstantLocation(constant));
1118}
1119
1120void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant) {
1121  // Will be generated at use site.
1122  UNUSED(constant);
1123}
1124
1125void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) {
1126  LocationSummary* locations =
1127      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1128  locations->SetOut(Location::ConstantLocation(constant));
1129}
1130
1131void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant) {
1132  // Will be generated at use site.
1133  UNUSED(constant);
1134}
1135
1136void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) {
1137  LocationSummary* locations =
1138      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1139  locations->SetOut(Location::ConstantLocation(constant));
1140}
1141
1142void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant) {
1143  // Will be generated at use site.
1144  UNUSED(constant);
1145}
1146
1147void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) {
1148  LocationSummary* locations =
1149      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1150  locations->SetOut(Location::ConstantLocation(constant));
1151}
1152
1153void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant) {
1154  // Will be generated at use site.
1155  UNUSED(constant);
1156}
1157
1158void LocationsBuilderARM::VisitReturnVoid(HReturnVoid* ret) {
1159  ret->SetLocations(nullptr);
1160}
1161
1162void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret) {
1163  UNUSED(ret);
1164  codegen_->GenerateFrameExit();
1165}
1166
1167void LocationsBuilderARM::VisitReturn(HReturn* ret) {
1168  LocationSummary* locations =
1169      new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
1170  locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType()));
1171}
1172
1173void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret) {
1174  UNUSED(ret);
1175  codegen_->GenerateFrameExit();
1176}
1177
1178void LocationsBuilderARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1179  HandleInvoke(invoke);
1180}
1181
1182void CodeGeneratorARM::LoadCurrentMethod(Register reg) {
1183  DCHECK(RequiresCurrentMethod());
1184  __ LoadFromOffset(kLoadWord, reg, SP, kCurrentMethodStackOffset);
1185}
1186
1187void InstructionCodeGeneratorARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1188  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1189
1190  // TODO: Implement all kinds of calls:
1191  // 1) boot -> boot
1192  // 2) app -> boot
1193  // 3) app -> app
1194  //
1195  // Currently we implement the app -> app logic, which looks up in the resolve cache.
1196
1197  // temp = method;
1198  codegen_->LoadCurrentMethod(temp);
1199  if (!invoke->IsRecursive()) {
1200    // temp = temp->dex_cache_resolved_methods_;
1201    __ LoadFromOffset(
1202        kLoadWord, temp, temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value());
1203    // temp = temp[index_in_cache]
1204    __ LoadFromOffset(
1205        kLoadWord, temp, temp, CodeGenerator::GetCacheOffset(invoke->GetDexMethodIndex()));
1206    // LR = temp[offset_of_quick_compiled_code]
1207    __ LoadFromOffset(kLoadWord, LR, temp,
1208                      mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1209                             kArmWordSize).Int32Value());
1210    // LR()
1211    __ blx(LR);
1212  } else {
1213    __ bl(codegen_->GetFrameEntryLabel());
1214  }
1215
1216  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1217  DCHECK(!codegen_->IsLeafMethod());
1218}
1219
1220void LocationsBuilderARM::HandleInvoke(HInvoke* invoke) {
1221  LocationSummary* locations =
1222      new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1223  locations->AddTemp(Location::RegisterLocation(R0));
1224
1225  InvokeDexCallingConventionVisitor calling_convention_visitor;
1226  for (size_t i = 0; i < invoke->InputCount(); i++) {
1227    HInstruction* input = invoke->InputAt(i);
1228    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1229  }
1230
1231  locations->SetOut(calling_convention_visitor.GetReturnLocation(invoke->GetType()));
1232}
1233
1234void LocationsBuilderARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1235  HandleInvoke(invoke);
1236}
1237
1238void InstructionCodeGeneratorARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1239  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1240  uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() +
1241          invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
1242  LocationSummary* locations = invoke->GetLocations();
1243  Location receiver = locations->InAt(0);
1244  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1245  // temp = object->GetClass();
1246  if (receiver.IsStackSlot()) {
1247    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1248    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1249  } else {
1250    __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
1251  }
1252  codegen_->MaybeRecordImplicitNullCheck(invoke);
1253  // temp = temp->GetMethodAt(method_offset);
1254  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1255      kArmWordSize).Int32Value();
1256  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1257  // LR = temp->GetEntryPoint();
1258  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1259  // LR();
1260  __ blx(LR);
1261  DCHECK(!codegen_->IsLeafMethod());
1262  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1263}
1264
1265void LocationsBuilderARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1266  HandleInvoke(invoke);
1267  // Add the hidden argument.
1268  invoke->GetLocations()->AddTemp(Location::RegisterLocation(R12));
1269}
1270
1271void InstructionCodeGeneratorARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1272  // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
1273  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1274  uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() +
1275          (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry);
1276  LocationSummary* locations = invoke->GetLocations();
1277  Location receiver = locations->InAt(0);
1278  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1279
1280  // Set the hidden argument.
1281  __ LoadImmediate(invoke->GetLocations()->GetTemp(1).AsRegister<Register>(),
1282                   invoke->GetDexMethodIndex());
1283
1284  // temp = object->GetClass();
1285  if (receiver.IsStackSlot()) {
1286    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1287    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1288  } else {
1289    __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
1290  }
1291  codegen_->MaybeRecordImplicitNullCheck(invoke);
1292  // temp = temp->GetImtEntryAt(method_offset);
1293  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1294      kArmWordSize).Int32Value();
1295  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1296  // LR = temp->GetEntryPoint();
1297  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1298  // LR();
1299  __ blx(LR);
1300  DCHECK(!codegen_->IsLeafMethod());
1301  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1302}
1303
1304void LocationsBuilderARM::VisitNeg(HNeg* neg) {
1305  LocationSummary* locations =
1306      new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
1307  switch (neg->GetResultType()) {
1308    case Primitive::kPrimInt: {
1309      locations->SetInAt(0, Location::RequiresRegister());
1310      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1311      break;
1312    }
1313    case Primitive::kPrimLong: {
1314      locations->SetInAt(0, Location::RequiresRegister());
1315      locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1316      break;
1317    }
1318
1319    case Primitive::kPrimFloat:
1320    case Primitive::kPrimDouble:
1321      locations->SetInAt(0, Location::RequiresFpuRegister());
1322      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1323      break;
1324
1325    default:
1326      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1327  }
1328}
1329
1330void InstructionCodeGeneratorARM::VisitNeg(HNeg* neg) {
1331  LocationSummary* locations = neg->GetLocations();
1332  Location out = locations->Out();
1333  Location in = locations->InAt(0);
1334  switch (neg->GetResultType()) {
1335    case Primitive::kPrimInt:
1336      DCHECK(in.IsRegister());
1337      __ rsb(out.AsRegister<Register>(), in.AsRegister<Register>(), ShifterOperand(0));
1338      break;
1339
1340    case Primitive::kPrimLong:
1341      DCHECK(in.IsRegisterPair());
1342      // out.lo = 0 - in.lo (and update the carry/borrow (C) flag)
1343      __ rsbs(out.AsRegisterPairLow<Register>(),
1344              in.AsRegisterPairLow<Register>(),
1345              ShifterOperand(0));
1346      // We cannot emit an RSC (Reverse Subtract with Carry)
1347      // instruction here, as it does not exist in the Thumb-2
1348      // instruction set.  We use the following approach
1349      // using SBC and SUB instead.
1350      //
1351      // out.hi = -C
1352      __ sbc(out.AsRegisterPairHigh<Register>(),
1353             out.AsRegisterPairHigh<Register>(),
1354             ShifterOperand(out.AsRegisterPairHigh<Register>()));
1355      // out.hi = out.hi - in.hi
1356      __ sub(out.AsRegisterPairHigh<Register>(),
1357             out.AsRegisterPairHigh<Register>(),
1358             ShifterOperand(in.AsRegisterPairHigh<Register>()));
1359      break;
1360
1361    case Primitive::kPrimFloat:
1362      DCHECK(in.IsFpuRegister());
1363      __ vnegs(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>());
1364      break;
1365
1366    case Primitive::kPrimDouble:
1367      DCHECK(in.IsFpuRegisterPair());
1368      __ vnegd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1369               FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1370      break;
1371
1372    default:
1373      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1374  }
1375}
1376
1377void LocationsBuilderARM::VisitTypeConversion(HTypeConversion* conversion) {
1378  Primitive::Type result_type = conversion->GetResultType();
1379  Primitive::Type input_type = conversion->GetInputType();
1380  DCHECK_NE(result_type, input_type);
1381
1382  // The float-to-long and double-to-long type conversions rely on a
1383  // call to the runtime.
1384  LocationSummary::CallKind call_kind =
1385      ((input_type == Primitive::kPrimFloat || input_type == Primitive::kPrimDouble)
1386       && result_type == Primitive::kPrimLong)
1387      ? LocationSummary::kCall
1388      : LocationSummary::kNoCall;
1389  LocationSummary* locations =
1390      new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind);
1391
1392  switch (result_type) {
1393    case Primitive::kPrimByte:
1394      switch (input_type) {
1395        case Primitive::kPrimShort:
1396        case Primitive::kPrimInt:
1397        case Primitive::kPrimChar:
1398          // Processing a Dex `int-to-byte' instruction.
1399          locations->SetInAt(0, Location::RequiresRegister());
1400          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1401          break;
1402
1403        default:
1404          LOG(FATAL) << "Unexpected type conversion from " << input_type
1405                     << " to " << result_type;
1406      }
1407      break;
1408
1409    case Primitive::kPrimShort:
1410      switch (input_type) {
1411        case Primitive::kPrimByte:
1412        case Primitive::kPrimInt:
1413        case Primitive::kPrimChar:
1414          // Processing a Dex `int-to-short' instruction.
1415          locations->SetInAt(0, Location::RequiresRegister());
1416          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1417          break;
1418
1419        default:
1420          LOG(FATAL) << "Unexpected type conversion from " << input_type
1421                     << " to " << result_type;
1422      }
1423      break;
1424
1425    case Primitive::kPrimInt:
1426      switch (input_type) {
1427        case Primitive::kPrimLong:
1428          // Processing a Dex `long-to-int' instruction.
1429          locations->SetInAt(0, Location::Any());
1430          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1431          break;
1432
1433        case Primitive::kPrimFloat:
1434          // Processing a Dex `float-to-int' instruction.
1435          locations->SetInAt(0, Location::RequiresFpuRegister());
1436          locations->SetOut(Location::RequiresRegister());
1437          locations->AddTemp(Location::RequiresFpuRegister());
1438          break;
1439
1440        case Primitive::kPrimDouble:
1441          // Processing a Dex `double-to-int' instruction.
1442          locations->SetInAt(0, Location::RequiresFpuRegister());
1443          locations->SetOut(Location::RequiresRegister());
1444          locations->AddTemp(Location::RequiresFpuRegister());
1445          break;
1446
1447        default:
1448          LOG(FATAL) << "Unexpected type conversion from " << input_type
1449                     << " to " << result_type;
1450      }
1451      break;
1452
1453    case Primitive::kPrimLong:
1454      switch (input_type) {
1455        case Primitive::kPrimByte:
1456        case Primitive::kPrimShort:
1457        case Primitive::kPrimInt:
1458        case Primitive::kPrimChar:
1459          // Processing a Dex `int-to-long' instruction.
1460          locations->SetInAt(0, Location::RequiresRegister());
1461          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1462          break;
1463
1464        case Primitive::kPrimFloat: {
1465          // Processing a Dex `float-to-long' instruction.
1466          InvokeRuntimeCallingConvention calling_convention;
1467          locations->SetInAt(0, Location::FpuRegisterLocation(
1468              calling_convention.GetFpuRegisterAt(0)));
1469          locations->SetOut(Location::RegisterPairLocation(R0, R1));
1470          break;
1471        }
1472
1473        case Primitive::kPrimDouble: {
1474          // Processing a Dex `double-to-long' instruction.
1475          InvokeRuntimeCallingConvention calling_convention;
1476          locations->SetInAt(0, Location::FpuRegisterPairLocation(
1477              calling_convention.GetFpuRegisterAt(0),
1478              calling_convention.GetFpuRegisterAt(1)));
1479          locations->SetOut(Location::RegisterPairLocation(R0, R1));
1480          break;
1481        }
1482
1483        default:
1484          LOG(FATAL) << "Unexpected type conversion from " << input_type
1485                     << " to " << result_type;
1486      }
1487      break;
1488
1489    case Primitive::kPrimChar:
1490      switch (input_type) {
1491        case Primitive::kPrimByte:
1492        case Primitive::kPrimShort:
1493        case Primitive::kPrimInt:
1494          // Processing a Dex `int-to-char' instruction.
1495          locations->SetInAt(0, Location::RequiresRegister());
1496          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1497          break;
1498
1499        default:
1500          LOG(FATAL) << "Unexpected type conversion from " << input_type
1501                     << " to " << result_type;
1502      }
1503      break;
1504
1505    case Primitive::kPrimFloat:
1506      switch (input_type) {
1507        case Primitive::kPrimByte:
1508        case Primitive::kPrimShort:
1509        case Primitive::kPrimInt:
1510        case Primitive::kPrimChar:
1511          // Processing a Dex `int-to-float' instruction.
1512          locations->SetInAt(0, Location::RequiresRegister());
1513          locations->SetOut(Location::RequiresFpuRegister());
1514          break;
1515
1516        case Primitive::kPrimLong:
1517          // Processing a Dex `long-to-float' instruction.
1518          locations->SetInAt(0, Location::RequiresRegister());
1519          locations->SetOut(Location::RequiresFpuRegister());
1520          locations->AddTemp(Location::RequiresRegister());
1521          locations->AddTemp(Location::RequiresRegister());
1522          locations->AddTemp(Location::RequiresFpuRegister());
1523          locations->AddTemp(Location::RequiresFpuRegister());
1524          break;
1525
1526        case Primitive::kPrimDouble:
1527          // Processing a Dex `double-to-float' instruction.
1528          locations->SetInAt(0, Location::RequiresFpuRegister());
1529          locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1530          break;
1531
1532        default:
1533          LOG(FATAL) << "Unexpected type conversion from " << input_type
1534                     << " to " << result_type;
1535      };
1536      break;
1537
1538    case Primitive::kPrimDouble:
1539      switch (input_type) {
1540        case Primitive::kPrimByte:
1541        case Primitive::kPrimShort:
1542        case Primitive::kPrimInt:
1543        case Primitive::kPrimChar:
1544          // Processing a Dex `int-to-double' instruction.
1545          locations->SetInAt(0, Location::RequiresRegister());
1546          locations->SetOut(Location::RequiresFpuRegister());
1547          break;
1548
1549        case Primitive::kPrimLong:
1550          // Processing a Dex `long-to-double' instruction.
1551          locations->SetInAt(0, Location::RequiresRegister());
1552          locations->SetOut(Location::RequiresFpuRegister());
1553          locations->AddTemp(Location::RequiresRegister());
1554          locations->AddTemp(Location::RequiresRegister());
1555          locations->AddTemp(Location::RequiresFpuRegister());
1556          break;
1557
1558        case Primitive::kPrimFloat:
1559          // Processing a Dex `float-to-double' instruction.
1560          locations->SetInAt(0, Location::RequiresFpuRegister());
1561          locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1562          break;
1563
1564        default:
1565          LOG(FATAL) << "Unexpected type conversion from " << input_type
1566                     << " to " << result_type;
1567      };
1568      break;
1569
1570    default:
1571      LOG(FATAL) << "Unexpected type conversion from " << input_type
1572                 << " to " << result_type;
1573  }
1574}
1575
1576void InstructionCodeGeneratorARM::VisitTypeConversion(HTypeConversion* conversion) {
1577  LocationSummary* locations = conversion->GetLocations();
1578  Location out = locations->Out();
1579  Location in = locations->InAt(0);
1580  Primitive::Type result_type = conversion->GetResultType();
1581  Primitive::Type input_type = conversion->GetInputType();
1582  DCHECK_NE(result_type, input_type);
1583  switch (result_type) {
1584    case Primitive::kPrimByte:
1585      switch (input_type) {
1586        case Primitive::kPrimShort:
1587        case Primitive::kPrimInt:
1588        case Primitive::kPrimChar:
1589          // Processing a Dex `int-to-byte' instruction.
1590          __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 8);
1591          break;
1592
1593        default:
1594          LOG(FATAL) << "Unexpected type conversion from " << input_type
1595                     << " to " << result_type;
1596      }
1597      break;
1598
1599    case Primitive::kPrimShort:
1600      switch (input_type) {
1601        case Primitive::kPrimByte:
1602        case Primitive::kPrimInt:
1603        case Primitive::kPrimChar:
1604          // Processing a Dex `int-to-short' instruction.
1605          __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16);
1606          break;
1607
1608        default:
1609          LOG(FATAL) << "Unexpected type conversion from " << input_type
1610                     << " to " << result_type;
1611      }
1612      break;
1613
1614    case Primitive::kPrimInt:
1615      switch (input_type) {
1616        case Primitive::kPrimLong:
1617          // Processing a Dex `long-to-int' instruction.
1618          DCHECK(out.IsRegister());
1619          if (in.IsRegisterPair()) {
1620            __ Mov(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
1621          } else if (in.IsDoubleStackSlot()) {
1622            __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), SP, in.GetStackIndex());
1623          } else {
1624            DCHECK(in.IsConstant());
1625            DCHECK(in.GetConstant()->IsLongConstant());
1626            int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
1627            __ LoadImmediate(out.AsRegister<Register>(), static_cast<int32_t>(value));
1628          }
1629          break;
1630
1631        case Primitive::kPrimFloat: {
1632          // Processing a Dex `float-to-int' instruction.
1633          SRegister temp = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
1634          __ vmovs(temp, in.AsFpuRegister<SRegister>());
1635          __ vcvtis(temp, temp);
1636          __ vmovrs(out.AsRegister<Register>(), temp);
1637          break;
1638        }
1639
1640        case Primitive::kPrimDouble: {
1641          // Processing a Dex `double-to-int' instruction.
1642          SRegister temp_s = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
1643          DRegister temp_d = FromLowSToD(temp_s);
1644          __ vmovd(temp_d, FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1645          __ vcvtid(temp_s, temp_d);
1646          __ vmovrs(out.AsRegister<Register>(), temp_s);
1647          break;
1648        }
1649
1650        default:
1651          LOG(FATAL) << "Unexpected type conversion from " << input_type
1652                     << " to " << result_type;
1653      }
1654      break;
1655
1656    case Primitive::kPrimLong:
1657      switch (input_type) {
1658        case Primitive::kPrimByte:
1659        case Primitive::kPrimShort:
1660        case Primitive::kPrimInt:
1661        case Primitive::kPrimChar:
1662          // Processing a Dex `int-to-long' instruction.
1663          DCHECK(out.IsRegisterPair());
1664          DCHECK(in.IsRegister());
1665          __ Mov(out.AsRegisterPairLow<Register>(), in.AsRegister<Register>());
1666          // Sign extension.
1667          __ Asr(out.AsRegisterPairHigh<Register>(),
1668                 out.AsRegisterPairLow<Register>(),
1669                 31);
1670          break;
1671
1672        case Primitive::kPrimFloat:
1673          // Processing a Dex `float-to-long' instruction.
1674          codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pF2l),
1675                                  conversion,
1676                                  conversion->GetDexPc());
1677          break;
1678
1679        case Primitive::kPrimDouble:
1680          // Processing a Dex `double-to-long' instruction.
1681          codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pD2l),
1682                                  conversion,
1683                                  conversion->GetDexPc());
1684          break;
1685
1686        default:
1687          LOG(FATAL) << "Unexpected type conversion from " << input_type
1688                     << " to " << result_type;
1689      }
1690      break;
1691
1692    case Primitive::kPrimChar:
1693      switch (input_type) {
1694        case Primitive::kPrimByte:
1695        case Primitive::kPrimShort:
1696        case Primitive::kPrimInt:
1697          // Processing a Dex `int-to-char' instruction.
1698          __ ubfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16);
1699          break;
1700
1701        default:
1702          LOG(FATAL) << "Unexpected type conversion from " << input_type
1703                     << " to " << result_type;
1704      }
1705      break;
1706
1707    case Primitive::kPrimFloat:
1708      switch (input_type) {
1709        case Primitive::kPrimByte:
1710        case Primitive::kPrimShort:
1711        case Primitive::kPrimInt:
1712        case Primitive::kPrimChar: {
1713          // Processing a Dex `int-to-float' instruction.
1714          __ vmovsr(out.AsFpuRegister<SRegister>(), in.AsRegister<Register>());
1715          __ vcvtsi(out.AsFpuRegister<SRegister>(), out.AsFpuRegister<SRegister>());
1716          break;
1717        }
1718
1719        case Primitive::kPrimLong: {
1720          // Processing a Dex `long-to-float' instruction.
1721          Register low = in.AsRegisterPairLow<Register>();
1722          Register high = in.AsRegisterPairHigh<Register>();
1723          SRegister output = out.AsFpuRegister<SRegister>();
1724          Register constant_low = locations->GetTemp(0).AsRegister<Register>();
1725          Register constant_high = locations->GetTemp(1).AsRegister<Register>();
1726          SRegister temp1_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>();
1727          DRegister temp1_d = FromLowSToD(temp1_s);
1728          SRegister temp2_s = locations->GetTemp(3).AsFpuRegisterPairLow<SRegister>();
1729          DRegister temp2_d = FromLowSToD(temp2_s);
1730
1731          // Operations use doubles for precision reasons (each 32-bit
1732          // half of a long fits in the 53-bit mantissa of a double,
1733          // but not in the 24-bit mantissa of a float).  This is
1734          // especially important for the low bits.  The result is
1735          // eventually converted to float.
1736
1737          // temp1_d = int-to-double(high)
1738          __ vmovsr(temp1_s, high);
1739          __ vcvtdi(temp1_d, temp1_s);
1740          // Using vmovd to load the `k2Pow32EncodingForDouble` constant
1741          // as an immediate value into `temp2_d` does not work, as
1742          // this instruction only transfers 8 significant bits of its
1743          // immediate operand.  Instead, use two 32-bit core
1744          // registers to load `k2Pow32EncodingForDouble` into
1745          // `temp2_d`.
1746          __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble));
1747          __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble));
1748          __ vmovdrr(temp2_d, constant_low, constant_high);
1749          // temp1_d = temp1_d * 2^32
1750          __ vmuld(temp1_d, temp1_d, temp2_d);
1751          // temp2_d = unsigned-to-double(low)
1752          __ vmovsr(temp2_s, low);
1753          __ vcvtdu(temp2_d, temp2_s);
1754          // temp1_d = temp1_d + temp2_d
1755          __ vaddd(temp1_d, temp1_d, temp2_d);
1756          // output = double-to-float(temp1_d);
1757          __ vcvtsd(output, temp1_d);
1758          break;
1759        }
1760
1761        case Primitive::kPrimDouble:
1762          // Processing a Dex `double-to-float' instruction.
1763          __ vcvtsd(out.AsFpuRegister<SRegister>(),
1764                    FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1765          break;
1766
1767        default:
1768          LOG(FATAL) << "Unexpected type conversion from " << input_type
1769                     << " to " << result_type;
1770      };
1771      break;
1772
1773    case Primitive::kPrimDouble:
1774      switch (input_type) {
1775        case Primitive::kPrimByte:
1776        case Primitive::kPrimShort:
1777        case Primitive::kPrimInt:
1778        case Primitive::kPrimChar: {
1779          // Processing a Dex `int-to-double' instruction.
1780          __ vmovsr(out.AsFpuRegisterPairLow<SRegister>(), in.AsRegister<Register>());
1781          __ vcvtdi(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1782                    out.AsFpuRegisterPairLow<SRegister>());
1783          break;
1784        }
1785
1786        case Primitive::kPrimLong: {
1787          // Processing a Dex `long-to-double' instruction.
1788          Register low = in.AsRegisterPairLow<Register>();
1789          Register high = in.AsRegisterPairHigh<Register>();
1790          SRegister out_s = out.AsFpuRegisterPairLow<SRegister>();
1791          DRegister out_d = FromLowSToD(out_s);
1792          Register constant_low = locations->GetTemp(0).AsRegister<Register>();
1793          Register constant_high = locations->GetTemp(1).AsRegister<Register>();
1794          SRegister temp_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>();
1795          DRegister temp_d = FromLowSToD(temp_s);
1796
1797          // out_d = int-to-double(high)
1798          __ vmovsr(out_s, high);
1799          __ vcvtdi(out_d, out_s);
1800          // Using vmovd to load the `k2Pow32EncodingForDouble` constant
1801          // as an immediate value into `temp_d` does not work, as
1802          // this instruction only transfers 8 significant bits of its
1803          // immediate operand.  Instead, use two 32-bit core
1804          // registers to load `k2Pow32EncodingForDouble` into `temp_d`.
1805          __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble));
1806          __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble));
1807          __ vmovdrr(temp_d, constant_low, constant_high);
1808          // out_d = out_d * 2^32
1809          __ vmuld(out_d, out_d, temp_d);
1810          // temp_d = unsigned-to-double(low)
1811          __ vmovsr(temp_s, low);
1812          __ vcvtdu(temp_d, temp_s);
1813          // out_d = out_d + temp_d
1814          __ vaddd(out_d, out_d, temp_d);
1815          break;
1816        }
1817
1818        case Primitive::kPrimFloat:
1819          // Processing a Dex `float-to-double' instruction.
1820          __ vcvtds(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1821                    in.AsFpuRegister<SRegister>());
1822          break;
1823
1824        default:
1825          LOG(FATAL) << "Unexpected type conversion from " << input_type
1826                     << " to " << result_type;
1827      };
1828      break;
1829
1830    default:
1831      LOG(FATAL) << "Unexpected type conversion from " << input_type
1832                 << " to " << result_type;
1833  }
1834}
1835
1836void LocationsBuilderARM::VisitAdd(HAdd* add) {
1837  LocationSummary* locations =
1838      new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
1839  switch (add->GetResultType()) {
1840    case Primitive::kPrimInt: {
1841      locations->SetInAt(0, Location::RequiresRegister());
1842      locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
1843      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1844      break;
1845    }
1846
1847    case Primitive::kPrimLong: {
1848      locations->SetInAt(0, Location::RequiresRegister());
1849      locations->SetInAt(1, Location::RequiresRegister());
1850      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1851      break;
1852    }
1853
1854    case Primitive::kPrimFloat:
1855    case Primitive::kPrimDouble: {
1856      locations->SetInAt(0, Location::RequiresFpuRegister());
1857      locations->SetInAt(1, Location::RequiresFpuRegister());
1858      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1859      break;
1860    }
1861
1862    default:
1863      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1864  }
1865}
1866
1867void InstructionCodeGeneratorARM::VisitAdd(HAdd* add) {
1868  LocationSummary* locations = add->GetLocations();
1869  Location out = locations->Out();
1870  Location first = locations->InAt(0);
1871  Location second = locations->InAt(1);
1872  switch (add->GetResultType()) {
1873    case Primitive::kPrimInt:
1874      if (second.IsRegister()) {
1875        __ add(out.AsRegister<Register>(),
1876               first.AsRegister<Register>(),
1877               ShifterOperand(second.AsRegister<Register>()));
1878      } else {
1879        __ AddConstant(out.AsRegister<Register>(),
1880                       first.AsRegister<Register>(),
1881                       second.GetConstant()->AsIntConstant()->GetValue());
1882      }
1883      break;
1884
1885    case Primitive::kPrimLong: {
1886      DCHECK(second.IsRegisterPair());
1887      __ adds(out.AsRegisterPairLow<Register>(),
1888              first.AsRegisterPairLow<Register>(),
1889              ShifterOperand(second.AsRegisterPairLow<Register>()));
1890      __ adc(out.AsRegisterPairHigh<Register>(),
1891             first.AsRegisterPairHigh<Register>(),
1892             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1893      break;
1894    }
1895
1896    case Primitive::kPrimFloat:
1897      __ vadds(out.AsFpuRegister<SRegister>(),
1898               first.AsFpuRegister<SRegister>(),
1899               second.AsFpuRegister<SRegister>());
1900      break;
1901
1902    case Primitive::kPrimDouble:
1903      __ vaddd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1904               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1905               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1906      break;
1907
1908    default:
1909      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1910  }
1911}
1912
1913void LocationsBuilderARM::VisitSub(HSub* sub) {
1914  LocationSummary* locations =
1915      new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
1916  switch (sub->GetResultType()) {
1917    case Primitive::kPrimInt: {
1918      locations->SetInAt(0, Location::RequiresRegister());
1919      locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1)));
1920      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1921      break;
1922    }
1923
1924    case Primitive::kPrimLong: {
1925      locations->SetInAt(0, Location::RequiresRegister());
1926      locations->SetInAt(1, Location::RequiresRegister());
1927      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1928      break;
1929    }
1930    case Primitive::kPrimFloat:
1931    case Primitive::kPrimDouble: {
1932      locations->SetInAt(0, Location::RequiresFpuRegister());
1933      locations->SetInAt(1, Location::RequiresFpuRegister());
1934      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1935      break;
1936    }
1937    default:
1938      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1939  }
1940}
1941
1942void InstructionCodeGeneratorARM::VisitSub(HSub* sub) {
1943  LocationSummary* locations = sub->GetLocations();
1944  Location out = locations->Out();
1945  Location first = locations->InAt(0);
1946  Location second = locations->InAt(1);
1947  switch (sub->GetResultType()) {
1948    case Primitive::kPrimInt: {
1949      if (second.IsRegister()) {
1950        __ sub(out.AsRegister<Register>(),
1951               first.AsRegister<Register>(),
1952               ShifterOperand(second.AsRegister<Register>()));
1953      } else {
1954        __ AddConstant(out.AsRegister<Register>(),
1955                       first.AsRegister<Register>(),
1956                       -second.GetConstant()->AsIntConstant()->GetValue());
1957      }
1958      break;
1959    }
1960
1961    case Primitive::kPrimLong: {
1962      DCHECK(second.IsRegisterPair());
1963      __ subs(out.AsRegisterPairLow<Register>(),
1964              first.AsRegisterPairLow<Register>(),
1965              ShifterOperand(second.AsRegisterPairLow<Register>()));
1966      __ sbc(out.AsRegisterPairHigh<Register>(),
1967             first.AsRegisterPairHigh<Register>(),
1968             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1969      break;
1970    }
1971
1972    case Primitive::kPrimFloat: {
1973      __ vsubs(out.AsFpuRegister<SRegister>(),
1974               first.AsFpuRegister<SRegister>(),
1975               second.AsFpuRegister<SRegister>());
1976      break;
1977    }
1978
1979    case Primitive::kPrimDouble: {
1980      __ vsubd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1981               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1982               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1983      break;
1984    }
1985
1986
1987    default:
1988      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1989  }
1990}
1991
1992void LocationsBuilderARM::VisitMul(HMul* mul) {
1993  LocationSummary* locations =
1994      new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
1995  switch (mul->GetResultType()) {
1996    case Primitive::kPrimInt:
1997    case Primitive::kPrimLong:  {
1998      locations->SetInAt(0, Location::RequiresRegister());
1999      locations->SetInAt(1, Location::RequiresRegister());
2000      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2001      break;
2002    }
2003
2004    case Primitive::kPrimFloat:
2005    case Primitive::kPrimDouble: {
2006      locations->SetInAt(0, Location::RequiresFpuRegister());
2007      locations->SetInAt(1, Location::RequiresFpuRegister());
2008      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2009      break;
2010    }
2011
2012    default:
2013      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2014  }
2015}
2016
2017void InstructionCodeGeneratorARM::VisitMul(HMul* mul) {
2018  LocationSummary* locations = mul->GetLocations();
2019  Location out = locations->Out();
2020  Location first = locations->InAt(0);
2021  Location second = locations->InAt(1);
2022  switch (mul->GetResultType()) {
2023    case Primitive::kPrimInt: {
2024      __ mul(out.AsRegister<Register>(),
2025             first.AsRegister<Register>(),
2026             second.AsRegister<Register>());
2027      break;
2028    }
2029    case Primitive::kPrimLong: {
2030      Register out_hi = out.AsRegisterPairHigh<Register>();
2031      Register out_lo = out.AsRegisterPairLow<Register>();
2032      Register in1_hi = first.AsRegisterPairHigh<Register>();
2033      Register in1_lo = first.AsRegisterPairLow<Register>();
2034      Register in2_hi = second.AsRegisterPairHigh<Register>();
2035      Register in2_lo = second.AsRegisterPairLow<Register>();
2036
2037      // Extra checks to protect caused by the existence of R1_R2.
2038      // The algorithm is wrong if out.hi is either in1.lo or in2.lo:
2039      // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2);
2040      DCHECK_NE(out_hi, in1_lo);
2041      DCHECK_NE(out_hi, in2_lo);
2042
2043      // input: in1 - 64 bits, in2 - 64 bits
2044      // output: out
2045      // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
2046      // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
2047      // parts: out.lo = (in1.lo * in2.lo)[31:0]
2048
2049      // IP <- in1.lo * in2.hi
2050      __ mul(IP, in1_lo, in2_hi);
2051      // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo
2052      __ mla(out_hi, in1_hi, in2_lo, IP);
2053      // out.lo <- (in1.lo * in2.lo)[31:0];
2054      __ umull(out_lo, IP, in1_lo, in2_lo);
2055      // out.hi <- in2.hi * in1.lo +  in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
2056      __ add(out_hi, out_hi, ShifterOperand(IP));
2057      break;
2058    }
2059
2060    case Primitive::kPrimFloat: {
2061      __ vmuls(out.AsFpuRegister<SRegister>(),
2062               first.AsFpuRegister<SRegister>(),
2063               second.AsFpuRegister<SRegister>());
2064      break;
2065    }
2066
2067    case Primitive::kPrimDouble: {
2068      __ vmuld(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2069               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2070               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2071      break;
2072    }
2073
2074    default:
2075      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2076  }
2077}
2078
2079void LocationsBuilderARM::VisitDiv(HDiv* div) {
2080  LocationSummary::CallKind call_kind = div->GetResultType() == Primitive::kPrimLong
2081      ? LocationSummary::kCall
2082      : LocationSummary::kNoCall;
2083  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
2084
2085  switch (div->GetResultType()) {
2086    case Primitive::kPrimInt: {
2087      locations->SetInAt(0, Location::RequiresRegister());
2088      locations->SetInAt(1, Location::RequiresRegister());
2089      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2090      break;
2091    }
2092    case Primitive::kPrimLong: {
2093      InvokeRuntimeCallingConvention calling_convention;
2094      locations->SetInAt(0, Location::RegisterPairLocation(
2095          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2096      locations->SetInAt(1, Location::RegisterPairLocation(
2097          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2098      locations->SetOut(Location::RegisterPairLocation(R0, R1));
2099      break;
2100    }
2101    case Primitive::kPrimFloat:
2102    case Primitive::kPrimDouble: {
2103      locations->SetInAt(0, Location::RequiresFpuRegister());
2104      locations->SetInAt(1, Location::RequiresFpuRegister());
2105      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2106      break;
2107    }
2108
2109    default:
2110      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2111  }
2112}
2113
2114void InstructionCodeGeneratorARM::VisitDiv(HDiv* div) {
2115  LocationSummary* locations = div->GetLocations();
2116  Location out = locations->Out();
2117  Location first = locations->InAt(0);
2118  Location second = locations->InAt(1);
2119
2120  switch (div->GetResultType()) {
2121    case Primitive::kPrimInt: {
2122      __ sdiv(out.AsRegister<Register>(),
2123              first.AsRegister<Register>(),
2124              second.AsRegister<Register>());
2125      break;
2126    }
2127
2128    case Primitive::kPrimLong: {
2129      InvokeRuntimeCallingConvention calling_convention;
2130      DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
2131      DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
2132      DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegisterPairLow<Register>());
2133      DCHECK_EQ(calling_convention.GetRegisterAt(3), second.AsRegisterPairHigh<Register>());
2134      DCHECK_EQ(R0, out.AsRegisterPairLow<Register>());
2135      DCHECK_EQ(R1, out.AsRegisterPairHigh<Register>());
2136
2137      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLdiv), div, div->GetDexPc());
2138      break;
2139    }
2140
2141    case Primitive::kPrimFloat: {
2142      __ vdivs(out.AsFpuRegister<SRegister>(),
2143               first.AsFpuRegister<SRegister>(),
2144               second.AsFpuRegister<SRegister>());
2145      break;
2146    }
2147
2148    case Primitive::kPrimDouble: {
2149      __ vdivd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2150               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2151               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2152      break;
2153    }
2154
2155    default:
2156      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2157  }
2158}
2159
2160void LocationsBuilderARM::VisitRem(HRem* rem) {
2161  Primitive::Type type = rem->GetResultType();
2162  LocationSummary::CallKind call_kind = type == Primitive::kPrimInt
2163      ? LocationSummary::kNoCall
2164      : LocationSummary::kCall;
2165  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
2166
2167  switch (type) {
2168    case Primitive::kPrimInt: {
2169      locations->SetInAt(0, Location::RequiresRegister());
2170      locations->SetInAt(1, Location::RequiresRegister());
2171      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2172      locations->AddTemp(Location::RequiresRegister());
2173      break;
2174    }
2175    case Primitive::kPrimLong: {
2176      InvokeRuntimeCallingConvention calling_convention;
2177      locations->SetInAt(0, Location::RegisterPairLocation(
2178          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2179      locations->SetInAt(1, Location::RegisterPairLocation(
2180          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2181      // The runtime helper puts the output in R2,R3.
2182      locations->SetOut(Location::RegisterPairLocation(R2, R3));
2183      break;
2184    }
2185    case Primitive::kPrimFloat: {
2186      InvokeRuntimeCallingConvention calling_convention;
2187      locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
2188      locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
2189      locations->SetOut(Location::FpuRegisterLocation(S0));
2190      break;
2191    }
2192
2193    case Primitive::kPrimDouble: {
2194      InvokeRuntimeCallingConvention calling_convention;
2195      locations->SetInAt(0, Location::FpuRegisterPairLocation(
2196          calling_convention.GetFpuRegisterAt(0), calling_convention.GetFpuRegisterAt(1)));
2197      locations->SetInAt(1, Location::FpuRegisterPairLocation(
2198          calling_convention.GetFpuRegisterAt(2), calling_convention.GetFpuRegisterAt(3)));
2199      locations->SetOut(Location::Location::FpuRegisterPairLocation(S0, S1));
2200      break;
2201    }
2202
2203    default:
2204      LOG(FATAL) << "Unexpected rem type " << type;
2205  }
2206}
2207
2208void InstructionCodeGeneratorARM::VisitRem(HRem* rem) {
2209  LocationSummary* locations = rem->GetLocations();
2210  Location out = locations->Out();
2211  Location first = locations->InAt(0);
2212  Location second = locations->InAt(1);
2213
2214  Primitive::Type type = rem->GetResultType();
2215  switch (type) {
2216    case Primitive::kPrimInt: {
2217      Register reg1 = first.AsRegister<Register>();
2218      Register reg2 = second.AsRegister<Register>();
2219      Register temp = locations->GetTemp(0).AsRegister<Register>();
2220
2221      // temp = reg1 / reg2  (integer division)
2222      // temp = temp * reg2
2223      // dest = reg1 - temp
2224      __ sdiv(temp, reg1, reg2);
2225      __ mul(temp, temp, reg2);
2226      __ sub(out.AsRegister<Register>(), reg1, ShifterOperand(temp));
2227      break;
2228    }
2229
2230    case Primitive::kPrimLong: {
2231      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLmod), rem, rem->GetDexPc());
2232      break;
2233    }
2234
2235    case Primitive::kPrimFloat: {
2236      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmodf), rem, rem->GetDexPc());
2237      break;
2238    }
2239
2240    case Primitive::kPrimDouble: {
2241      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmod), rem, rem->GetDexPc());
2242      break;
2243    }
2244
2245    default:
2246      LOG(FATAL) << "Unexpected rem type " << type;
2247  }
2248}
2249
2250void LocationsBuilderARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2251  LocationSummary* locations =
2252      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2253  locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2254  if (instruction->HasUses()) {
2255    locations->SetOut(Location::SameAsFirstInput());
2256  }
2257}
2258
2259void InstructionCodeGeneratorARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2260  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM(instruction);
2261  codegen_->AddSlowPath(slow_path);
2262
2263  LocationSummary* locations = instruction->GetLocations();
2264  Location value = locations->InAt(0);
2265
2266  switch (instruction->GetType()) {
2267    case Primitive::kPrimInt: {
2268      if (value.IsRegister()) {
2269        __ cmp(value.AsRegister<Register>(), ShifterOperand(0));
2270        __ b(slow_path->GetEntryLabel(), EQ);
2271      } else {
2272        DCHECK(value.IsConstant()) << value;
2273        if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
2274          __ b(slow_path->GetEntryLabel());
2275        }
2276      }
2277      break;
2278    }
2279    case Primitive::kPrimLong: {
2280      if (value.IsRegisterPair()) {
2281        __ orrs(IP,
2282                value.AsRegisterPairLow<Register>(),
2283                ShifterOperand(value.AsRegisterPairHigh<Register>()));
2284        __ b(slow_path->GetEntryLabel(), EQ);
2285      } else {
2286        DCHECK(value.IsConstant()) << value;
2287        if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
2288          __ b(slow_path->GetEntryLabel());
2289        }
2290      }
2291      break;
2292    default:
2293      LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
2294    }
2295  }
2296}
2297
2298void LocationsBuilderARM::HandleShift(HBinaryOperation* op) {
2299  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2300
2301  LocationSummary::CallKind call_kind = op->GetResultType() == Primitive::kPrimLong
2302      ? LocationSummary::kCall
2303      : LocationSummary::kNoCall;
2304  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(op, call_kind);
2305
2306  switch (op->GetResultType()) {
2307    case Primitive::kPrimInt: {
2308      locations->SetInAt(0, Location::RequiresRegister());
2309      locations->SetInAt(1, Location::RegisterOrConstant(op->InputAt(1)));
2310      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2311      break;
2312    }
2313    case Primitive::kPrimLong: {
2314      InvokeRuntimeCallingConvention calling_convention;
2315      locations->SetInAt(0, Location::RegisterPairLocation(
2316          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2317      locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2318      // The runtime helper puts the output in R0,R1.
2319      locations->SetOut(Location::RegisterPairLocation(R0, R1));
2320      break;
2321    }
2322    default:
2323      LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
2324  }
2325}
2326
2327void InstructionCodeGeneratorARM::HandleShift(HBinaryOperation* op) {
2328  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2329
2330  LocationSummary* locations = op->GetLocations();
2331  Location out = locations->Out();
2332  Location first = locations->InAt(0);
2333  Location second = locations->InAt(1);
2334
2335  Primitive::Type type = op->GetResultType();
2336  switch (type) {
2337    case Primitive::kPrimInt: {
2338      Register out_reg = out.AsRegister<Register>();
2339      Register first_reg = first.AsRegister<Register>();
2340      // Arm doesn't mask the shift count so we need to do it ourselves.
2341      if (second.IsRegister()) {
2342        Register second_reg = second.AsRegister<Register>();
2343        __ and_(second_reg, second_reg, ShifterOperand(kMaxIntShiftValue));
2344        if (op->IsShl()) {
2345          __ Lsl(out_reg, first_reg, second_reg);
2346        } else if (op->IsShr()) {
2347          __ Asr(out_reg, first_reg, second_reg);
2348        } else {
2349          __ Lsr(out_reg, first_reg, second_reg);
2350        }
2351      } else {
2352        int32_t cst = second.GetConstant()->AsIntConstant()->GetValue();
2353        uint32_t shift_value = static_cast<uint32_t>(cst & kMaxIntShiftValue);
2354        if (shift_value == 0) {  // arm does not support shifting with 0 immediate.
2355          __ Mov(out_reg, first_reg);
2356        } else if (op->IsShl()) {
2357          __ Lsl(out_reg, first_reg, shift_value);
2358        } else if (op->IsShr()) {
2359          __ Asr(out_reg, first_reg, shift_value);
2360        } else {
2361          __ Lsr(out_reg, first_reg, shift_value);
2362        }
2363      }
2364      break;
2365    }
2366    case Primitive::kPrimLong: {
2367      // TODO: Inline the assembly instead of calling the runtime.
2368      InvokeRuntimeCallingConvention calling_convention;
2369      DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
2370      DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
2371      DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegister<Register>());
2372      DCHECK_EQ(R0, out.AsRegisterPairLow<Register>());
2373      DCHECK_EQ(R1, out.AsRegisterPairHigh<Register>());
2374
2375      int32_t entry_point_offset;
2376      if (op->IsShl()) {
2377        entry_point_offset = QUICK_ENTRY_POINT(pShlLong);
2378      } else if (op->IsShr()) {
2379        entry_point_offset = QUICK_ENTRY_POINT(pShrLong);
2380      } else {
2381        entry_point_offset = QUICK_ENTRY_POINT(pUshrLong);
2382      }
2383      __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
2384      __ blx(LR);
2385      break;
2386    }
2387    default:
2388      LOG(FATAL) << "Unexpected operation type " << type;
2389  }
2390}
2391
2392void LocationsBuilderARM::VisitShl(HShl* shl) {
2393  HandleShift(shl);
2394}
2395
2396void InstructionCodeGeneratorARM::VisitShl(HShl* shl) {
2397  HandleShift(shl);
2398}
2399
2400void LocationsBuilderARM::VisitShr(HShr* shr) {
2401  HandleShift(shr);
2402}
2403
2404void InstructionCodeGeneratorARM::VisitShr(HShr* shr) {
2405  HandleShift(shr);
2406}
2407
2408void LocationsBuilderARM::VisitUShr(HUShr* ushr) {
2409  HandleShift(ushr);
2410}
2411
2412void InstructionCodeGeneratorARM::VisitUShr(HUShr* ushr) {
2413  HandleShift(ushr);
2414}
2415
2416void LocationsBuilderARM::VisitNewInstance(HNewInstance* instruction) {
2417  LocationSummary* locations =
2418      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2419  InvokeRuntimeCallingConvention calling_convention;
2420  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2421  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2422  locations->SetOut(Location::RegisterLocation(R0));
2423}
2424
2425void InstructionCodeGeneratorARM::VisitNewInstance(HNewInstance* instruction) {
2426  InvokeRuntimeCallingConvention calling_convention;
2427  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
2428  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
2429  codegen_->InvokeRuntime(GetThreadOffset<kArmWordSize>(instruction->GetEntrypoint()).Int32Value(),
2430                          instruction,
2431                          instruction->GetDexPc());
2432}
2433
2434void LocationsBuilderARM::VisitNewArray(HNewArray* instruction) {
2435  LocationSummary* locations =
2436      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2437  InvokeRuntimeCallingConvention calling_convention;
2438  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2439  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2440  locations->SetOut(Location::RegisterLocation(R0));
2441  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2442}
2443
2444void InstructionCodeGeneratorARM::VisitNewArray(HNewArray* instruction) {
2445  InvokeRuntimeCallingConvention calling_convention;
2446  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(2));
2447  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
2448  codegen_->InvokeRuntime(GetThreadOffset<kArmWordSize>(instruction->GetEntrypoint()).Int32Value(),
2449                          instruction,
2450                          instruction->GetDexPc());
2451}
2452
2453void LocationsBuilderARM::VisitParameterValue(HParameterValue* instruction) {
2454  LocationSummary* locations =
2455      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2456  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
2457  if (location.IsStackSlot()) {
2458    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2459  } else if (location.IsDoubleStackSlot()) {
2460    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2461  }
2462  locations->SetOut(location);
2463}
2464
2465void InstructionCodeGeneratorARM::VisitParameterValue(HParameterValue* instruction) {
2466  // Nothing to do, the parameter is already at its location.
2467  UNUSED(instruction);
2468}
2469
2470void LocationsBuilderARM::VisitNot(HNot* not_) {
2471  LocationSummary* locations =
2472      new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
2473  locations->SetInAt(0, Location::RequiresRegister());
2474  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2475}
2476
2477void InstructionCodeGeneratorARM::VisitNot(HNot* not_) {
2478  LocationSummary* locations = not_->GetLocations();
2479  Location out = locations->Out();
2480  Location in = locations->InAt(0);
2481  switch (not_->InputAt(0)->GetType()) {
2482    case Primitive::kPrimInt:
2483      __ mvn(out.AsRegister<Register>(), ShifterOperand(in.AsRegister<Register>()));
2484      break;
2485
2486    case Primitive::kPrimLong:
2487      __ mvn(out.AsRegisterPairLow<Register>(),
2488             ShifterOperand(in.AsRegisterPairLow<Register>()));
2489      __ mvn(out.AsRegisterPairHigh<Register>(),
2490             ShifterOperand(in.AsRegisterPairHigh<Register>()));
2491      break;
2492
2493    default:
2494      LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
2495  }
2496}
2497
2498void LocationsBuilderARM::VisitCompare(HCompare* compare) {
2499  LocationSummary* locations =
2500      new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2501  switch (compare->InputAt(0)->GetType()) {
2502    case Primitive::kPrimLong: {
2503      locations->SetInAt(0, Location::RequiresRegister());
2504      locations->SetInAt(1, Location::RequiresRegister());
2505      // Output overlaps because it is written before doing the low comparison.
2506      locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2507      break;
2508    }
2509    case Primitive::kPrimFloat:
2510    case Primitive::kPrimDouble: {
2511      locations->SetInAt(0, Location::RequiresFpuRegister());
2512      locations->SetInAt(1, Location::RequiresFpuRegister());
2513      locations->SetOut(Location::RequiresRegister());
2514      break;
2515    }
2516    default:
2517      LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2518  }
2519}
2520
2521void InstructionCodeGeneratorARM::VisitCompare(HCompare* compare) {
2522  LocationSummary* locations = compare->GetLocations();
2523  Register out = locations->Out().AsRegister<Register>();
2524  Location left = locations->InAt(0);
2525  Location right = locations->InAt(1);
2526
2527  Label less, greater, done;
2528  Primitive::Type type = compare->InputAt(0)->GetType();
2529  switch (type) {
2530    case Primitive::kPrimLong: {
2531      __ cmp(left.AsRegisterPairHigh<Register>(),
2532             ShifterOperand(right.AsRegisterPairHigh<Register>()));  // Signed compare.
2533      __ b(&less, LT);
2534      __ b(&greater, GT);
2535      // Do LoadImmediate before any `cmp`, as LoadImmediate might affect the status flags.
2536      __ LoadImmediate(out, 0);
2537      __ cmp(left.AsRegisterPairLow<Register>(),
2538             ShifterOperand(right.AsRegisterPairLow<Register>()));  // Unsigned compare.
2539      break;
2540    }
2541    case Primitive::kPrimFloat:
2542    case Primitive::kPrimDouble: {
2543      __ LoadImmediate(out, 0);
2544      if (type == Primitive::kPrimFloat) {
2545        __ vcmps(left.AsFpuRegister<SRegister>(), right.AsFpuRegister<SRegister>());
2546      } else {
2547        __ vcmpd(FromLowSToD(left.AsFpuRegisterPairLow<SRegister>()),
2548                 FromLowSToD(right.AsFpuRegisterPairLow<SRegister>()));
2549      }
2550      __ vmstat();  // transfer FP status register to ARM APSR.
2551      __ b(compare->IsGtBias() ? &greater : &less, VS);  // VS for unordered.
2552      break;
2553    }
2554    default:
2555      LOG(FATAL) << "Unexpected compare type " << type;
2556  }
2557  __ b(&done, EQ);
2558  __ b(&less, CC);  // CC is for both: unsigned compare for longs and 'less than' for floats.
2559
2560  __ Bind(&greater);
2561  __ LoadImmediate(out, 1);
2562  __ b(&done);
2563
2564  __ Bind(&less);
2565  __ LoadImmediate(out, -1);
2566
2567  __ Bind(&done);
2568}
2569
2570void LocationsBuilderARM::VisitPhi(HPhi* instruction) {
2571  LocationSummary* locations =
2572      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2573  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
2574    locations->SetInAt(i, Location::Any());
2575  }
2576  locations->SetOut(Location::Any());
2577}
2578
2579void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction) {
2580  UNUSED(instruction);
2581  LOG(FATAL) << "Unreachable";
2582}
2583
2584void InstructionCodeGeneratorARM::GenerateMemoryBarrier(MemBarrierKind kind) {
2585  // TODO (ported from quick): revisit Arm barrier kinds
2586  DmbOptions flavour = DmbOptions::ISH;  // quiet c++ warnings
2587  switch (kind) {
2588    case MemBarrierKind::kAnyStore:
2589    case MemBarrierKind::kLoadAny:
2590    case MemBarrierKind::kAnyAny: {
2591      flavour = DmbOptions::ISH;
2592      break;
2593    }
2594    case MemBarrierKind::kStoreStore: {
2595      flavour = DmbOptions::ISHST;
2596      break;
2597    }
2598    default:
2599      LOG(FATAL) << "Unexpected memory barrier " << kind;
2600  }
2601  __ dmb(flavour);
2602}
2603
2604void InstructionCodeGeneratorARM::GenerateWideAtomicLoad(Register addr,
2605                                                         uint32_t offset,
2606                                                         Register out_lo,
2607                                                         Register out_hi) {
2608  if (offset != 0) {
2609    __ LoadImmediate(out_lo, offset);
2610    __ add(IP, addr, ShifterOperand(out_lo));
2611    addr = IP;
2612  }
2613  __ ldrexd(out_lo, out_hi, addr);
2614}
2615
2616void InstructionCodeGeneratorARM::GenerateWideAtomicStore(Register addr,
2617                                                          uint32_t offset,
2618                                                          Register value_lo,
2619                                                          Register value_hi,
2620                                                          Register temp1,
2621                                                          Register temp2,
2622                                                          HInstruction* instruction) {
2623  Label fail;
2624  if (offset != 0) {
2625    __ LoadImmediate(temp1, offset);
2626    __ add(IP, addr, ShifterOperand(temp1));
2627    addr = IP;
2628  }
2629  __ Bind(&fail);
2630  // We need a load followed by store. (The address used in a STREX instruction must
2631  // be the same as the address in the most recently executed LDREX instruction.)
2632  __ ldrexd(temp1, temp2, addr);
2633  codegen_->MaybeRecordImplicitNullCheck(instruction);
2634  __ strexd(temp1, value_lo, value_hi, addr);
2635  __ cmp(temp1, ShifterOperand(0));
2636  __ b(&fail, NE);
2637}
2638
2639void LocationsBuilderARM::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) {
2640  DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2641
2642  LocationSummary* locations =
2643      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2644  locations->SetInAt(0, Location::RequiresRegister());
2645  locations->SetInAt(1, Location::RequiresRegister());
2646
2647
2648  Primitive::Type field_type = field_info.GetFieldType();
2649  bool is_wide = field_type == Primitive::kPrimLong || field_type == Primitive::kPrimDouble;
2650  bool generate_volatile = field_info.IsVolatile()
2651      && is_wide
2652      && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2653  // Temporary registers for the write barrier.
2654  // TODO: consider renaming StoreNeedsWriteBarrier to StoreNeedsGCMark.
2655  if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
2656    locations->AddTemp(Location::RequiresRegister());
2657    locations->AddTemp(Location::RequiresRegister());
2658  } else if (generate_volatile) {
2659    // Arm encoding have some additional constraints for ldrexd/strexd:
2660    // - registers need to be consecutive
2661    // - the first register should be even but not R14.
2662    // We don't test for Arm yet, and the assertion makes sure that we revisit this if we ever
2663    // enable Arm encoding.
2664    DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
2665
2666    locations->AddTemp(Location::RequiresRegister());
2667    locations->AddTemp(Location::RequiresRegister());
2668    if (field_type == Primitive::kPrimDouble) {
2669      // For doubles we need two more registers to copy the value.
2670      locations->AddTemp(Location::RegisterLocation(R2));
2671      locations->AddTemp(Location::RegisterLocation(R3));
2672    }
2673  }
2674}
2675
2676void InstructionCodeGeneratorARM::HandleFieldSet(HInstruction* instruction,
2677                                                 const FieldInfo& field_info) {
2678  DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2679
2680  LocationSummary* locations = instruction->GetLocations();
2681  Register base = locations->InAt(0).AsRegister<Register>();
2682  Location value = locations->InAt(1);
2683
2684  bool is_volatile = field_info.IsVolatile();
2685  bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2686  Primitive::Type field_type = field_info.GetFieldType();
2687  uint32_t offset = field_info.GetFieldOffset().Uint32Value();
2688
2689  if (is_volatile) {
2690    GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
2691  }
2692
2693  switch (field_type) {
2694    case Primitive::kPrimBoolean:
2695    case Primitive::kPrimByte: {
2696      __ StoreToOffset(kStoreByte, value.AsRegister<Register>(), base, offset);
2697      break;
2698    }
2699
2700    case Primitive::kPrimShort:
2701    case Primitive::kPrimChar: {
2702      __ StoreToOffset(kStoreHalfword, value.AsRegister<Register>(), base, offset);
2703      break;
2704    }
2705
2706    case Primitive::kPrimInt:
2707    case Primitive::kPrimNot: {
2708      __ StoreToOffset(kStoreWord, value.AsRegister<Register>(), base, offset);
2709      break;
2710    }
2711
2712    case Primitive::kPrimLong: {
2713      if (is_volatile && !atomic_ldrd_strd) {
2714        GenerateWideAtomicStore(base, offset,
2715                                value.AsRegisterPairLow<Register>(),
2716                                value.AsRegisterPairHigh<Register>(),
2717                                locations->GetTemp(0).AsRegister<Register>(),
2718                                locations->GetTemp(1).AsRegister<Register>(),
2719                                instruction);
2720      } else {
2721        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), base, offset);
2722        codegen_->MaybeRecordImplicitNullCheck(instruction);
2723      }
2724      break;
2725    }
2726
2727    case Primitive::kPrimFloat: {
2728      __ StoreSToOffset(value.AsFpuRegister<SRegister>(), base, offset);
2729      break;
2730    }
2731
2732    case Primitive::kPrimDouble: {
2733      DRegister value_reg = FromLowSToD(value.AsFpuRegisterPairLow<SRegister>());
2734      if (is_volatile && !atomic_ldrd_strd) {
2735        Register value_reg_lo = locations->GetTemp(0).AsRegister<Register>();
2736        Register value_reg_hi = locations->GetTemp(1).AsRegister<Register>();
2737
2738        __ vmovrrd(value_reg_lo, value_reg_hi, value_reg);
2739
2740        GenerateWideAtomicStore(base, offset,
2741                                value_reg_lo,
2742                                value_reg_hi,
2743                                locations->GetTemp(2).AsRegister<Register>(),
2744                                locations->GetTemp(3).AsRegister<Register>(),
2745                                instruction);
2746      } else {
2747        __ StoreDToOffset(value_reg, base, offset);
2748        codegen_->MaybeRecordImplicitNullCheck(instruction);
2749      }
2750      break;
2751    }
2752
2753    case Primitive::kPrimVoid:
2754      LOG(FATAL) << "Unreachable type " << field_type;
2755      UNREACHABLE();
2756  }
2757
2758  // Longs and doubles are handled in the switch.
2759  if (field_type != Primitive::kPrimLong && field_type != Primitive::kPrimDouble) {
2760    codegen_->MaybeRecordImplicitNullCheck(instruction);
2761  }
2762
2763  if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
2764    Register temp = locations->GetTemp(0).AsRegister<Register>();
2765    Register card = locations->GetTemp(1).AsRegister<Register>();
2766    codegen_->MarkGCCard(temp, card, base, value.AsRegister<Register>());
2767  }
2768
2769  if (is_volatile) {
2770    GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
2771  }
2772}
2773
2774void LocationsBuilderARM::HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info) {
2775  DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2776  LocationSummary* locations =
2777      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2778  locations->SetInAt(0, Location::RequiresRegister());
2779
2780  bool volatile_for_double = field_info.IsVolatile()
2781      && (field_info.GetFieldType() == Primitive::kPrimDouble)
2782      && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2783  bool overlap = field_info.IsVolatile() && (field_info.GetFieldType() == Primitive::kPrimLong);
2784  locations->SetOut(Location::RequiresRegister(),
2785                    (overlap ? Location::kOutputOverlap : Location::kNoOutputOverlap));
2786  if (volatile_for_double) {
2787    // Arm encoding have some additional constraints for ldrexd/strexd:
2788    // - registers need to be consecutive
2789    // - the first register should be even but not R14.
2790    // We don't test for Arm yet, and the assertion makes sure that we revisit this if we ever
2791    // enable Arm encoding.
2792    DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
2793    locations->AddTemp(Location::RequiresRegister());
2794    locations->AddTemp(Location::RequiresRegister());
2795  }
2796}
2797
2798void InstructionCodeGeneratorARM::HandleFieldGet(HInstruction* instruction,
2799                                                 const FieldInfo& field_info) {
2800  DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2801
2802  LocationSummary* locations = instruction->GetLocations();
2803  Register base = locations->InAt(0).AsRegister<Register>();
2804  Location out = locations->Out();
2805  bool is_volatile = field_info.IsVolatile();
2806  bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2807  Primitive::Type field_type = field_info.GetFieldType();
2808  uint32_t offset = field_info.GetFieldOffset().Uint32Value();
2809
2810  switch (field_type) {
2811    case Primitive::kPrimBoolean: {
2812      __ LoadFromOffset(kLoadUnsignedByte, out.AsRegister<Register>(), base, offset);
2813      break;
2814    }
2815
2816    case Primitive::kPrimByte: {
2817      __ LoadFromOffset(kLoadSignedByte, out.AsRegister<Register>(), base, offset);
2818      break;
2819    }
2820
2821    case Primitive::kPrimShort: {
2822      __ LoadFromOffset(kLoadSignedHalfword, out.AsRegister<Register>(), base, offset);
2823      break;
2824    }
2825
2826    case Primitive::kPrimChar: {
2827      __ LoadFromOffset(kLoadUnsignedHalfword, out.AsRegister<Register>(), base, offset);
2828      break;
2829    }
2830
2831    case Primitive::kPrimInt:
2832    case Primitive::kPrimNot: {
2833      __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), base, offset);
2834      break;
2835    }
2836
2837    case Primitive::kPrimLong: {
2838      if (is_volatile && !atomic_ldrd_strd) {
2839        GenerateWideAtomicLoad(base, offset,
2840                               out.AsRegisterPairLow<Register>(),
2841                               out.AsRegisterPairHigh<Register>());
2842      } else {
2843        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), base, offset);
2844      }
2845      break;
2846    }
2847
2848    case Primitive::kPrimFloat: {
2849      __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), base, offset);
2850      break;
2851    }
2852
2853    case Primitive::kPrimDouble: {
2854      DRegister out_reg = FromLowSToD(out.AsFpuRegisterPairLow<SRegister>());
2855      if (is_volatile && !atomic_ldrd_strd) {
2856        Register lo = locations->GetTemp(0).AsRegister<Register>();
2857        Register hi = locations->GetTemp(1).AsRegister<Register>();
2858        GenerateWideAtomicLoad(base, offset, lo, hi);
2859        codegen_->MaybeRecordImplicitNullCheck(instruction);
2860        __ vmovdrr(out_reg, lo, hi);
2861      } else {
2862        __ LoadDFromOffset(out_reg, base, offset);
2863        codegen_->MaybeRecordImplicitNullCheck(instruction);
2864      }
2865      break;
2866    }
2867
2868    case Primitive::kPrimVoid:
2869      LOG(FATAL) << "Unreachable type " << field_type;
2870      UNREACHABLE();
2871  }
2872
2873  // Doubles are handled in the switch.
2874  if (field_type != Primitive::kPrimDouble) {
2875    codegen_->MaybeRecordImplicitNullCheck(instruction);
2876  }
2877
2878  if (is_volatile) {
2879    GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
2880  }
2881}
2882
2883void LocationsBuilderARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2884  HandleFieldSet(instruction, instruction->GetFieldInfo());
2885}
2886
2887void InstructionCodeGeneratorARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2888  HandleFieldSet(instruction, instruction->GetFieldInfo());
2889}
2890
2891void LocationsBuilderARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2892  HandleFieldGet(instruction, instruction->GetFieldInfo());
2893}
2894
2895void InstructionCodeGeneratorARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2896  HandleFieldGet(instruction, instruction->GetFieldInfo());
2897}
2898
2899void LocationsBuilderARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2900  HandleFieldGet(instruction, instruction->GetFieldInfo());
2901}
2902
2903void InstructionCodeGeneratorARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2904  HandleFieldGet(instruction, instruction->GetFieldInfo());
2905}
2906
2907void LocationsBuilderARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2908  HandleFieldSet(instruction, instruction->GetFieldInfo());
2909}
2910
2911void InstructionCodeGeneratorARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2912  HandleFieldSet(instruction, instruction->GetFieldInfo());
2913}
2914
2915void LocationsBuilderARM::VisitNullCheck(HNullCheck* instruction) {
2916  LocationSummary* locations =
2917      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2918  locations->SetInAt(0, Location::RequiresRegister());
2919  if (instruction->HasUses()) {
2920    locations->SetOut(Location::SameAsFirstInput());
2921  }
2922}
2923
2924void InstructionCodeGeneratorARM::GenerateImplicitNullCheck(HNullCheck* instruction) {
2925  if (codegen_->CanMoveNullCheckToUser(instruction)) {
2926    return;
2927  }
2928  Location obj = instruction->GetLocations()->InAt(0);
2929
2930  __ LoadFromOffset(kLoadWord, IP, obj.AsRegister<Register>(), 0);
2931  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
2932}
2933
2934void InstructionCodeGeneratorARM::GenerateExplicitNullCheck(HNullCheck* instruction) {
2935  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM(instruction);
2936  codegen_->AddSlowPath(slow_path);
2937
2938  LocationSummary* locations = instruction->GetLocations();
2939  Location obj = locations->InAt(0);
2940
2941  __ cmp(obj.AsRegister<Register>(), ShifterOperand(0));
2942  __ b(slow_path->GetEntryLabel(), EQ);
2943}
2944
2945void InstructionCodeGeneratorARM::VisitNullCheck(HNullCheck* instruction) {
2946  if (codegen_->GetCompilerOptions().GetImplicitNullChecks()) {
2947    GenerateImplicitNullCheck(instruction);
2948  } else {
2949    GenerateExplicitNullCheck(instruction);
2950  }
2951}
2952
2953void LocationsBuilderARM::VisitArrayGet(HArrayGet* instruction) {
2954  LocationSummary* locations =
2955      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2956  locations->SetInAt(0, Location::RequiresRegister());
2957  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2958  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2959}
2960
2961void InstructionCodeGeneratorARM::VisitArrayGet(HArrayGet* instruction) {
2962  LocationSummary* locations = instruction->GetLocations();
2963  Register obj = locations->InAt(0).AsRegister<Register>();
2964  Location index = locations->InAt(1);
2965
2966  switch (instruction->GetType()) {
2967    case Primitive::kPrimBoolean: {
2968      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
2969      Register out = locations->Out().AsRegister<Register>();
2970      if (index.IsConstant()) {
2971        size_t offset =
2972            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2973        __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
2974      } else {
2975        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
2976        __ LoadFromOffset(kLoadUnsignedByte, out, IP, data_offset);
2977      }
2978      break;
2979    }
2980
2981    case Primitive::kPrimByte: {
2982      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
2983      Register out = locations->Out().AsRegister<Register>();
2984      if (index.IsConstant()) {
2985        size_t offset =
2986            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2987        __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
2988      } else {
2989        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
2990        __ LoadFromOffset(kLoadSignedByte, out, IP, data_offset);
2991      }
2992      break;
2993    }
2994
2995    case Primitive::kPrimShort: {
2996      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
2997      Register out = locations->Out().AsRegister<Register>();
2998      if (index.IsConstant()) {
2999        size_t offset =
3000            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
3001        __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
3002      } else {
3003        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
3004        __ LoadFromOffset(kLoadSignedHalfword, out, IP, data_offset);
3005      }
3006      break;
3007    }
3008
3009    case Primitive::kPrimChar: {
3010      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
3011      Register out = locations->Out().AsRegister<Register>();
3012      if (index.IsConstant()) {
3013        size_t offset =
3014            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
3015        __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
3016      } else {
3017        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
3018        __ LoadFromOffset(kLoadUnsignedHalfword, out, IP, data_offset);
3019      }
3020      break;
3021    }
3022
3023    case Primitive::kPrimInt:
3024    case Primitive::kPrimNot: {
3025      DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
3026      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
3027      Register out = locations->Out().AsRegister<Register>();
3028      if (index.IsConstant()) {
3029        size_t offset =
3030            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3031        __ LoadFromOffset(kLoadWord, out, obj, offset);
3032      } else {
3033        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3034        __ LoadFromOffset(kLoadWord, out, IP, data_offset);
3035      }
3036      break;
3037    }
3038
3039    case Primitive::kPrimLong: {
3040      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
3041      Location out = locations->Out();
3042      if (index.IsConstant()) {
3043        size_t offset =
3044            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3045        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset);
3046      } else {
3047        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3048        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), IP, data_offset);
3049      }
3050      break;
3051    }
3052
3053    case Primitive::kPrimFloat: {
3054      uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
3055      Location out = locations->Out();
3056      DCHECK(out.IsFpuRegister());
3057      if (index.IsConstant()) {
3058        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3059        __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), obj, offset);
3060      } else {
3061        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3062        __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), IP, data_offset);
3063      }
3064      break;
3065    }
3066
3067    case Primitive::kPrimDouble: {
3068      uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
3069      Location out = locations->Out();
3070      DCHECK(out.IsFpuRegisterPair());
3071      if (index.IsConstant()) {
3072        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3073        __ LoadDFromOffset(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), obj, offset);
3074      } else {
3075        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3076        __ LoadDFromOffset(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), IP, data_offset);
3077      }
3078      break;
3079    }
3080
3081    case Primitive::kPrimVoid:
3082      LOG(FATAL) << "Unreachable type " << instruction->GetType();
3083      UNREACHABLE();
3084  }
3085  codegen_->MaybeRecordImplicitNullCheck(instruction);
3086}
3087
3088void LocationsBuilderARM::VisitArraySet(HArraySet* instruction) {
3089  Primitive::Type value_type = instruction->GetComponentType();
3090
3091  bool needs_write_barrier =
3092      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
3093  bool needs_runtime_call = instruction->NeedsTypeCheck();
3094
3095  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3096      instruction, needs_runtime_call ? LocationSummary::kCall : LocationSummary::kNoCall);
3097  if (needs_runtime_call) {
3098    InvokeRuntimeCallingConvention calling_convention;
3099    locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3100    locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3101    locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
3102  } else {
3103    locations->SetInAt(0, Location::RequiresRegister());
3104    locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3105    locations->SetInAt(2, Location::RequiresRegister());
3106
3107    if (needs_write_barrier) {
3108      // Temporary registers for the write barrier.
3109      locations->AddTemp(Location::RequiresRegister());
3110      locations->AddTemp(Location::RequiresRegister());
3111    }
3112  }
3113}
3114
3115void InstructionCodeGeneratorARM::VisitArraySet(HArraySet* instruction) {
3116  LocationSummary* locations = instruction->GetLocations();
3117  Register obj = locations->InAt(0).AsRegister<Register>();
3118  Location index = locations->InAt(1);
3119  Primitive::Type value_type = instruction->GetComponentType();
3120  bool needs_runtime_call = locations->WillCall();
3121  bool needs_write_barrier =
3122      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
3123
3124  switch (value_type) {
3125    case Primitive::kPrimBoolean:
3126    case Primitive::kPrimByte: {
3127      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
3128      Register value = locations->InAt(2).AsRegister<Register>();
3129      if (index.IsConstant()) {
3130        size_t offset =
3131            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
3132        __ StoreToOffset(kStoreByte, value, obj, offset);
3133      } else {
3134        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
3135        __ StoreToOffset(kStoreByte, value, IP, data_offset);
3136      }
3137      break;
3138    }
3139
3140    case Primitive::kPrimShort:
3141    case Primitive::kPrimChar: {
3142      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
3143      Register value = locations->InAt(2).AsRegister<Register>();
3144      if (index.IsConstant()) {
3145        size_t offset =
3146            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
3147        __ StoreToOffset(kStoreHalfword, value, obj, offset);
3148      } else {
3149        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
3150        __ StoreToOffset(kStoreHalfword, value, IP, data_offset);
3151      }
3152      break;
3153    }
3154
3155    case Primitive::kPrimInt:
3156    case Primitive::kPrimNot: {
3157      if (!needs_runtime_call) {
3158        uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
3159        Register value = locations->InAt(2).AsRegister<Register>();
3160        if (index.IsConstant()) {
3161          size_t offset =
3162              (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3163          __ StoreToOffset(kStoreWord, value, obj, offset);
3164        } else {
3165          DCHECK(index.IsRegister()) << index;
3166          __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3167          __ StoreToOffset(kStoreWord, value, IP, data_offset);
3168        }
3169        codegen_->MaybeRecordImplicitNullCheck(instruction);
3170        if (needs_write_barrier) {
3171          DCHECK_EQ(value_type, Primitive::kPrimNot);
3172          Register temp = locations->GetTemp(0).AsRegister<Register>();
3173          Register card = locations->GetTemp(1).AsRegister<Register>();
3174          codegen_->MarkGCCard(temp, card, obj, value);
3175        }
3176      } else {
3177        DCHECK_EQ(value_type, Primitive::kPrimNot);
3178        codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
3179                                instruction,
3180                                instruction->GetDexPc());
3181      }
3182      break;
3183    }
3184
3185    case Primitive::kPrimLong: {
3186      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
3187      Location value = locations->InAt(2);
3188      if (index.IsConstant()) {
3189        size_t offset =
3190            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3191        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset);
3192      } else {
3193        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3194        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), IP, data_offset);
3195      }
3196      break;
3197    }
3198
3199    case Primitive::kPrimFloat: {
3200      uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
3201      Location value = locations->InAt(2);
3202      DCHECK(value.IsFpuRegister());
3203      if (index.IsConstant()) {
3204        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3205        __ StoreSToOffset(value.AsFpuRegister<SRegister>(), obj, offset);
3206      } else {
3207        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3208        __ StoreSToOffset(value.AsFpuRegister<SRegister>(), IP, data_offset);
3209      }
3210      break;
3211    }
3212
3213    case Primitive::kPrimDouble: {
3214      uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
3215      Location value = locations->InAt(2);
3216      DCHECK(value.IsFpuRegisterPair());
3217      if (index.IsConstant()) {
3218        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3219        __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), obj, offset);
3220      } else {
3221        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3222        __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), IP, data_offset);
3223      }
3224
3225      break;
3226    }
3227
3228    case Primitive::kPrimVoid:
3229      LOG(FATAL) << "Unreachable type " << value_type;
3230      UNREACHABLE();
3231  }
3232
3233  // Ints and objects are handled in the switch.
3234  if (value_type != Primitive::kPrimInt && value_type != Primitive::kPrimNot) {
3235    codegen_->MaybeRecordImplicitNullCheck(instruction);
3236  }
3237}
3238
3239void LocationsBuilderARM::VisitArrayLength(HArrayLength* instruction) {
3240  LocationSummary* locations =
3241      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3242  locations->SetInAt(0, Location::RequiresRegister());
3243  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3244}
3245
3246void InstructionCodeGeneratorARM::VisitArrayLength(HArrayLength* instruction) {
3247  LocationSummary* locations = instruction->GetLocations();
3248  uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
3249  Register obj = locations->InAt(0).AsRegister<Register>();
3250  Register out = locations->Out().AsRegister<Register>();
3251  __ LoadFromOffset(kLoadWord, out, obj, offset);
3252  codegen_->MaybeRecordImplicitNullCheck(instruction);
3253}
3254
3255void LocationsBuilderARM::VisitBoundsCheck(HBoundsCheck* instruction) {
3256  LocationSummary* locations =
3257      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3258  locations->SetInAt(0, Location::RequiresRegister());
3259  locations->SetInAt(1, Location::RequiresRegister());
3260  if (instruction->HasUses()) {
3261    locations->SetOut(Location::SameAsFirstInput());
3262  }
3263}
3264
3265void InstructionCodeGeneratorARM::VisitBoundsCheck(HBoundsCheck* instruction) {
3266  LocationSummary* locations = instruction->GetLocations();
3267  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM(
3268      instruction, locations->InAt(0), locations->InAt(1));
3269  codegen_->AddSlowPath(slow_path);
3270
3271  Register index = locations->InAt(0).AsRegister<Register>();
3272  Register length = locations->InAt(1).AsRegister<Register>();
3273
3274  __ cmp(index, ShifterOperand(length));
3275  __ b(slow_path->GetEntryLabel(), CS);
3276}
3277
3278void CodeGeneratorARM::MarkGCCard(Register temp, Register card, Register object, Register value) {
3279  Label is_null;
3280  __ CompareAndBranchIfZero(value, &is_null);
3281  __ LoadFromOffset(kLoadWord, card, TR, Thread::CardTableOffset<kArmWordSize>().Int32Value());
3282  __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
3283  __ strb(card, Address(card, temp));
3284  __ Bind(&is_null);
3285}
3286
3287void LocationsBuilderARM::VisitTemporary(HTemporary* temp) {
3288  temp->SetLocations(nullptr);
3289}
3290
3291void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp) {
3292  // Nothing to do, this is driven by the code generator.
3293  UNUSED(temp);
3294}
3295
3296void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction) {
3297  UNUSED(instruction);
3298  LOG(FATAL) << "Unreachable";
3299}
3300
3301void InstructionCodeGeneratorARM::VisitParallelMove(HParallelMove* instruction) {
3302  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
3303}
3304
3305void LocationsBuilderARM::VisitSuspendCheck(HSuspendCheck* instruction) {
3306  new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
3307}
3308
3309void InstructionCodeGeneratorARM::VisitSuspendCheck(HSuspendCheck* instruction) {
3310  HBasicBlock* block = instruction->GetBlock();
3311  if (block->GetLoopInformation() != nullptr) {
3312    DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
3313    // The back edge will generate the suspend check.
3314    return;
3315  }
3316  if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
3317    // The goto will generate the suspend check.
3318    return;
3319  }
3320  GenerateSuspendCheck(instruction, nullptr);
3321}
3322
3323void InstructionCodeGeneratorARM::GenerateSuspendCheck(HSuspendCheck* instruction,
3324                                                       HBasicBlock* successor) {
3325  SuspendCheckSlowPathARM* slow_path =
3326      new (GetGraph()->GetArena()) SuspendCheckSlowPathARM(instruction, successor);
3327  codegen_->AddSlowPath(slow_path);
3328
3329  __ LoadFromOffset(
3330      kLoadUnsignedHalfword, IP, TR, Thread::ThreadFlagsOffset<kArmWordSize>().Int32Value());
3331  __ cmp(IP, ShifterOperand(0));
3332  // TODO: Figure out the branch offsets and use cbz/cbnz.
3333  if (successor == nullptr) {
3334    __ b(slow_path->GetEntryLabel(), NE);
3335    __ Bind(slow_path->GetReturnLabel());
3336  } else {
3337    __ b(codegen_->GetLabelOf(successor), EQ);
3338    __ b(slow_path->GetEntryLabel());
3339  }
3340}
3341
3342ArmAssembler* ParallelMoveResolverARM::GetAssembler() const {
3343  return codegen_->GetAssembler();
3344}
3345
3346void ParallelMoveResolverARM::EmitMove(size_t index) {
3347  MoveOperands* move = moves_.Get(index);
3348  Location source = move->GetSource();
3349  Location destination = move->GetDestination();
3350
3351  if (source.IsRegister()) {
3352    if (destination.IsRegister()) {
3353      __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>());
3354    } else {
3355      DCHECK(destination.IsStackSlot());
3356      __ StoreToOffset(kStoreWord, source.AsRegister<Register>(),
3357                       SP, destination.GetStackIndex());
3358    }
3359  } else if (source.IsStackSlot()) {
3360    if (destination.IsRegister()) {
3361      __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(),
3362                        SP, source.GetStackIndex());
3363    } else if (destination.IsFpuRegister()) {
3364      __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex());
3365    } else {
3366      DCHECK(destination.IsStackSlot());
3367      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
3368      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3369    }
3370  } else if (source.IsFpuRegister()) {
3371    if (destination.IsFpuRegister()) {
3372      __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>());
3373    } else {
3374      DCHECK(destination.IsStackSlot());
3375      __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex());
3376    }
3377  } else if (source.IsDoubleStackSlot()) {
3378    DCHECK(destination.IsDoubleStackSlot()) << destination;
3379    __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
3380    __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3381    __ LoadFromOffset(kLoadWord, IP, SP, source.GetHighStackIndex(kArmWordSize));
3382    __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
3383  } else {
3384    DCHECK(source.IsConstant()) << source;
3385    HInstruction* constant = source.GetConstant();
3386    if (constant->IsIntConstant()) {
3387      int32_t value = constant->AsIntConstant()->GetValue();
3388      if (destination.IsRegister()) {
3389        __ LoadImmediate(destination.AsRegister<Register>(), value);
3390      } else {
3391        DCHECK(destination.IsStackSlot());
3392        __ LoadImmediate(IP, value);
3393        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3394      }
3395    } else if (constant->IsLongConstant()) {
3396      int64_t value = constant->AsLongConstant()->GetValue();
3397      if (destination.IsRegister()) {
3398        // In the presence of long or double constants, the parallel move resolver will
3399        // split the move into two, but keeps the same constant for both moves. Here,
3400        // we use the low or high part depending on which register this move goes to.
3401        if (destination.reg() % 2 == 0) {
3402          __ LoadImmediate(destination.AsRegister<Register>(), Low32Bits(value));
3403        } else {
3404          __ LoadImmediate(destination.AsRegister<Register>(), High32Bits(value));
3405        }
3406      } else {
3407        DCHECK(destination.IsDoubleStackSlot());
3408        __ LoadImmediate(IP, Low32Bits(value));
3409        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3410        __ LoadImmediate(IP, High32Bits(value));
3411        __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
3412      }
3413    } else if (constant->IsDoubleConstant()) {
3414      double value = constant->AsDoubleConstant()->GetValue();
3415      uint64_t int_value = bit_cast<uint64_t, double>(value);
3416      if (destination.IsFpuRegister()) {
3417        // In the presence of long or double constants, the parallel move resolver will
3418        // split the move into two, but keeps the same constant for both moves. Here,
3419        // we use the low or high part depending on which register this move goes to.
3420        if (destination.reg() % 2 == 0) {
3421          __ LoadSImmediate(destination.AsFpuRegister<SRegister>(),
3422                            bit_cast<float, uint32_t>(Low32Bits(int_value)));
3423        } else {
3424          __ LoadSImmediate(destination.AsFpuRegister<SRegister>(),
3425                            bit_cast<float, uint32_t>(High32Bits(int_value)));
3426        }
3427      } else {
3428        DCHECK(destination.IsDoubleStackSlot());
3429        __ LoadImmediate(IP, Low32Bits(int_value));
3430        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3431        __ LoadImmediate(IP, High32Bits(int_value));
3432        __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
3433      }
3434    } else {
3435      DCHECK(constant->IsFloatConstant()) << constant->DebugName();
3436      float value = constant->AsFloatConstant()->GetValue();
3437      if (destination.IsFpuRegister()) {
3438        __ LoadSImmediate(destination.AsFpuRegister<SRegister>(), value);
3439      } else {
3440        DCHECK(destination.IsStackSlot());
3441        __ LoadImmediate(IP, bit_cast<int32_t, float>(value));
3442        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3443      }
3444    }
3445  }
3446}
3447
3448void ParallelMoveResolverARM::Exchange(Register reg, int mem) {
3449  __ Mov(IP, reg);
3450  __ LoadFromOffset(kLoadWord, reg, SP, mem);
3451  __ StoreToOffset(kStoreWord, IP, SP, mem);
3452}
3453
3454void ParallelMoveResolverARM::Exchange(int mem1, int mem2) {
3455  ScratchRegisterScope ensure_scratch(this, IP, R0, codegen_->GetNumberOfCoreRegisters());
3456  int stack_offset = ensure_scratch.IsSpilled() ? kArmWordSize : 0;
3457  __ LoadFromOffset(kLoadWord, static_cast<Register>(ensure_scratch.GetRegister()),
3458                    SP, mem1 + stack_offset);
3459  __ LoadFromOffset(kLoadWord, IP, SP, mem2 + stack_offset);
3460  __ StoreToOffset(kStoreWord, static_cast<Register>(ensure_scratch.GetRegister()),
3461                   SP, mem2 + stack_offset);
3462  __ StoreToOffset(kStoreWord, IP, SP, mem1 + stack_offset);
3463}
3464
3465void ParallelMoveResolverARM::EmitSwap(size_t index) {
3466  MoveOperands* move = moves_.Get(index);
3467  Location source = move->GetSource();
3468  Location destination = move->GetDestination();
3469
3470  if (source.IsRegister() && destination.IsRegister()) {
3471    DCHECK_NE(source.AsRegister<Register>(), IP);
3472    DCHECK_NE(destination.AsRegister<Register>(), IP);
3473    __ Mov(IP, source.AsRegister<Register>());
3474    __ Mov(source.AsRegister<Register>(), destination.AsRegister<Register>());
3475    __ Mov(destination.AsRegister<Register>(), IP);
3476  } else if (source.IsRegister() && destination.IsStackSlot()) {
3477    Exchange(source.AsRegister<Register>(), destination.GetStackIndex());
3478  } else if (source.IsStackSlot() && destination.IsRegister()) {
3479    Exchange(destination.AsRegister<Register>(), source.GetStackIndex());
3480  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
3481    Exchange(source.GetStackIndex(), destination.GetStackIndex());
3482  } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
3483    __ vmovrs(IP, source.AsFpuRegister<SRegister>());
3484    __ vmovs(source.AsFpuRegister<SRegister>(), destination.AsFpuRegister<SRegister>());
3485    __ vmovsr(destination.AsFpuRegister<SRegister>(), IP);
3486  } else if (source.IsFpuRegister() || destination.IsFpuRegister()) {
3487    SRegister reg = source.IsFpuRegister() ? source.AsFpuRegister<SRegister>()
3488                                           : destination.AsFpuRegister<SRegister>();
3489    int mem = source.IsFpuRegister()
3490        ? destination.GetStackIndex()
3491        : source.GetStackIndex();
3492
3493    __ vmovrs(IP, reg);
3494    __ LoadFromOffset(kLoadWord, IP, SP, mem);
3495    __ StoreToOffset(kStoreWord, IP, SP, mem);
3496  } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
3497    Exchange(source.GetStackIndex(), destination.GetStackIndex());
3498    Exchange(source.GetHighStackIndex(kArmWordSize), destination.GetHighStackIndex(kArmWordSize));
3499  } else {
3500    LOG(FATAL) << "Unimplemented" << source << " <-> " << destination;
3501  }
3502}
3503
3504void ParallelMoveResolverARM::SpillScratch(int reg) {
3505  __ Push(static_cast<Register>(reg));
3506}
3507
3508void ParallelMoveResolverARM::RestoreScratch(int reg) {
3509  __ Pop(static_cast<Register>(reg));
3510}
3511
3512void LocationsBuilderARM::VisitLoadClass(HLoadClass* cls) {
3513  LocationSummary::CallKind call_kind = cls->CanCallRuntime()
3514      ? LocationSummary::kCallOnSlowPath
3515      : LocationSummary::kNoCall;
3516  LocationSummary* locations =
3517      new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
3518  locations->SetOut(Location::RequiresRegister());
3519}
3520
3521void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) {
3522  Register out = cls->GetLocations()->Out().AsRegister<Register>();
3523  if (cls->IsReferrersClass()) {
3524    DCHECK(!cls->CanCallRuntime());
3525    DCHECK(!cls->MustGenerateClinitCheck());
3526    codegen_->LoadCurrentMethod(out);
3527    __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
3528  } else {
3529    DCHECK(cls->CanCallRuntime());
3530    codegen_->LoadCurrentMethod(out);
3531    __ LoadFromOffset(
3532        kLoadWord, out, out, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value());
3533    __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
3534
3535    SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
3536        cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
3537    codegen_->AddSlowPath(slow_path);
3538    __ cmp(out, ShifterOperand(0));
3539    __ b(slow_path->GetEntryLabel(), EQ);
3540    if (cls->MustGenerateClinitCheck()) {
3541      GenerateClassInitializationCheck(slow_path, out);
3542    } else {
3543      __ Bind(slow_path->GetExitLabel());
3544    }
3545  }
3546}
3547
3548void LocationsBuilderARM::VisitClinitCheck(HClinitCheck* check) {
3549  LocationSummary* locations =
3550      new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
3551  locations->SetInAt(0, Location::RequiresRegister());
3552  if (check->HasUses()) {
3553    locations->SetOut(Location::SameAsFirstInput());
3554  }
3555}
3556
3557void InstructionCodeGeneratorARM::VisitClinitCheck(HClinitCheck* check) {
3558  // We assume the class is not null.
3559  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
3560      check->GetLoadClass(), check, check->GetDexPc(), true);
3561  codegen_->AddSlowPath(slow_path);
3562  GenerateClassInitializationCheck(slow_path,
3563                                   check->GetLocations()->InAt(0).AsRegister<Register>());
3564}
3565
3566void InstructionCodeGeneratorARM::GenerateClassInitializationCheck(
3567    SlowPathCodeARM* slow_path, Register class_reg) {
3568  __ LoadFromOffset(kLoadWord, IP, class_reg, mirror::Class::StatusOffset().Int32Value());
3569  __ cmp(IP, ShifterOperand(mirror::Class::kStatusInitialized));
3570  __ b(slow_path->GetEntryLabel(), LT);
3571  // Even if the initialized flag is set, we may be in a situation where caches are not synced
3572  // properly. Therefore, we do a memory fence.
3573  __ dmb(ISH);
3574  __ Bind(slow_path->GetExitLabel());
3575}
3576
3577void LocationsBuilderARM::VisitLoadString(HLoadString* load) {
3578  LocationSummary* locations =
3579      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
3580  locations->SetOut(Location::RequiresRegister());
3581}
3582
3583void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) {
3584  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM(load);
3585  codegen_->AddSlowPath(slow_path);
3586
3587  Register out = load->GetLocations()->Out().AsRegister<Register>();
3588  codegen_->LoadCurrentMethod(out);
3589  __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
3590  __ LoadFromOffset(kLoadWord, out, out, mirror::Class::DexCacheStringsOffset().Int32Value());
3591  __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(load->GetStringIndex()));
3592  __ cmp(out, ShifterOperand(0));
3593  __ b(slow_path->GetEntryLabel(), EQ);
3594  __ Bind(slow_path->GetExitLabel());
3595}
3596
3597void LocationsBuilderARM::VisitLoadException(HLoadException* load) {
3598  LocationSummary* locations =
3599      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
3600  locations->SetOut(Location::RequiresRegister());
3601}
3602
3603void InstructionCodeGeneratorARM::VisitLoadException(HLoadException* load) {
3604  Register out = load->GetLocations()->Out().AsRegister<Register>();
3605  int32_t offset = Thread::ExceptionOffset<kArmWordSize>().Int32Value();
3606  __ LoadFromOffset(kLoadWord, out, TR, offset);
3607  __ LoadImmediate(IP, 0);
3608  __ StoreToOffset(kStoreWord, IP, TR, offset);
3609}
3610
3611void LocationsBuilderARM::VisitThrow(HThrow* instruction) {
3612  LocationSummary* locations =
3613      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3614  InvokeRuntimeCallingConvention calling_convention;
3615  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3616}
3617
3618void InstructionCodeGeneratorARM::VisitThrow(HThrow* instruction) {
3619  codegen_->InvokeRuntime(
3620      QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc());
3621}
3622
3623void LocationsBuilderARM::VisitInstanceOf(HInstanceOf* instruction) {
3624  LocationSummary::CallKind call_kind = instruction->IsClassFinal()
3625      ? LocationSummary::kNoCall
3626      : LocationSummary::kCallOnSlowPath;
3627  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3628  locations->SetInAt(0, Location::RequiresRegister());
3629  locations->SetInAt(1, Location::RequiresRegister());
3630  // The out register is used as a temporary, so it overlaps with the inputs.
3631  locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3632}
3633
3634void InstructionCodeGeneratorARM::VisitInstanceOf(HInstanceOf* instruction) {
3635  LocationSummary* locations = instruction->GetLocations();
3636  Register obj = locations->InAt(0).AsRegister<Register>();
3637  Register cls = locations->InAt(1).AsRegister<Register>();
3638  Register out = locations->Out().AsRegister<Register>();
3639  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3640  Label done, zero;
3641  SlowPathCodeARM* slow_path = nullptr;
3642
3643  // Return 0 if `obj` is null.
3644  // TODO: avoid this check if we know obj is not null.
3645  __ cmp(obj, ShifterOperand(0));
3646  __ b(&zero, EQ);
3647  // Compare the class of `obj` with `cls`.
3648  __ LoadFromOffset(kLoadWord, out, obj, class_offset);
3649  __ cmp(out, ShifterOperand(cls));
3650  if (instruction->IsClassFinal()) {
3651    // Classes must be equal for the instanceof to succeed.
3652    __ b(&zero, NE);
3653    __ LoadImmediate(out, 1);
3654    __ b(&done);
3655  } else {
3656    // If the classes are not equal, we go into a slow path.
3657    DCHECK(locations->OnlyCallsOnSlowPath());
3658    slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
3659        instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc());
3660    codegen_->AddSlowPath(slow_path);
3661    __ b(slow_path->GetEntryLabel(), NE);
3662    __ LoadImmediate(out, 1);
3663    __ b(&done);
3664  }
3665  __ Bind(&zero);
3666  __ LoadImmediate(out, 0);
3667  if (slow_path != nullptr) {
3668    __ Bind(slow_path->GetExitLabel());
3669  }
3670  __ Bind(&done);
3671}
3672
3673void LocationsBuilderARM::VisitCheckCast(HCheckCast* instruction) {
3674  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3675      instruction, LocationSummary::kCallOnSlowPath);
3676  locations->SetInAt(0, Location::RequiresRegister());
3677  locations->SetInAt(1, Location::RequiresRegister());
3678  locations->AddTemp(Location::RequiresRegister());
3679}
3680
3681void InstructionCodeGeneratorARM::VisitCheckCast(HCheckCast* instruction) {
3682  LocationSummary* locations = instruction->GetLocations();
3683  Register obj = locations->InAt(0).AsRegister<Register>();
3684  Register cls = locations->InAt(1).AsRegister<Register>();
3685  Register temp = locations->GetTemp(0).AsRegister<Register>();
3686  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3687
3688  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
3689      instruction, locations->InAt(1), locations->GetTemp(0), instruction->GetDexPc());
3690  codegen_->AddSlowPath(slow_path);
3691
3692  // TODO: avoid this check if we know obj is not null.
3693  __ cmp(obj, ShifterOperand(0));
3694  __ b(slow_path->GetExitLabel(), EQ);
3695  // Compare the class of `obj` with `cls`.
3696  __ LoadFromOffset(kLoadWord, temp, obj, class_offset);
3697  __ cmp(temp, ShifterOperand(cls));
3698  __ b(slow_path->GetEntryLabel(), NE);
3699  __ Bind(slow_path->GetExitLabel());
3700}
3701
3702void LocationsBuilderARM::VisitMonitorOperation(HMonitorOperation* instruction) {
3703  LocationSummary* locations =
3704      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3705  InvokeRuntimeCallingConvention calling_convention;
3706  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3707}
3708
3709void InstructionCodeGeneratorARM::VisitMonitorOperation(HMonitorOperation* instruction) {
3710  codegen_->InvokeRuntime(instruction->IsEnter()
3711        ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
3712      instruction,
3713      instruction->GetDexPc());
3714}
3715
3716void LocationsBuilderARM::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
3717void LocationsBuilderARM::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
3718void LocationsBuilderARM::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
3719
3720void LocationsBuilderARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
3721  LocationSummary* locations =
3722      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3723  DCHECK(instruction->GetResultType() == Primitive::kPrimInt
3724         || instruction->GetResultType() == Primitive::kPrimLong);
3725  locations->SetInAt(0, Location::RequiresRegister());
3726  locations->SetInAt(1, Location::RequiresRegister());
3727  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3728}
3729
3730void InstructionCodeGeneratorARM::VisitAnd(HAnd* instruction) {
3731  HandleBitwiseOperation(instruction);
3732}
3733
3734void InstructionCodeGeneratorARM::VisitOr(HOr* instruction) {
3735  HandleBitwiseOperation(instruction);
3736}
3737
3738void InstructionCodeGeneratorARM::VisitXor(HXor* instruction) {
3739  HandleBitwiseOperation(instruction);
3740}
3741
3742void InstructionCodeGeneratorARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
3743  LocationSummary* locations = instruction->GetLocations();
3744
3745  if (instruction->GetResultType() == Primitive::kPrimInt) {
3746    Register first = locations->InAt(0).AsRegister<Register>();
3747    Register second = locations->InAt(1).AsRegister<Register>();
3748    Register out = locations->Out().AsRegister<Register>();
3749    if (instruction->IsAnd()) {
3750      __ and_(out, first, ShifterOperand(second));
3751    } else if (instruction->IsOr()) {
3752      __ orr(out, first, ShifterOperand(second));
3753    } else {
3754      DCHECK(instruction->IsXor());
3755      __ eor(out, first, ShifterOperand(second));
3756    }
3757  } else {
3758    DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3759    Location first = locations->InAt(0);
3760    Location second = locations->InAt(1);
3761    Location out = locations->Out();
3762    if (instruction->IsAnd()) {
3763      __ and_(out.AsRegisterPairLow<Register>(),
3764              first.AsRegisterPairLow<Register>(),
3765              ShifterOperand(second.AsRegisterPairLow<Register>()));
3766      __ and_(out.AsRegisterPairHigh<Register>(),
3767              first.AsRegisterPairHigh<Register>(),
3768              ShifterOperand(second.AsRegisterPairHigh<Register>()));
3769    } else if (instruction->IsOr()) {
3770      __ orr(out.AsRegisterPairLow<Register>(),
3771             first.AsRegisterPairLow<Register>(),
3772             ShifterOperand(second.AsRegisterPairLow<Register>()));
3773      __ orr(out.AsRegisterPairHigh<Register>(),
3774             first.AsRegisterPairHigh<Register>(),
3775             ShifterOperand(second.AsRegisterPairHigh<Register>()));
3776    } else {
3777      DCHECK(instruction->IsXor());
3778      __ eor(out.AsRegisterPairLow<Register>(),
3779             first.AsRegisterPairLow<Register>(),
3780             ShifterOperand(second.AsRegisterPairLow<Register>()));
3781      __ eor(out.AsRegisterPairHigh<Register>(),
3782             first.AsRegisterPairHigh<Register>(),
3783             ShifterOperand(second.AsRegisterPairHigh<Register>()));
3784    }
3785  }
3786}
3787
3788}  // namespace arm
3789}  // namespace art
3790