code_generator_arm.cc revision a0bb2bd5b6a049ad806c223f00672d1f0210db67
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
20#include "entrypoints/quick/quick_entrypoints.h"
21#include "gc/accounting/card_table.h"
22#include "mirror/array-inl.h"
23#include "mirror/art_method.h"
24#include "mirror/class.h"
25#include "thread.h"
26#include "utils/arm/assembler_arm.h"
27#include "utils/arm/managed_register_arm.h"
28#include "utils/assembler.h"
29#include "utils/stack_checks.h"
30
31namespace art {
32
33namespace arm {
34
35static DRegister FromLowSToD(SRegister reg) {
36  DCHECK_EQ(reg % 2, 0);
37  return static_cast<DRegister>(reg / 2);
38}
39
40static bool ExpectedPairLayout(Location location) {
41  // We expected this for both core and fpu register pairs.
42  return ((location.low() & 1) == 0) && (location.low() + 1 == location.high());
43}
44
45static constexpr int kCurrentMethodStackOffset = 0;
46
47static constexpr Register kRuntimeParameterCoreRegisters[] = { R0, R1, R2, R3 };
48static constexpr size_t kRuntimeParameterCoreRegistersLength =
49    arraysize(kRuntimeParameterCoreRegisters);
50static constexpr SRegister kRuntimeParameterFpuRegisters[] = { S0, S1, S2, S3 };
51static constexpr size_t kRuntimeParameterFpuRegistersLength =
52    arraysize(kRuntimeParameterFpuRegisters);
53// We unconditionally allocate R5 to ensure we can do long operations
54// with baseline.
55static constexpr Register kCoreSavedRegisterForBaseline = R5;
56static constexpr Register kCoreCalleeSaves[] =
57    { R5, R6, R7, R8, R10, R11, PC };
58static constexpr SRegister kFpuCalleeSaves[] =
59    { S16, S17, S18, S19, S20, S21, S22, S23, S24, S25, S26, S27, S28, S29, S30, S31 };
60
61class InvokeRuntimeCallingConvention : public CallingConvention<Register, SRegister> {
62 public:
63  InvokeRuntimeCallingConvention()
64      : CallingConvention(kRuntimeParameterCoreRegisters,
65                          kRuntimeParameterCoreRegistersLength,
66                          kRuntimeParameterFpuRegisters,
67                          kRuntimeParameterFpuRegistersLength) {}
68
69 private:
70  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
71};
72
73#define __ reinterpret_cast<ArmAssembler*>(codegen->GetAssembler())->
74#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArmWordSize, x).Int32Value()
75
76class SlowPathCodeARM : public SlowPathCode {
77 public:
78  SlowPathCodeARM() : entry_label_(), exit_label_() {}
79
80  Label* GetEntryLabel() { return &entry_label_; }
81  Label* GetExitLabel() { return &exit_label_; }
82
83 private:
84  Label entry_label_;
85  Label exit_label_;
86
87  DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM);
88};
89
90class NullCheckSlowPathARM : public SlowPathCodeARM {
91 public:
92  explicit NullCheckSlowPathARM(HNullCheck* instruction) : instruction_(instruction) {}
93
94  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
95    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
96    __ Bind(GetEntryLabel());
97    arm_codegen->InvokeRuntime(
98        QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc());
99  }
100
101 private:
102  HNullCheck* const instruction_;
103  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM);
104};
105
106class DivZeroCheckSlowPathARM : public SlowPathCodeARM {
107 public:
108  explicit DivZeroCheckSlowPathARM(HDivZeroCheck* instruction) : instruction_(instruction) {}
109
110  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
111    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
112    __ Bind(GetEntryLabel());
113    arm_codegen->InvokeRuntime(
114        QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc());
115  }
116
117 private:
118  HDivZeroCheck* const instruction_;
119  DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM);
120};
121
122class SuspendCheckSlowPathARM : public SlowPathCodeARM {
123 public:
124  SuspendCheckSlowPathARM(HSuspendCheck* instruction, HBasicBlock* successor)
125      : instruction_(instruction), successor_(successor) {}
126
127  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
128    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
129    __ Bind(GetEntryLabel());
130    codegen->SaveLiveRegisters(instruction_->GetLocations());
131    arm_codegen->InvokeRuntime(
132        QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc());
133    codegen->RestoreLiveRegisters(instruction_->GetLocations());
134    if (successor_ == nullptr) {
135      __ b(GetReturnLabel());
136    } else {
137      __ b(arm_codegen->GetLabelOf(successor_));
138    }
139  }
140
141  Label* GetReturnLabel() {
142    DCHECK(successor_ == nullptr);
143    return &return_label_;
144  }
145
146 private:
147  HSuspendCheck* const instruction_;
148  // If not null, the block to branch to after the suspend check.
149  HBasicBlock* const successor_;
150
151  // If `successor_` is null, the label to branch to after the suspend check.
152  Label return_label_;
153
154  DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM);
155};
156
157class BoundsCheckSlowPathARM : public SlowPathCodeARM {
158 public:
159  BoundsCheckSlowPathARM(HBoundsCheck* instruction,
160                         Location index_location,
161                         Location length_location)
162      : instruction_(instruction),
163        index_location_(index_location),
164        length_location_(length_location) {}
165
166  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
167    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
168    __ Bind(GetEntryLabel());
169    // We're moving two locations to locations that could overlap, so we need a parallel
170    // move resolver.
171    InvokeRuntimeCallingConvention calling_convention;
172    codegen->EmitParallelMoves(
173        index_location_,
174        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
175        length_location_,
176        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
177    arm_codegen->InvokeRuntime(
178        QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc());
179  }
180
181 private:
182  HBoundsCheck* const instruction_;
183  const Location index_location_;
184  const Location length_location_;
185
186  DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM);
187};
188
189class LoadClassSlowPathARM : public SlowPathCodeARM {
190 public:
191  LoadClassSlowPathARM(HLoadClass* cls,
192                       HInstruction* at,
193                       uint32_t dex_pc,
194                       bool do_clinit)
195      : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
196    DCHECK(at->IsLoadClass() || at->IsClinitCheck());
197  }
198
199  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
200    LocationSummary* locations = at_->GetLocations();
201
202    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
203    __ Bind(GetEntryLabel());
204    codegen->SaveLiveRegisters(locations);
205
206    InvokeRuntimeCallingConvention calling_convention;
207    __ LoadImmediate(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex());
208    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
209    int32_t entry_point_offset = do_clinit_
210        ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
211        : QUICK_ENTRY_POINT(pInitializeType);
212    arm_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_);
213
214    // Move the class to the desired location.
215    Location out = locations->Out();
216    if (out.IsValid()) {
217      DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
218      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
219    }
220    codegen->RestoreLiveRegisters(locations);
221    __ b(GetExitLabel());
222  }
223
224 private:
225  // The class this slow path will load.
226  HLoadClass* const cls_;
227
228  // The instruction where this slow path is happening.
229  // (Might be the load class or an initialization check).
230  HInstruction* const at_;
231
232  // The dex PC of `at_`.
233  const uint32_t dex_pc_;
234
235  // Whether to initialize the class.
236  const bool do_clinit_;
237
238  DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM);
239};
240
241class LoadStringSlowPathARM : public SlowPathCodeARM {
242 public:
243  explicit LoadStringSlowPathARM(HLoadString* instruction) : instruction_(instruction) {}
244
245  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
246    LocationSummary* locations = instruction_->GetLocations();
247    DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
248
249    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
250    __ Bind(GetEntryLabel());
251    codegen->SaveLiveRegisters(locations);
252
253    InvokeRuntimeCallingConvention calling_convention;
254    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
255    __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction_->GetStringIndex());
256    arm_codegen->InvokeRuntime(
257        QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc());
258    arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
259
260    codegen->RestoreLiveRegisters(locations);
261    __ b(GetExitLabel());
262  }
263
264 private:
265  HLoadString* const instruction_;
266
267  DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM);
268};
269
270class TypeCheckSlowPathARM : public SlowPathCodeARM {
271 public:
272  TypeCheckSlowPathARM(HInstruction* instruction,
273                       Location class_to_check,
274                       Location object_class,
275                       uint32_t dex_pc)
276      : instruction_(instruction),
277        class_to_check_(class_to_check),
278        object_class_(object_class),
279        dex_pc_(dex_pc) {}
280
281  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
282    LocationSummary* locations = instruction_->GetLocations();
283    DCHECK(instruction_->IsCheckCast()
284           || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
285
286    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
287    __ Bind(GetEntryLabel());
288    codegen->SaveLiveRegisters(locations);
289
290    // We're moving two locations to locations that could overlap, so we need a parallel
291    // move resolver.
292    InvokeRuntimeCallingConvention calling_convention;
293    codegen->EmitParallelMoves(
294        class_to_check_,
295        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
296        object_class_,
297        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
298
299    if (instruction_->IsInstanceOf()) {
300      arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc_);
301      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
302    } else {
303      DCHECK(instruction_->IsCheckCast());
304      arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc_);
305    }
306
307    codegen->RestoreLiveRegisters(locations);
308    __ b(GetExitLabel());
309  }
310
311 private:
312  HInstruction* const instruction_;
313  const Location class_to_check_;
314  const Location object_class_;
315  uint32_t dex_pc_;
316
317  DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM);
318};
319
320#undef __
321
322#undef __
323#define __ reinterpret_cast<ArmAssembler*>(GetAssembler())->
324
325inline Condition ARMCondition(IfCondition cond) {
326  switch (cond) {
327    case kCondEQ: return EQ;
328    case kCondNE: return NE;
329    case kCondLT: return LT;
330    case kCondLE: return LE;
331    case kCondGT: return GT;
332    case kCondGE: return GE;
333    default:
334      LOG(FATAL) << "Unknown if condition";
335  }
336  return EQ;        // Unreachable.
337}
338
339inline Condition ARMOppositeCondition(IfCondition cond) {
340  switch (cond) {
341    case kCondEQ: return NE;
342    case kCondNE: return EQ;
343    case kCondLT: return GE;
344    case kCondLE: return GT;
345    case kCondGT: return LE;
346    case kCondGE: return LT;
347    default:
348      LOG(FATAL) << "Unknown if condition";
349  }
350  return EQ;        // Unreachable.
351}
352
353void CodeGeneratorARM::DumpCoreRegister(std::ostream& stream, int reg) const {
354  stream << ArmManagedRegister::FromCoreRegister(Register(reg));
355}
356
357void CodeGeneratorARM::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
358  stream << ArmManagedRegister::FromSRegister(SRegister(reg));
359}
360
361size_t CodeGeneratorARM::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
362  __ StoreToOffset(kStoreWord, static_cast<Register>(reg_id), SP, stack_index);
363  return kArmWordSize;
364}
365
366size_t CodeGeneratorARM::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
367  __ LoadFromOffset(kLoadWord, static_cast<Register>(reg_id), SP, stack_index);
368  return kArmWordSize;
369}
370
371size_t CodeGeneratorARM::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
372  __ StoreSToOffset(static_cast<SRegister>(reg_id), SP, stack_index);
373  return kArmWordSize;
374}
375
376size_t CodeGeneratorARM::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
377  __ LoadSFromOffset(static_cast<SRegister>(reg_id), SP, stack_index);
378  return kArmWordSize;
379}
380
381CodeGeneratorARM::CodeGeneratorARM(HGraph* graph,
382                                   const ArmInstructionSetFeatures& isa_features,
383                                   const CompilerOptions& compiler_options)
384    : CodeGenerator(graph,
385                    kNumberOfCoreRegisters,
386                    kNumberOfSRegisters,
387                    kNumberOfRegisterPairs,
388                    ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
389                                        arraysize(kCoreCalleeSaves)),
390                    ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
391                                        arraysize(kFpuCalleeSaves)),
392                    compiler_options),
393      block_labels_(graph->GetArena(), 0),
394      location_builder_(graph, this),
395      instruction_visitor_(graph, this),
396      move_resolver_(graph->GetArena(), this),
397      assembler_(true),
398      isa_features_(isa_features) {
399  // Save one extra register for baseline. Note that on thumb2, there is no easy
400  // instruction to restore just the PC, so this actually helps both baseline
401  // and non-baseline to save and restore at least two registers at entry and exit.
402  AddAllocatedRegister(Location::RegisterLocation(kCoreSavedRegisterForBaseline));
403  // Save the PC register to mimic Quick.
404  AddAllocatedRegister(Location::RegisterLocation(PC));
405}
406
407Location CodeGeneratorARM::AllocateFreeRegister(Primitive::Type type) const {
408  switch (type) {
409    case Primitive::kPrimLong: {
410      size_t reg = FindFreeEntry(blocked_register_pairs_, kNumberOfRegisterPairs);
411      ArmManagedRegister pair =
412          ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
413      DCHECK(!blocked_core_registers_[pair.AsRegisterPairLow()]);
414      DCHECK(!blocked_core_registers_[pair.AsRegisterPairHigh()]);
415
416      blocked_core_registers_[pair.AsRegisterPairLow()] = true;
417      blocked_core_registers_[pair.AsRegisterPairHigh()] = true;
418      UpdateBlockedPairRegisters();
419      return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
420    }
421
422    case Primitive::kPrimByte:
423    case Primitive::kPrimBoolean:
424    case Primitive::kPrimChar:
425    case Primitive::kPrimShort:
426    case Primitive::kPrimInt:
427    case Primitive::kPrimNot: {
428      int reg = FindFreeEntry(blocked_core_registers_, kNumberOfCoreRegisters);
429      // Block all register pairs that contain `reg`.
430      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
431        ArmManagedRegister current =
432            ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
433        if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
434          blocked_register_pairs_[i] = true;
435        }
436      }
437      return Location::RegisterLocation(reg);
438    }
439
440    case Primitive::kPrimFloat: {
441      int reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfSRegisters);
442      return Location::FpuRegisterLocation(reg);
443    }
444
445    case Primitive::kPrimDouble: {
446      int reg = FindTwoFreeConsecutiveAlignedEntries(blocked_fpu_registers_, kNumberOfSRegisters);
447      DCHECK_EQ(reg % 2, 0);
448      return Location::FpuRegisterPairLocation(reg, reg + 1);
449    }
450
451    case Primitive::kPrimVoid:
452      LOG(FATAL) << "Unreachable type " << type;
453  }
454
455  return Location();
456}
457
458void CodeGeneratorARM::SetupBlockedRegisters(bool is_baseline) const {
459  // Don't allocate the dalvik style register pair passing.
460  blocked_register_pairs_[R1_R2] = true;
461
462  // Stack register, LR and PC are always reserved.
463  blocked_core_registers_[SP] = true;
464  blocked_core_registers_[LR] = true;
465  blocked_core_registers_[PC] = true;
466
467  // Reserve thread register.
468  blocked_core_registers_[TR] = true;
469
470  // Reserve temp register.
471  blocked_core_registers_[IP] = true;
472
473  if (is_baseline) {
474    for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
475      blocked_core_registers_[kCoreCalleeSaves[i]] = true;
476    }
477
478    blocked_core_registers_[kCoreSavedRegisterForBaseline] = false;
479
480    for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
481      blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
482    }
483  }
484
485  UpdateBlockedPairRegisters();
486}
487
488void CodeGeneratorARM::UpdateBlockedPairRegisters() const {
489  for (int i = 0; i < kNumberOfRegisterPairs; i++) {
490    ArmManagedRegister current =
491        ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
492    if (blocked_core_registers_[current.AsRegisterPairLow()]
493        || blocked_core_registers_[current.AsRegisterPairHigh()]) {
494      blocked_register_pairs_[i] = true;
495    }
496  }
497}
498
499InstructionCodeGeneratorARM::InstructionCodeGeneratorARM(HGraph* graph, CodeGeneratorARM* codegen)
500      : HGraphVisitor(graph),
501        assembler_(codegen->GetAssembler()),
502        codegen_(codegen) {}
503
504static uint32_t LeastSignificantBit(uint32_t mask) {
505  // ffs starts at 1.
506  return ffs(mask) - 1;
507}
508
509void CodeGeneratorARM::ComputeSpillMask() {
510  core_spill_mask_ = allocated_registers_.GetCoreRegisters() & core_callee_save_mask_;
511  DCHECK_NE(core_spill_mask_, 0u) << "At least the return address register must be saved";
512  fpu_spill_mask_ = allocated_registers_.GetFloatingPointRegisters() & fpu_callee_save_mask_;
513  // We use vpush and vpop for saving and restoring floating point registers, which take
514  // a SRegister and the number of registers to save/restore after that SRegister. We
515  // therefore update the `fpu_spill_mask_` to also contain those registers not allocated,
516  // but in the range.
517  if (fpu_spill_mask_ != 0) {
518    uint32_t least_significant_bit = LeastSignificantBit(fpu_spill_mask_);
519    uint32_t most_significant_bit = MostSignificantBit(fpu_spill_mask_);
520    for (uint32_t i = least_significant_bit + 1 ; i < most_significant_bit; ++i) {
521      fpu_spill_mask_ |= (1 << i);
522    }
523  }
524}
525
526void CodeGeneratorARM::GenerateFrameEntry() {
527  bool skip_overflow_check =
528      IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm);
529  DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
530  if (!skip_overflow_check) {
531    __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm)));
532    __ LoadFromOffset(kLoadWord, IP, IP, 0);
533    RecordPcInfo(nullptr, 0);
534  }
535
536  // PC is in the list of callee-save to mimic Quick, but we need to push
537  // LR at entry instead.
538  __ PushList((core_spill_mask_ & (~(1 << PC))) | 1 << LR);
539  if (fpu_spill_mask_ != 0) {
540    SRegister start_register = SRegister(LeastSignificantBit(fpu_spill_mask_));
541    __ vpushs(start_register, POPCOUNT(fpu_spill_mask_));
542  }
543  __ AddConstant(SP, -(GetFrameSize() - FrameEntrySpillSize()));
544  __ StoreToOffset(kStoreWord, R0, SP, 0);
545}
546
547void CodeGeneratorARM::GenerateFrameExit() {
548  __ AddConstant(SP, GetFrameSize() - FrameEntrySpillSize());
549  if (fpu_spill_mask_ != 0) {
550    SRegister start_register = SRegister(LeastSignificantBit(fpu_spill_mask_));
551    __ vpops(start_register, POPCOUNT(fpu_spill_mask_));
552  }
553  __ PopList(core_spill_mask_);
554}
555
556void CodeGeneratorARM::Bind(HBasicBlock* block) {
557  __ Bind(GetLabelOf(block));
558}
559
560Location CodeGeneratorARM::GetStackLocation(HLoadLocal* load) const {
561  switch (load->GetType()) {
562    case Primitive::kPrimLong:
563    case Primitive::kPrimDouble:
564      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
565      break;
566
567    case Primitive::kPrimInt:
568    case Primitive::kPrimNot:
569    case Primitive::kPrimFloat:
570      return Location::StackSlot(GetStackSlot(load->GetLocal()));
571
572    case Primitive::kPrimBoolean:
573    case Primitive::kPrimByte:
574    case Primitive::kPrimChar:
575    case Primitive::kPrimShort:
576    case Primitive::kPrimVoid:
577      LOG(FATAL) << "Unexpected type " << load->GetType();
578  }
579
580  LOG(FATAL) << "Unreachable";
581  return Location();
582}
583
584Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
585  switch (type) {
586    case Primitive::kPrimBoolean:
587    case Primitive::kPrimByte:
588    case Primitive::kPrimChar:
589    case Primitive::kPrimShort:
590    case Primitive::kPrimInt:
591    case Primitive::kPrimNot: {
592      uint32_t index = gp_index_++;
593      uint32_t stack_index = stack_index_++;
594      if (index < calling_convention.GetNumberOfRegisters()) {
595        return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
596      } else {
597        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
598      }
599    }
600
601    case Primitive::kPrimLong: {
602      uint32_t index = gp_index_;
603      uint32_t stack_index = stack_index_;
604      gp_index_ += 2;
605      stack_index_ += 2;
606      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
607        if (calling_convention.GetRegisterAt(index) == R1) {
608          // Skip R1, and use R2_R3 instead.
609          gp_index_++;
610          index++;
611        }
612      }
613      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
614        DCHECK_EQ(calling_convention.GetRegisterAt(index) + 1,
615                  calling_convention.GetRegisterAt(index + 1));
616        return Location::RegisterPairLocation(calling_convention.GetRegisterAt(index),
617                                              calling_convention.GetRegisterAt(index + 1));
618      } else {
619        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
620      }
621    }
622
623    case Primitive::kPrimFloat: {
624      uint32_t stack_index = stack_index_++;
625      if (float_index_ % 2 == 0) {
626        float_index_ = std::max(double_index_, float_index_);
627      }
628      if (float_index_ < calling_convention.GetNumberOfFpuRegisters()) {
629        return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(float_index_++));
630      } else {
631        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
632      }
633    }
634
635    case Primitive::kPrimDouble: {
636      double_index_ = std::max(double_index_, RoundUp(float_index_, 2));
637      uint32_t stack_index = stack_index_;
638      stack_index_ += 2;
639      if (double_index_ + 1 < calling_convention.GetNumberOfFpuRegisters()) {
640        uint32_t index = double_index_;
641        double_index_ += 2;
642        Location result = Location::FpuRegisterPairLocation(
643          calling_convention.GetFpuRegisterAt(index),
644          calling_convention.GetFpuRegisterAt(index + 1));
645        DCHECK(ExpectedPairLayout(result));
646        return result;
647      } else {
648        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
649      }
650    }
651
652    case Primitive::kPrimVoid:
653      LOG(FATAL) << "Unexpected parameter type " << type;
654      break;
655  }
656  return Location();
657}
658
659Location InvokeDexCallingConventionVisitor::GetReturnLocation(Primitive::Type type) {
660  switch (type) {
661    case Primitive::kPrimBoolean:
662    case Primitive::kPrimByte:
663    case Primitive::kPrimChar:
664    case Primitive::kPrimShort:
665    case Primitive::kPrimInt:
666    case Primitive::kPrimNot: {
667      return Location::RegisterLocation(R0);
668    }
669
670    case Primitive::kPrimFloat: {
671      return Location::FpuRegisterLocation(S0);
672    }
673
674    case Primitive::kPrimLong: {
675      return Location::RegisterPairLocation(R0, R1);
676    }
677
678    case Primitive::kPrimDouble: {
679      return Location::FpuRegisterPairLocation(S0, S1);
680    }
681
682    case Primitive::kPrimVoid:
683      return Location();
684  }
685  UNREACHABLE();
686  return Location();
687}
688
689void CodeGeneratorARM::Move32(Location destination, Location source) {
690  if (source.Equals(destination)) {
691    return;
692  }
693  if (destination.IsRegister()) {
694    if (source.IsRegister()) {
695      __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>());
696    } else if (source.IsFpuRegister()) {
697      __ vmovrs(destination.AsRegister<Register>(), source.AsFpuRegister<SRegister>());
698    } else {
699      __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(), SP, source.GetStackIndex());
700    }
701  } else if (destination.IsFpuRegister()) {
702    if (source.IsRegister()) {
703      __ vmovsr(destination.AsFpuRegister<SRegister>(), source.AsRegister<Register>());
704    } else if (source.IsFpuRegister()) {
705      __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>());
706    } else {
707      __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex());
708    }
709  } else {
710    DCHECK(destination.IsStackSlot()) << destination;
711    if (source.IsRegister()) {
712      __ StoreToOffset(kStoreWord, source.AsRegister<Register>(), SP, destination.GetStackIndex());
713    } else if (source.IsFpuRegister()) {
714      __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex());
715    } else {
716      DCHECK(source.IsStackSlot()) << source;
717      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
718      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
719    }
720  }
721}
722
723void CodeGeneratorARM::Move64(Location destination, Location source) {
724  if (source.Equals(destination)) {
725    return;
726  }
727  if (destination.IsRegisterPair()) {
728    if (source.IsRegisterPair()) {
729      EmitParallelMoves(
730          Location::RegisterLocation(source.AsRegisterPairHigh<Register>()),
731          Location::RegisterLocation(destination.AsRegisterPairHigh<Register>()),
732          Location::RegisterLocation(source.AsRegisterPairLow<Register>()),
733          Location::RegisterLocation(destination.AsRegisterPairLow<Register>()));
734    } else if (source.IsFpuRegister()) {
735      UNIMPLEMENTED(FATAL);
736    } else {
737      DCHECK(source.IsDoubleStackSlot());
738      DCHECK(ExpectedPairLayout(destination));
739      __ LoadFromOffset(kLoadWordPair, destination.AsRegisterPairLow<Register>(),
740                        SP, source.GetStackIndex());
741    }
742  } else if (destination.IsFpuRegisterPair()) {
743    if (source.IsDoubleStackSlot()) {
744      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
745                         SP,
746                         source.GetStackIndex());
747    } else {
748      UNIMPLEMENTED(FATAL);
749    }
750  } else {
751    DCHECK(destination.IsDoubleStackSlot());
752    if (source.IsRegisterPair()) {
753      // No conflict possible, so just do the moves.
754      if (source.AsRegisterPairLow<Register>() == R1) {
755        DCHECK_EQ(source.AsRegisterPairHigh<Register>(), R2);
756        __ StoreToOffset(kStoreWord, R1, SP, destination.GetStackIndex());
757        __ StoreToOffset(kStoreWord, R2, SP, destination.GetHighStackIndex(kArmWordSize));
758      } else {
759        __ StoreToOffset(kStoreWordPair, source.AsRegisterPairLow<Register>(),
760                         SP, destination.GetStackIndex());
761      }
762    } else if (source.IsFpuRegisterPair()) {
763      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
764                        SP,
765                        destination.GetStackIndex());
766    } else {
767      DCHECK(source.IsDoubleStackSlot());
768      EmitParallelMoves(
769          Location::StackSlot(source.GetStackIndex()),
770          Location::StackSlot(destination.GetStackIndex()),
771          Location::StackSlot(source.GetHighStackIndex(kArmWordSize)),
772          Location::StackSlot(destination.GetHighStackIndex(kArmWordSize)));
773    }
774  }
775}
776
777void CodeGeneratorARM::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
778  LocationSummary* locations = instruction->GetLocations();
779  if (locations != nullptr && locations->Out().Equals(location)) {
780    return;
781  }
782
783  if (locations != nullptr && locations->Out().IsConstant()) {
784    HConstant* const_to_move = locations->Out().GetConstant();
785    if (const_to_move->IsIntConstant()) {
786      int32_t value = const_to_move->AsIntConstant()->GetValue();
787      if (location.IsRegister()) {
788        __ LoadImmediate(location.AsRegister<Register>(), value);
789      } else {
790        DCHECK(location.IsStackSlot());
791        __ LoadImmediate(IP, value);
792        __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
793      }
794    } else {
795      DCHECK(const_to_move->IsLongConstant()) << const_to_move->DebugName();
796      int64_t value = const_to_move->AsLongConstant()->GetValue();
797      if (location.IsRegisterPair()) {
798        __ LoadImmediate(location.AsRegisterPairLow<Register>(), Low32Bits(value));
799        __ LoadImmediate(location.AsRegisterPairHigh<Register>(), High32Bits(value));
800      } else {
801        DCHECK(location.IsDoubleStackSlot());
802        __ LoadImmediate(IP, Low32Bits(value));
803        __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
804        __ LoadImmediate(IP, High32Bits(value));
805        __ StoreToOffset(kStoreWord, IP, SP, location.GetHighStackIndex(kArmWordSize));
806      }
807    }
808  } else if (instruction->IsLoadLocal()) {
809    uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
810    switch (instruction->GetType()) {
811      case Primitive::kPrimBoolean:
812      case Primitive::kPrimByte:
813      case Primitive::kPrimChar:
814      case Primitive::kPrimShort:
815      case Primitive::kPrimInt:
816      case Primitive::kPrimNot:
817      case Primitive::kPrimFloat:
818        Move32(location, Location::StackSlot(stack_slot));
819        break;
820
821      case Primitive::kPrimLong:
822      case Primitive::kPrimDouble:
823        Move64(location, Location::DoubleStackSlot(stack_slot));
824        break;
825
826      default:
827        LOG(FATAL) << "Unexpected type " << instruction->GetType();
828    }
829  } else if (instruction->IsTemporary()) {
830    Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
831    if (temp_location.IsStackSlot()) {
832      Move32(location, temp_location);
833    } else {
834      DCHECK(temp_location.IsDoubleStackSlot());
835      Move64(location, temp_location);
836    }
837  } else {
838    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
839    switch (instruction->GetType()) {
840      case Primitive::kPrimBoolean:
841      case Primitive::kPrimByte:
842      case Primitive::kPrimChar:
843      case Primitive::kPrimShort:
844      case Primitive::kPrimNot:
845      case Primitive::kPrimInt:
846      case Primitive::kPrimFloat:
847        Move32(location, locations->Out());
848        break;
849
850      case Primitive::kPrimLong:
851      case Primitive::kPrimDouble:
852        Move64(location, locations->Out());
853        break;
854
855      default:
856        LOG(FATAL) << "Unexpected type " << instruction->GetType();
857    }
858  }
859}
860
861void CodeGeneratorARM::InvokeRuntime(int32_t entry_point_offset,
862                                     HInstruction* instruction,
863                                     uint32_t dex_pc) {
864  __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
865  __ blx(LR);
866  RecordPcInfo(instruction, dex_pc);
867  DCHECK(instruction->IsSuspendCheck()
868      || instruction->IsBoundsCheck()
869      || instruction->IsNullCheck()
870      || instruction->IsDivZeroCheck()
871      || instruction->GetLocations()->CanCall()
872      || !IsLeafMethod());
873}
874
875void LocationsBuilderARM::VisitGoto(HGoto* got) {
876  got->SetLocations(nullptr);
877}
878
879void InstructionCodeGeneratorARM::VisitGoto(HGoto* got) {
880  HBasicBlock* successor = got->GetSuccessor();
881  DCHECK(!successor->IsExitBlock());
882
883  HBasicBlock* block = got->GetBlock();
884  HInstruction* previous = got->GetPrevious();
885
886  HLoopInformation* info = block->GetLoopInformation();
887  if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
888    codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
889    GenerateSuspendCheck(info->GetSuspendCheck(), successor);
890    return;
891  }
892
893  if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
894    GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
895  }
896  if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
897    __ b(codegen_->GetLabelOf(successor));
898  }
899}
900
901void LocationsBuilderARM::VisitExit(HExit* exit) {
902  exit->SetLocations(nullptr);
903}
904
905void InstructionCodeGeneratorARM::VisitExit(HExit* exit) {
906  UNUSED(exit);
907  if (kIsDebugBuild) {
908    __ Comment("Unreachable");
909    __ bkpt(0);
910  }
911}
912
913void LocationsBuilderARM::VisitIf(HIf* if_instr) {
914  LocationSummary* locations =
915      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
916  HInstruction* cond = if_instr->InputAt(0);
917  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
918    locations->SetInAt(0, Location::RequiresRegister());
919  }
920}
921
922void InstructionCodeGeneratorARM::VisitIf(HIf* if_instr) {
923  HInstruction* cond = if_instr->InputAt(0);
924  if (cond->IsIntConstant()) {
925    // Constant condition, statically compared against 1.
926    int32_t cond_value = cond->AsIntConstant()->GetValue();
927    if (cond_value == 1) {
928      if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
929                                     if_instr->IfTrueSuccessor())) {
930        __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
931      }
932      return;
933    } else {
934      DCHECK_EQ(cond_value, 0);
935    }
936  } else {
937    if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
938      // Condition has been materialized, compare the output to 0
939      DCHECK(if_instr->GetLocations()->InAt(0).IsRegister());
940      __ cmp(if_instr->GetLocations()->InAt(0).AsRegister<Register>(),
941             ShifterOperand(0));
942      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()), NE);
943    } else {
944      // Condition has not been materialized, use its inputs as the
945      // comparison and its condition as the branch condition.
946      LocationSummary* locations = cond->GetLocations();
947      DCHECK(locations->InAt(0).IsRegister()) << locations->InAt(0);
948      Register left = locations->InAt(0).AsRegister<Register>();
949      if (locations->InAt(1).IsRegister()) {
950        __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>()));
951      } else {
952        DCHECK(locations->InAt(1).IsConstant());
953        int32_t value =
954            locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
955        ShifterOperand operand;
956        if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) {
957          __ cmp(left, operand);
958        } else {
959          Register temp = IP;
960          __ LoadImmediate(temp, value);
961          __ cmp(left, ShifterOperand(temp));
962        }
963      }
964      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()),
965           ARMCondition(cond->AsCondition()->GetCondition()));
966    }
967  }
968  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
969                                 if_instr->IfFalseSuccessor())) {
970    __ b(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
971  }
972}
973
974
975void LocationsBuilderARM::VisitCondition(HCondition* comp) {
976  LocationSummary* locations =
977      new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall);
978  locations->SetInAt(0, Location::RequiresRegister());
979  locations->SetInAt(1, Location::RegisterOrConstant(comp->InputAt(1)));
980  if (comp->NeedsMaterialization()) {
981    locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
982  }
983}
984
985void InstructionCodeGeneratorARM::VisitCondition(HCondition* comp) {
986  if (!comp->NeedsMaterialization()) return;
987  LocationSummary* locations = comp->GetLocations();
988  Register left = locations->InAt(0).AsRegister<Register>();
989
990  if (locations->InAt(1).IsRegister()) {
991    __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>()));
992  } else {
993    DCHECK(locations->InAt(1).IsConstant());
994    int32_t value = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
995    ShifterOperand operand;
996    if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) {
997      __ cmp(left, operand);
998    } else {
999      Register temp = IP;
1000      __ LoadImmediate(temp, value);
1001      __ cmp(left, ShifterOperand(temp));
1002    }
1003  }
1004  __ it(ARMCondition(comp->GetCondition()), kItElse);
1005  __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(1),
1006         ARMCondition(comp->GetCondition()));
1007  __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(0),
1008         ARMOppositeCondition(comp->GetCondition()));
1009}
1010
1011void LocationsBuilderARM::VisitEqual(HEqual* comp) {
1012  VisitCondition(comp);
1013}
1014
1015void InstructionCodeGeneratorARM::VisitEqual(HEqual* comp) {
1016  VisitCondition(comp);
1017}
1018
1019void LocationsBuilderARM::VisitNotEqual(HNotEqual* comp) {
1020  VisitCondition(comp);
1021}
1022
1023void InstructionCodeGeneratorARM::VisitNotEqual(HNotEqual* comp) {
1024  VisitCondition(comp);
1025}
1026
1027void LocationsBuilderARM::VisitLessThan(HLessThan* comp) {
1028  VisitCondition(comp);
1029}
1030
1031void InstructionCodeGeneratorARM::VisitLessThan(HLessThan* comp) {
1032  VisitCondition(comp);
1033}
1034
1035void LocationsBuilderARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1036  VisitCondition(comp);
1037}
1038
1039void InstructionCodeGeneratorARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1040  VisitCondition(comp);
1041}
1042
1043void LocationsBuilderARM::VisitGreaterThan(HGreaterThan* comp) {
1044  VisitCondition(comp);
1045}
1046
1047void InstructionCodeGeneratorARM::VisitGreaterThan(HGreaterThan* comp) {
1048  VisitCondition(comp);
1049}
1050
1051void LocationsBuilderARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1052  VisitCondition(comp);
1053}
1054
1055void InstructionCodeGeneratorARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1056  VisitCondition(comp);
1057}
1058
1059void LocationsBuilderARM::VisitLocal(HLocal* local) {
1060  local->SetLocations(nullptr);
1061}
1062
1063void InstructionCodeGeneratorARM::VisitLocal(HLocal* local) {
1064  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
1065}
1066
1067void LocationsBuilderARM::VisitLoadLocal(HLoadLocal* load) {
1068  load->SetLocations(nullptr);
1069}
1070
1071void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load) {
1072  // Nothing to do, this is driven by the code generator.
1073  UNUSED(load);
1074}
1075
1076void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) {
1077  LocationSummary* locations =
1078      new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
1079  switch (store->InputAt(1)->GetType()) {
1080    case Primitive::kPrimBoolean:
1081    case Primitive::kPrimByte:
1082    case Primitive::kPrimChar:
1083    case Primitive::kPrimShort:
1084    case Primitive::kPrimInt:
1085    case Primitive::kPrimNot:
1086    case Primitive::kPrimFloat:
1087      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
1088      break;
1089
1090    case Primitive::kPrimLong:
1091    case Primitive::kPrimDouble:
1092      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
1093      break;
1094
1095    default:
1096      LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType();
1097  }
1098}
1099
1100void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store) {
1101  UNUSED(store);
1102}
1103
1104void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) {
1105  LocationSummary* locations =
1106      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1107  locations->SetOut(Location::ConstantLocation(constant));
1108}
1109
1110void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant) {
1111  // Will be generated at use site.
1112  UNUSED(constant);
1113}
1114
1115void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) {
1116  LocationSummary* locations =
1117      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1118  locations->SetOut(Location::ConstantLocation(constant));
1119}
1120
1121void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant) {
1122  // Will be generated at use site.
1123  UNUSED(constant);
1124}
1125
1126void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) {
1127  LocationSummary* locations =
1128      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1129  locations->SetOut(Location::ConstantLocation(constant));
1130}
1131
1132void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant) {
1133  // Will be generated at use site.
1134  UNUSED(constant);
1135}
1136
1137void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) {
1138  LocationSummary* locations =
1139      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1140  locations->SetOut(Location::ConstantLocation(constant));
1141}
1142
1143void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant) {
1144  // Will be generated at use site.
1145  UNUSED(constant);
1146}
1147
1148void LocationsBuilderARM::VisitReturnVoid(HReturnVoid* ret) {
1149  ret->SetLocations(nullptr);
1150}
1151
1152void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret) {
1153  UNUSED(ret);
1154  codegen_->GenerateFrameExit();
1155}
1156
1157void LocationsBuilderARM::VisitReturn(HReturn* ret) {
1158  LocationSummary* locations =
1159      new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
1160  locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType()));
1161}
1162
1163void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret) {
1164  UNUSED(ret);
1165  codegen_->GenerateFrameExit();
1166}
1167
1168void LocationsBuilderARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1169  HandleInvoke(invoke);
1170}
1171
1172void CodeGeneratorARM::LoadCurrentMethod(Register reg) {
1173  __ LoadFromOffset(kLoadWord, reg, SP, kCurrentMethodStackOffset);
1174}
1175
1176void InstructionCodeGeneratorARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1177  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1178
1179  // TODO: Implement all kinds of calls:
1180  // 1) boot -> boot
1181  // 2) app -> boot
1182  // 3) app -> app
1183  //
1184  // Currently we implement the app -> app logic, which looks up in the resolve cache.
1185
1186  // temp = method;
1187  codegen_->LoadCurrentMethod(temp);
1188  // temp = temp->dex_cache_resolved_methods_;
1189  __ LoadFromOffset(
1190      kLoadWord, temp, temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value());
1191  // temp = temp[index_in_cache]
1192  __ LoadFromOffset(
1193      kLoadWord, temp, temp, CodeGenerator::GetCacheOffset(invoke->GetDexMethodIndex()));
1194  // LR = temp[offset_of_quick_compiled_code]
1195  __ LoadFromOffset(kLoadWord, LR, temp,
1196                     mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1197                         kArmWordSize).Int32Value());
1198  // LR()
1199  __ blx(LR);
1200
1201  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1202  DCHECK(!codegen_->IsLeafMethod());
1203}
1204
1205void LocationsBuilderARM::HandleInvoke(HInvoke* invoke) {
1206  LocationSummary* locations =
1207      new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1208  locations->AddTemp(Location::RegisterLocation(R0));
1209
1210  InvokeDexCallingConventionVisitor calling_convention_visitor;
1211  for (size_t i = 0; i < invoke->InputCount(); i++) {
1212    HInstruction* input = invoke->InputAt(i);
1213    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1214  }
1215
1216  locations->SetOut(calling_convention_visitor.GetReturnLocation(invoke->GetType()));
1217}
1218
1219void LocationsBuilderARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1220  HandleInvoke(invoke);
1221}
1222
1223void InstructionCodeGeneratorARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1224  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1225  uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() +
1226          invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
1227  LocationSummary* locations = invoke->GetLocations();
1228  Location receiver = locations->InAt(0);
1229  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1230  // temp = object->GetClass();
1231  if (receiver.IsStackSlot()) {
1232    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1233    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1234  } else {
1235    __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
1236  }
1237  codegen_->MaybeRecordImplicitNullCheck(invoke);
1238  // temp = temp->GetMethodAt(method_offset);
1239  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1240      kArmWordSize).Int32Value();
1241  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1242  // LR = temp->GetEntryPoint();
1243  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1244  // LR();
1245  __ blx(LR);
1246  DCHECK(!codegen_->IsLeafMethod());
1247  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1248}
1249
1250void LocationsBuilderARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1251  HandleInvoke(invoke);
1252  // Add the hidden argument.
1253  invoke->GetLocations()->AddTemp(Location::RegisterLocation(R12));
1254}
1255
1256void InstructionCodeGeneratorARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1257  // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
1258  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1259  uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() +
1260          (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry);
1261  LocationSummary* locations = invoke->GetLocations();
1262  Location receiver = locations->InAt(0);
1263  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1264
1265  // Set the hidden argument.
1266  __ LoadImmediate(invoke->GetLocations()->GetTemp(1).AsRegister<Register>(),
1267                   invoke->GetDexMethodIndex());
1268
1269  // temp = object->GetClass();
1270  if (receiver.IsStackSlot()) {
1271    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1272    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1273  } else {
1274    __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
1275  }
1276  codegen_->MaybeRecordImplicitNullCheck(invoke);
1277  // temp = temp->GetImtEntryAt(method_offset);
1278  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1279      kArmWordSize).Int32Value();
1280  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1281  // LR = temp->GetEntryPoint();
1282  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1283  // LR();
1284  __ blx(LR);
1285  DCHECK(!codegen_->IsLeafMethod());
1286  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1287}
1288
1289void LocationsBuilderARM::VisitNeg(HNeg* neg) {
1290  LocationSummary* locations =
1291      new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
1292  switch (neg->GetResultType()) {
1293    case Primitive::kPrimInt:
1294    case Primitive::kPrimLong: {
1295      Location::OutputOverlap output_overlaps = (neg->GetResultType() == Primitive::kPrimLong)
1296          ? Location::kOutputOverlap
1297          : Location::kNoOutputOverlap;
1298      locations->SetInAt(0, Location::RequiresRegister());
1299      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1300      break;
1301    }
1302
1303    case Primitive::kPrimFloat:
1304    case Primitive::kPrimDouble:
1305      locations->SetInAt(0, Location::RequiresFpuRegister());
1306      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1307      break;
1308
1309    default:
1310      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1311  }
1312}
1313
1314void InstructionCodeGeneratorARM::VisitNeg(HNeg* neg) {
1315  LocationSummary* locations = neg->GetLocations();
1316  Location out = locations->Out();
1317  Location in = locations->InAt(0);
1318  switch (neg->GetResultType()) {
1319    case Primitive::kPrimInt:
1320      DCHECK(in.IsRegister());
1321      __ rsb(out.AsRegister<Register>(), in.AsRegister<Register>(), ShifterOperand(0));
1322      break;
1323
1324    case Primitive::kPrimLong:
1325      DCHECK(in.IsRegisterPair());
1326      // out.lo = 0 - in.lo (and update the carry/borrow (C) flag)
1327      __ rsbs(out.AsRegisterPairLow<Register>(),
1328              in.AsRegisterPairLow<Register>(),
1329              ShifterOperand(0));
1330      // We cannot emit an RSC (Reverse Subtract with Carry)
1331      // instruction here, as it does not exist in the Thumb-2
1332      // instruction set.  We use the following approach
1333      // using SBC and SUB instead.
1334      //
1335      // out.hi = -C
1336      __ sbc(out.AsRegisterPairHigh<Register>(),
1337             out.AsRegisterPairHigh<Register>(),
1338             ShifterOperand(out.AsRegisterPairHigh<Register>()));
1339      // out.hi = out.hi - in.hi
1340      __ sub(out.AsRegisterPairHigh<Register>(),
1341             out.AsRegisterPairHigh<Register>(),
1342             ShifterOperand(in.AsRegisterPairHigh<Register>()));
1343      break;
1344
1345    case Primitive::kPrimFloat:
1346      DCHECK(in.IsFpuRegister());
1347      __ vnegs(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>());
1348      break;
1349
1350    case Primitive::kPrimDouble:
1351      DCHECK(in.IsFpuRegisterPair());
1352      __ vnegd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1353               FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1354      break;
1355
1356    default:
1357      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1358  }
1359}
1360
1361void LocationsBuilderARM::VisitTypeConversion(HTypeConversion* conversion) {
1362  Primitive::Type result_type = conversion->GetResultType();
1363  Primitive::Type input_type = conversion->GetInputType();
1364  DCHECK_NE(result_type, input_type);
1365
1366  // The float-to-long and double-to-long type conversions rely on a
1367  // call to the runtime.
1368  LocationSummary::CallKind call_kind =
1369      ((input_type == Primitive::kPrimFloat || input_type == Primitive::kPrimDouble)
1370       && result_type == Primitive::kPrimLong)
1371      ? LocationSummary::kCall
1372      : LocationSummary::kNoCall;
1373  LocationSummary* locations =
1374      new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind);
1375
1376  switch (result_type) {
1377    case Primitive::kPrimByte:
1378      switch (input_type) {
1379        case Primitive::kPrimShort:
1380        case Primitive::kPrimInt:
1381        case Primitive::kPrimChar:
1382          // Processing a Dex `int-to-byte' instruction.
1383          locations->SetInAt(0, Location::RequiresRegister());
1384          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1385          break;
1386
1387        default:
1388          LOG(FATAL) << "Unexpected type conversion from " << input_type
1389                     << " to " << result_type;
1390      }
1391      break;
1392
1393    case Primitive::kPrimShort:
1394      switch (input_type) {
1395        case Primitive::kPrimByte:
1396        case Primitive::kPrimInt:
1397        case Primitive::kPrimChar:
1398          // Processing a Dex `int-to-short' instruction.
1399          locations->SetInAt(0, Location::RequiresRegister());
1400          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1401          break;
1402
1403        default:
1404          LOG(FATAL) << "Unexpected type conversion from " << input_type
1405                     << " to " << result_type;
1406      }
1407      break;
1408
1409    case Primitive::kPrimInt:
1410      switch (input_type) {
1411        case Primitive::kPrimLong:
1412          // Processing a Dex `long-to-int' instruction.
1413          locations->SetInAt(0, Location::Any());
1414          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1415          break;
1416
1417        case Primitive::kPrimFloat:
1418          // Processing a Dex `float-to-int' instruction.
1419          locations->SetInAt(0, Location::RequiresFpuRegister());
1420          locations->SetOut(Location::RequiresRegister());
1421          locations->AddTemp(Location::RequiresFpuRegister());
1422          break;
1423
1424        case Primitive::kPrimDouble:
1425          // Processing a Dex `double-to-int' instruction.
1426          locations->SetInAt(0, Location::RequiresFpuRegister());
1427          locations->SetOut(Location::RequiresRegister());
1428          locations->AddTemp(Location::RequiresFpuRegister());
1429          break;
1430
1431        default:
1432          LOG(FATAL) << "Unexpected type conversion from " << input_type
1433                     << " to " << result_type;
1434      }
1435      break;
1436
1437    case Primitive::kPrimLong:
1438      switch (input_type) {
1439        case Primitive::kPrimByte:
1440        case Primitive::kPrimShort:
1441        case Primitive::kPrimInt:
1442        case Primitive::kPrimChar:
1443          // Processing a Dex `int-to-long' instruction.
1444          locations->SetInAt(0, Location::RequiresRegister());
1445          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1446          break;
1447
1448        case Primitive::kPrimFloat: {
1449          // Processing a Dex `float-to-long' instruction.
1450          InvokeRuntimeCallingConvention calling_convention;
1451          locations->SetInAt(0, Location::FpuRegisterLocation(
1452              calling_convention.GetFpuRegisterAt(0)));
1453          locations->SetOut(Location::RegisterPairLocation(R0, R1));
1454          break;
1455        }
1456
1457        case Primitive::kPrimDouble: {
1458          // Processing a Dex `double-to-long' instruction.
1459          InvokeRuntimeCallingConvention calling_convention;
1460          locations->SetInAt(0, Location::FpuRegisterPairLocation(
1461              calling_convention.GetFpuRegisterAt(0),
1462              calling_convention.GetFpuRegisterAt(1)));
1463          locations->SetOut(Location::RegisterPairLocation(R0, R1));
1464          break;
1465        }
1466
1467        default:
1468          LOG(FATAL) << "Unexpected type conversion from " << input_type
1469                     << " to " << result_type;
1470      }
1471      break;
1472
1473    case Primitive::kPrimChar:
1474      switch (input_type) {
1475        case Primitive::kPrimByte:
1476        case Primitive::kPrimShort:
1477        case Primitive::kPrimInt:
1478          // Processing a Dex `int-to-char' instruction.
1479          locations->SetInAt(0, Location::RequiresRegister());
1480          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1481          break;
1482
1483        default:
1484          LOG(FATAL) << "Unexpected type conversion from " << input_type
1485                     << " to " << result_type;
1486      }
1487      break;
1488
1489    case Primitive::kPrimFloat:
1490      switch (input_type) {
1491        case Primitive::kPrimByte:
1492        case Primitive::kPrimShort:
1493        case Primitive::kPrimInt:
1494        case Primitive::kPrimChar:
1495          // Processing a Dex `int-to-float' instruction.
1496          locations->SetInAt(0, Location::RequiresRegister());
1497          locations->SetOut(Location::RequiresFpuRegister());
1498          break;
1499
1500        case Primitive::kPrimLong:
1501          // Processing a Dex `long-to-float' instruction.
1502          locations->SetInAt(0, Location::RequiresRegister());
1503          locations->SetOut(Location::RequiresFpuRegister());
1504          locations->AddTemp(Location::RequiresRegister());
1505          locations->AddTemp(Location::RequiresRegister());
1506          locations->AddTemp(Location::RequiresFpuRegister());
1507          locations->AddTemp(Location::RequiresFpuRegister());
1508          break;
1509
1510        case Primitive::kPrimDouble:
1511          // Processing a Dex `double-to-float' instruction.
1512          locations->SetInAt(0, Location::RequiresFpuRegister());
1513          locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1514          break;
1515
1516        default:
1517          LOG(FATAL) << "Unexpected type conversion from " << input_type
1518                     << " to " << result_type;
1519      };
1520      break;
1521
1522    case Primitive::kPrimDouble:
1523      switch (input_type) {
1524        case Primitive::kPrimByte:
1525        case Primitive::kPrimShort:
1526        case Primitive::kPrimInt:
1527        case Primitive::kPrimChar:
1528          // Processing a Dex `int-to-double' instruction.
1529          locations->SetInAt(0, Location::RequiresRegister());
1530          locations->SetOut(Location::RequiresFpuRegister());
1531          break;
1532
1533        case Primitive::kPrimLong:
1534          // Processing a Dex `long-to-double' instruction.
1535          locations->SetInAt(0, Location::RequiresRegister());
1536          locations->SetOut(Location::RequiresFpuRegister());
1537          locations->AddTemp(Location::RequiresRegister());
1538          locations->AddTemp(Location::RequiresRegister());
1539          locations->AddTemp(Location::RequiresFpuRegister());
1540          break;
1541
1542        case Primitive::kPrimFloat:
1543          // Processing a Dex `float-to-double' instruction.
1544          locations->SetInAt(0, Location::RequiresFpuRegister());
1545          locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1546          break;
1547
1548        default:
1549          LOG(FATAL) << "Unexpected type conversion from " << input_type
1550                     << " to " << result_type;
1551      };
1552      break;
1553
1554    default:
1555      LOG(FATAL) << "Unexpected type conversion from " << input_type
1556                 << " to " << result_type;
1557  }
1558}
1559
1560void InstructionCodeGeneratorARM::VisitTypeConversion(HTypeConversion* conversion) {
1561  LocationSummary* locations = conversion->GetLocations();
1562  Location out = locations->Out();
1563  Location in = locations->InAt(0);
1564  Primitive::Type result_type = conversion->GetResultType();
1565  Primitive::Type input_type = conversion->GetInputType();
1566  DCHECK_NE(result_type, input_type);
1567  switch (result_type) {
1568    case Primitive::kPrimByte:
1569      switch (input_type) {
1570        case Primitive::kPrimShort:
1571        case Primitive::kPrimInt:
1572        case Primitive::kPrimChar:
1573          // Processing a Dex `int-to-byte' instruction.
1574          __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 8);
1575          break;
1576
1577        default:
1578          LOG(FATAL) << "Unexpected type conversion from " << input_type
1579                     << " to " << result_type;
1580      }
1581      break;
1582
1583    case Primitive::kPrimShort:
1584      switch (input_type) {
1585        case Primitive::kPrimByte:
1586        case Primitive::kPrimInt:
1587        case Primitive::kPrimChar:
1588          // Processing a Dex `int-to-short' instruction.
1589          __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16);
1590          break;
1591
1592        default:
1593          LOG(FATAL) << "Unexpected type conversion from " << input_type
1594                     << " to " << result_type;
1595      }
1596      break;
1597
1598    case Primitive::kPrimInt:
1599      switch (input_type) {
1600        case Primitive::kPrimLong:
1601          // Processing a Dex `long-to-int' instruction.
1602          DCHECK(out.IsRegister());
1603          if (in.IsRegisterPair()) {
1604            __ Mov(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
1605          } else if (in.IsDoubleStackSlot()) {
1606            __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), SP, in.GetStackIndex());
1607          } else {
1608            DCHECK(in.IsConstant());
1609            DCHECK(in.GetConstant()->IsLongConstant());
1610            int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
1611            __ LoadImmediate(out.AsRegister<Register>(), static_cast<int32_t>(value));
1612          }
1613          break;
1614
1615        case Primitive::kPrimFloat: {
1616          // Processing a Dex `float-to-int' instruction.
1617          SRegister temp = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
1618          __ vmovs(temp, in.AsFpuRegister<SRegister>());
1619          __ vcvtis(temp, temp);
1620          __ vmovrs(out.AsRegister<Register>(), temp);
1621          break;
1622        }
1623
1624        case Primitive::kPrimDouble: {
1625          // Processing a Dex `double-to-int' instruction.
1626          SRegister temp_s = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
1627          DRegister temp_d = FromLowSToD(temp_s);
1628          __ vmovd(temp_d, FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1629          __ vcvtid(temp_s, temp_d);
1630          __ vmovrs(out.AsRegister<Register>(), temp_s);
1631          break;
1632        }
1633
1634        default:
1635          LOG(FATAL) << "Unexpected type conversion from " << input_type
1636                     << " to " << result_type;
1637      }
1638      break;
1639
1640    case Primitive::kPrimLong:
1641      switch (input_type) {
1642        case Primitive::kPrimByte:
1643        case Primitive::kPrimShort:
1644        case Primitive::kPrimInt:
1645        case Primitive::kPrimChar:
1646          // Processing a Dex `int-to-long' instruction.
1647          DCHECK(out.IsRegisterPair());
1648          DCHECK(in.IsRegister());
1649          __ Mov(out.AsRegisterPairLow<Register>(), in.AsRegister<Register>());
1650          // Sign extension.
1651          __ Asr(out.AsRegisterPairHigh<Register>(),
1652                 out.AsRegisterPairLow<Register>(),
1653                 31);
1654          break;
1655
1656        case Primitive::kPrimFloat:
1657          // Processing a Dex `float-to-long' instruction.
1658          codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pF2l),
1659                                  conversion,
1660                                  conversion->GetDexPc());
1661          break;
1662
1663        case Primitive::kPrimDouble:
1664          // Processing a Dex `double-to-long' instruction.
1665          codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pD2l),
1666                                  conversion,
1667                                  conversion->GetDexPc());
1668          break;
1669
1670        default:
1671          LOG(FATAL) << "Unexpected type conversion from " << input_type
1672                     << " to " << result_type;
1673      }
1674      break;
1675
1676    case Primitive::kPrimChar:
1677      switch (input_type) {
1678        case Primitive::kPrimByte:
1679        case Primitive::kPrimShort:
1680        case Primitive::kPrimInt:
1681          // Processing a Dex `int-to-char' instruction.
1682          __ ubfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16);
1683          break;
1684
1685        default:
1686          LOG(FATAL) << "Unexpected type conversion from " << input_type
1687                     << " to " << result_type;
1688      }
1689      break;
1690
1691    case Primitive::kPrimFloat:
1692      switch (input_type) {
1693        case Primitive::kPrimByte:
1694        case Primitive::kPrimShort:
1695        case Primitive::kPrimInt:
1696        case Primitive::kPrimChar: {
1697          // Processing a Dex `int-to-float' instruction.
1698          __ vmovsr(out.AsFpuRegister<SRegister>(), in.AsRegister<Register>());
1699          __ vcvtsi(out.AsFpuRegister<SRegister>(), out.AsFpuRegister<SRegister>());
1700          break;
1701        }
1702
1703        case Primitive::kPrimLong: {
1704          // Processing a Dex `long-to-float' instruction.
1705          Register low = in.AsRegisterPairLow<Register>();
1706          Register high = in.AsRegisterPairHigh<Register>();
1707          SRegister output = out.AsFpuRegister<SRegister>();
1708          Register constant_low = locations->GetTemp(0).AsRegister<Register>();
1709          Register constant_high = locations->GetTemp(1).AsRegister<Register>();
1710          SRegister temp1_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>();
1711          DRegister temp1_d = FromLowSToD(temp1_s);
1712          SRegister temp2_s = locations->GetTemp(3).AsFpuRegisterPairLow<SRegister>();
1713          DRegister temp2_d = FromLowSToD(temp2_s);
1714
1715          // Operations use doubles for precision reasons (each 32-bit
1716          // half of a long fits in the 53-bit mantissa of a double,
1717          // but not in the 24-bit mantissa of a float).  This is
1718          // especially important for the low bits.  The result is
1719          // eventually converted to float.
1720
1721          // temp1_d = int-to-double(high)
1722          __ vmovsr(temp1_s, high);
1723          __ vcvtdi(temp1_d, temp1_s);
1724          // Using vmovd to load the `k2Pow32EncodingForDouble` constant
1725          // as an immediate value into `temp2_d` does not work, as
1726          // this instruction only transfers 8 significant bits of its
1727          // immediate operand.  Instead, use two 32-bit core
1728          // registers to load `k2Pow32EncodingForDouble` into
1729          // `temp2_d`.
1730          __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble));
1731          __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble));
1732          __ vmovdrr(temp2_d, constant_low, constant_high);
1733          // temp1_d = temp1_d * 2^32
1734          __ vmuld(temp1_d, temp1_d, temp2_d);
1735          // temp2_d = unsigned-to-double(low)
1736          __ vmovsr(temp2_s, low);
1737          __ vcvtdu(temp2_d, temp2_s);
1738          // temp1_d = temp1_d + temp2_d
1739          __ vaddd(temp1_d, temp1_d, temp2_d);
1740          // output = double-to-float(temp1_d);
1741          __ vcvtsd(output, temp1_d);
1742          break;
1743        }
1744
1745        case Primitive::kPrimDouble:
1746          // Processing a Dex `double-to-float' instruction.
1747          __ vcvtsd(out.AsFpuRegister<SRegister>(),
1748                    FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1749          break;
1750
1751        default:
1752          LOG(FATAL) << "Unexpected type conversion from " << input_type
1753                     << " to " << result_type;
1754      };
1755      break;
1756
1757    case Primitive::kPrimDouble:
1758      switch (input_type) {
1759        case Primitive::kPrimByte:
1760        case Primitive::kPrimShort:
1761        case Primitive::kPrimInt:
1762        case Primitive::kPrimChar: {
1763          // Processing a Dex `int-to-double' instruction.
1764          __ vmovsr(out.AsFpuRegisterPairLow<SRegister>(), in.AsRegister<Register>());
1765          __ vcvtdi(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1766                    out.AsFpuRegisterPairLow<SRegister>());
1767          break;
1768        }
1769
1770        case Primitive::kPrimLong: {
1771          // Processing a Dex `long-to-double' instruction.
1772          Register low = in.AsRegisterPairLow<Register>();
1773          Register high = in.AsRegisterPairHigh<Register>();
1774          SRegister out_s = out.AsFpuRegisterPairLow<SRegister>();
1775          DRegister out_d = FromLowSToD(out_s);
1776          Register constant_low = locations->GetTemp(0).AsRegister<Register>();
1777          Register constant_high = locations->GetTemp(1).AsRegister<Register>();
1778          SRegister temp_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>();
1779          DRegister temp_d = FromLowSToD(temp_s);
1780
1781          // out_d = int-to-double(high)
1782          __ vmovsr(out_s, high);
1783          __ vcvtdi(out_d, out_s);
1784          // Using vmovd to load the `k2Pow32EncodingForDouble` constant
1785          // as an immediate value into `temp_d` does not work, as
1786          // this instruction only transfers 8 significant bits of its
1787          // immediate operand.  Instead, use two 32-bit core
1788          // registers to load `k2Pow32EncodingForDouble` into `temp_d`.
1789          __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble));
1790          __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble));
1791          __ vmovdrr(temp_d, constant_low, constant_high);
1792          // out_d = out_d * 2^32
1793          __ vmuld(out_d, out_d, temp_d);
1794          // temp_d = unsigned-to-double(low)
1795          __ vmovsr(temp_s, low);
1796          __ vcvtdu(temp_d, temp_s);
1797          // out_d = out_d + temp_d
1798          __ vaddd(out_d, out_d, temp_d);
1799          break;
1800        }
1801
1802        case Primitive::kPrimFloat:
1803          // Processing a Dex `float-to-double' instruction.
1804          __ vcvtds(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1805                    in.AsFpuRegister<SRegister>());
1806          break;
1807
1808        default:
1809          LOG(FATAL) << "Unexpected type conversion from " << input_type
1810                     << " to " << result_type;
1811      };
1812      break;
1813
1814    default:
1815      LOG(FATAL) << "Unexpected type conversion from " << input_type
1816                 << " to " << result_type;
1817  }
1818}
1819
1820void LocationsBuilderARM::VisitAdd(HAdd* add) {
1821  LocationSummary* locations =
1822      new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
1823  switch (add->GetResultType()) {
1824    case Primitive::kPrimInt: {
1825      locations->SetInAt(0, Location::RequiresRegister());
1826      locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
1827      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1828      break;
1829    }
1830
1831    case Primitive::kPrimLong: {
1832      locations->SetInAt(0, Location::RequiresRegister());
1833      locations->SetInAt(1, Location::RequiresRegister());
1834      locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1835      break;
1836    }
1837
1838    case Primitive::kPrimFloat:
1839    case Primitive::kPrimDouble: {
1840      locations->SetInAt(0, Location::RequiresFpuRegister());
1841      locations->SetInAt(1, Location::RequiresFpuRegister());
1842      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1843      break;
1844    }
1845
1846    default:
1847      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1848  }
1849}
1850
1851void InstructionCodeGeneratorARM::VisitAdd(HAdd* add) {
1852  LocationSummary* locations = add->GetLocations();
1853  Location out = locations->Out();
1854  Location first = locations->InAt(0);
1855  Location second = locations->InAt(1);
1856  switch (add->GetResultType()) {
1857    case Primitive::kPrimInt:
1858      if (second.IsRegister()) {
1859        __ add(out.AsRegister<Register>(),
1860               first.AsRegister<Register>(),
1861               ShifterOperand(second.AsRegister<Register>()));
1862      } else {
1863        __ AddConstant(out.AsRegister<Register>(),
1864                       first.AsRegister<Register>(),
1865                       second.GetConstant()->AsIntConstant()->GetValue());
1866      }
1867      break;
1868
1869    case Primitive::kPrimLong: {
1870      DCHECK(second.IsRegisterPair());
1871      __ adds(out.AsRegisterPairLow<Register>(),
1872              first.AsRegisterPairLow<Register>(),
1873              ShifterOperand(second.AsRegisterPairLow<Register>()));
1874      __ adc(out.AsRegisterPairHigh<Register>(),
1875             first.AsRegisterPairHigh<Register>(),
1876             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1877      break;
1878    }
1879
1880    case Primitive::kPrimFloat:
1881      __ vadds(out.AsFpuRegister<SRegister>(),
1882               first.AsFpuRegister<SRegister>(),
1883               second.AsFpuRegister<SRegister>());
1884      break;
1885
1886    case Primitive::kPrimDouble:
1887      __ vaddd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1888               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1889               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1890      break;
1891
1892    default:
1893      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1894  }
1895}
1896
1897void LocationsBuilderARM::VisitSub(HSub* sub) {
1898  LocationSummary* locations =
1899      new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
1900  switch (sub->GetResultType()) {
1901    case Primitive::kPrimInt: {
1902      locations->SetInAt(0, Location::RequiresRegister());
1903      locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1)));
1904      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1905      break;
1906    }
1907
1908    case Primitive::kPrimLong: {
1909      locations->SetInAt(0, Location::RequiresRegister());
1910      locations->SetInAt(1, Location::RequiresRegister());
1911      locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1912      break;
1913    }
1914    case Primitive::kPrimFloat:
1915    case Primitive::kPrimDouble: {
1916      locations->SetInAt(0, Location::RequiresFpuRegister());
1917      locations->SetInAt(1, Location::RequiresFpuRegister());
1918      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1919      break;
1920    }
1921    default:
1922      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1923  }
1924}
1925
1926void InstructionCodeGeneratorARM::VisitSub(HSub* sub) {
1927  LocationSummary* locations = sub->GetLocations();
1928  Location out = locations->Out();
1929  Location first = locations->InAt(0);
1930  Location second = locations->InAt(1);
1931  switch (sub->GetResultType()) {
1932    case Primitive::kPrimInt: {
1933      if (second.IsRegister()) {
1934        __ sub(out.AsRegister<Register>(),
1935               first.AsRegister<Register>(),
1936               ShifterOperand(second.AsRegister<Register>()));
1937      } else {
1938        __ AddConstant(out.AsRegister<Register>(),
1939                       first.AsRegister<Register>(),
1940                       -second.GetConstant()->AsIntConstant()->GetValue());
1941      }
1942      break;
1943    }
1944
1945    case Primitive::kPrimLong: {
1946      DCHECK(second.IsRegisterPair());
1947      __ subs(out.AsRegisterPairLow<Register>(),
1948              first.AsRegisterPairLow<Register>(),
1949              ShifterOperand(second.AsRegisterPairLow<Register>()));
1950      __ sbc(out.AsRegisterPairHigh<Register>(),
1951             first.AsRegisterPairHigh<Register>(),
1952             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1953      break;
1954    }
1955
1956    case Primitive::kPrimFloat: {
1957      __ vsubs(out.AsFpuRegister<SRegister>(),
1958               first.AsFpuRegister<SRegister>(),
1959               second.AsFpuRegister<SRegister>());
1960      break;
1961    }
1962
1963    case Primitive::kPrimDouble: {
1964      __ vsubd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1965               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1966               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1967      break;
1968    }
1969
1970
1971    default:
1972      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1973  }
1974}
1975
1976void LocationsBuilderARM::VisitMul(HMul* mul) {
1977  LocationSummary* locations =
1978      new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
1979  switch (mul->GetResultType()) {
1980    case Primitive::kPrimInt:
1981    case Primitive::kPrimLong:  {
1982      locations->SetInAt(0, Location::RequiresRegister());
1983      locations->SetInAt(1, Location::RequiresRegister());
1984      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1985      break;
1986    }
1987
1988    case Primitive::kPrimFloat:
1989    case Primitive::kPrimDouble: {
1990      locations->SetInAt(0, Location::RequiresFpuRegister());
1991      locations->SetInAt(1, Location::RequiresFpuRegister());
1992      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1993      break;
1994    }
1995
1996    default:
1997      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1998  }
1999}
2000
2001void InstructionCodeGeneratorARM::VisitMul(HMul* mul) {
2002  LocationSummary* locations = mul->GetLocations();
2003  Location out = locations->Out();
2004  Location first = locations->InAt(0);
2005  Location second = locations->InAt(1);
2006  switch (mul->GetResultType()) {
2007    case Primitive::kPrimInt: {
2008      __ mul(out.AsRegister<Register>(),
2009             first.AsRegister<Register>(),
2010             second.AsRegister<Register>());
2011      break;
2012    }
2013    case Primitive::kPrimLong: {
2014      Register out_hi = out.AsRegisterPairHigh<Register>();
2015      Register out_lo = out.AsRegisterPairLow<Register>();
2016      Register in1_hi = first.AsRegisterPairHigh<Register>();
2017      Register in1_lo = first.AsRegisterPairLow<Register>();
2018      Register in2_hi = second.AsRegisterPairHigh<Register>();
2019      Register in2_lo = second.AsRegisterPairLow<Register>();
2020
2021      // Extra checks to protect caused by the existence of R1_R2.
2022      // The algorithm is wrong if out.hi is either in1.lo or in2.lo:
2023      // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2);
2024      DCHECK_NE(out_hi, in1_lo);
2025      DCHECK_NE(out_hi, in2_lo);
2026
2027      // input: in1 - 64 bits, in2 - 64 bits
2028      // output: out
2029      // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
2030      // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
2031      // parts: out.lo = (in1.lo * in2.lo)[31:0]
2032
2033      // IP <- in1.lo * in2.hi
2034      __ mul(IP, in1_lo, in2_hi);
2035      // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo
2036      __ mla(out_hi, in1_hi, in2_lo, IP);
2037      // out.lo <- (in1.lo * in2.lo)[31:0];
2038      __ umull(out_lo, IP, in1_lo, in2_lo);
2039      // out.hi <- in2.hi * in1.lo +  in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
2040      __ add(out_hi, out_hi, ShifterOperand(IP));
2041      break;
2042    }
2043
2044    case Primitive::kPrimFloat: {
2045      __ vmuls(out.AsFpuRegister<SRegister>(),
2046               first.AsFpuRegister<SRegister>(),
2047               second.AsFpuRegister<SRegister>());
2048      break;
2049    }
2050
2051    case Primitive::kPrimDouble: {
2052      __ vmuld(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2053               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2054               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2055      break;
2056    }
2057
2058    default:
2059      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2060  }
2061}
2062
2063void LocationsBuilderARM::VisitDiv(HDiv* div) {
2064  LocationSummary::CallKind call_kind = div->GetResultType() == Primitive::kPrimLong
2065      ? LocationSummary::kCall
2066      : LocationSummary::kNoCall;
2067  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
2068
2069  switch (div->GetResultType()) {
2070    case Primitive::kPrimInt: {
2071      locations->SetInAt(0, Location::RequiresRegister());
2072      locations->SetInAt(1, Location::RequiresRegister());
2073      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2074      break;
2075    }
2076    case Primitive::kPrimLong: {
2077      InvokeRuntimeCallingConvention calling_convention;
2078      locations->SetInAt(0, Location::RegisterPairLocation(
2079          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2080      locations->SetInAt(1, Location::RegisterPairLocation(
2081          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2082      locations->SetOut(Location::RegisterPairLocation(R0, R1));
2083      break;
2084    }
2085    case Primitive::kPrimFloat:
2086    case Primitive::kPrimDouble: {
2087      locations->SetInAt(0, Location::RequiresFpuRegister());
2088      locations->SetInAt(1, Location::RequiresFpuRegister());
2089      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2090      break;
2091    }
2092
2093    default:
2094      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2095  }
2096}
2097
2098void InstructionCodeGeneratorARM::VisitDiv(HDiv* div) {
2099  LocationSummary* locations = div->GetLocations();
2100  Location out = locations->Out();
2101  Location first = locations->InAt(0);
2102  Location second = locations->InAt(1);
2103
2104  switch (div->GetResultType()) {
2105    case Primitive::kPrimInt: {
2106      __ sdiv(out.AsRegister<Register>(),
2107              first.AsRegister<Register>(),
2108              second.AsRegister<Register>());
2109      break;
2110    }
2111
2112    case Primitive::kPrimLong: {
2113      InvokeRuntimeCallingConvention calling_convention;
2114      DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
2115      DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
2116      DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegisterPairLow<Register>());
2117      DCHECK_EQ(calling_convention.GetRegisterAt(3), second.AsRegisterPairHigh<Register>());
2118      DCHECK_EQ(R0, out.AsRegisterPairLow<Register>());
2119      DCHECK_EQ(R1, out.AsRegisterPairHigh<Register>());
2120
2121      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLdiv), div, div->GetDexPc());
2122      break;
2123    }
2124
2125    case Primitive::kPrimFloat: {
2126      __ vdivs(out.AsFpuRegister<SRegister>(),
2127               first.AsFpuRegister<SRegister>(),
2128               second.AsFpuRegister<SRegister>());
2129      break;
2130    }
2131
2132    case Primitive::kPrimDouble: {
2133      __ vdivd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2134               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2135               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2136      break;
2137    }
2138
2139    default:
2140      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2141  }
2142}
2143
2144void LocationsBuilderARM::VisitRem(HRem* rem) {
2145  Primitive::Type type = rem->GetResultType();
2146  LocationSummary::CallKind call_kind = type == Primitive::kPrimInt
2147      ? LocationSummary::kNoCall
2148      : LocationSummary::kCall;
2149  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
2150
2151  switch (type) {
2152    case Primitive::kPrimInt: {
2153      locations->SetInAt(0, Location::RequiresRegister());
2154      locations->SetInAt(1, Location::RequiresRegister());
2155      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2156      locations->AddTemp(Location::RequiresRegister());
2157      break;
2158    }
2159    case Primitive::kPrimLong: {
2160      InvokeRuntimeCallingConvention calling_convention;
2161      locations->SetInAt(0, Location::RegisterPairLocation(
2162          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2163      locations->SetInAt(1, Location::RegisterPairLocation(
2164          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2165      // The runtime helper puts the output in R2,R3.
2166      locations->SetOut(Location::RegisterPairLocation(R2, R3));
2167      break;
2168    }
2169    case Primitive::kPrimFloat: {
2170      InvokeRuntimeCallingConvention calling_convention;
2171      locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
2172      locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
2173      locations->SetOut(Location::FpuRegisterLocation(S0));
2174      break;
2175    }
2176
2177    case Primitive::kPrimDouble: {
2178      InvokeRuntimeCallingConvention calling_convention;
2179      locations->SetInAt(0, Location::FpuRegisterPairLocation(
2180          calling_convention.GetFpuRegisterAt(0), calling_convention.GetFpuRegisterAt(1)));
2181      locations->SetInAt(1, Location::FpuRegisterPairLocation(
2182          calling_convention.GetFpuRegisterAt(2), calling_convention.GetFpuRegisterAt(3)));
2183      locations->SetOut(Location::Location::FpuRegisterPairLocation(S0, S1));
2184      break;
2185    }
2186
2187    default:
2188      LOG(FATAL) << "Unexpected rem type " << type;
2189  }
2190}
2191
2192void InstructionCodeGeneratorARM::VisitRem(HRem* rem) {
2193  LocationSummary* locations = rem->GetLocations();
2194  Location out = locations->Out();
2195  Location first = locations->InAt(0);
2196  Location second = locations->InAt(1);
2197
2198  Primitive::Type type = rem->GetResultType();
2199  switch (type) {
2200    case Primitive::kPrimInt: {
2201      Register reg1 = first.AsRegister<Register>();
2202      Register reg2 = second.AsRegister<Register>();
2203      Register temp = locations->GetTemp(0).AsRegister<Register>();
2204
2205      // temp = reg1 / reg2  (integer division)
2206      // temp = temp * reg2
2207      // dest = reg1 - temp
2208      __ sdiv(temp, reg1, reg2);
2209      __ mul(temp, temp, reg2);
2210      __ sub(out.AsRegister<Register>(), reg1, ShifterOperand(temp));
2211      break;
2212    }
2213
2214    case Primitive::kPrimLong: {
2215      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLmod), rem, rem->GetDexPc());
2216      break;
2217    }
2218
2219    case Primitive::kPrimFloat: {
2220      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmodf), rem, rem->GetDexPc());
2221      break;
2222    }
2223
2224    case Primitive::kPrimDouble: {
2225      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmod), rem, rem->GetDexPc());
2226      break;
2227    }
2228
2229    default:
2230      LOG(FATAL) << "Unexpected rem type " << type;
2231  }
2232}
2233
2234void LocationsBuilderARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2235  LocationSummary* locations =
2236      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2237  locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2238  if (instruction->HasUses()) {
2239    locations->SetOut(Location::SameAsFirstInput());
2240  }
2241}
2242
2243void InstructionCodeGeneratorARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2244  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM(instruction);
2245  codegen_->AddSlowPath(slow_path);
2246
2247  LocationSummary* locations = instruction->GetLocations();
2248  Location value = locations->InAt(0);
2249
2250  switch (instruction->GetType()) {
2251    case Primitive::kPrimInt: {
2252      if (value.IsRegister()) {
2253        __ cmp(value.AsRegister<Register>(), ShifterOperand(0));
2254        __ b(slow_path->GetEntryLabel(), EQ);
2255      } else {
2256        DCHECK(value.IsConstant()) << value;
2257        if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
2258          __ b(slow_path->GetEntryLabel());
2259        }
2260      }
2261      break;
2262    }
2263    case Primitive::kPrimLong: {
2264      if (value.IsRegisterPair()) {
2265        __ orrs(IP,
2266                value.AsRegisterPairLow<Register>(),
2267                ShifterOperand(value.AsRegisterPairHigh<Register>()));
2268        __ b(slow_path->GetEntryLabel(), EQ);
2269      } else {
2270        DCHECK(value.IsConstant()) << value;
2271        if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
2272          __ b(slow_path->GetEntryLabel());
2273        }
2274      }
2275      break;
2276    default:
2277      LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
2278    }
2279  }
2280}
2281
2282void LocationsBuilderARM::HandleShift(HBinaryOperation* op) {
2283  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2284
2285  LocationSummary::CallKind call_kind = op->GetResultType() == Primitive::kPrimLong
2286      ? LocationSummary::kCall
2287      : LocationSummary::kNoCall;
2288  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(op, call_kind);
2289
2290  switch (op->GetResultType()) {
2291    case Primitive::kPrimInt: {
2292      locations->SetInAt(0, Location::RequiresRegister());
2293      locations->SetInAt(1, Location::RegisterOrConstant(op->InputAt(1)));
2294      locations->SetOut(Location::RequiresRegister());
2295      break;
2296    }
2297    case Primitive::kPrimLong: {
2298      InvokeRuntimeCallingConvention calling_convention;
2299      locations->SetInAt(0, Location::RegisterPairLocation(
2300          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2301      locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2302      // The runtime helper puts the output in R0,R1.
2303      locations->SetOut(Location::RegisterPairLocation(R0, R1));
2304      break;
2305    }
2306    default:
2307      LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
2308  }
2309}
2310
2311void InstructionCodeGeneratorARM::HandleShift(HBinaryOperation* op) {
2312  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2313
2314  LocationSummary* locations = op->GetLocations();
2315  Location out = locations->Out();
2316  Location first = locations->InAt(0);
2317  Location second = locations->InAt(1);
2318
2319  Primitive::Type type = op->GetResultType();
2320  switch (type) {
2321    case Primitive::kPrimInt: {
2322      Register out_reg = out.AsRegister<Register>();
2323      Register first_reg = first.AsRegister<Register>();
2324      // Arm doesn't mask the shift count so we need to do it ourselves.
2325      if (second.IsRegister()) {
2326        Register second_reg = second.AsRegister<Register>();
2327        __ and_(second_reg, second_reg, ShifterOperand(kMaxIntShiftValue));
2328        if (op->IsShl()) {
2329          __ Lsl(out_reg, first_reg, second_reg);
2330        } else if (op->IsShr()) {
2331          __ Asr(out_reg, first_reg, second_reg);
2332        } else {
2333          __ Lsr(out_reg, first_reg, second_reg);
2334        }
2335      } else {
2336        int32_t cst = second.GetConstant()->AsIntConstant()->GetValue();
2337        uint32_t shift_value = static_cast<uint32_t>(cst & kMaxIntShiftValue);
2338        if (shift_value == 0) {  // arm does not support shifting with 0 immediate.
2339          __ Mov(out_reg, first_reg);
2340        } else if (op->IsShl()) {
2341          __ Lsl(out_reg, first_reg, shift_value);
2342        } else if (op->IsShr()) {
2343          __ Asr(out_reg, first_reg, shift_value);
2344        } else {
2345          __ Lsr(out_reg, first_reg, shift_value);
2346        }
2347      }
2348      break;
2349    }
2350    case Primitive::kPrimLong: {
2351      // TODO: Inline the assembly instead of calling the runtime.
2352      InvokeRuntimeCallingConvention calling_convention;
2353      DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
2354      DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
2355      DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegister<Register>());
2356      DCHECK_EQ(R0, out.AsRegisterPairLow<Register>());
2357      DCHECK_EQ(R1, out.AsRegisterPairHigh<Register>());
2358
2359      int32_t entry_point_offset;
2360      if (op->IsShl()) {
2361        entry_point_offset = QUICK_ENTRY_POINT(pShlLong);
2362      } else if (op->IsShr()) {
2363        entry_point_offset = QUICK_ENTRY_POINT(pShrLong);
2364      } else {
2365        entry_point_offset = QUICK_ENTRY_POINT(pUshrLong);
2366      }
2367      __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
2368      __ blx(LR);
2369      break;
2370    }
2371    default:
2372      LOG(FATAL) << "Unexpected operation type " << type;
2373  }
2374}
2375
2376void LocationsBuilderARM::VisitShl(HShl* shl) {
2377  HandleShift(shl);
2378}
2379
2380void InstructionCodeGeneratorARM::VisitShl(HShl* shl) {
2381  HandleShift(shl);
2382}
2383
2384void LocationsBuilderARM::VisitShr(HShr* shr) {
2385  HandleShift(shr);
2386}
2387
2388void InstructionCodeGeneratorARM::VisitShr(HShr* shr) {
2389  HandleShift(shr);
2390}
2391
2392void LocationsBuilderARM::VisitUShr(HUShr* ushr) {
2393  HandleShift(ushr);
2394}
2395
2396void InstructionCodeGeneratorARM::VisitUShr(HUShr* ushr) {
2397  HandleShift(ushr);
2398}
2399
2400void LocationsBuilderARM::VisitNewInstance(HNewInstance* instruction) {
2401  LocationSummary* locations =
2402      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2403  InvokeRuntimeCallingConvention calling_convention;
2404  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2405  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2406  locations->SetOut(Location::RegisterLocation(R0));
2407}
2408
2409void InstructionCodeGeneratorARM::VisitNewInstance(HNewInstance* instruction) {
2410  InvokeRuntimeCallingConvention calling_convention;
2411  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
2412  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
2413  codegen_->InvokeRuntime(
2414      QUICK_ENTRY_POINT(pAllocObjectWithAccessCheck), instruction, instruction->GetDexPc());
2415}
2416
2417void LocationsBuilderARM::VisitNewArray(HNewArray* instruction) {
2418  LocationSummary* locations =
2419      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2420  InvokeRuntimeCallingConvention calling_convention;
2421  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2422  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2423  locations->SetOut(Location::RegisterLocation(R0));
2424  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2425}
2426
2427void InstructionCodeGeneratorARM::VisitNewArray(HNewArray* instruction) {
2428  InvokeRuntimeCallingConvention calling_convention;
2429  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(2));
2430  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
2431  codegen_->InvokeRuntime(
2432      QUICK_ENTRY_POINT(pAllocArrayWithAccessCheck), instruction, instruction->GetDexPc());
2433}
2434
2435void LocationsBuilderARM::VisitParameterValue(HParameterValue* instruction) {
2436  LocationSummary* locations =
2437      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2438  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
2439  if (location.IsStackSlot()) {
2440    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2441  } else if (location.IsDoubleStackSlot()) {
2442    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2443  }
2444  locations->SetOut(location);
2445}
2446
2447void InstructionCodeGeneratorARM::VisitParameterValue(HParameterValue* instruction) {
2448  // Nothing to do, the parameter is already at its location.
2449  UNUSED(instruction);
2450}
2451
2452void LocationsBuilderARM::VisitNot(HNot* not_) {
2453  LocationSummary* locations =
2454      new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
2455  locations->SetInAt(0, Location::RequiresRegister());
2456  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2457}
2458
2459void InstructionCodeGeneratorARM::VisitNot(HNot* not_) {
2460  LocationSummary* locations = not_->GetLocations();
2461  Location out = locations->Out();
2462  Location in = locations->InAt(0);
2463  switch (not_->InputAt(0)->GetType()) {
2464    case Primitive::kPrimInt:
2465      __ mvn(out.AsRegister<Register>(), ShifterOperand(in.AsRegister<Register>()));
2466      break;
2467
2468    case Primitive::kPrimLong:
2469      __ mvn(out.AsRegisterPairLow<Register>(),
2470             ShifterOperand(in.AsRegisterPairLow<Register>()));
2471      __ mvn(out.AsRegisterPairHigh<Register>(),
2472             ShifterOperand(in.AsRegisterPairHigh<Register>()));
2473      break;
2474
2475    default:
2476      LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
2477  }
2478}
2479
2480void LocationsBuilderARM::VisitCompare(HCompare* compare) {
2481  LocationSummary* locations =
2482      new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2483  switch (compare->InputAt(0)->GetType()) {
2484    case Primitive::kPrimLong: {
2485      locations->SetInAt(0, Location::RequiresRegister());
2486      locations->SetInAt(1, Location::RequiresRegister());
2487      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2488      break;
2489    }
2490    case Primitive::kPrimFloat:
2491    case Primitive::kPrimDouble: {
2492      locations->SetInAt(0, Location::RequiresFpuRegister());
2493      locations->SetInAt(1, Location::RequiresFpuRegister());
2494      locations->SetOut(Location::RequiresRegister());
2495      break;
2496    }
2497    default:
2498      LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2499  }
2500}
2501
2502void InstructionCodeGeneratorARM::VisitCompare(HCompare* compare) {
2503  LocationSummary* locations = compare->GetLocations();
2504  Register out = locations->Out().AsRegister<Register>();
2505  Location left = locations->InAt(0);
2506  Location right = locations->InAt(1);
2507
2508  Label less, greater, done;
2509  Primitive::Type type = compare->InputAt(0)->GetType();
2510  switch (type) {
2511    case Primitive::kPrimLong: {
2512      __ cmp(left.AsRegisterPairHigh<Register>(),
2513             ShifterOperand(right.AsRegisterPairHigh<Register>()));  // Signed compare.
2514      __ b(&less, LT);
2515      __ b(&greater, GT);
2516      // Do LoadImmediate before any `cmp`, as LoadImmediate might affect the status flags.
2517      __ LoadImmediate(out, 0);
2518      __ cmp(left.AsRegisterPairLow<Register>(),
2519             ShifterOperand(right.AsRegisterPairLow<Register>()));  // Unsigned compare.
2520      break;
2521    }
2522    case Primitive::kPrimFloat:
2523    case Primitive::kPrimDouble: {
2524      __ LoadImmediate(out, 0);
2525      if (type == Primitive::kPrimFloat) {
2526        __ vcmps(left.AsFpuRegister<SRegister>(), right.AsFpuRegister<SRegister>());
2527      } else {
2528        __ vcmpd(FromLowSToD(left.AsFpuRegisterPairLow<SRegister>()),
2529                 FromLowSToD(right.AsFpuRegisterPairLow<SRegister>()));
2530      }
2531      __ vmstat();  // transfer FP status register to ARM APSR.
2532      __ b(compare->IsGtBias() ? &greater : &less, VS);  // VS for unordered.
2533      break;
2534    }
2535    default:
2536      LOG(FATAL) << "Unexpected compare type " << type;
2537  }
2538  __ b(&done, EQ);
2539  __ b(&less, CC);  // CC is for both: unsigned compare for longs and 'less than' for floats.
2540
2541  __ Bind(&greater);
2542  __ LoadImmediate(out, 1);
2543  __ b(&done);
2544
2545  __ Bind(&less);
2546  __ LoadImmediate(out, -1);
2547
2548  __ Bind(&done);
2549}
2550
2551void LocationsBuilderARM::VisitPhi(HPhi* instruction) {
2552  LocationSummary* locations =
2553      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2554  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
2555    locations->SetInAt(i, Location::Any());
2556  }
2557  locations->SetOut(Location::Any());
2558}
2559
2560void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction) {
2561  UNUSED(instruction);
2562  LOG(FATAL) << "Unreachable";
2563}
2564
2565void InstructionCodeGeneratorARM::GenerateMemoryBarrier(MemBarrierKind kind) {
2566  // TODO (ported from quick): revisit Arm barrier kinds
2567  DmbOptions flavour = DmbOptions::ISH;  // quiet c++ warnings
2568  switch (kind) {
2569    case MemBarrierKind::kAnyStore:
2570    case MemBarrierKind::kLoadAny:
2571    case MemBarrierKind::kAnyAny: {
2572      flavour = DmbOptions::ISH;
2573      break;
2574    }
2575    case MemBarrierKind::kStoreStore: {
2576      flavour = DmbOptions::ISHST;
2577      break;
2578    }
2579    default:
2580      LOG(FATAL) << "Unexpected memory barrier " << kind;
2581  }
2582  __ dmb(flavour);
2583}
2584
2585void InstructionCodeGeneratorARM::GenerateWideAtomicLoad(Register addr,
2586                                                         uint32_t offset,
2587                                                         Register out_lo,
2588                                                         Register out_hi) {
2589  if (offset != 0) {
2590    __ LoadImmediate(out_lo, offset);
2591    __ add(IP, addr, ShifterOperand(out_lo));
2592    addr = IP;
2593  }
2594  __ ldrexd(out_lo, out_hi, addr);
2595}
2596
2597void InstructionCodeGeneratorARM::GenerateWideAtomicStore(Register addr,
2598                                                          uint32_t offset,
2599                                                          Register value_lo,
2600                                                          Register value_hi,
2601                                                          Register temp1,
2602                                                          Register temp2,
2603                                                          HInstruction* instruction) {
2604  Label fail;
2605  if (offset != 0) {
2606    __ LoadImmediate(temp1, offset);
2607    __ add(IP, addr, ShifterOperand(temp1));
2608    addr = IP;
2609  }
2610  __ Bind(&fail);
2611  // We need a load followed by store. (The address used in a STREX instruction must
2612  // be the same as the address in the most recently executed LDREX instruction.)
2613  __ ldrexd(temp1, temp2, addr);
2614  codegen_->MaybeRecordImplicitNullCheck(instruction);
2615  __ strexd(temp1, value_lo, value_hi, addr);
2616  __ cmp(temp1, ShifterOperand(0));
2617  __ b(&fail, NE);
2618}
2619
2620void LocationsBuilderARM::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) {
2621  DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2622
2623  LocationSummary* locations =
2624      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2625  locations->SetInAt(0, Location::RequiresRegister());
2626  locations->SetInAt(1, Location::RequiresRegister());
2627
2628
2629  Primitive::Type field_type = field_info.GetFieldType();
2630  bool is_wide = field_type == Primitive::kPrimLong || field_type == Primitive::kPrimDouble;
2631  bool generate_volatile = field_info.IsVolatile()
2632      && is_wide
2633      && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2634  // Temporary registers for the write barrier.
2635  // TODO: consider renaming StoreNeedsWriteBarrier to StoreNeedsGCMark.
2636  if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
2637    locations->AddTemp(Location::RequiresRegister());
2638    locations->AddTemp(Location::RequiresRegister());
2639  } else if (generate_volatile) {
2640    // Arm encoding have some additional constraints for ldrexd/strexd:
2641    // - registers need to be consecutive
2642    // - the first register should be even but not R14.
2643    // We don't test for Arm yet, and the assertion makes sure that we revisit this if we ever
2644    // enable Arm encoding.
2645    DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
2646
2647    locations->AddTemp(Location::RequiresRegister());
2648    locations->AddTemp(Location::RequiresRegister());
2649    if (field_type == Primitive::kPrimDouble) {
2650      // For doubles we need two more registers to copy the value.
2651      locations->AddTemp(Location::RegisterLocation(R2));
2652      locations->AddTemp(Location::RegisterLocation(R3));
2653    }
2654  }
2655}
2656
2657void InstructionCodeGeneratorARM::HandleFieldSet(HInstruction* instruction,
2658                                                 const FieldInfo& field_info) {
2659  DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2660
2661  LocationSummary* locations = instruction->GetLocations();
2662  Register base = locations->InAt(0).AsRegister<Register>();
2663  Location value = locations->InAt(1);
2664
2665  bool is_volatile = field_info.IsVolatile();
2666  bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2667  Primitive::Type field_type = field_info.GetFieldType();
2668  uint32_t offset = field_info.GetFieldOffset().Uint32Value();
2669
2670  if (is_volatile) {
2671    GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
2672  }
2673
2674  switch (field_type) {
2675    case Primitive::kPrimBoolean:
2676    case Primitive::kPrimByte: {
2677      __ StoreToOffset(kStoreByte, value.AsRegister<Register>(), base, offset);
2678      break;
2679    }
2680
2681    case Primitive::kPrimShort:
2682    case Primitive::kPrimChar: {
2683      __ StoreToOffset(kStoreHalfword, value.AsRegister<Register>(), base, offset);
2684      break;
2685    }
2686
2687    case Primitive::kPrimInt:
2688    case Primitive::kPrimNot: {
2689      __ StoreToOffset(kStoreWord, value.AsRegister<Register>(), base, offset);
2690      break;
2691    }
2692
2693    case Primitive::kPrimLong: {
2694      if (is_volatile && !atomic_ldrd_strd) {
2695        GenerateWideAtomicStore(base, offset,
2696                                value.AsRegisterPairLow<Register>(),
2697                                value.AsRegisterPairHigh<Register>(),
2698                                locations->GetTemp(0).AsRegister<Register>(),
2699                                locations->GetTemp(1).AsRegister<Register>(),
2700                                instruction);
2701      } else {
2702        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), base, offset);
2703        codegen_->MaybeRecordImplicitNullCheck(instruction);
2704      }
2705      break;
2706    }
2707
2708    case Primitive::kPrimFloat: {
2709      __ StoreSToOffset(value.AsFpuRegister<SRegister>(), base, offset);
2710      break;
2711    }
2712
2713    case Primitive::kPrimDouble: {
2714      DRegister value_reg = FromLowSToD(value.AsFpuRegisterPairLow<SRegister>());
2715      if (is_volatile && !atomic_ldrd_strd) {
2716        Register value_reg_lo = locations->GetTemp(0).AsRegister<Register>();
2717        Register value_reg_hi = locations->GetTemp(1).AsRegister<Register>();
2718
2719        __ vmovrrd(value_reg_lo, value_reg_hi, value_reg);
2720
2721        GenerateWideAtomicStore(base, offset,
2722                                value_reg_lo,
2723                                value_reg_hi,
2724                                locations->GetTemp(2).AsRegister<Register>(),
2725                                locations->GetTemp(3).AsRegister<Register>(),
2726                                instruction);
2727      } else {
2728        __ StoreDToOffset(value_reg, base, offset);
2729        codegen_->MaybeRecordImplicitNullCheck(instruction);
2730      }
2731      break;
2732    }
2733
2734    case Primitive::kPrimVoid:
2735      LOG(FATAL) << "Unreachable type " << field_type;
2736      UNREACHABLE();
2737  }
2738
2739  // Longs and doubles are handled in the switch.
2740  if (field_type != Primitive::kPrimLong && field_type != Primitive::kPrimDouble) {
2741    codegen_->MaybeRecordImplicitNullCheck(instruction);
2742  }
2743
2744  if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
2745    Register temp = locations->GetTemp(0).AsRegister<Register>();
2746    Register card = locations->GetTemp(1).AsRegister<Register>();
2747    codegen_->MarkGCCard(temp, card, base, value.AsRegister<Register>());
2748  }
2749
2750  if (is_volatile) {
2751    GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
2752  }
2753}
2754
2755void LocationsBuilderARM::HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info) {
2756  DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2757  LocationSummary* locations =
2758      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2759  locations->SetInAt(0, Location::RequiresRegister());
2760  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2761
2762  bool generate_volatile = field_info.IsVolatile()
2763      && (field_info.GetFieldType() == Primitive::kPrimDouble)
2764      && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2765  if (generate_volatile) {
2766    // Arm encoding have some additional constraints for ldrexd/strexd:
2767    // - registers need to be consecutive
2768    // - the first register should be even but not R14.
2769    // We don't test for Arm yet, and the assertion makes sure that we revisit this if we ever
2770    // enable Arm encoding.
2771    DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
2772    locations->AddTemp(Location::RequiresRegister());
2773    locations->AddTemp(Location::RequiresRegister());
2774  }
2775}
2776
2777void InstructionCodeGeneratorARM::HandleFieldGet(HInstruction* instruction,
2778                                                 const FieldInfo& field_info) {
2779  DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2780
2781  LocationSummary* locations = instruction->GetLocations();
2782  Register base = locations->InAt(0).AsRegister<Register>();
2783  Location out = locations->Out();
2784  bool is_volatile = field_info.IsVolatile();
2785  bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2786  Primitive::Type field_type = field_info.GetFieldType();
2787  uint32_t offset = field_info.GetFieldOffset().Uint32Value();
2788
2789  switch (field_type) {
2790    case Primitive::kPrimBoolean: {
2791      __ LoadFromOffset(kLoadUnsignedByte, out.AsRegister<Register>(), base, offset);
2792      break;
2793    }
2794
2795    case Primitive::kPrimByte: {
2796      __ LoadFromOffset(kLoadSignedByte, out.AsRegister<Register>(), base, offset);
2797      break;
2798    }
2799
2800    case Primitive::kPrimShort: {
2801      __ LoadFromOffset(kLoadSignedHalfword, out.AsRegister<Register>(), base, offset);
2802      break;
2803    }
2804
2805    case Primitive::kPrimChar: {
2806      __ LoadFromOffset(kLoadUnsignedHalfword, out.AsRegister<Register>(), base, offset);
2807      break;
2808    }
2809
2810    case Primitive::kPrimInt:
2811    case Primitive::kPrimNot: {
2812      __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), base, offset);
2813      break;
2814    }
2815
2816    case Primitive::kPrimLong: {
2817      if (is_volatile && !atomic_ldrd_strd) {
2818        GenerateWideAtomicLoad(base, offset,
2819                               out.AsRegisterPairLow<Register>(),
2820                               out.AsRegisterPairHigh<Register>());
2821      } else {
2822        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), base, offset);
2823      }
2824      break;
2825    }
2826
2827    case Primitive::kPrimFloat: {
2828      __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), base, offset);
2829      break;
2830    }
2831
2832    case Primitive::kPrimDouble: {
2833      DRegister out_reg = FromLowSToD(out.AsFpuRegisterPairLow<SRegister>());
2834      if (is_volatile && !atomic_ldrd_strd) {
2835        Register lo = locations->GetTemp(0).AsRegister<Register>();
2836        Register hi = locations->GetTemp(1).AsRegister<Register>();
2837        GenerateWideAtomicLoad(base, offset, lo, hi);
2838        codegen_->MaybeRecordImplicitNullCheck(instruction);
2839        __ vmovdrr(out_reg, lo, hi);
2840      } else {
2841        __ LoadDFromOffset(out_reg, base, offset);
2842        codegen_->MaybeRecordImplicitNullCheck(instruction);
2843      }
2844      break;
2845    }
2846
2847    case Primitive::kPrimVoid:
2848      LOG(FATAL) << "Unreachable type " << field_type;
2849      UNREACHABLE();
2850  }
2851
2852  // Doubles are handled in the switch.
2853  if (field_type != Primitive::kPrimDouble) {
2854    codegen_->MaybeRecordImplicitNullCheck(instruction);
2855  }
2856
2857  if (is_volatile) {
2858    GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
2859  }
2860}
2861
2862void LocationsBuilderARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2863  HandleFieldSet(instruction, instruction->GetFieldInfo());
2864}
2865
2866void InstructionCodeGeneratorARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2867  HandleFieldSet(instruction, instruction->GetFieldInfo());
2868}
2869
2870void LocationsBuilderARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2871  HandleFieldGet(instruction, instruction->GetFieldInfo());
2872}
2873
2874void InstructionCodeGeneratorARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2875  HandleFieldGet(instruction, instruction->GetFieldInfo());
2876}
2877
2878void LocationsBuilderARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2879  HandleFieldGet(instruction, instruction->GetFieldInfo());
2880}
2881
2882void InstructionCodeGeneratorARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2883  HandleFieldGet(instruction, instruction->GetFieldInfo());
2884}
2885
2886void LocationsBuilderARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2887  HandleFieldSet(instruction, instruction->GetFieldInfo());
2888}
2889
2890void InstructionCodeGeneratorARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2891  HandleFieldSet(instruction, instruction->GetFieldInfo());
2892}
2893
2894void LocationsBuilderARM::VisitNullCheck(HNullCheck* instruction) {
2895  LocationSummary* locations =
2896      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2897  locations->SetInAt(0, Location::RequiresRegister());
2898  if (instruction->HasUses()) {
2899    locations->SetOut(Location::SameAsFirstInput());
2900  }
2901}
2902
2903void InstructionCodeGeneratorARM::GenerateImplicitNullCheck(HNullCheck* instruction) {
2904  if (codegen_->CanMoveNullCheckToUser(instruction)) {
2905    return;
2906  }
2907  Location obj = instruction->GetLocations()->InAt(0);
2908
2909  __ LoadFromOffset(kLoadWord, IP, obj.AsRegister<Register>(), 0);
2910  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
2911}
2912
2913void InstructionCodeGeneratorARM::GenerateExplicitNullCheck(HNullCheck* instruction) {
2914  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM(instruction);
2915  codegen_->AddSlowPath(slow_path);
2916
2917  LocationSummary* locations = instruction->GetLocations();
2918  Location obj = locations->InAt(0);
2919
2920  __ cmp(obj.AsRegister<Register>(), ShifterOperand(0));
2921  __ b(slow_path->GetEntryLabel(), EQ);
2922}
2923
2924void InstructionCodeGeneratorARM::VisitNullCheck(HNullCheck* instruction) {
2925  if (codegen_->GetCompilerOptions().GetImplicitNullChecks()) {
2926    GenerateImplicitNullCheck(instruction);
2927  } else {
2928    GenerateExplicitNullCheck(instruction);
2929  }
2930}
2931
2932void LocationsBuilderARM::VisitArrayGet(HArrayGet* instruction) {
2933  LocationSummary* locations =
2934      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2935  locations->SetInAt(0, Location::RequiresRegister());
2936  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2937  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2938}
2939
2940void InstructionCodeGeneratorARM::VisitArrayGet(HArrayGet* instruction) {
2941  LocationSummary* locations = instruction->GetLocations();
2942  Register obj = locations->InAt(0).AsRegister<Register>();
2943  Location index = locations->InAt(1);
2944
2945  switch (instruction->GetType()) {
2946    case Primitive::kPrimBoolean: {
2947      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
2948      Register out = locations->Out().AsRegister<Register>();
2949      if (index.IsConstant()) {
2950        size_t offset =
2951            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2952        __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
2953      } else {
2954        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
2955        __ LoadFromOffset(kLoadUnsignedByte, out, IP, data_offset);
2956      }
2957      break;
2958    }
2959
2960    case Primitive::kPrimByte: {
2961      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
2962      Register out = locations->Out().AsRegister<Register>();
2963      if (index.IsConstant()) {
2964        size_t offset =
2965            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2966        __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
2967      } else {
2968        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
2969        __ LoadFromOffset(kLoadSignedByte, out, IP, data_offset);
2970      }
2971      break;
2972    }
2973
2974    case Primitive::kPrimShort: {
2975      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
2976      Register out = locations->Out().AsRegister<Register>();
2977      if (index.IsConstant()) {
2978        size_t offset =
2979            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2980        __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
2981      } else {
2982        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
2983        __ LoadFromOffset(kLoadSignedHalfword, out, IP, data_offset);
2984      }
2985      break;
2986    }
2987
2988    case Primitive::kPrimChar: {
2989      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
2990      Register out = locations->Out().AsRegister<Register>();
2991      if (index.IsConstant()) {
2992        size_t offset =
2993            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2994        __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
2995      } else {
2996        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
2997        __ LoadFromOffset(kLoadUnsignedHalfword, out, IP, data_offset);
2998      }
2999      break;
3000    }
3001
3002    case Primitive::kPrimInt:
3003    case Primitive::kPrimNot: {
3004      DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
3005      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
3006      Register out = locations->Out().AsRegister<Register>();
3007      if (index.IsConstant()) {
3008        size_t offset =
3009            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3010        __ LoadFromOffset(kLoadWord, out, obj, offset);
3011      } else {
3012        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3013        __ LoadFromOffset(kLoadWord, out, IP, data_offset);
3014      }
3015      break;
3016    }
3017
3018    case Primitive::kPrimLong: {
3019      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
3020      Location out = locations->Out();
3021      if (index.IsConstant()) {
3022        size_t offset =
3023            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3024        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset);
3025      } else {
3026        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3027        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), IP, data_offset);
3028      }
3029      break;
3030    }
3031
3032    case Primitive::kPrimFloat: {
3033      uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
3034      Location out = locations->Out();
3035      DCHECK(out.IsFpuRegister());
3036      if (index.IsConstant()) {
3037        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3038        __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), obj, offset);
3039      } else {
3040        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3041        __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), IP, data_offset);
3042      }
3043      break;
3044    }
3045
3046    case Primitive::kPrimDouble: {
3047      uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
3048      Location out = locations->Out();
3049      DCHECK(out.IsFpuRegisterPair());
3050      if (index.IsConstant()) {
3051        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3052        __ LoadDFromOffset(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), obj, offset);
3053      } else {
3054        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3055        __ LoadDFromOffset(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), IP, data_offset);
3056      }
3057      break;
3058    }
3059
3060    case Primitive::kPrimVoid:
3061      LOG(FATAL) << "Unreachable type " << instruction->GetType();
3062      UNREACHABLE();
3063  }
3064  codegen_->MaybeRecordImplicitNullCheck(instruction);
3065}
3066
3067void LocationsBuilderARM::VisitArraySet(HArraySet* instruction) {
3068  Primitive::Type value_type = instruction->GetComponentType();
3069
3070  bool needs_write_barrier =
3071      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
3072  bool needs_runtime_call = instruction->NeedsTypeCheck();
3073
3074  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3075      instruction, needs_runtime_call ? LocationSummary::kCall : LocationSummary::kNoCall);
3076  if (needs_runtime_call) {
3077    InvokeRuntimeCallingConvention calling_convention;
3078    locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3079    locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3080    locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
3081  } else {
3082    locations->SetInAt(0, Location::RequiresRegister());
3083    locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3084    locations->SetInAt(2, Location::RequiresRegister());
3085
3086    if (needs_write_barrier) {
3087      // Temporary registers for the write barrier.
3088      locations->AddTemp(Location::RequiresRegister());
3089      locations->AddTemp(Location::RequiresRegister());
3090    }
3091  }
3092}
3093
3094void InstructionCodeGeneratorARM::VisitArraySet(HArraySet* instruction) {
3095  LocationSummary* locations = instruction->GetLocations();
3096  Register obj = locations->InAt(0).AsRegister<Register>();
3097  Location index = locations->InAt(1);
3098  Primitive::Type value_type = instruction->GetComponentType();
3099  bool needs_runtime_call = locations->WillCall();
3100  bool needs_write_barrier =
3101      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
3102
3103  switch (value_type) {
3104    case Primitive::kPrimBoolean:
3105    case Primitive::kPrimByte: {
3106      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
3107      Register value = locations->InAt(2).AsRegister<Register>();
3108      if (index.IsConstant()) {
3109        size_t offset =
3110            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
3111        __ StoreToOffset(kStoreByte, value, obj, offset);
3112      } else {
3113        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
3114        __ StoreToOffset(kStoreByte, value, IP, data_offset);
3115      }
3116      break;
3117    }
3118
3119    case Primitive::kPrimShort:
3120    case Primitive::kPrimChar: {
3121      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
3122      Register value = locations->InAt(2).AsRegister<Register>();
3123      if (index.IsConstant()) {
3124        size_t offset =
3125            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
3126        __ StoreToOffset(kStoreHalfword, value, obj, offset);
3127      } else {
3128        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
3129        __ StoreToOffset(kStoreHalfword, value, IP, data_offset);
3130      }
3131      break;
3132    }
3133
3134    case Primitive::kPrimInt:
3135    case Primitive::kPrimNot: {
3136      if (!needs_runtime_call) {
3137        uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
3138        Register value = locations->InAt(2).AsRegister<Register>();
3139        if (index.IsConstant()) {
3140          size_t offset =
3141              (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3142          __ StoreToOffset(kStoreWord, value, obj, offset);
3143        } else {
3144          DCHECK(index.IsRegister()) << index;
3145          __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3146          __ StoreToOffset(kStoreWord, value, IP, data_offset);
3147        }
3148        codegen_->MaybeRecordImplicitNullCheck(instruction);
3149        if (needs_write_barrier) {
3150          DCHECK_EQ(value_type, Primitive::kPrimNot);
3151          Register temp = locations->GetTemp(0).AsRegister<Register>();
3152          Register card = locations->GetTemp(1).AsRegister<Register>();
3153          codegen_->MarkGCCard(temp, card, obj, value);
3154        }
3155      } else {
3156        DCHECK_EQ(value_type, Primitive::kPrimNot);
3157        codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
3158                                instruction,
3159                                instruction->GetDexPc());
3160      }
3161      break;
3162    }
3163
3164    case Primitive::kPrimLong: {
3165      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
3166      Location value = locations->InAt(2);
3167      if (index.IsConstant()) {
3168        size_t offset =
3169            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3170        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset);
3171      } else {
3172        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3173        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), IP, data_offset);
3174      }
3175      break;
3176    }
3177
3178    case Primitive::kPrimFloat: {
3179      uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
3180      Location value = locations->InAt(2);
3181      DCHECK(value.IsFpuRegister());
3182      if (index.IsConstant()) {
3183        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3184        __ StoreSToOffset(value.AsFpuRegister<SRegister>(), obj, offset);
3185      } else {
3186        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3187        __ StoreSToOffset(value.AsFpuRegister<SRegister>(), IP, data_offset);
3188      }
3189      break;
3190    }
3191
3192    case Primitive::kPrimDouble: {
3193      uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
3194      Location value = locations->InAt(2);
3195      DCHECK(value.IsFpuRegisterPair());
3196      if (index.IsConstant()) {
3197        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3198        __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), obj, offset);
3199      } else {
3200        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3201        __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), IP, data_offset);
3202      }
3203
3204      break;
3205    }
3206
3207    case Primitive::kPrimVoid:
3208      LOG(FATAL) << "Unreachable type " << value_type;
3209      UNREACHABLE();
3210  }
3211
3212  // Ints and objects are handled in the switch.
3213  if (value_type != Primitive::kPrimInt && value_type != Primitive::kPrimNot) {
3214    codegen_->MaybeRecordImplicitNullCheck(instruction);
3215  }
3216}
3217
3218void LocationsBuilderARM::VisitArrayLength(HArrayLength* instruction) {
3219  LocationSummary* locations =
3220      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3221  locations->SetInAt(0, Location::RequiresRegister());
3222  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3223}
3224
3225void InstructionCodeGeneratorARM::VisitArrayLength(HArrayLength* instruction) {
3226  LocationSummary* locations = instruction->GetLocations();
3227  uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
3228  Register obj = locations->InAt(0).AsRegister<Register>();
3229  Register out = locations->Out().AsRegister<Register>();
3230  __ LoadFromOffset(kLoadWord, out, obj, offset);
3231  codegen_->MaybeRecordImplicitNullCheck(instruction);
3232}
3233
3234void LocationsBuilderARM::VisitBoundsCheck(HBoundsCheck* instruction) {
3235  LocationSummary* locations =
3236      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3237  locations->SetInAt(0, Location::RequiresRegister());
3238  locations->SetInAt(1, Location::RequiresRegister());
3239  if (instruction->HasUses()) {
3240    locations->SetOut(Location::SameAsFirstInput());
3241  }
3242}
3243
3244void InstructionCodeGeneratorARM::VisitBoundsCheck(HBoundsCheck* instruction) {
3245  LocationSummary* locations = instruction->GetLocations();
3246  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM(
3247      instruction, locations->InAt(0), locations->InAt(1));
3248  codegen_->AddSlowPath(slow_path);
3249
3250  Register index = locations->InAt(0).AsRegister<Register>();
3251  Register length = locations->InAt(1).AsRegister<Register>();
3252
3253  __ cmp(index, ShifterOperand(length));
3254  __ b(slow_path->GetEntryLabel(), CS);
3255}
3256
3257void CodeGeneratorARM::MarkGCCard(Register temp, Register card, Register object, Register value) {
3258  Label is_null;
3259  __ CompareAndBranchIfZero(value, &is_null);
3260  __ LoadFromOffset(kLoadWord, card, TR, Thread::CardTableOffset<kArmWordSize>().Int32Value());
3261  __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
3262  __ strb(card, Address(card, temp));
3263  __ Bind(&is_null);
3264}
3265
3266void LocationsBuilderARM::VisitTemporary(HTemporary* temp) {
3267  temp->SetLocations(nullptr);
3268}
3269
3270void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp) {
3271  // Nothing to do, this is driven by the code generator.
3272  UNUSED(temp);
3273}
3274
3275void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction) {
3276  UNUSED(instruction);
3277  LOG(FATAL) << "Unreachable";
3278}
3279
3280void InstructionCodeGeneratorARM::VisitParallelMove(HParallelMove* instruction) {
3281  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
3282}
3283
3284void LocationsBuilderARM::VisitSuspendCheck(HSuspendCheck* instruction) {
3285  new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
3286}
3287
3288void InstructionCodeGeneratorARM::VisitSuspendCheck(HSuspendCheck* instruction) {
3289  HBasicBlock* block = instruction->GetBlock();
3290  if (block->GetLoopInformation() != nullptr) {
3291    DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
3292    // The back edge will generate the suspend check.
3293    return;
3294  }
3295  if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
3296    // The goto will generate the suspend check.
3297    return;
3298  }
3299  GenerateSuspendCheck(instruction, nullptr);
3300}
3301
3302void InstructionCodeGeneratorARM::GenerateSuspendCheck(HSuspendCheck* instruction,
3303                                                       HBasicBlock* successor) {
3304  SuspendCheckSlowPathARM* slow_path =
3305      new (GetGraph()->GetArena()) SuspendCheckSlowPathARM(instruction, successor);
3306  codegen_->AddSlowPath(slow_path);
3307
3308  __ LoadFromOffset(
3309      kLoadUnsignedHalfword, IP, TR, Thread::ThreadFlagsOffset<kArmWordSize>().Int32Value());
3310  __ cmp(IP, ShifterOperand(0));
3311  // TODO: Figure out the branch offsets and use cbz/cbnz.
3312  if (successor == nullptr) {
3313    __ b(slow_path->GetEntryLabel(), NE);
3314    __ Bind(slow_path->GetReturnLabel());
3315  } else {
3316    __ b(codegen_->GetLabelOf(successor), EQ);
3317    __ b(slow_path->GetEntryLabel());
3318  }
3319}
3320
3321ArmAssembler* ParallelMoveResolverARM::GetAssembler() const {
3322  return codegen_->GetAssembler();
3323}
3324
3325void ParallelMoveResolverARM::EmitMove(size_t index) {
3326  MoveOperands* move = moves_.Get(index);
3327  Location source = move->GetSource();
3328  Location destination = move->GetDestination();
3329
3330  if (source.IsRegister()) {
3331    if (destination.IsRegister()) {
3332      __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>());
3333    } else {
3334      DCHECK(destination.IsStackSlot());
3335      __ StoreToOffset(kStoreWord, source.AsRegister<Register>(),
3336                       SP, destination.GetStackIndex());
3337    }
3338  } else if (source.IsStackSlot()) {
3339    if (destination.IsRegister()) {
3340      __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(),
3341                        SP, source.GetStackIndex());
3342    } else if (destination.IsFpuRegister()) {
3343      __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex());
3344    } else {
3345      DCHECK(destination.IsStackSlot());
3346      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
3347      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3348    }
3349  } else if (source.IsFpuRegister()) {
3350    if (destination.IsFpuRegister()) {
3351      __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>());
3352    } else {
3353      DCHECK(destination.IsStackSlot());
3354      __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex());
3355    }
3356  } else if (source.IsDoubleStackSlot()) {
3357    DCHECK(destination.IsDoubleStackSlot()) << destination;
3358    __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
3359    __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3360    __ LoadFromOffset(kLoadWord, IP, SP, source.GetHighStackIndex(kArmWordSize));
3361    __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
3362  } else {
3363    DCHECK(source.IsConstant()) << source;
3364    HInstruction* constant = source.GetConstant();
3365    if (constant->IsIntConstant()) {
3366      int32_t value = constant->AsIntConstant()->GetValue();
3367      if (destination.IsRegister()) {
3368        __ LoadImmediate(destination.AsRegister<Register>(), value);
3369      } else {
3370        DCHECK(destination.IsStackSlot());
3371        __ LoadImmediate(IP, value);
3372        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3373      }
3374    } else if (constant->IsLongConstant()) {
3375      int64_t value = constant->AsLongConstant()->GetValue();
3376      if (destination.IsRegister()) {
3377        // In the presence of long or double constants, the parallel move resolver will
3378        // split the move into two, but keeps the same constant for both moves. Here,
3379        // we use the low or high part depending on which register this move goes to.
3380        if (destination.reg() % 2 == 0) {
3381          __ LoadImmediate(destination.AsRegister<Register>(), Low32Bits(value));
3382        } else {
3383          __ LoadImmediate(destination.AsRegister<Register>(), High32Bits(value));
3384        }
3385      } else {
3386        DCHECK(destination.IsDoubleStackSlot());
3387        __ LoadImmediate(IP, Low32Bits(value));
3388        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3389        __ LoadImmediate(IP, High32Bits(value));
3390        __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
3391      }
3392    } else if (constant->IsDoubleConstant()) {
3393      double value = constant->AsDoubleConstant()->GetValue();
3394      uint64_t int_value = bit_cast<uint64_t, double>(value);
3395      if (destination.IsFpuRegister()) {
3396        // In the presence of long or double constants, the parallel move resolver will
3397        // split the move into two, but keeps the same constant for both moves. Here,
3398        // we use the low or high part depending on which register this move goes to.
3399        if (destination.reg() % 2 == 0) {
3400          __ LoadSImmediate(destination.AsFpuRegister<SRegister>(),
3401                            bit_cast<float, uint32_t>(Low32Bits(int_value)));
3402        } else {
3403          __ LoadSImmediate(destination.AsFpuRegister<SRegister>(),
3404                            bit_cast<float, uint32_t>(High32Bits(int_value)));
3405        }
3406      } else {
3407        DCHECK(destination.IsDoubleStackSlot());
3408        __ LoadImmediate(IP, Low32Bits(int_value));
3409        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3410        __ LoadImmediate(IP, High32Bits(int_value));
3411        __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
3412      }
3413    } else {
3414      DCHECK(constant->IsFloatConstant()) << constant->DebugName();
3415      float value = constant->AsFloatConstant()->GetValue();
3416      if (destination.IsFpuRegister()) {
3417        __ LoadSImmediate(destination.AsFpuRegister<SRegister>(), value);
3418      } else {
3419        DCHECK(destination.IsStackSlot());
3420        __ LoadImmediate(IP, bit_cast<int32_t, float>(value));
3421        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3422      }
3423    }
3424  }
3425}
3426
3427void ParallelMoveResolverARM::Exchange(Register reg, int mem) {
3428  __ Mov(IP, reg);
3429  __ LoadFromOffset(kLoadWord, reg, SP, mem);
3430  __ StoreToOffset(kStoreWord, IP, SP, mem);
3431}
3432
3433void ParallelMoveResolverARM::Exchange(int mem1, int mem2) {
3434  ScratchRegisterScope ensure_scratch(this, IP, R0, codegen_->GetNumberOfCoreRegisters());
3435  int stack_offset = ensure_scratch.IsSpilled() ? kArmWordSize : 0;
3436  __ LoadFromOffset(kLoadWord, static_cast<Register>(ensure_scratch.GetRegister()),
3437                    SP, mem1 + stack_offset);
3438  __ LoadFromOffset(kLoadWord, IP, SP, mem2 + stack_offset);
3439  __ StoreToOffset(kStoreWord, static_cast<Register>(ensure_scratch.GetRegister()),
3440                   SP, mem2 + stack_offset);
3441  __ StoreToOffset(kStoreWord, IP, SP, mem1 + stack_offset);
3442}
3443
3444void ParallelMoveResolverARM::EmitSwap(size_t index) {
3445  MoveOperands* move = moves_.Get(index);
3446  Location source = move->GetSource();
3447  Location destination = move->GetDestination();
3448
3449  if (source.IsRegister() && destination.IsRegister()) {
3450    DCHECK_NE(source.AsRegister<Register>(), IP);
3451    DCHECK_NE(destination.AsRegister<Register>(), IP);
3452    __ Mov(IP, source.AsRegister<Register>());
3453    __ Mov(source.AsRegister<Register>(), destination.AsRegister<Register>());
3454    __ Mov(destination.AsRegister<Register>(), IP);
3455  } else if (source.IsRegister() && destination.IsStackSlot()) {
3456    Exchange(source.AsRegister<Register>(), destination.GetStackIndex());
3457  } else if (source.IsStackSlot() && destination.IsRegister()) {
3458    Exchange(destination.AsRegister<Register>(), source.GetStackIndex());
3459  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
3460    Exchange(source.GetStackIndex(), destination.GetStackIndex());
3461  } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
3462    __ vmovrs(IP, source.AsFpuRegister<SRegister>());
3463    __ vmovs(source.AsFpuRegister<SRegister>(), destination.AsFpuRegister<SRegister>());
3464    __ vmovsr(destination.AsFpuRegister<SRegister>(), IP);
3465  } else if (source.IsFpuRegister() || destination.IsFpuRegister()) {
3466    SRegister reg = source.IsFpuRegister() ? source.AsFpuRegister<SRegister>()
3467                                           : destination.AsFpuRegister<SRegister>();
3468    int mem = source.IsFpuRegister()
3469        ? destination.GetStackIndex()
3470        : source.GetStackIndex();
3471
3472    __ vmovrs(IP, reg);
3473    __ LoadFromOffset(kLoadWord, IP, SP, mem);
3474    __ StoreToOffset(kStoreWord, IP, SP, mem);
3475  } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
3476    Exchange(source.GetStackIndex(), destination.GetStackIndex());
3477    Exchange(source.GetHighStackIndex(kArmWordSize), destination.GetHighStackIndex(kArmWordSize));
3478  } else {
3479    LOG(FATAL) << "Unimplemented" << source << " <-> " << destination;
3480  }
3481}
3482
3483void ParallelMoveResolverARM::SpillScratch(int reg) {
3484  __ Push(static_cast<Register>(reg));
3485}
3486
3487void ParallelMoveResolverARM::RestoreScratch(int reg) {
3488  __ Pop(static_cast<Register>(reg));
3489}
3490
3491void LocationsBuilderARM::VisitLoadClass(HLoadClass* cls) {
3492  LocationSummary::CallKind call_kind = cls->CanCallRuntime()
3493      ? LocationSummary::kCallOnSlowPath
3494      : LocationSummary::kNoCall;
3495  LocationSummary* locations =
3496      new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
3497  locations->SetOut(Location::RequiresRegister());
3498}
3499
3500void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) {
3501  Register out = cls->GetLocations()->Out().AsRegister<Register>();
3502  if (cls->IsReferrersClass()) {
3503    DCHECK(!cls->CanCallRuntime());
3504    DCHECK(!cls->MustGenerateClinitCheck());
3505    codegen_->LoadCurrentMethod(out);
3506    __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
3507  } else {
3508    DCHECK(cls->CanCallRuntime());
3509    codegen_->LoadCurrentMethod(out);
3510    __ LoadFromOffset(
3511        kLoadWord, out, out, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value());
3512    __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
3513
3514    SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
3515        cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
3516    codegen_->AddSlowPath(slow_path);
3517    __ cmp(out, ShifterOperand(0));
3518    __ b(slow_path->GetEntryLabel(), EQ);
3519    if (cls->MustGenerateClinitCheck()) {
3520      GenerateClassInitializationCheck(slow_path, out);
3521    } else {
3522      __ Bind(slow_path->GetExitLabel());
3523    }
3524  }
3525}
3526
3527void LocationsBuilderARM::VisitClinitCheck(HClinitCheck* check) {
3528  LocationSummary* locations =
3529      new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
3530  locations->SetInAt(0, Location::RequiresRegister());
3531  if (check->HasUses()) {
3532    locations->SetOut(Location::SameAsFirstInput());
3533  }
3534}
3535
3536void InstructionCodeGeneratorARM::VisitClinitCheck(HClinitCheck* check) {
3537  // We assume the class is not null.
3538  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
3539      check->GetLoadClass(), check, check->GetDexPc(), true);
3540  codegen_->AddSlowPath(slow_path);
3541  GenerateClassInitializationCheck(slow_path,
3542                                   check->GetLocations()->InAt(0).AsRegister<Register>());
3543}
3544
3545void InstructionCodeGeneratorARM::GenerateClassInitializationCheck(
3546    SlowPathCodeARM* slow_path, Register class_reg) {
3547  __ LoadFromOffset(kLoadWord, IP, class_reg, mirror::Class::StatusOffset().Int32Value());
3548  __ cmp(IP, ShifterOperand(mirror::Class::kStatusInitialized));
3549  __ b(slow_path->GetEntryLabel(), LT);
3550  // Even if the initialized flag is set, we may be in a situation where caches are not synced
3551  // properly. Therefore, we do a memory fence.
3552  __ dmb(ISH);
3553  __ Bind(slow_path->GetExitLabel());
3554}
3555
3556void LocationsBuilderARM::VisitLoadString(HLoadString* load) {
3557  LocationSummary* locations =
3558      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
3559  locations->SetOut(Location::RequiresRegister());
3560}
3561
3562void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) {
3563  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM(load);
3564  codegen_->AddSlowPath(slow_path);
3565
3566  Register out = load->GetLocations()->Out().AsRegister<Register>();
3567  codegen_->LoadCurrentMethod(out);
3568  __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
3569  __ LoadFromOffset(kLoadWord, out, out, mirror::Class::DexCacheStringsOffset().Int32Value());
3570  __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(load->GetStringIndex()));
3571  __ cmp(out, ShifterOperand(0));
3572  __ b(slow_path->GetEntryLabel(), EQ);
3573  __ Bind(slow_path->GetExitLabel());
3574}
3575
3576void LocationsBuilderARM::VisitLoadException(HLoadException* load) {
3577  LocationSummary* locations =
3578      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
3579  locations->SetOut(Location::RequiresRegister());
3580}
3581
3582void InstructionCodeGeneratorARM::VisitLoadException(HLoadException* load) {
3583  Register out = load->GetLocations()->Out().AsRegister<Register>();
3584  int32_t offset = Thread::ExceptionOffset<kArmWordSize>().Int32Value();
3585  __ LoadFromOffset(kLoadWord, out, TR, offset);
3586  __ LoadImmediate(IP, 0);
3587  __ StoreToOffset(kStoreWord, IP, TR, offset);
3588}
3589
3590void LocationsBuilderARM::VisitThrow(HThrow* instruction) {
3591  LocationSummary* locations =
3592      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3593  InvokeRuntimeCallingConvention calling_convention;
3594  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3595}
3596
3597void InstructionCodeGeneratorARM::VisitThrow(HThrow* instruction) {
3598  codegen_->InvokeRuntime(
3599      QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc());
3600}
3601
3602void LocationsBuilderARM::VisitInstanceOf(HInstanceOf* instruction) {
3603  LocationSummary::CallKind call_kind = instruction->IsClassFinal()
3604      ? LocationSummary::kNoCall
3605      : LocationSummary::kCallOnSlowPath;
3606  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3607  locations->SetInAt(0, Location::RequiresRegister());
3608  locations->SetInAt(1, Location::RequiresRegister());
3609  locations->SetOut(Location::RequiresRegister());
3610}
3611
3612void InstructionCodeGeneratorARM::VisitInstanceOf(HInstanceOf* instruction) {
3613  LocationSummary* locations = instruction->GetLocations();
3614  Register obj = locations->InAt(0).AsRegister<Register>();
3615  Register cls = locations->InAt(1).AsRegister<Register>();
3616  Register out = locations->Out().AsRegister<Register>();
3617  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3618  Label done, zero;
3619  SlowPathCodeARM* slow_path = nullptr;
3620
3621  // Return 0 if `obj` is null.
3622  // TODO: avoid this check if we know obj is not null.
3623  __ cmp(obj, ShifterOperand(0));
3624  __ b(&zero, EQ);
3625  // Compare the class of `obj` with `cls`.
3626  __ LoadFromOffset(kLoadWord, out, obj, class_offset);
3627  __ cmp(out, ShifterOperand(cls));
3628  if (instruction->IsClassFinal()) {
3629    // Classes must be equal for the instanceof to succeed.
3630    __ b(&zero, NE);
3631    __ LoadImmediate(out, 1);
3632    __ b(&done);
3633  } else {
3634    // If the classes are not equal, we go into a slow path.
3635    DCHECK(locations->OnlyCallsOnSlowPath());
3636    slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
3637        instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc());
3638    codegen_->AddSlowPath(slow_path);
3639    __ b(slow_path->GetEntryLabel(), NE);
3640    __ LoadImmediate(out, 1);
3641    __ b(&done);
3642  }
3643  __ Bind(&zero);
3644  __ LoadImmediate(out, 0);
3645  if (slow_path != nullptr) {
3646    __ Bind(slow_path->GetExitLabel());
3647  }
3648  __ Bind(&done);
3649}
3650
3651void LocationsBuilderARM::VisitCheckCast(HCheckCast* instruction) {
3652  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3653      instruction, LocationSummary::kCallOnSlowPath);
3654  locations->SetInAt(0, Location::RequiresRegister());
3655  locations->SetInAt(1, Location::RequiresRegister());
3656  locations->AddTemp(Location::RequiresRegister());
3657}
3658
3659void InstructionCodeGeneratorARM::VisitCheckCast(HCheckCast* instruction) {
3660  LocationSummary* locations = instruction->GetLocations();
3661  Register obj = locations->InAt(0).AsRegister<Register>();
3662  Register cls = locations->InAt(1).AsRegister<Register>();
3663  Register temp = locations->GetTemp(0).AsRegister<Register>();
3664  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3665
3666  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
3667      instruction, locations->InAt(1), locations->GetTemp(0), instruction->GetDexPc());
3668  codegen_->AddSlowPath(slow_path);
3669
3670  // TODO: avoid this check if we know obj is not null.
3671  __ cmp(obj, ShifterOperand(0));
3672  __ b(slow_path->GetExitLabel(), EQ);
3673  // Compare the class of `obj` with `cls`.
3674  __ LoadFromOffset(kLoadWord, temp, obj, class_offset);
3675  __ cmp(temp, ShifterOperand(cls));
3676  __ b(slow_path->GetEntryLabel(), NE);
3677  __ Bind(slow_path->GetExitLabel());
3678}
3679
3680void LocationsBuilderARM::VisitMonitorOperation(HMonitorOperation* instruction) {
3681  LocationSummary* locations =
3682      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3683  InvokeRuntimeCallingConvention calling_convention;
3684  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3685}
3686
3687void InstructionCodeGeneratorARM::VisitMonitorOperation(HMonitorOperation* instruction) {
3688  codegen_->InvokeRuntime(instruction->IsEnter()
3689        ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
3690      instruction,
3691      instruction->GetDexPc());
3692}
3693
3694void LocationsBuilderARM::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
3695void LocationsBuilderARM::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
3696void LocationsBuilderARM::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
3697
3698void LocationsBuilderARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
3699  LocationSummary* locations =
3700      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3701  DCHECK(instruction->GetResultType() == Primitive::kPrimInt
3702         || instruction->GetResultType() == Primitive::kPrimLong);
3703  locations->SetInAt(0, Location::RequiresRegister());
3704  locations->SetInAt(1, Location::RequiresRegister());
3705  Location::OutputOverlap output_overlaps = (instruction->GetResultType() == Primitive::kPrimLong)
3706      ? Location::kOutputOverlap
3707      : Location::kNoOutputOverlap;
3708  locations->SetOut(Location::RequiresRegister(), output_overlaps);
3709}
3710
3711void InstructionCodeGeneratorARM::VisitAnd(HAnd* instruction) {
3712  HandleBitwiseOperation(instruction);
3713}
3714
3715void InstructionCodeGeneratorARM::VisitOr(HOr* instruction) {
3716  HandleBitwiseOperation(instruction);
3717}
3718
3719void InstructionCodeGeneratorARM::VisitXor(HXor* instruction) {
3720  HandleBitwiseOperation(instruction);
3721}
3722
3723void InstructionCodeGeneratorARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
3724  LocationSummary* locations = instruction->GetLocations();
3725
3726  if (instruction->GetResultType() == Primitive::kPrimInt) {
3727    Register first = locations->InAt(0).AsRegister<Register>();
3728    Register second = locations->InAt(1).AsRegister<Register>();
3729    Register out = locations->Out().AsRegister<Register>();
3730    if (instruction->IsAnd()) {
3731      __ and_(out, first, ShifterOperand(second));
3732    } else if (instruction->IsOr()) {
3733      __ orr(out, first, ShifterOperand(second));
3734    } else {
3735      DCHECK(instruction->IsXor());
3736      __ eor(out, first, ShifterOperand(second));
3737    }
3738  } else {
3739    DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3740    Location first = locations->InAt(0);
3741    Location second = locations->InAt(1);
3742    Location out = locations->Out();
3743    if (instruction->IsAnd()) {
3744      __ and_(out.AsRegisterPairLow<Register>(),
3745              first.AsRegisterPairLow<Register>(),
3746              ShifterOperand(second.AsRegisterPairLow<Register>()));
3747      __ and_(out.AsRegisterPairHigh<Register>(),
3748              first.AsRegisterPairHigh<Register>(),
3749              ShifterOperand(second.AsRegisterPairHigh<Register>()));
3750    } else if (instruction->IsOr()) {
3751      __ orr(out.AsRegisterPairLow<Register>(),
3752             first.AsRegisterPairLow<Register>(),
3753             ShifterOperand(second.AsRegisterPairLow<Register>()));
3754      __ orr(out.AsRegisterPairHigh<Register>(),
3755             first.AsRegisterPairHigh<Register>(),
3756             ShifterOperand(second.AsRegisterPairHigh<Register>()));
3757    } else {
3758      DCHECK(instruction->IsXor());
3759      __ eor(out.AsRegisterPairLow<Register>(),
3760             first.AsRegisterPairLow<Register>(),
3761             ShifterOperand(second.AsRegisterPairLow<Register>()));
3762      __ eor(out.AsRegisterPairHigh<Register>(),
3763             first.AsRegisterPairHigh<Register>(),
3764             ShifterOperand(second.AsRegisterPairHigh<Register>()));
3765    }
3766  }
3767}
3768
3769}  // namespace arm
3770}  // namespace art
3771