code_generator_arm.cc revision 2bcf9bf784a0021630d8fe63d7230d46d6891780
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
20#include "entrypoints/quick/quick_entrypoints.h"
21#include "gc/accounting/card_table.h"
22#include "intrinsics.h"
23#include "intrinsics_arm.h"
24#include "mirror/array-inl.h"
25#include "mirror/art_method.h"
26#include "mirror/class.h"
27#include "thread.h"
28#include "utils/arm/assembler_arm.h"
29#include "utils/arm/managed_register_arm.h"
30#include "utils/assembler.h"
31#include "utils/stack_checks.h"
32
33namespace art {
34
35namespace arm {
36
37static bool ExpectedPairLayout(Location location) {
38  // We expected this for both core and fpu register pairs.
39  return ((location.low() & 1) == 0) && (location.low() + 1 == location.high());
40}
41
42static constexpr int kCurrentMethodStackOffset = 0;
43
44static constexpr Register kRuntimeParameterCoreRegisters[] = { R0, R1, R2, R3 };
45static constexpr size_t kRuntimeParameterCoreRegistersLength =
46    arraysize(kRuntimeParameterCoreRegisters);
47static constexpr SRegister kRuntimeParameterFpuRegisters[] = { S0, S1, S2, S3 };
48static constexpr size_t kRuntimeParameterFpuRegistersLength =
49    arraysize(kRuntimeParameterFpuRegisters);
50// We unconditionally allocate R5 to ensure we can do long operations
51// with baseline.
52static constexpr Register kCoreSavedRegisterForBaseline = R5;
53static constexpr Register kCoreCalleeSaves[] =
54    { R5, R6, R7, R8, R10, R11, PC };
55static constexpr SRegister kFpuCalleeSaves[] =
56    { S16, S17, S18, S19, S20, S21, S22, S23, S24, S25, S26, S27, S28, S29, S30, S31 };
57
58class InvokeRuntimeCallingConvention : public CallingConvention<Register, SRegister> {
59 public:
60  InvokeRuntimeCallingConvention()
61      : CallingConvention(kRuntimeParameterCoreRegisters,
62                          kRuntimeParameterCoreRegistersLength,
63                          kRuntimeParameterFpuRegisters,
64                          kRuntimeParameterFpuRegistersLength) {}
65
66 private:
67  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
68};
69
70#define __ reinterpret_cast<ArmAssembler*>(codegen->GetAssembler())->
71#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArmWordSize, x).Int32Value()
72
73class NullCheckSlowPathARM : public SlowPathCodeARM {
74 public:
75  explicit NullCheckSlowPathARM(HNullCheck* instruction) : instruction_(instruction) {}
76
77  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
78    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
79    __ Bind(GetEntryLabel());
80    arm_codegen->InvokeRuntime(
81        QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc());
82  }
83
84 private:
85  HNullCheck* const instruction_;
86  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM);
87};
88
89class DivZeroCheckSlowPathARM : public SlowPathCodeARM {
90 public:
91  explicit DivZeroCheckSlowPathARM(HDivZeroCheck* instruction) : instruction_(instruction) {}
92
93  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
94    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
95    __ Bind(GetEntryLabel());
96    arm_codegen->InvokeRuntime(
97        QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc());
98  }
99
100 private:
101  HDivZeroCheck* const instruction_;
102  DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM);
103};
104
105class SuspendCheckSlowPathARM : public SlowPathCodeARM {
106 public:
107  SuspendCheckSlowPathARM(HSuspendCheck* instruction, HBasicBlock* successor)
108      : instruction_(instruction), successor_(successor) {}
109
110  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
111    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
112    __ Bind(GetEntryLabel());
113    codegen->SaveLiveRegisters(instruction_->GetLocations());
114    arm_codegen->InvokeRuntime(
115        QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc());
116    codegen->RestoreLiveRegisters(instruction_->GetLocations());
117    if (successor_ == nullptr) {
118      __ b(GetReturnLabel());
119    } else {
120      __ b(arm_codegen->GetLabelOf(successor_));
121    }
122  }
123
124  Label* GetReturnLabel() {
125    DCHECK(successor_ == nullptr);
126    return &return_label_;
127  }
128
129 private:
130  HSuspendCheck* const instruction_;
131  // If not null, the block to branch to after the suspend check.
132  HBasicBlock* const successor_;
133
134  // If `successor_` is null, the label to branch to after the suspend check.
135  Label return_label_;
136
137  DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM);
138};
139
140class BoundsCheckSlowPathARM : public SlowPathCodeARM {
141 public:
142  BoundsCheckSlowPathARM(HBoundsCheck* instruction,
143                         Location index_location,
144                         Location length_location)
145      : instruction_(instruction),
146        index_location_(index_location),
147        length_location_(length_location) {}
148
149  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
150    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
151    __ Bind(GetEntryLabel());
152    // We're moving two locations to locations that could overlap, so we need a parallel
153    // move resolver.
154    InvokeRuntimeCallingConvention calling_convention;
155    codegen->EmitParallelMoves(
156        index_location_,
157        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
158        length_location_,
159        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
160    arm_codegen->InvokeRuntime(
161        QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc());
162  }
163
164 private:
165  HBoundsCheck* const instruction_;
166  const Location index_location_;
167  const Location length_location_;
168
169  DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM);
170};
171
172class LoadClassSlowPathARM : public SlowPathCodeARM {
173 public:
174  LoadClassSlowPathARM(HLoadClass* cls,
175                       HInstruction* at,
176                       uint32_t dex_pc,
177                       bool do_clinit)
178      : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
179    DCHECK(at->IsLoadClass() || at->IsClinitCheck());
180  }
181
182  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
183    LocationSummary* locations = at_->GetLocations();
184
185    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
186    __ Bind(GetEntryLabel());
187    codegen->SaveLiveRegisters(locations);
188
189    InvokeRuntimeCallingConvention calling_convention;
190    __ LoadImmediate(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex());
191    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
192    int32_t entry_point_offset = do_clinit_
193        ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
194        : QUICK_ENTRY_POINT(pInitializeType);
195    arm_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_);
196
197    // Move the class to the desired location.
198    Location out = locations->Out();
199    if (out.IsValid()) {
200      DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
201      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
202    }
203    codegen->RestoreLiveRegisters(locations);
204    __ b(GetExitLabel());
205  }
206
207 private:
208  // The class this slow path will load.
209  HLoadClass* const cls_;
210
211  // The instruction where this slow path is happening.
212  // (Might be the load class or an initialization check).
213  HInstruction* const at_;
214
215  // The dex PC of `at_`.
216  const uint32_t dex_pc_;
217
218  // Whether to initialize the class.
219  const bool do_clinit_;
220
221  DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM);
222};
223
224class LoadStringSlowPathARM : public SlowPathCodeARM {
225 public:
226  explicit LoadStringSlowPathARM(HLoadString* instruction) : instruction_(instruction) {}
227
228  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
229    LocationSummary* locations = instruction_->GetLocations();
230    DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
231
232    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
233    __ Bind(GetEntryLabel());
234    codegen->SaveLiveRegisters(locations);
235
236    InvokeRuntimeCallingConvention calling_convention;
237    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
238    __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction_->GetStringIndex());
239    arm_codegen->InvokeRuntime(
240        QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc());
241    arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
242
243    codegen->RestoreLiveRegisters(locations);
244    __ b(GetExitLabel());
245  }
246
247 private:
248  HLoadString* const instruction_;
249
250  DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM);
251};
252
253class TypeCheckSlowPathARM : public SlowPathCodeARM {
254 public:
255  TypeCheckSlowPathARM(HInstruction* instruction,
256                       Location class_to_check,
257                       Location object_class,
258                       uint32_t dex_pc)
259      : instruction_(instruction),
260        class_to_check_(class_to_check),
261        object_class_(object_class),
262        dex_pc_(dex_pc) {}
263
264  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
265    LocationSummary* locations = instruction_->GetLocations();
266    DCHECK(instruction_->IsCheckCast()
267           || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
268
269    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
270    __ Bind(GetEntryLabel());
271    codegen->SaveLiveRegisters(locations);
272
273    // We're moving two locations to locations that could overlap, so we need a parallel
274    // move resolver.
275    InvokeRuntimeCallingConvention calling_convention;
276    codegen->EmitParallelMoves(
277        class_to_check_,
278        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
279        object_class_,
280        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
281
282    if (instruction_->IsInstanceOf()) {
283      arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc_);
284      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
285    } else {
286      DCHECK(instruction_->IsCheckCast());
287      arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc_);
288    }
289
290    codegen->RestoreLiveRegisters(locations);
291    __ b(GetExitLabel());
292  }
293
294 private:
295  HInstruction* const instruction_;
296  const Location class_to_check_;
297  const Location object_class_;
298  uint32_t dex_pc_;
299
300  DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM);
301};
302
303#undef __
304
305#undef __
306#define __ reinterpret_cast<ArmAssembler*>(GetAssembler())->
307
308inline Condition ARMCondition(IfCondition cond) {
309  switch (cond) {
310    case kCondEQ: return EQ;
311    case kCondNE: return NE;
312    case kCondLT: return LT;
313    case kCondLE: return LE;
314    case kCondGT: return GT;
315    case kCondGE: return GE;
316    default:
317      LOG(FATAL) << "Unknown if condition";
318  }
319  return EQ;        // Unreachable.
320}
321
322inline Condition ARMOppositeCondition(IfCondition cond) {
323  switch (cond) {
324    case kCondEQ: return NE;
325    case kCondNE: return EQ;
326    case kCondLT: return GE;
327    case kCondLE: return GT;
328    case kCondGT: return LE;
329    case kCondGE: return LT;
330    default:
331      LOG(FATAL) << "Unknown if condition";
332  }
333  return EQ;        // Unreachable.
334}
335
336void CodeGeneratorARM::DumpCoreRegister(std::ostream& stream, int reg) const {
337  stream << ArmManagedRegister::FromCoreRegister(Register(reg));
338}
339
340void CodeGeneratorARM::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
341  stream << ArmManagedRegister::FromSRegister(SRegister(reg));
342}
343
344size_t CodeGeneratorARM::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
345  __ StoreToOffset(kStoreWord, static_cast<Register>(reg_id), SP, stack_index);
346  return kArmWordSize;
347}
348
349size_t CodeGeneratorARM::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
350  __ LoadFromOffset(kLoadWord, static_cast<Register>(reg_id), SP, stack_index);
351  return kArmWordSize;
352}
353
354size_t CodeGeneratorARM::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
355  __ StoreSToOffset(static_cast<SRegister>(reg_id), SP, stack_index);
356  return kArmWordSize;
357}
358
359size_t CodeGeneratorARM::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
360  __ LoadSFromOffset(static_cast<SRegister>(reg_id), SP, stack_index);
361  return kArmWordSize;
362}
363
364CodeGeneratorARM::CodeGeneratorARM(HGraph* graph,
365                                   const ArmInstructionSetFeatures& isa_features,
366                                   const CompilerOptions& compiler_options)
367    : CodeGenerator(graph,
368                    kNumberOfCoreRegisters,
369                    kNumberOfSRegisters,
370                    kNumberOfRegisterPairs,
371                    ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
372                                        arraysize(kCoreCalleeSaves)),
373                    ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
374                                        arraysize(kFpuCalleeSaves)),
375                    compiler_options),
376      block_labels_(graph->GetArena(), 0),
377      location_builder_(graph, this),
378      instruction_visitor_(graph, this),
379      move_resolver_(graph->GetArena(), this),
380      assembler_(true),
381      isa_features_(isa_features) {
382  // Save one extra register for baseline. Note that on thumb2, there is no easy
383  // instruction to restore just the PC, so this actually helps both baseline
384  // and non-baseline to save and restore at least two registers at entry and exit.
385  AddAllocatedRegister(Location::RegisterLocation(kCoreSavedRegisterForBaseline));
386  // Save the PC register to mimic Quick.
387  AddAllocatedRegister(Location::RegisterLocation(PC));
388}
389
390Location CodeGeneratorARM::AllocateFreeRegister(Primitive::Type type) const {
391  switch (type) {
392    case Primitive::kPrimLong: {
393      size_t reg = FindFreeEntry(blocked_register_pairs_, kNumberOfRegisterPairs);
394      ArmManagedRegister pair =
395          ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
396      DCHECK(!blocked_core_registers_[pair.AsRegisterPairLow()]);
397      DCHECK(!blocked_core_registers_[pair.AsRegisterPairHigh()]);
398
399      blocked_core_registers_[pair.AsRegisterPairLow()] = true;
400      blocked_core_registers_[pair.AsRegisterPairHigh()] = true;
401      UpdateBlockedPairRegisters();
402      return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
403    }
404
405    case Primitive::kPrimByte:
406    case Primitive::kPrimBoolean:
407    case Primitive::kPrimChar:
408    case Primitive::kPrimShort:
409    case Primitive::kPrimInt:
410    case Primitive::kPrimNot: {
411      int reg = FindFreeEntry(blocked_core_registers_, kNumberOfCoreRegisters);
412      // Block all register pairs that contain `reg`.
413      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
414        ArmManagedRegister current =
415            ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
416        if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
417          blocked_register_pairs_[i] = true;
418        }
419      }
420      return Location::RegisterLocation(reg);
421    }
422
423    case Primitive::kPrimFloat: {
424      int reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfSRegisters);
425      return Location::FpuRegisterLocation(reg);
426    }
427
428    case Primitive::kPrimDouble: {
429      int reg = FindTwoFreeConsecutiveAlignedEntries(blocked_fpu_registers_, kNumberOfSRegisters);
430      DCHECK_EQ(reg % 2, 0);
431      return Location::FpuRegisterPairLocation(reg, reg + 1);
432    }
433
434    case Primitive::kPrimVoid:
435      LOG(FATAL) << "Unreachable type " << type;
436  }
437
438  return Location();
439}
440
441void CodeGeneratorARM::SetupBlockedRegisters(bool is_baseline) const {
442  // Don't allocate the dalvik style register pair passing.
443  blocked_register_pairs_[R1_R2] = true;
444
445  // Stack register, LR and PC are always reserved.
446  blocked_core_registers_[SP] = true;
447  blocked_core_registers_[LR] = true;
448  blocked_core_registers_[PC] = true;
449
450  // Reserve thread register.
451  blocked_core_registers_[TR] = true;
452
453  // Reserve temp register.
454  blocked_core_registers_[IP] = true;
455
456  if (is_baseline) {
457    for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
458      blocked_core_registers_[kCoreCalleeSaves[i]] = true;
459    }
460
461    blocked_core_registers_[kCoreSavedRegisterForBaseline] = false;
462
463    for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
464      blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
465    }
466  }
467
468  UpdateBlockedPairRegisters();
469}
470
471void CodeGeneratorARM::UpdateBlockedPairRegisters() const {
472  for (int i = 0; i < kNumberOfRegisterPairs; i++) {
473    ArmManagedRegister current =
474        ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
475    if (blocked_core_registers_[current.AsRegisterPairLow()]
476        || blocked_core_registers_[current.AsRegisterPairHigh()]) {
477      blocked_register_pairs_[i] = true;
478    }
479  }
480}
481
482InstructionCodeGeneratorARM::InstructionCodeGeneratorARM(HGraph* graph, CodeGeneratorARM* codegen)
483      : HGraphVisitor(graph),
484        assembler_(codegen->GetAssembler()),
485        codegen_(codegen) {}
486
487static uint32_t LeastSignificantBit(uint32_t mask) {
488  // ffs starts at 1.
489  return ffs(mask) - 1;
490}
491
492void CodeGeneratorARM::ComputeSpillMask() {
493  core_spill_mask_ = allocated_registers_.GetCoreRegisters() & core_callee_save_mask_;
494  DCHECK_NE(core_spill_mask_, 0u) << "At least the return address register must be saved";
495  fpu_spill_mask_ = allocated_registers_.GetFloatingPointRegisters() & fpu_callee_save_mask_;
496  // We use vpush and vpop for saving and restoring floating point registers, which take
497  // a SRegister and the number of registers to save/restore after that SRegister. We
498  // therefore update the `fpu_spill_mask_` to also contain those registers not allocated,
499  // but in the range.
500  if (fpu_spill_mask_ != 0) {
501    uint32_t least_significant_bit = LeastSignificantBit(fpu_spill_mask_);
502    uint32_t most_significant_bit = MostSignificantBit(fpu_spill_mask_);
503    for (uint32_t i = least_significant_bit + 1 ; i < most_significant_bit; ++i) {
504      fpu_spill_mask_ |= (1 << i);
505    }
506  }
507}
508
509void CodeGeneratorARM::GenerateFrameEntry() {
510  bool skip_overflow_check =
511      IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm);
512  DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
513  __ Bind(&frame_entry_label_);
514
515  if (!skip_overflow_check) {
516    __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm)));
517    __ LoadFromOffset(kLoadWord, IP, IP, 0);
518    RecordPcInfo(nullptr, 0);
519  }
520
521  // PC is in the list of callee-save to mimic Quick, but we need to push
522  // LR at entry instead.
523  __ PushList((core_spill_mask_ & (~(1 << PC))) | 1 << LR);
524  if (fpu_spill_mask_ != 0) {
525    SRegister start_register = SRegister(LeastSignificantBit(fpu_spill_mask_));
526    __ vpushs(start_register, POPCOUNT(fpu_spill_mask_));
527  }
528  __ AddConstant(SP, -(GetFrameSize() - FrameEntrySpillSize()));
529  __ StoreToOffset(kStoreWord, R0, SP, 0);
530}
531
532void CodeGeneratorARM::GenerateFrameExit() {
533  __ AddConstant(SP, GetFrameSize() - FrameEntrySpillSize());
534  if (fpu_spill_mask_ != 0) {
535    SRegister start_register = SRegister(LeastSignificantBit(fpu_spill_mask_));
536    __ vpops(start_register, POPCOUNT(fpu_spill_mask_));
537  }
538  __ PopList(core_spill_mask_);
539}
540
541void CodeGeneratorARM::Bind(HBasicBlock* block) {
542  __ Bind(GetLabelOf(block));
543}
544
545Location CodeGeneratorARM::GetStackLocation(HLoadLocal* load) const {
546  switch (load->GetType()) {
547    case Primitive::kPrimLong:
548    case Primitive::kPrimDouble:
549      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
550      break;
551
552    case Primitive::kPrimInt:
553    case Primitive::kPrimNot:
554    case Primitive::kPrimFloat:
555      return Location::StackSlot(GetStackSlot(load->GetLocal()));
556
557    case Primitive::kPrimBoolean:
558    case Primitive::kPrimByte:
559    case Primitive::kPrimChar:
560    case Primitive::kPrimShort:
561    case Primitive::kPrimVoid:
562      LOG(FATAL) << "Unexpected type " << load->GetType();
563  }
564
565  LOG(FATAL) << "Unreachable";
566  return Location();
567}
568
569Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
570  switch (type) {
571    case Primitive::kPrimBoolean:
572    case Primitive::kPrimByte:
573    case Primitive::kPrimChar:
574    case Primitive::kPrimShort:
575    case Primitive::kPrimInt:
576    case Primitive::kPrimNot: {
577      uint32_t index = gp_index_++;
578      uint32_t stack_index = stack_index_++;
579      if (index < calling_convention.GetNumberOfRegisters()) {
580        return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
581      } else {
582        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
583      }
584    }
585
586    case Primitive::kPrimLong: {
587      uint32_t index = gp_index_;
588      uint32_t stack_index = stack_index_;
589      gp_index_ += 2;
590      stack_index_ += 2;
591      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
592        if (calling_convention.GetRegisterAt(index) == R1) {
593          // Skip R1, and use R2_R3 instead.
594          gp_index_++;
595          index++;
596        }
597      }
598      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
599        DCHECK_EQ(calling_convention.GetRegisterAt(index) + 1,
600                  calling_convention.GetRegisterAt(index + 1));
601        return Location::RegisterPairLocation(calling_convention.GetRegisterAt(index),
602                                              calling_convention.GetRegisterAt(index + 1));
603      } else {
604        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
605      }
606    }
607
608    case Primitive::kPrimFloat: {
609      uint32_t stack_index = stack_index_++;
610      if (float_index_ % 2 == 0) {
611        float_index_ = std::max(double_index_, float_index_);
612      }
613      if (float_index_ < calling_convention.GetNumberOfFpuRegisters()) {
614        return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(float_index_++));
615      } else {
616        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
617      }
618    }
619
620    case Primitive::kPrimDouble: {
621      double_index_ = std::max(double_index_, RoundUp(float_index_, 2));
622      uint32_t stack_index = stack_index_;
623      stack_index_ += 2;
624      if (double_index_ + 1 < calling_convention.GetNumberOfFpuRegisters()) {
625        uint32_t index = double_index_;
626        double_index_ += 2;
627        Location result = Location::FpuRegisterPairLocation(
628          calling_convention.GetFpuRegisterAt(index),
629          calling_convention.GetFpuRegisterAt(index + 1));
630        DCHECK(ExpectedPairLayout(result));
631        return result;
632      } else {
633        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
634      }
635    }
636
637    case Primitive::kPrimVoid:
638      LOG(FATAL) << "Unexpected parameter type " << type;
639      break;
640  }
641  return Location();
642}
643
644Location InvokeDexCallingConventionVisitor::GetReturnLocation(Primitive::Type type) {
645  switch (type) {
646    case Primitive::kPrimBoolean:
647    case Primitive::kPrimByte:
648    case Primitive::kPrimChar:
649    case Primitive::kPrimShort:
650    case Primitive::kPrimInt:
651    case Primitive::kPrimNot: {
652      return Location::RegisterLocation(R0);
653    }
654
655    case Primitive::kPrimFloat: {
656      return Location::FpuRegisterLocation(S0);
657    }
658
659    case Primitive::kPrimLong: {
660      return Location::RegisterPairLocation(R0, R1);
661    }
662
663    case Primitive::kPrimDouble: {
664      return Location::FpuRegisterPairLocation(S0, S1);
665    }
666
667    case Primitive::kPrimVoid:
668      return Location();
669  }
670  UNREACHABLE();
671  return Location();
672}
673
674void CodeGeneratorARM::Move32(Location destination, Location source) {
675  if (source.Equals(destination)) {
676    return;
677  }
678  if (destination.IsRegister()) {
679    if (source.IsRegister()) {
680      __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>());
681    } else if (source.IsFpuRegister()) {
682      __ vmovrs(destination.AsRegister<Register>(), source.AsFpuRegister<SRegister>());
683    } else {
684      __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(), SP, source.GetStackIndex());
685    }
686  } else if (destination.IsFpuRegister()) {
687    if (source.IsRegister()) {
688      __ vmovsr(destination.AsFpuRegister<SRegister>(), source.AsRegister<Register>());
689    } else if (source.IsFpuRegister()) {
690      __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>());
691    } else {
692      __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex());
693    }
694  } else {
695    DCHECK(destination.IsStackSlot()) << destination;
696    if (source.IsRegister()) {
697      __ StoreToOffset(kStoreWord, source.AsRegister<Register>(), SP, destination.GetStackIndex());
698    } else if (source.IsFpuRegister()) {
699      __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex());
700    } else {
701      DCHECK(source.IsStackSlot()) << source;
702      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
703      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
704    }
705  }
706}
707
708void CodeGeneratorARM::Move64(Location destination, Location source) {
709  if (source.Equals(destination)) {
710    return;
711  }
712  if (destination.IsRegisterPair()) {
713    if (source.IsRegisterPair()) {
714      EmitParallelMoves(
715          Location::RegisterLocation(source.AsRegisterPairHigh<Register>()),
716          Location::RegisterLocation(destination.AsRegisterPairHigh<Register>()),
717          Location::RegisterLocation(source.AsRegisterPairLow<Register>()),
718          Location::RegisterLocation(destination.AsRegisterPairLow<Register>()));
719    } else if (source.IsFpuRegister()) {
720      UNIMPLEMENTED(FATAL);
721    } else {
722      DCHECK(source.IsDoubleStackSlot());
723      DCHECK(ExpectedPairLayout(destination));
724      __ LoadFromOffset(kLoadWordPair, destination.AsRegisterPairLow<Register>(),
725                        SP, source.GetStackIndex());
726    }
727  } else if (destination.IsFpuRegisterPair()) {
728    if (source.IsDoubleStackSlot()) {
729      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
730                         SP,
731                         source.GetStackIndex());
732    } else {
733      UNIMPLEMENTED(FATAL);
734    }
735  } else {
736    DCHECK(destination.IsDoubleStackSlot());
737    if (source.IsRegisterPair()) {
738      // No conflict possible, so just do the moves.
739      if (source.AsRegisterPairLow<Register>() == R1) {
740        DCHECK_EQ(source.AsRegisterPairHigh<Register>(), R2);
741        __ StoreToOffset(kStoreWord, R1, SP, destination.GetStackIndex());
742        __ StoreToOffset(kStoreWord, R2, SP, destination.GetHighStackIndex(kArmWordSize));
743      } else {
744        __ StoreToOffset(kStoreWordPair, source.AsRegisterPairLow<Register>(),
745                         SP, destination.GetStackIndex());
746      }
747    } else if (source.IsFpuRegisterPair()) {
748      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
749                        SP,
750                        destination.GetStackIndex());
751    } else {
752      DCHECK(source.IsDoubleStackSlot());
753      EmitParallelMoves(
754          Location::StackSlot(source.GetStackIndex()),
755          Location::StackSlot(destination.GetStackIndex()),
756          Location::StackSlot(source.GetHighStackIndex(kArmWordSize)),
757          Location::StackSlot(destination.GetHighStackIndex(kArmWordSize)));
758    }
759  }
760}
761
762void CodeGeneratorARM::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
763  LocationSummary* locations = instruction->GetLocations();
764  if (locations != nullptr && locations->Out().Equals(location)) {
765    return;
766  }
767
768  if (locations != nullptr && locations->Out().IsConstant()) {
769    HConstant* const_to_move = locations->Out().GetConstant();
770    if (const_to_move->IsIntConstant()) {
771      int32_t value = const_to_move->AsIntConstant()->GetValue();
772      if (location.IsRegister()) {
773        __ LoadImmediate(location.AsRegister<Register>(), value);
774      } else {
775        DCHECK(location.IsStackSlot());
776        __ LoadImmediate(IP, value);
777        __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
778      }
779    } else {
780      DCHECK(const_to_move->IsLongConstant()) << const_to_move->DebugName();
781      int64_t value = const_to_move->AsLongConstant()->GetValue();
782      if (location.IsRegisterPair()) {
783        __ LoadImmediate(location.AsRegisterPairLow<Register>(), Low32Bits(value));
784        __ LoadImmediate(location.AsRegisterPairHigh<Register>(), High32Bits(value));
785      } else {
786        DCHECK(location.IsDoubleStackSlot());
787        __ LoadImmediate(IP, Low32Bits(value));
788        __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
789        __ LoadImmediate(IP, High32Bits(value));
790        __ StoreToOffset(kStoreWord, IP, SP, location.GetHighStackIndex(kArmWordSize));
791      }
792    }
793  } else if (instruction->IsLoadLocal()) {
794    uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
795    switch (instruction->GetType()) {
796      case Primitive::kPrimBoolean:
797      case Primitive::kPrimByte:
798      case Primitive::kPrimChar:
799      case Primitive::kPrimShort:
800      case Primitive::kPrimInt:
801      case Primitive::kPrimNot:
802      case Primitive::kPrimFloat:
803        Move32(location, Location::StackSlot(stack_slot));
804        break;
805
806      case Primitive::kPrimLong:
807      case Primitive::kPrimDouble:
808        Move64(location, Location::DoubleStackSlot(stack_slot));
809        break;
810
811      default:
812        LOG(FATAL) << "Unexpected type " << instruction->GetType();
813    }
814  } else if (instruction->IsTemporary()) {
815    Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
816    if (temp_location.IsStackSlot()) {
817      Move32(location, temp_location);
818    } else {
819      DCHECK(temp_location.IsDoubleStackSlot());
820      Move64(location, temp_location);
821    }
822  } else {
823    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
824    switch (instruction->GetType()) {
825      case Primitive::kPrimBoolean:
826      case Primitive::kPrimByte:
827      case Primitive::kPrimChar:
828      case Primitive::kPrimShort:
829      case Primitive::kPrimNot:
830      case Primitive::kPrimInt:
831      case Primitive::kPrimFloat:
832        Move32(location, locations->Out());
833        break;
834
835      case Primitive::kPrimLong:
836      case Primitive::kPrimDouble:
837        Move64(location, locations->Out());
838        break;
839
840      default:
841        LOG(FATAL) << "Unexpected type " << instruction->GetType();
842    }
843  }
844}
845
846void CodeGeneratorARM::InvokeRuntime(int32_t entry_point_offset,
847                                     HInstruction* instruction,
848                                     uint32_t dex_pc) {
849  __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
850  __ blx(LR);
851  RecordPcInfo(instruction, dex_pc);
852  DCHECK(instruction->IsSuspendCheck()
853      || instruction->IsBoundsCheck()
854      || instruction->IsNullCheck()
855      || instruction->IsDivZeroCheck()
856      || instruction->GetLocations()->CanCall()
857      || !IsLeafMethod());
858}
859
860void LocationsBuilderARM::VisitGoto(HGoto* got) {
861  got->SetLocations(nullptr);
862}
863
864void InstructionCodeGeneratorARM::VisitGoto(HGoto* got) {
865  HBasicBlock* successor = got->GetSuccessor();
866  DCHECK(!successor->IsExitBlock());
867
868  HBasicBlock* block = got->GetBlock();
869  HInstruction* previous = got->GetPrevious();
870
871  HLoopInformation* info = block->GetLoopInformation();
872  if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
873    codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
874    GenerateSuspendCheck(info->GetSuspendCheck(), successor);
875    return;
876  }
877
878  if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
879    GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
880  }
881  if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
882    __ b(codegen_->GetLabelOf(successor));
883  }
884}
885
886void LocationsBuilderARM::VisitExit(HExit* exit) {
887  exit->SetLocations(nullptr);
888}
889
890void InstructionCodeGeneratorARM::VisitExit(HExit* exit) {
891  UNUSED(exit);
892  if (kIsDebugBuild) {
893    __ Comment("Unreachable");
894    __ bkpt(0);
895  }
896}
897
898void LocationsBuilderARM::VisitIf(HIf* if_instr) {
899  LocationSummary* locations =
900      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
901  HInstruction* cond = if_instr->InputAt(0);
902  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
903    locations->SetInAt(0, Location::RequiresRegister());
904  }
905}
906
907void InstructionCodeGeneratorARM::VisitIf(HIf* if_instr) {
908  HInstruction* cond = if_instr->InputAt(0);
909  if (cond->IsIntConstant()) {
910    // Constant condition, statically compared against 1.
911    int32_t cond_value = cond->AsIntConstant()->GetValue();
912    if (cond_value == 1) {
913      if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
914                                     if_instr->IfTrueSuccessor())) {
915        __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
916      }
917      return;
918    } else {
919      DCHECK_EQ(cond_value, 0);
920    }
921  } else {
922    if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
923      // Condition has been materialized, compare the output to 0
924      DCHECK(if_instr->GetLocations()->InAt(0).IsRegister());
925      __ cmp(if_instr->GetLocations()->InAt(0).AsRegister<Register>(),
926             ShifterOperand(0));
927      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()), NE);
928    } else {
929      // Condition has not been materialized, use its inputs as the
930      // comparison and its condition as the branch condition.
931      LocationSummary* locations = cond->GetLocations();
932      DCHECK(locations->InAt(0).IsRegister()) << locations->InAt(0);
933      Register left = locations->InAt(0).AsRegister<Register>();
934      if (locations->InAt(1).IsRegister()) {
935        __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>()));
936      } else {
937        DCHECK(locations->InAt(1).IsConstant());
938        int32_t value =
939            locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
940        ShifterOperand operand;
941        if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) {
942          __ cmp(left, operand);
943        } else {
944          Register temp = IP;
945          __ LoadImmediate(temp, value);
946          __ cmp(left, ShifterOperand(temp));
947        }
948      }
949      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()),
950           ARMCondition(cond->AsCondition()->GetCondition()));
951    }
952  }
953  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
954                                 if_instr->IfFalseSuccessor())) {
955    __ b(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
956  }
957}
958
959
960void LocationsBuilderARM::VisitCondition(HCondition* comp) {
961  LocationSummary* locations =
962      new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall);
963  locations->SetInAt(0, Location::RequiresRegister());
964  locations->SetInAt(1, Location::RegisterOrConstant(comp->InputAt(1)));
965  if (comp->NeedsMaterialization()) {
966    locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
967  }
968}
969
970void InstructionCodeGeneratorARM::VisitCondition(HCondition* comp) {
971  if (!comp->NeedsMaterialization()) return;
972  LocationSummary* locations = comp->GetLocations();
973  Register left = locations->InAt(0).AsRegister<Register>();
974
975  if (locations->InAt(1).IsRegister()) {
976    __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>()));
977  } else {
978    DCHECK(locations->InAt(1).IsConstant());
979    int32_t value = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
980    ShifterOperand operand;
981    if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) {
982      __ cmp(left, operand);
983    } else {
984      Register temp = IP;
985      __ LoadImmediate(temp, value);
986      __ cmp(left, ShifterOperand(temp));
987    }
988  }
989  __ it(ARMCondition(comp->GetCondition()), kItElse);
990  __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(1),
991         ARMCondition(comp->GetCondition()));
992  __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(0),
993         ARMOppositeCondition(comp->GetCondition()));
994}
995
996void LocationsBuilderARM::VisitEqual(HEqual* comp) {
997  VisitCondition(comp);
998}
999
1000void InstructionCodeGeneratorARM::VisitEqual(HEqual* comp) {
1001  VisitCondition(comp);
1002}
1003
1004void LocationsBuilderARM::VisitNotEqual(HNotEqual* comp) {
1005  VisitCondition(comp);
1006}
1007
1008void InstructionCodeGeneratorARM::VisitNotEqual(HNotEqual* comp) {
1009  VisitCondition(comp);
1010}
1011
1012void LocationsBuilderARM::VisitLessThan(HLessThan* comp) {
1013  VisitCondition(comp);
1014}
1015
1016void InstructionCodeGeneratorARM::VisitLessThan(HLessThan* comp) {
1017  VisitCondition(comp);
1018}
1019
1020void LocationsBuilderARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1021  VisitCondition(comp);
1022}
1023
1024void InstructionCodeGeneratorARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1025  VisitCondition(comp);
1026}
1027
1028void LocationsBuilderARM::VisitGreaterThan(HGreaterThan* comp) {
1029  VisitCondition(comp);
1030}
1031
1032void InstructionCodeGeneratorARM::VisitGreaterThan(HGreaterThan* comp) {
1033  VisitCondition(comp);
1034}
1035
1036void LocationsBuilderARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1037  VisitCondition(comp);
1038}
1039
1040void InstructionCodeGeneratorARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1041  VisitCondition(comp);
1042}
1043
1044void LocationsBuilderARM::VisitLocal(HLocal* local) {
1045  local->SetLocations(nullptr);
1046}
1047
1048void InstructionCodeGeneratorARM::VisitLocal(HLocal* local) {
1049  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
1050}
1051
1052void LocationsBuilderARM::VisitLoadLocal(HLoadLocal* load) {
1053  load->SetLocations(nullptr);
1054}
1055
1056void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load) {
1057  // Nothing to do, this is driven by the code generator.
1058  UNUSED(load);
1059}
1060
1061void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) {
1062  LocationSummary* locations =
1063      new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
1064  switch (store->InputAt(1)->GetType()) {
1065    case Primitive::kPrimBoolean:
1066    case Primitive::kPrimByte:
1067    case Primitive::kPrimChar:
1068    case Primitive::kPrimShort:
1069    case Primitive::kPrimInt:
1070    case Primitive::kPrimNot:
1071    case Primitive::kPrimFloat:
1072      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
1073      break;
1074
1075    case Primitive::kPrimLong:
1076    case Primitive::kPrimDouble:
1077      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
1078      break;
1079
1080    default:
1081      LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType();
1082  }
1083}
1084
1085void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store) {
1086  UNUSED(store);
1087}
1088
1089void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) {
1090  LocationSummary* locations =
1091      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1092  locations->SetOut(Location::ConstantLocation(constant));
1093}
1094
1095void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant) {
1096  // Will be generated at use site.
1097  UNUSED(constant);
1098}
1099
1100void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) {
1101  LocationSummary* locations =
1102      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1103  locations->SetOut(Location::ConstantLocation(constant));
1104}
1105
1106void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant) {
1107  // Will be generated at use site.
1108  UNUSED(constant);
1109}
1110
1111void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) {
1112  LocationSummary* locations =
1113      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1114  locations->SetOut(Location::ConstantLocation(constant));
1115}
1116
1117void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant) {
1118  // Will be generated at use site.
1119  UNUSED(constant);
1120}
1121
1122void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) {
1123  LocationSummary* locations =
1124      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1125  locations->SetOut(Location::ConstantLocation(constant));
1126}
1127
1128void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant) {
1129  // Will be generated at use site.
1130  UNUSED(constant);
1131}
1132
1133void LocationsBuilderARM::VisitReturnVoid(HReturnVoid* ret) {
1134  ret->SetLocations(nullptr);
1135}
1136
1137void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret) {
1138  UNUSED(ret);
1139  codegen_->GenerateFrameExit();
1140}
1141
1142void LocationsBuilderARM::VisitReturn(HReturn* ret) {
1143  LocationSummary* locations =
1144      new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
1145  locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType()));
1146}
1147
1148void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret) {
1149  UNUSED(ret);
1150  codegen_->GenerateFrameExit();
1151}
1152
1153void LocationsBuilderARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1154  IntrinsicLocationsBuilderARM intrinsic(GetGraph()->GetArena(),
1155                                         codegen_->GetInstructionSetFeatures());
1156  if (intrinsic.TryDispatch(invoke)) {
1157    return;
1158  }
1159
1160  HandleInvoke(invoke);
1161}
1162
1163void CodeGeneratorARM::LoadCurrentMethod(Register reg) {
1164  __ LoadFromOffset(kLoadWord, reg, SP, kCurrentMethodStackOffset);
1165}
1166
1167static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM* codegen) {
1168  if (invoke->GetLocations()->Intrinsified()) {
1169    IntrinsicCodeGeneratorARM intrinsic(codegen);
1170    intrinsic.Dispatch(invoke);
1171    return true;
1172  }
1173  return false;
1174}
1175
1176void InstructionCodeGeneratorARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1177  if (TryGenerateIntrinsicCode(invoke, codegen_)) {
1178    return;
1179  }
1180
1181  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1182
1183  codegen_->GenerateStaticOrDirectCall(invoke, temp);
1184}
1185
1186void LocationsBuilderARM::HandleInvoke(HInvoke* invoke) {
1187  LocationSummary* locations =
1188      new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1189  locations->AddTemp(Location::RegisterLocation(R0));
1190
1191  InvokeDexCallingConventionVisitor calling_convention_visitor;
1192  for (size_t i = 0; i < invoke->InputCount(); i++) {
1193    HInstruction* input = invoke->InputAt(i);
1194    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1195  }
1196
1197  locations->SetOut(calling_convention_visitor.GetReturnLocation(invoke->GetType()));
1198}
1199
1200void LocationsBuilderARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1201  IntrinsicLocationsBuilderARM intrinsic(GetGraph()->GetArena(),
1202                                         codegen_->GetInstructionSetFeatures());
1203  if (intrinsic.TryDispatch(invoke)) {
1204    return;
1205  }
1206
1207  HandleInvoke(invoke);
1208}
1209
1210void InstructionCodeGeneratorARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1211  if (TryGenerateIntrinsicCode(invoke, codegen_)) {
1212    return;
1213  }
1214
1215  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1216  uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() +
1217          invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
1218  LocationSummary* locations = invoke->GetLocations();
1219  Location receiver = locations->InAt(0);
1220  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1221  // temp = object->GetClass();
1222  if (receiver.IsStackSlot()) {
1223    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1224    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1225  } else {
1226    __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
1227  }
1228  codegen_->MaybeRecordImplicitNullCheck(invoke);
1229  // temp = temp->GetMethodAt(method_offset);
1230  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1231      kArmWordSize).Int32Value();
1232  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1233  // LR = temp->GetEntryPoint();
1234  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1235  // LR();
1236  __ blx(LR);
1237  DCHECK(!codegen_->IsLeafMethod());
1238  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1239}
1240
1241void LocationsBuilderARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1242  HandleInvoke(invoke);
1243  // Add the hidden argument.
1244  invoke->GetLocations()->AddTemp(Location::RegisterLocation(R12));
1245}
1246
1247void InstructionCodeGeneratorARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1248  // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
1249  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1250  uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() +
1251          (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry);
1252  LocationSummary* locations = invoke->GetLocations();
1253  Location receiver = locations->InAt(0);
1254  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1255
1256  // Set the hidden argument.
1257  __ LoadImmediate(invoke->GetLocations()->GetTemp(1).AsRegister<Register>(),
1258                   invoke->GetDexMethodIndex());
1259
1260  // temp = object->GetClass();
1261  if (receiver.IsStackSlot()) {
1262    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1263    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1264  } else {
1265    __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
1266  }
1267  codegen_->MaybeRecordImplicitNullCheck(invoke);
1268  // temp = temp->GetImtEntryAt(method_offset);
1269  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1270      kArmWordSize).Int32Value();
1271  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1272  // LR = temp->GetEntryPoint();
1273  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1274  // LR();
1275  __ blx(LR);
1276  DCHECK(!codegen_->IsLeafMethod());
1277  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1278}
1279
1280void LocationsBuilderARM::VisitNeg(HNeg* neg) {
1281  LocationSummary* locations =
1282      new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
1283  switch (neg->GetResultType()) {
1284    case Primitive::kPrimInt: {
1285      locations->SetInAt(0, Location::RequiresRegister());
1286      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1287      break;
1288    }
1289    case Primitive::kPrimLong: {
1290      locations->SetInAt(0, Location::RequiresRegister());
1291      locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1292      break;
1293    }
1294
1295    case Primitive::kPrimFloat:
1296    case Primitive::kPrimDouble:
1297      locations->SetInAt(0, Location::RequiresFpuRegister());
1298      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1299      break;
1300
1301    default:
1302      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1303  }
1304}
1305
1306void InstructionCodeGeneratorARM::VisitNeg(HNeg* neg) {
1307  LocationSummary* locations = neg->GetLocations();
1308  Location out = locations->Out();
1309  Location in = locations->InAt(0);
1310  switch (neg->GetResultType()) {
1311    case Primitive::kPrimInt:
1312      DCHECK(in.IsRegister());
1313      __ rsb(out.AsRegister<Register>(), in.AsRegister<Register>(), ShifterOperand(0));
1314      break;
1315
1316    case Primitive::kPrimLong:
1317      DCHECK(in.IsRegisterPair());
1318      // out.lo = 0 - in.lo (and update the carry/borrow (C) flag)
1319      __ rsbs(out.AsRegisterPairLow<Register>(),
1320              in.AsRegisterPairLow<Register>(),
1321              ShifterOperand(0));
1322      // We cannot emit an RSC (Reverse Subtract with Carry)
1323      // instruction here, as it does not exist in the Thumb-2
1324      // instruction set.  We use the following approach
1325      // using SBC and SUB instead.
1326      //
1327      // out.hi = -C
1328      __ sbc(out.AsRegisterPairHigh<Register>(),
1329             out.AsRegisterPairHigh<Register>(),
1330             ShifterOperand(out.AsRegisterPairHigh<Register>()));
1331      // out.hi = out.hi - in.hi
1332      __ sub(out.AsRegisterPairHigh<Register>(),
1333             out.AsRegisterPairHigh<Register>(),
1334             ShifterOperand(in.AsRegisterPairHigh<Register>()));
1335      break;
1336
1337    case Primitive::kPrimFloat:
1338      DCHECK(in.IsFpuRegister());
1339      __ vnegs(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>());
1340      break;
1341
1342    case Primitive::kPrimDouble:
1343      DCHECK(in.IsFpuRegisterPair());
1344      __ vnegd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1345               FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1346      break;
1347
1348    default:
1349      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1350  }
1351}
1352
1353void LocationsBuilderARM::VisitTypeConversion(HTypeConversion* conversion) {
1354  Primitive::Type result_type = conversion->GetResultType();
1355  Primitive::Type input_type = conversion->GetInputType();
1356  DCHECK_NE(result_type, input_type);
1357
1358  // The float-to-long and double-to-long type conversions rely on a
1359  // call to the runtime.
1360  LocationSummary::CallKind call_kind =
1361      ((input_type == Primitive::kPrimFloat || input_type == Primitive::kPrimDouble)
1362       && result_type == Primitive::kPrimLong)
1363      ? LocationSummary::kCall
1364      : LocationSummary::kNoCall;
1365  LocationSummary* locations =
1366      new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind);
1367
1368  switch (result_type) {
1369    case Primitive::kPrimByte:
1370      switch (input_type) {
1371        case Primitive::kPrimShort:
1372        case Primitive::kPrimInt:
1373        case Primitive::kPrimChar:
1374          // Processing a Dex `int-to-byte' instruction.
1375          locations->SetInAt(0, Location::RequiresRegister());
1376          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1377          break;
1378
1379        default:
1380          LOG(FATAL) << "Unexpected type conversion from " << input_type
1381                     << " to " << result_type;
1382      }
1383      break;
1384
1385    case Primitive::kPrimShort:
1386      switch (input_type) {
1387        case Primitive::kPrimByte:
1388        case Primitive::kPrimInt:
1389        case Primitive::kPrimChar:
1390          // Processing a Dex `int-to-short' instruction.
1391          locations->SetInAt(0, Location::RequiresRegister());
1392          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1393          break;
1394
1395        default:
1396          LOG(FATAL) << "Unexpected type conversion from " << input_type
1397                     << " to " << result_type;
1398      }
1399      break;
1400
1401    case Primitive::kPrimInt:
1402      switch (input_type) {
1403        case Primitive::kPrimLong:
1404          // Processing a Dex `long-to-int' instruction.
1405          locations->SetInAt(0, Location::Any());
1406          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1407          break;
1408
1409        case Primitive::kPrimFloat:
1410          // Processing a Dex `float-to-int' instruction.
1411          locations->SetInAt(0, Location::RequiresFpuRegister());
1412          locations->SetOut(Location::RequiresRegister());
1413          locations->AddTemp(Location::RequiresFpuRegister());
1414          break;
1415
1416        case Primitive::kPrimDouble:
1417          // Processing a Dex `double-to-int' instruction.
1418          locations->SetInAt(0, Location::RequiresFpuRegister());
1419          locations->SetOut(Location::RequiresRegister());
1420          locations->AddTemp(Location::RequiresFpuRegister());
1421          break;
1422
1423        default:
1424          LOG(FATAL) << "Unexpected type conversion from " << input_type
1425                     << " to " << result_type;
1426      }
1427      break;
1428
1429    case Primitive::kPrimLong:
1430      switch (input_type) {
1431        case Primitive::kPrimByte:
1432        case Primitive::kPrimShort:
1433        case Primitive::kPrimInt:
1434        case Primitive::kPrimChar:
1435          // Processing a Dex `int-to-long' instruction.
1436          locations->SetInAt(0, Location::RequiresRegister());
1437          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1438          break;
1439
1440        case Primitive::kPrimFloat: {
1441          // Processing a Dex `float-to-long' instruction.
1442          InvokeRuntimeCallingConvention calling_convention;
1443          locations->SetInAt(0, Location::FpuRegisterLocation(
1444              calling_convention.GetFpuRegisterAt(0)));
1445          locations->SetOut(Location::RegisterPairLocation(R0, R1));
1446          break;
1447        }
1448
1449        case Primitive::kPrimDouble: {
1450          // Processing a Dex `double-to-long' instruction.
1451          InvokeRuntimeCallingConvention calling_convention;
1452          locations->SetInAt(0, Location::FpuRegisterPairLocation(
1453              calling_convention.GetFpuRegisterAt(0),
1454              calling_convention.GetFpuRegisterAt(1)));
1455          locations->SetOut(Location::RegisterPairLocation(R0, R1));
1456          break;
1457        }
1458
1459        default:
1460          LOG(FATAL) << "Unexpected type conversion from " << input_type
1461                     << " to " << result_type;
1462      }
1463      break;
1464
1465    case Primitive::kPrimChar:
1466      switch (input_type) {
1467        case Primitive::kPrimByte:
1468        case Primitive::kPrimShort:
1469        case Primitive::kPrimInt:
1470          // Processing a Dex `int-to-char' instruction.
1471          locations->SetInAt(0, Location::RequiresRegister());
1472          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1473          break;
1474
1475        default:
1476          LOG(FATAL) << "Unexpected type conversion from " << input_type
1477                     << " to " << result_type;
1478      }
1479      break;
1480
1481    case Primitive::kPrimFloat:
1482      switch (input_type) {
1483        case Primitive::kPrimByte:
1484        case Primitive::kPrimShort:
1485        case Primitive::kPrimInt:
1486        case Primitive::kPrimChar:
1487          // Processing a Dex `int-to-float' instruction.
1488          locations->SetInAt(0, Location::RequiresRegister());
1489          locations->SetOut(Location::RequiresFpuRegister());
1490          break;
1491
1492        case Primitive::kPrimLong:
1493          // Processing a Dex `long-to-float' instruction.
1494          locations->SetInAt(0, Location::RequiresRegister());
1495          locations->SetOut(Location::RequiresFpuRegister());
1496          locations->AddTemp(Location::RequiresRegister());
1497          locations->AddTemp(Location::RequiresRegister());
1498          locations->AddTemp(Location::RequiresFpuRegister());
1499          locations->AddTemp(Location::RequiresFpuRegister());
1500          break;
1501
1502        case Primitive::kPrimDouble:
1503          // Processing a Dex `double-to-float' instruction.
1504          locations->SetInAt(0, Location::RequiresFpuRegister());
1505          locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1506          break;
1507
1508        default:
1509          LOG(FATAL) << "Unexpected type conversion from " << input_type
1510                     << " to " << result_type;
1511      };
1512      break;
1513
1514    case Primitive::kPrimDouble:
1515      switch (input_type) {
1516        case Primitive::kPrimByte:
1517        case Primitive::kPrimShort:
1518        case Primitive::kPrimInt:
1519        case Primitive::kPrimChar:
1520          // Processing a Dex `int-to-double' instruction.
1521          locations->SetInAt(0, Location::RequiresRegister());
1522          locations->SetOut(Location::RequiresFpuRegister());
1523          break;
1524
1525        case Primitive::kPrimLong:
1526          // Processing a Dex `long-to-double' instruction.
1527          locations->SetInAt(0, Location::RequiresRegister());
1528          locations->SetOut(Location::RequiresFpuRegister());
1529          locations->AddTemp(Location::RequiresRegister());
1530          locations->AddTemp(Location::RequiresRegister());
1531          locations->AddTemp(Location::RequiresFpuRegister());
1532          break;
1533
1534        case Primitive::kPrimFloat:
1535          // Processing a Dex `float-to-double' instruction.
1536          locations->SetInAt(0, Location::RequiresFpuRegister());
1537          locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1538          break;
1539
1540        default:
1541          LOG(FATAL) << "Unexpected type conversion from " << input_type
1542                     << " to " << result_type;
1543      };
1544      break;
1545
1546    default:
1547      LOG(FATAL) << "Unexpected type conversion from " << input_type
1548                 << " to " << result_type;
1549  }
1550}
1551
1552void InstructionCodeGeneratorARM::VisitTypeConversion(HTypeConversion* conversion) {
1553  LocationSummary* locations = conversion->GetLocations();
1554  Location out = locations->Out();
1555  Location in = locations->InAt(0);
1556  Primitive::Type result_type = conversion->GetResultType();
1557  Primitive::Type input_type = conversion->GetInputType();
1558  DCHECK_NE(result_type, input_type);
1559  switch (result_type) {
1560    case Primitive::kPrimByte:
1561      switch (input_type) {
1562        case Primitive::kPrimShort:
1563        case Primitive::kPrimInt:
1564        case Primitive::kPrimChar:
1565          // Processing a Dex `int-to-byte' instruction.
1566          __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 8);
1567          break;
1568
1569        default:
1570          LOG(FATAL) << "Unexpected type conversion from " << input_type
1571                     << " to " << result_type;
1572      }
1573      break;
1574
1575    case Primitive::kPrimShort:
1576      switch (input_type) {
1577        case Primitive::kPrimByte:
1578        case Primitive::kPrimInt:
1579        case Primitive::kPrimChar:
1580          // Processing a Dex `int-to-short' instruction.
1581          __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16);
1582          break;
1583
1584        default:
1585          LOG(FATAL) << "Unexpected type conversion from " << input_type
1586                     << " to " << result_type;
1587      }
1588      break;
1589
1590    case Primitive::kPrimInt:
1591      switch (input_type) {
1592        case Primitive::kPrimLong:
1593          // Processing a Dex `long-to-int' instruction.
1594          DCHECK(out.IsRegister());
1595          if (in.IsRegisterPair()) {
1596            __ Mov(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
1597          } else if (in.IsDoubleStackSlot()) {
1598            __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), SP, in.GetStackIndex());
1599          } else {
1600            DCHECK(in.IsConstant());
1601            DCHECK(in.GetConstant()->IsLongConstant());
1602            int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
1603            __ LoadImmediate(out.AsRegister<Register>(), static_cast<int32_t>(value));
1604          }
1605          break;
1606
1607        case Primitive::kPrimFloat: {
1608          // Processing a Dex `float-to-int' instruction.
1609          SRegister temp = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
1610          __ vmovs(temp, in.AsFpuRegister<SRegister>());
1611          __ vcvtis(temp, temp);
1612          __ vmovrs(out.AsRegister<Register>(), temp);
1613          break;
1614        }
1615
1616        case Primitive::kPrimDouble: {
1617          // Processing a Dex `double-to-int' instruction.
1618          SRegister temp_s = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
1619          DRegister temp_d = FromLowSToD(temp_s);
1620          __ vmovd(temp_d, FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1621          __ vcvtid(temp_s, temp_d);
1622          __ vmovrs(out.AsRegister<Register>(), temp_s);
1623          break;
1624        }
1625
1626        default:
1627          LOG(FATAL) << "Unexpected type conversion from " << input_type
1628                     << " to " << result_type;
1629      }
1630      break;
1631
1632    case Primitive::kPrimLong:
1633      switch (input_type) {
1634        case Primitive::kPrimByte:
1635        case Primitive::kPrimShort:
1636        case Primitive::kPrimInt:
1637        case Primitive::kPrimChar:
1638          // Processing a Dex `int-to-long' instruction.
1639          DCHECK(out.IsRegisterPair());
1640          DCHECK(in.IsRegister());
1641          __ Mov(out.AsRegisterPairLow<Register>(), in.AsRegister<Register>());
1642          // Sign extension.
1643          __ Asr(out.AsRegisterPairHigh<Register>(),
1644                 out.AsRegisterPairLow<Register>(),
1645                 31);
1646          break;
1647
1648        case Primitive::kPrimFloat:
1649          // Processing a Dex `float-to-long' instruction.
1650          codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pF2l),
1651                                  conversion,
1652                                  conversion->GetDexPc());
1653          break;
1654
1655        case Primitive::kPrimDouble:
1656          // Processing a Dex `double-to-long' instruction.
1657          codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pD2l),
1658                                  conversion,
1659                                  conversion->GetDexPc());
1660          break;
1661
1662        default:
1663          LOG(FATAL) << "Unexpected type conversion from " << input_type
1664                     << " to " << result_type;
1665      }
1666      break;
1667
1668    case Primitive::kPrimChar:
1669      switch (input_type) {
1670        case Primitive::kPrimByte:
1671        case Primitive::kPrimShort:
1672        case Primitive::kPrimInt:
1673          // Processing a Dex `int-to-char' instruction.
1674          __ ubfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16);
1675          break;
1676
1677        default:
1678          LOG(FATAL) << "Unexpected type conversion from " << input_type
1679                     << " to " << result_type;
1680      }
1681      break;
1682
1683    case Primitive::kPrimFloat:
1684      switch (input_type) {
1685        case Primitive::kPrimByte:
1686        case Primitive::kPrimShort:
1687        case Primitive::kPrimInt:
1688        case Primitive::kPrimChar: {
1689          // Processing a Dex `int-to-float' instruction.
1690          __ vmovsr(out.AsFpuRegister<SRegister>(), in.AsRegister<Register>());
1691          __ vcvtsi(out.AsFpuRegister<SRegister>(), out.AsFpuRegister<SRegister>());
1692          break;
1693        }
1694
1695        case Primitive::kPrimLong: {
1696          // Processing a Dex `long-to-float' instruction.
1697          Register low = in.AsRegisterPairLow<Register>();
1698          Register high = in.AsRegisterPairHigh<Register>();
1699          SRegister output = out.AsFpuRegister<SRegister>();
1700          Register constant_low = locations->GetTemp(0).AsRegister<Register>();
1701          Register constant_high = locations->GetTemp(1).AsRegister<Register>();
1702          SRegister temp1_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>();
1703          DRegister temp1_d = FromLowSToD(temp1_s);
1704          SRegister temp2_s = locations->GetTemp(3).AsFpuRegisterPairLow<SRegister>();
1705          DRegister temp2_d = FromLowSToD(temp2_s);
1706
1707          // Operations use doubles for precision reasons (each 32-bit
1708          // half of a long fits in the 53-bit mantissa of a double,
1709          // but not in the 24-bit mantissa of a float).  This is
1710          // especially important for the low bits.  The result is
1711          // eventually converted to float.
1712
1713          // temp1_d = int-to-double(high)
1714          __ vmovsr(temp1_s, high);
1715          __ vcvtdi(temp1_d, temp1_s);
1716          // Using vmovd to load the `k2Pow32EncodingForDouble` constant
1717          // as an immediate value into `temp2_d` does not work, as
1718          // this instruction only transfers 8 significant bits of its
1719          // immediate operand.  Instead, use two 32-bit core
1720          // registers to load `k2Pow32EncodingForDouble` into
1721          // `temp2_d`.
1722          __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble));
1723          __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble));
1724          __ vmovdrr(temp2_d, constant_low, constant_high);
1725          // temp1_d = temp1_d * 2^32
1726          __ vmuld(temp1_d, temp1_d, temp2_d);
1727          // temp2_d = unsigned-to-double(low)
1728          __ vmovsr(temp2_s, low);
1729          __ vcvtdu(temp2_d, temp2_s);
1730          // temp1_d = temp1_d + temp2_d
1731          __ vaddd(temp1_d, temp1_d, temp2_d);
1732          // output = double-to-float(temp1_d);
1733          __ vcvtsd(output, temp1_d);
1734          break;
1735        }
1736
1737        case Primitive::kPrimDouble:
1738          // Processing a Dex `double-to-float' instruction.
1739          __ vcvtsd(out.AsFpuRegister<SRegister>(),
1740                    FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1741          break;
1742
1743        default:
1744          LOG(FATAL) << "Unexpected type conversion from " << input_type
1745                     << " to " << result_type;
1746      };
1747      break;
1748
1749    case Primitive::kPrimDouble:
1750      switch (input_type) {
1751        case Primitive::kPrimByte:
1752        case Primitive::kPrimShort:
1753        case Primitive::kPrimInt:
1754        case Primitive::kPrimChar: {
1755          // Processing a Dex `int-to-double' instruction.
1756          __ vmovsr(out.AsFpuRegisterPairLow<SRegister>(), in.AsRegister<Register>());
1757          __ vcvtdi(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1758                    out.AsFpuRegisterPairLow<SRegister>());
1759          break;
1760        }
1761
1762        case Primitive::kPrimLong: {
1763          // Processing a Dex `long-to-double' instruction.
1764          Register low = in.AsRegisterPairLow<Register>();
1765          Register high = in.AsRegisterPairHigh<Register>();
1766          SRegister out_s = out.AsFpuRegisterPairLow<SRegister>();
1767          DRegister out_d = FromLowSToD(out_s);
1768          Register constant_low = locations->GetTemp(0).AsRegister<Register>();
1769          Register constant_high = locations->GetTemp(1).AsRegister<Register>();
1770          SRegister temp_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>();
1771          DRegister temp_d = FromLowSToD(temp_s);
1772
1773          // out_d = int-to-double(high)
1774          __ vmovsr(out_s, high);
1775          __ vcvtdi(out_d, out_s);
1776          // Using vmovd to load the `k2Pow32EncodingForDouble` constant
1777          // as an immediate value into `temp_d` does not work, as
1778          // this instruction only transfers 8 significant bits of its
1779          // immediate operand.  Instead, use two 32-bit core
1780          // registers to load `k2Pow32EncodingForDouble` into `temp_d`.
1781          __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble));
1782          __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble));
1783          __ vmovdrr(temp_d, constant_low, constant_high);
1784          // out_d = out_d * 2^32
1785          __ vmuld(out_d, out_d, temp_d);
1786          // temp_d = unsigned-to-double(low)
1787          __ vmovsr(temp_s, low);
1788          __ vcvtdu(temp_d, temp_s);
1789          // out_d = out_d + temp_d
1790          __ vaddd(out_d, out_d, temp_d);
1791          break;
1792        }
1793
1794        case Primitive::kPrimFloat:
1795          // Processing a Dex `float-to-double' instruction.
1796          __ vcvtds(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1797                    in.AsFpuRegister<SRegister>());
1798          break;
1799
1800        default:
1801          LOG(FATAL) << "Unexpected type conversion from " << input_type
1802                     << " to " << result_type;
1803      };
1804      break;
1805
1806    default:
1807      LOG(FATAL) << "Unexpected type conversion from " << input_type
1808                 << " to " << result_type;
1809  }
1810}
1811
1812void LocationsBuilderARM::VisitAdd(HAdd* add) {
1813  LocationSummary* locations =
1814      new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
1815  switch (add->GetResultType()) {
1816    case Primitive::kPrimInt: {
1817      locations->SetInAt(0, Location::RequiresRegister());
1818      locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
1819      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1820      break;
1821    }
1822
1823    case Primitive::kPrimLong: {
1824      locations->SetInAt(0, Location::RequiresRegister());
1825      locations->SetInAt(1, Location::RequiresRegister());
1826      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1827      break;
1828    }
1829
1830    case Primitive::kPrimFloat:
1831    case Primitive::kPrimDouble: {
1832      locations->SetInAt(0, Location::RequiresFpuRegister());
1833      locations->SetInAt(1, Location::RequiresFpuRegister());
1834      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1835      break;
1836    }
1837
1838    default:
1839      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1840  }
1841}
1842
1843void InstructionCodeGeneratorARM::VisitAdd(HAdd* add) {
1844  LocationSummary* locations = add->GetLocations();
1845  Location out = locations->Out();
1846  Location first = locations->InAt(0);
1847  Location second = locations->InAt(1);
1848  switch (add->GetResultType()) {
1849    case Primitive::kPrimInt:
1850      if (second.IsRegister()) {
1851        __ add(out.AsRegister<Register>(),
1852               first.AsRegister<Register>(),
1853               ShifterOperand(second.AsRegister<Register>()));
1854      } else {
1855        __ AddConstant(out.AsRegister<Register>(),
1856                       first.AsRegister<Register>(),
1857                       second.GetConstant()->AsIntConstant()->GetValue());
1858      }
1859      break;
1860
1861    case Primitive::kPrimLong: {
1862      DCHECK(second.IsRegisterPair());
1863      __ adds(out.AsRegisterPairLow<Register>(),
1864              first.AsRegisterPairLow<Register>(),
1865              ShifterOperand(second.AsRegisterPairLow<Register>()));
1866      __ adc(out.AsRegisterPairHigh<Register>(),
1867             first.AsRegisterPairHigh<Register>(),
1868             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1869      break;
1870    }
1871
1872    case Primitive::kPrimFloat:
1873      __ vadds(out.AsFpuRegister<SRegister>(),
1874               first.AsFpuRegister<SRegister>(),
1875               second.AsFpuRegister<SRegister>());
1876      break;
1877
1878    case Primitive::kPrimDouble:
1879      __ vaddd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1880               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1881               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1882      break;
1883
1884    default:
1885      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1886  }
1887}
1888
1889void LocationsBuilderARM::VisitSub(HSub* sub) {
1890  LocationSummary* locations =
1891      new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
1892  switch (sub->GetResultType()) {
1893    case Primitive::kPrimInt: {
1894      locations->SetInAt(0, Location::RequiresRegister());
1895      locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1)));
1896      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1897      break;
1898    }
1899
1900    case Primitive::kPrimLong: {
1901      locations->SetInAt(0, Location::RequiresRegister());
1902      locations->SetInAt(1, Location::RequiresRegister());
1903      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1904      break;
1905    }
1906    case Primitive::kPrimFloat:
1907    case Primitive::kPrimDouble: {
1908      locations->SetInAt(0, Location::RequiresFpuRegister());
1909      locations->SetInAt(1, Location::RequiresFpuRegister());
1910      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1911      break;
1912    }
1913    default:
1914      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1915  }
1916}
1917
1918void InstructionCodeGeneratorARM::VisitSub(HSub* sub) {
1919  LocationSummary* locations = sub->GetLocations();
1920  Location out = locations->Out();
1921  Location first = locations->InAt(0);
1922  Location second = locations->InAt(1);
1923  switch (sub->GetResultType()) {
1924    case Primitive::kPrimInt: {
1925      if (second.IsRegister()) {
1926        __ sub(out.AsRegister<Register>(),
1927               first.AsRegister<Register>(),
1928               ShifterOperand(second.AsRegister<Register>()));
1929      } else {
1930        __ AddConstant(out.AsRegister<Register>(),
1931                       first.AsRegister<Register>(),
1932                       -second.GetConstant()->AsIntConstant()->GetValue());
1933      }
1934      break;
1935    }
1936
1937    case Primitive::kPrimLong: {
1938      DCHECK(second.IsRegisterPair());
1939      __ subs(out.AsRegisterPairLow<Register>(),
1940              first.AsRegisterPairLow<Register>(),
1941              ShifterOperand(second.AsRegisterPairLow<Register>()));
1942      __ sbc(out.AsRegisterPairHigh<Register>(),
1943             first.AsRegisterPairHigh<Register>(),
1944             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1945      break;
1946    }
1947
1948    case Primitive::kPrimFloat: {
1949      __ vsubs(out.AsFpuRegister<SRegister>(),
1950               first.AsFpuRegister<SRegister>(),
1951               second.AsFpuRegister<SRegister>());
1952      break;
1953    }
1954
1955    case Primitive::kPrimDouble: {
1956      __ vsubd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1957               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1958               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1959      break;
1960    }
1961
1962
1963    default:
1964      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1965  }
1966}
1967
1968void LocationsBuilderARM::VisitMul(HMul* mul) {
1969  LocationSummary* locations =
1970      new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
1971  switch (mul->GetResultType()) {
1972    case Primitive::kPrimInt:
1973    case Primitive::kPrimLong:  {
1974      locations->SetInAt(0, Location::RequiresRegister());
1975      locations->SetInAt(1, Location::RequiresRegister());
1976      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1977      break;
1978    }
1979
1980    case Primitive::kPrimFloat:
1981    case Primitive::kPrimDouble: {
1982      locations->SetInAt(0, Location::RequiresFpuRegister());
1983      locations->SetInAt(1, Location::RequiresFpuRegister());
1984      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1985      break;
1986    }
1987
1988    default:
1989      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1990  }
1991}
1992
1993void InstructionCodeGeneratorARM::VisitMul(HMul* mul) {
1994  LocationSummary* locations = mul->GetLocations();
1995  Location out = locations->Out();
1996  Location first = locations->InAt(0);
1997  Location second = locations->InAt(1);
1998  switch (mul->GetResultType()) {
1999    case Primitive::kPrimInt: {
2000      __ mul(out.AsRegister<Register>(),
2001             first.AsRegister<Register>(),
2002             second.AsRegister<Register>());
2003      break;
2004    }
2005    case Primitive::kPrimLong: {
2006      Register out_hi = out.AsRegisterPairHigh<Register>();
2007      Register out_lo = out.AsRegisterPairLow<Register>();
2008      Register in1_hi = first.AsRegisterPairHigh<Register>();
2009      Register in1_lo = first.AsRegisterPairLow<Register>();
2010      Register in2_hi = second.AsRegisterPairHigh<Register>();
2011      Register in2_lo = second.AsRegisterPairLow<Register>();
2012
2013      // Extra checks to protect caused by the existence of R1_R2.
2014      // The algorithm is wrong if out.hi is either in1.lo or in2.lo:
2015      // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2);
2016      DCHECK_NE(out_hi, in1_lo);
2017      DCHECK_NE(out_hi, in2_lo);
2018
2019      // input: in1 - 64 bits, in2 - 64 bits
2020      // output: out
2021      // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
2022      // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
2023      // parts: out.lo = (in1.lo * in2.lo)[31:0]
2024
2025      // IP <- in1.lo * in2.hi
2026      __ mul(IP, in1_lo, in2_hi);
2027      // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo
2028      __ mla(out_hi, in1_hi, in2_lo, IP);
2029      // out.lo <- (in1.lo * in2.lo)[31:0];
2030      __ umull(out_lo, IP, in1_lo, in2_lo);
2031      // out.hi <- in2.hi * in1.lo +  in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
2032      __ add(out_hi, out_hi, ShifterOperand(IP));
2033      break;
2034    }
2035
2036    case Primitive::kPrimFloat: {
2037      __ vmuls(out.AsFpuRegister<SRegister>(),
2038               first.AsFpuRegister<SRegister>(),
2039               second.AsFpuRegister<SRegister>());
2040      break;
2041    }
2042
2043    case Primitive::kPrimDouble: {
2044      __ vmuld(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2045               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2046               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2047      break;
2048    }
2049
2050    default:
2051      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2052  }
2053}
2054
2055void LocationsBuilderARM::VisitDiv(HDiv* div) {
2056  LocationSummary::CallKind call_kind = div->GetResultType() == Primitive::kPrimLong
2057      ? LocationSummary::kCall
2058      : LocationSummary::kNoCall;
2059  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
2060
2061  switch (div->GetResultType()) {
2062    case Primitive::kPrimInt: {
2063      locations->SetInAt(0, Location::RequiresRegister());
2064      locations->SetInAt(1, Location::RequiresRegister());
2065      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2066      break;
2067    }
2068    case Primitive::kPrimLong: {
2069      InvokeRuntimeCallingConvention calling_convention;
2070      locations->SetInAt(0, Location::RegisterPairLocation(
2071          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2072      locations->SetInAt(1, Location::RegisterPairLocation(
2073          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2074      locations->SetOut(Location::RegisterPairLocation(R0, R1));
2075      break;
2076    }
2077    case Primitive::kPrimFloat:
2078    case Primitive::kPrimDouble: {
2079      locations->SetInAt(0, Location::RequiresFpuRegister());
2080      locations->SetInAt(1, Location::RequiresFpuRegister());
2081      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2082      break;
2083    }
2084
2085    default:
2086      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2087  }
2088}
2089
2090void InstructionCodeGeneratorARM::VisitDiv(HDiv* div) {
2091  LocationSummary* locations = div->GetLocations();
2092  Location out = locations->Out();
2093  Location first = locations->InAt(0);
2094  Location second = locations->InAt(1);
2095
2096  switch (div->GetResultType()) {
2097    case Primitive::kPrimInt: {
2098      __ sdiv(out.AsRegister<Register>(),
2099              first.AsRegister<Register>(),
2100              second.AsRegister<Register>());
2101      break;
2102    }
2103
2104    case Primitive::kPrimLong: {
2105      InvokeRuntimeCallingConvention calling_convention;
2106      DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
2107      DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
2108      DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegisterPairLow<Register>());
2109      DCHECK_EQ(calling_convention.GetRegisterAt(3), second.AsRegisterPairHigh<Register>());
2110      DCHECK_EQ(R0, out.AsRegisterPairLow<Register>());
2111      DCHECK_EQ(R1, out.AsRegisterPairHigh<Register>());
2112
2113      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLdiv), div, div->GetDexPc());
2114      break;
2115    }
2116
2117    case Primitive::kPrimFloat: {
2118      __ vdivs(out.AsFpuRegister<SRegister>(),
2119               first.AsFpuRegister<SRegister>(),
2120               second.AsFpuRegister<SRegister>());
2121      break;
2122    }
2123
2124    case Primitive::kPrimDouble: {
2125      __ vdivd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2126               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2127               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2128      break;
2129    }
2130
2131    default:
2132      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2133  }
2134}
2135
2136void LocationsBuilderARM::VisitRem(HRem* rem) {
2137  Primitive::Type type = rem->GetResultType();
2138  LocationSummary::CallKind call_kind = type == Primitive::kPrimInt
2139      ? LocationSummary::kNoCall
2140      : LocationSummary::kCall;
2141  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
2142
2143  switch (type) {
2144    case Primitive::kPrimInt: {
2145      locations->SetInAt(0, Location::RequiresRegister());
2146      locations->SetInAt(1, Location::RequiresRegister());
2147      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2148      locations->AddTemp(Location::RequiresRegister());
2149      break;
2150    }
2151    case Primitive::kPrimLong: {
2152      InvokeRuntimeCallingConvention calling_convention;
2153      locations->SetInAt(0, Location::RegisterPairLocation(
2154          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2155      locations->SetInAt(1, Location::RegisterPairLocation(
2156          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2157      // The runtime helper puts the output in R2,R3.
2158      locations->SetOut(Location::RegisterPairLocation(R2, R3));
2159      break;
2160    }
2161    case Primitive::kPrimFloat: {
2162      InvokeRuntimeCallingConvention calling_convention;
2163      locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
2164      locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
2165      locations->SetOut(Location::FpuRegisterLocation(S0));
2166      break;
2167    }
2168
2169    case Primitive::kPrimDouble: {
2170      InvokeRuntimeCallingConvention calling_convention;
2171      locations->SetInAt(0, Location::FpuRegisterPairLocation(
2172          calling_convention.GetFpuRegisterAt(0), calling_convention.GetFpuRegisterAt(1)));
2173      locations->SetInAt(1, Location::FpuRegisterPairLocation(
2174          calling_convention.GetFpuRegisterAt(2), calling_convention.GetFpuRegisterAt(3)));
2175      locations->SetOut(Location::Location::FpuRegisterPairLocation(S0, S1));
2176      break;
2177    }
2178
2179    default:
2180      LOG(FATAL) << "Unexpected rem type " << type;
2181  }
2182}
2183
2184void InstructionCodeGeneratorARM::VisitRem(HRem* rem) {
2185  LocationSummary* locations = rem->GetLocations();
2186  Location out = locations->Out();
2187  Location first = locations->InAt(0);
2188  Location second = locations->InAt(1);
2189
2190  Primitive::Type type = rem->GetResultType();
2191  switch (type) {
2192    case Primitive::kPrimInt: {
2193      Register reg1 = first.AsRegister<Register>();
2194      Register reg2 = second.AsRegister<Register>();
2195      Register temp = locations->GetTemp(0).AsRegister<Register>();
2196
2197      // temp = reg1 / reg2  (integer division)
2198      // temp = temp * reg2
2199      // dest = reg1 - temp
2200      __ sdiv(temp, reg1, reg2);
2201      __ mul(temp, temp, reg2);
2202      __ sub(out.AsRegister<Register>(), reg1, ShifterOperand(temp));
2203      break;
2204    }
2205
2206    case Primitive::kPrimLong: {
2207      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLmod), rem, rem->GetDexPc());
2208      break;
2209    }
2210
2211    case Primitive::kPrimFloat: {
2212      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmodf), rem, rem->GetDexPc());
2213      break;
2214    }
2215
2216    case Primitive::kPrimDouble: {
2217      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmod), rem, rem->GetDexPc());
2218      break;
2219    }
2220
2221    default:
2222      LOG(FATAL) << "Unexpected rem type " << type;
2223  }
2224}
2225
2226void LocationsBuilderARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2227  LocationSummary* locations =
2228      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2229  locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2230  if (instruction->HasUses()) {
2231    locations->SetOut(Location::SameAsFirstInput());
2232  }
2233}
2234
2235void InstructionCodeGeneratorARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2236  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM(instruction);
2237  codegen_->AddSlowPath(slow_path);
2238
2239  LocationSummary* locations = instruction->GetLocations();
2240  Location value = locations->InAt(0);
2241
2242  switch (instruction->GetType()) {
2243    case Primitive::kPrimInt: {
2244      if (value.IsRegister()) {
2245        __ cmp(value.AsRegister<Register>(), ShifterOperand(0));
2246        __ b(slow_path->GetEntryLabel(), EQ);
2247      } else {
2248        DCHECK(value.IsConstant()) << value;
2249        if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
2250          __ b(slow_path->GetEntryLabel());
2251        }
2252      }
2253      break;
2254    }
2255    case Primitive::kPrimLong: {
2256      if (value.IsRegisterPair()) {
2257        __ orrs(IP,
2258                value.AsRegisterPairLow<Register>(),
2259                ShifterOperand(value.AsRegisterPairHigh<Register>()));
2260        __ b(slow_path->GetEntryLabel(), EQ);
2261      } else {
2262        DCHECK(value.IsConstant()) << value;
2263        if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
2264          __ b(slow_path->GetEntryLabel());
2265        }
2266      }
2267      break;
2268    default:
2269      LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
2270    }
2271  }
2272}
2273
2274void LocationsBuilderARM::HandleShift(HBinaryOperation* op) {
2275  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2276
2277  LocationSummary::CallKind call_kind = op->GetResultType() == Primitive::kPrimLong
2278      ? LocationSummary::kCall
2279      : LocationSummary::kNoCall;
2280  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(op, call_kind);
2281
2282  switch (op->GetResultType()) {
2283    case Primitive::kPrimInt: {
2284      locations->SetInAt(0, Location::RequiresRegister());
2285      locations->SetInAt(1, Location::RegisterOrConstant(op->InputAt(1)));
2286      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2287      break;
2288    }
2289    case Primitive::kPrimLong: {
2290      InvokeRuntimeCallingConvention calling_convention;
2291      locations->SetInAt(0, Location::RegisterPairLocation(
2292          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2293      locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2294      // The runtime helper puts the output in R0,R1.
2295      locations->SetOut(Location::RegisterPairLocation(R0, R1));
2296      break;
2297    }
2298    default:
2299      LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
2300  }
2301}
2302
2303void InstructionCodeGeneratorARM::HandleShift(HBinaryOperation* op) {
2304  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2305
2306  LocationSummary* locations = op->GetLocations();
2307  Location out = locations->Out();
2308  Location first = locations->InAt(0);
2309  Location second = locations->InAt(1);
2310
2311  Primitive::Type type = op->GetResultType();
2312  switch (type) {
2313    case Primitive::kPrimInt: {
2314      Register out_reg = out.AsRegister<Register>();
2315      Register first_reg = first.AsRegister<Register>();
2316      // Arm doesn't mask the shift count so we need to do it ourselves.
2317      if (second.IsRegister()) {
2318        Register second_reg = second.AsRegister<Register>();
2319        __ and_(second_reg, second_reg, ShifterOperand(kMaxIntShiftValue));
2320        if (op->IsShl()) {
2321          __ Lsl(out_reg, first_reg, second_reg);
2322        } else if (op->IsShr()) {
2323          __ Asr(out_reg, first_reg, second_reg);
2324        } else {
2325          __ Lsr(out_reg, first_reg, second_reg);
2326        }
2327      } else {
2328        int32_t cst = second.GetConstant()->AsIntConstant()->GetValue();
2329        uint32_t shift_value = static_cast<uint32_t>(cst & kMaxIntShiftValue);
2330        if (shift_value == 0) {  // arm does not support shifting with 0 immediate.
2331          __ Mov(out_reg, first_reg);
2332        } else if (op->IsShl()) {
2333          __ Lsl(out_reg, first_reg, shift_value);
2334        } else if (op->IsShr()) {
2335          __ Asr(out_reg, first_reg, shift_value);
2336        } else {
2337          __ Lsr(out_reg, first_reg, shift_value);
2338        }
2339      }
2340      break;
2341    }
2342    case Primitive::kPrimLong: {
2343      // TODO: Inline the assembly instead of calling the runtime.
2344      InvokeRuntimeCallingConvention calling_convention;
2345      DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
2346      DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
2347      DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegister<Register>());
2348      DCHECK_EQ(R0, out.AsRegisterPairLow<Register>());
2349      DCHECK_EQ(R1, out.AsRegisterPairHigh<Register>());
2350
2351      int32_t entry_point_offset;
2352      if (op->IsShl()) {
2353        entry_point_offset = QUICK_ENTRY_POINT(pShlLong);
2354      } else if (op->IsShr()) {
2355        entry_point_offset = QUICK_ENTRY_POINT(pShrLong);
2356      } else {
2357        entry_point_offset = QUICK_ENTRY_POINT(pUshrLong);
2358      }
2359      __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
2360      __ blx(LR);
2361      break;
2362    }
2363    default:
2364      LOG(FATAL) << "Unexpected operation type " << type;
2365  }
2366}
2367
2368void LocationsBuilderARM::VisitShl(HShl* shl) {
2369  HandleShift(shl);
2370}
2371
2372void InstructionCodeGeneratorARM::VisitShl(HShl* shl) {
2373  HandleShift(shl);
2374}
2375
2376void LocationsBuilderARM::VisitShr(HShr* shr) {
2377  HandleShift(shr);
2378}
2379
2380void InstructionCodeGeneratorARM::VisitShr(HShr* shr) {
2381  HandleShift(shr);
2382}
2383
2384void LocationsBuilderARM::VisitUShr(HUShr* ushr) {
2385  HandleShift(ushr);
2386}
2387
2388void InstructionCodeGeneratorARM::VisitUShr(HUShr* ushr) {
2389  HandleShift(ushr);
2390}
2391
2392void LocationsBuilderARM::VisitNewInstance(HNewInstance* instruction) {
2393  LocationSummary* locations =
2394      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2395  InvokeRuntimeCallingConvention calling_convention;
2396  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2397  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2398  locations->SetOut(Location::RegisterLocation(R0));
2399}
2400
2401void InstructionCodeGeneratorARM::VisitNewInstance(HNewInstance* instruction) {
2402  InvokeRuntimeCallingConvention calling_convention;
2403  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
2404  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
2405  codegen_->InvokeRuntime(GetThreadOffset<kArmWordSize>(instruction->GetEntrypoint()).Int32Value(),
2406                          instruction,
2407                          instruction->GetDexPc());
2408}
2409
2410void LocationsBuilderARM::VisitNewArray(HNewArray* instruction) {
2411  LocationSummary* locations =
2412      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2413  InvokeRuntimeCallingConvention calling_convention;
2414  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2415  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2416  locations->SetOut(Location::RegisterLocation(R0));
2417  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2418}
2419
2420void InstructionCodeGeneratorARM::VisitNewArray(HNewArray* instruction) {
2421  InvokeRuntimeCallingConvention calling_convention;
2422  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(2));
2423  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
2424  codegen_->InvokeRuntime(GetThreadOffset<kArmWordSize>(instruction->GetEntrypoint()).Int32Value(),
2425                          instruction,
2426                          instruction->GetDexPc());
2427}
2428
2429void LocationsBuilderARM::VisitParameterValue(HParameterValue* instruction) {
2430  LocationSummary* locations =
2431      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2432  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
2433  if (location.IsStackSlot()) {
2434    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2435  } else if (location.IsDoubleStackSlot()) {
2436    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2437  }
2438  locations->SetOut(location);
2439}
2440
2441void InstructionCodeGeneratorARM::VisitParameterValue(HParameterValue* instruction) {
2442  // Nothing to do, the parameter is already at its location.
2443  UNUSED(instruction);
2444}
2445
2446void LocationsBuilderARM::VisitNot(HNot* not_) {
2447  LocationSummary* locations =
2448      new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
2449  locations->SetInAt(0, Location::RequiresRegister());
2450  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2451}
2452
2453void InstructionCodeGeneratorARM::VisitNot(HNot* not_) {
2454  LocationSummary* locations = not_->GetLocations();
2455  Location out = locations->Out();
2456  Location in = locations->InAt(0);
2457  switch (not_->InputAt(0)->GetType()) {
2458    case Primitive::kPrimInt:
2459      __ mvn(out.AsRegister<Register>(), ShifterOperand(in.AsRegister<Register>()));
2460      break;
2461
2462    case Primitive::kPrimLong:
2463      __ mvn(out.AsRegisterPairLow<Register>(),
2464             ShifterOperand(in.AsRegisterPairLow<Register>()));
2465      __ mvn(out.AsRegisterPairHigh<Register>(),
2466             ShifterOperand(in.AsRegisterPairHigh<Register>()));
2467      break;
2468
2469    default:
2470      LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
2471  }
2472}
2473
2474void LocationsBuilderARM::VisitCompare(HCompare* compare) {
2475  LocationSummary* locations =
2476      new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2477  switch (compare->InputAt(0)->GetType()) {
2478    case Primitive::kPrimLong: {
2479      locations->SetInAt(0, Location::RequiresRegister());
2480      locations->SetInAt(1, Location::RequiresRegister());
2481      // Output overlaps because it is written before doing the low comparison.
2482      locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2483      break;
2484    }
2485    case Primitive::kPrimFloat:
2486    case Primitive::kPrimDouble: {
2487      locations->SetInAt(0, Location::RequiresFpuRegister());
2488      locations->SetInAt(1, Location::RequiresFpuRegister());
2489      locations->SetOut(Location::RequiresRegister());
2490      break;
2491    }
2492    default:
2493      LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2494  }
2495}
2496
2497void InstructionCodeGeneratorARM::VisitCompare(HCompare* compare) {
2498  LocationSummary* locations = compare->GetLocations();
2499  Register out = locations->Out().AsRegister<Register>();
2500  Location left = locations->InAt(0);
2501  Location right = locations->InAt(1);
2502
2503  Label less, greater, done;
2504  Primitive::Type type = compare->InputAt(0)->GetType();
2505  switch (type) {
2506    case Primitive::kPrimLong: {
2507      __ cmp(left.AsRegisterPairHigh<Register>(),
2508             ShifterOperand(right.AsRegisterPairHigh<Register>()));  // Signed compare.
2509      __ b(&less, LT);
2510      __ b(&greater, GT);
2511      // Do LoadImmediate before any `cmp`, as LoadImmediate might affect the status flags.
2512      __ LoadImmediate(out, 0);
2513      __ cmp(left.AsRegisterPairLow<Register>(),
2514             ShifterOperand(right.AsRegisterPairLow<Register>()));  // Unsigned compare.
2515      break;
2516    }
2517    case Primitive::kPrimFloat:
2518    case Primitive::kPrimDouble: {
2519      __ LoadImmediate(out, 0);
2520      if (type == Primitive::kPrimFloat) {
2521        __ vcmps(left.AsFpuRegister<SRegister>(), right.AsFpuRegister<SRegister>());
2522      } else {
2523        __ vcmpd(FromLowSToD(left.AsFpuRegisterPairLow<SRegister>()),
2524                 FromLowSToD(right.AsFpuRegisterPairLow<SRegister>()));
2525      }
2526      __ vmstat();  // transfer FP status register to ARM APSR.
2527      __ b(compare->IsGtBias() ? &greater : &less, VS);  // VS for unordered.
2528      break;
2529    }
2530    default:
2531      LOG(FATAL) << "Unexpected compare type " << type;
2532  }
2533  __ b(&done, EQ);
2534  __ b(&less, CC);  // CC is for both: unsigned compare for longs and 'less than' for floats.
2535
2536  __ Bind(&greater);
2537  __ LoadImmediate(out, 1);
2538  __ b(&done);
2539
2540  __ Bind(&less);
2541  __ LoadImmediate(out, -1);
2542
2543  __ Bind(&done);
2544}
2545
2546void LocationsBuilderARM::VisitPhi(HPhi* instruction) {
2547  LocationSummary* locations =
2548      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2549  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
2550    locations->SetInAt(i, Location::Any());
2551  }
2552  locations->SetOut(Location::Any());
2553}
2554
2555void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction) {
2556  UNUSED(instruction);
2557  LOG(FATAL) << "Unreachable";
2558}
2559
2560void InstructionCodeGeneratorARM::GenerateMemoryBarrier(MemBarrierKind kind) {
2561  // TODO (ported from quick): revisit Arm barrier kinds
2562  DmbOptions flavour = DmbOptions::ISH;  // quiet c++ warnings
2563  switch (kind) {
2564    case MemBarrierKind::kAnyStore:
2565    case MemBarrierKind::kLoadAny:
2566    case MemBarrierKind::kAnyAny: {
2567      flavour = DmbOptions::ISH;
2568      break;
2569    }
2570    case MemBarrierKind::kStoreStore: {
2571      flavour = DmbOptions::ISHST;
2572      break;
2573    }
2574    default:
2575      LOG(FATAL) << "Unexpected memory barrier " << kind;
2576  }
2577  __ dmb(flavour);
2578}
2579
2580void InstructionCodeGeneratorARM::GenerateWideAtomicLoad(Register addr,
2581                                                         uint32_t offset,
2582                                                         Register out_lo,
2583                                                         Register out_hi) {
2584  if (offset != 0) {
2585    __ LoadImmediate(out_lo, offset);
2586    __ add(IP, addr, ShifterOperand(out_lo));
2587    addr = IP;
2588  }
2589  __ ldrexd(out_lo, out_hi, addr);
2590}
2591
2592void InstructionCodeGeneratorARM::GenerateWideAtomicStore(Register addr,
2593                                                          uint32_t offset,
2594                                                          Register value_lo,
2595                                                          Register value_hi,
2596                                                          Register temp1,
2597                                                          Register temp2,
2598                                                          HInstruction* instruction) {
2599  Label fail;
2600  if (offset != 0) {
2601    __ LoadImmediate(temp1, offset);
2602    __ add(IP, addr, ShifterOperand(temp1));
2603    addr = IP;
2604  }
2605  __ Bind(&fail);
2606  // We need a load followed by store. (The address used in a STREX instruction must
2607  // be the same as the address in the most recently executed LDREX instruction.)
2608  __ ldrexd(temp1, temp2, addr);
2609  codegen_->MaybeRecordImplicitNullCheck(instruction);
2610  __ strexd(temp1, value_lo, value_hi, addr);
2611  __ cmp(temp1, ShifterOperand(0));
2612  __ b(&fail, NE);
2613}
2614
2615void LocationsBuilderARM::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) {
2616  DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2617
2618  LocationSummary* locations =
2619      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2620  locations->SetInAt(0, Location::RequiresRegister());
2621  locations->SetInAt(1, Location::RequiresRegister());
2622
2623
2624  Primitive::Type field_type = field_info.GetFieldType();
2625  bool is_wide = field_type == Primitive::kPrimLong || field_type == Primitive::kPrimDouble;
2626  bool generate_volatile = field_info.IsVolatile()
2627      && is_wide
2628      && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2629  // Temporary registers for the write barrier.
2630  // TODO: consider renaming StoreNeedsWriteBarrier to StoreNeedsGCMark.
2631  if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
2632    locations->AddTemp(Location::RequiresRegister());
2633    locations->AddTemp(Location::RequiresRegister());
2634  } else if (generate_volatile) {
2635    // Arm encoding have some additional constraints for ldrexd/strexd:
2636    // - registers need to be consecutive
2637    // - the first register should be even but not R14.
2638    // We don't test for Arm yet, and the assertion makes sure that we revisit this if we ever
2639    // enable Arm encoding.
2640    DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
2641
2642    locations->AddTemp(Location::RequiresRegister());
2643    locations->AddTemp(Location::RequiresRegister());
2644    if (field_type == Primitive::kPrimDouble) {
2645      // For doubles we need two more registers to copy the value.
2646      locations->AddTemp(Location::RegisterLocation(R2));
2647      locations->AddTemp(Location::RegisterLocation(R3));
2648    }
2649  }
2650}
2651
2652void InstructionCodeGeneratorARM::HandleFieldSet(HInstruction* instruction,
2653                                                 const FieldInfo& field_info) {
2654  DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2655
2656  LocationSummary* locations = instruction->GetLocations();
2657  Register base = locations->InAt(0).AsRegister<Register>();
2658  Location value = locations->InAt(1);
2659
2660  bool is_volatile = field_info.IsVolatile();
2661  bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2662  Primitive::Type field_type = field_info.GetFieldType();
2663  uint32_t offset = field_info.GetFieldOffset().Uint32Value();
2664
2665  if (is_volatile) {
2666    GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
2667  }
2668
2669  switch (field_type) {
2670    case Primitive::kPrimBoolean:
2671    case Primitive::kPrimByte: {
2672      __ StoreToOffset(kStoreByte, value.AsRegister<Register>(), base, offset);
2673      break;
2674    }
2675
2676    case Primitive::kPrimShort:
2677    case Primitive::kPrimChar: {
2678      __ StoreToOffset(kStoreHalfword, value.AsRegister<Register>(), base, offset);
2679      break;
2680    }
2681
2682    case Primitive::kPrimInt:
2683    case Primitive::kPrimNot: {
2684      __ StoreToOffset(kStoreWord, value.AsRegister<Register>(), base, offset);
2685      break;
2686    }
2687
2688    case Primitive::kPrimLong: {
2689      if (is_volatile && !atomic_ldrd_strd) {
2690        GenerateWideAtomicStore(base, offset,
2691                                value.AsRegisterPairLow<Register>(),
2692                                value.AsRegisterPairHigh<Register>(),
2693                                locations->GetTemp(0).AsRegister<Register>(),
2694                                locations->GetTemp(1).AsRegister<Register>(),
2695                                instruction);
2696      } else {
2697        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), base, offset);
2698        codegen_->MaybeRecordImplicitNullCheck(instruction);
2699      }
2700      break;
2701    }
2702
2703    case Primitive::kPrimFloat: {
2704      __ StoreSToOffset(value.AsFpuRegister<SRegister>(), base, offset);
2705      break;
2706    }
2707
2708    case Primitive::kPrimDouble: {
2709      DRegister value_reg = FromLowSToD(value.AsFpuRegisterPairLow<SRegister>());
2710      if (is_volatile && !atomic_ldrd_strd) {
2711        Register value_reg_lo = locations->GetTemp(0).AsRegister<Register>();
2712        Register value_reg_hi = locations->GetTemp(1).AsRegister<Register>();
2713
2714        __ vmovrrd(value_reg_lo, value_reg_hi, value_reg);
2715
2716        GenerateWideAtomicStore(base, offset,
2717                                value_reg_lo,
2718                                value_reg_hi,
2719                                locations->GetTemp(2).AsRegister<Register>(),
2720                                locations->GetTemp(3).AsRegister<Register>(),
2721                                instruction);
2722      } else {
2723        __ StoreDToOffset(value_reg, base, offset);
2724        codegen_->MaybeRecordImplicitNullCheck(instruction);
2725      }
2726      break;
2727    }
2728
2729    case Primitive::kPrimVoid:
2730      LOG(FATAL) << "Unreachable type " << field_type;
2731      UNREACHABLE();
2732  }
2733
2734  // Longs and doubles are handled in the switch.
2735  if (field_type != Primitive::kPrimLong && field_type != Primitive::kPrimDouble) {
2736    codegen_->MaybeRecordImplicitNullCheck(instruction);
2737  }
2738
2739  if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
2740    Register temp = locations->GetTemp(0).AsRegister<Register>();
2741    Register card = locations->GetTemp(1).AsRegister<Register>();
2742    codegen_->MarkGCCard(temp, card, base, value.AsRegister<Register>());
2743  }
2744
2745  if (is_volatile) {
2746    GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
2747  }
2748}
2749
2750void LocationsBuilderARM::HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info) {
2751  DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2752  LocationSummary* locations =
2753      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2754  locations->SetInAt(0, Location::RequiresRegister());
2755
2756  bool volatile_for_double = field_info.IsVolatile()
2757      && (field_info.GetFieldType() == Primitive::kPrimDouble)
2758      && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2759  bool overlap = field_info.IsVolatile() && (field_info.GetFieldType() == Primitive::kPrimLong);
2760  locations->SetOut(Location::RequiresRegister(),
2761                    (overlap ? Location::kOutputOverlap : Location::kNoOutputOverlap));
2762  if (volatile_for_double) {
2763    // Arm encoding have some additional constraints for ldrexd/strexd:
2764    // - registers need to be consecutive
2765    // - the first register should be even but not R14.
2766    // We don't test for Arm yet, and the assertion makes sure that we revisit this if we ever
2767    // enable Arm encoding.
2768    DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
2769    locations->AddTemp(Location::RequiresRegister());
2770    locations->AddTemp(Location::RequiresRegister());
2771  }
2772}
2773
2774void InstructionCodeGeneratorARM::HandleFieldGet(HInstruction* instruction,
2775                                                 const FieldInfo& field_info) {
2776  DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2777
2778  LocationSummary* locations = instruction->GetLocations();
2779  Register base = locations->InAt(0).AsRegister<Register>();
2780  Location out = locations->Out();
2781  bool is_volatile = field_info.IsVolatile();
2782  bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2783  Primitive::Type field_type = field_info.GetFieldType();
2784  uint32_t offset = field_info.GetFieldOffset().Uint32Value();
2785
2786  switch (field_type) {
2787    case Primitive::kPrimBoolean: {
2788      __ LoadFromOffset(kLoadUnsignedByte, out.AsRegister<Register>(), base, offset);
2789      break;
2790    }
2791
2792    case Primitive::kPrimByte: {
2793      __ LoadFromOffset(kLoadSignedByte, out.AsRegister<Register>(), base, offset);
2794      break;
2795    }
2796
2797    case Primitive::kPrimShort: {
2798      __ LoadFromOffset(kLoadSignedHalfword, out.AsRegister<Register>(), base, offset);
2799      break;
2800    }
2801
2802    case Primitive::kPrimChar: {
2803      __ LoadFromOffset(kLoadUnsignedHalfword, out.AsRegister<Register>(), base, offset);
2804      break;
2805    }
2806
2807    case Primitive::kPrimInt:
2808    case Primitive::kPrimNot: {
2809      __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), base, offset);
2810      break;
2811    }
2812
2813    case Primitive::kPrimLong: {
2814      if (is_volatile && !atomic_ldrd_strd) {
2815        GenerateWideAtomicLoad(base, offset,
2816                               out.AsRegisterPairLow<Register>(),
2817                               out.AsRegisterPairHigh<Register>());
2818      } else {
2819        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), base, offset);
2820      }
2821      break;
2822    }
2823
2824    case Primitive::kPrimFloat: {
2825      __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), base, offset);
2826      break;
2827    }
2828
2829    case Primitive::kPrimDouble: {
2830      DRegister out_reg = FromLowSToD(out.AsFpuRegisterPairLow<SRegister>());
2831      if (is_volatile && !atomic_ldrd_strd) {
2832        Register lo = locations->GetTemp(0).AsRegister<Register>();
2833        Register hi = locations->GetTemp(1).AsRegister<Register>();
2834        GenerateWideAtomicLoad(base, offset, lo, hi);
2835        codegen_->MaybeRecordImplicitNullCheck(instruction);
2836        __ vmovdrr(out_reg, lo, hi);
2837      } else {
2838        __ LoadDFromOffset(out_reg, base, offset);
2839        codegen_->MaybeRecordImplicitNullCheck(instruction);
2840      }
2841      break;
2842    }
2843
2844    case Primitive::kPrimVoid:
2845      LOG(FATAL) << "Unreachable type " << field_type;
2846      UNREACHABLE();
2847  }
2848
2849  // Doubles are handled in the switch.
2850  if (field_type != Primitive::kPrimDouble) {
2851    codegen_->MaybeRecordImplicitNullCheck(instruction);
2852  }
2853
2854  if (is_volatile) {
2855    GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
2856  }
2857}
2858
2859void LocationsBuilderARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2860  HandleFieldSet(instruction, instruction->GetFieldInfo());
2861}
2862
2863void InstructionCodeGeneratorARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2864  HandleFieldSet(instruction, instruction->GetFieldInfo());
2865}
2866
2867void LocationsBuilderARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2868  HandleFieldGet(instruction, instruction->GetFieldInfo());
2869}
2870
2871void InstructionCodeGeneratorARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2872  HandleFieldGet(instruction, instruction->GetFieldInfo());
2873}
2874
2875void LocationsBuilderARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2876  HandleFieldGet(instruction, instruction->GetFieldInfo());
2877}
2878
2879void InstructionCodeGeneratorARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2880  HandleFieldGet(instruction, instruction->GetFieldInfo());
2881}
2882
2883void LocationsBuilderARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2884  HandleFieldSet(instruction, instruction->GetFieldInfo());
2885}
2886
2887void InstructionCodeGeneratorARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2888  HandleFieldSet(instruction, instruction->GetFieldInfo());
2889}
2890
2891void LocationsBuilderARM::VisitNullCheck(HNullCheck* instruction) {
2892  LocationSummary* locations =
2893      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2894  locations->SetInAt(0, Location::RequiresRegister());
2895  if (instruction->HasUses()) {
2896    locations->SetOut(Location::SameAsFirstInput());
2897  }
2898}
2899
2900void InstructionCodeGeneratorARM::GenerateImplicitNullCheck(HNullCheck* instruction) {
2901  if (codegen_->CanMoveNullCheckToUser(instruction)) {
2902    return;
2903  }
2904  Location obj = instruction->GetLocations()->InAt(0);
2905
2906  __ LoadFromOffset(kLoadWord, IP, obj.AsRegister<Register>(), 0);
2907  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
2908}
2909
2910void InstructionCodeGeneratorARM::GenerateExplicitNullCheck(HNullCheck* instruction) {
2911  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM(instruction);
2912  codegen_->AddSlowPath(slow_path);
2913
2914  LocationSummary* locations = instruction->GetLocations();
2915  Location obj = locations->InAt(0);
2916
2917  __ cmp(obj.AsRegister<Register>(), ShifterOperand(0));
2918  __ b(slow_path->GetEntryLabel(), EQ);
2919}
2920
2921void InstructionCodeGeneratorARM::VisitNullCheck(HNullCheck* instruction) {
2922  if (codegen_->GetCompilerOptions().GetImplicitNullChecks()) {
2923    GenerateImplicitNullCheck(instruction);
2924  } else {
2925    GenerateExplicitNullCheck(instruction);
2926  }
2927}
2928
2929void LocationsBuilderARM::VisitArrayGet(HArrayGet* instruction) {
2930  LocationSummary* locations =
2931      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2932  locations->SetInAt(0, Location::RequiresRegister());
2933  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2934  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2935}
2936
2937void InstructionCodeGeneratorARM::VisitArrayGet(HArrayGet* instruction) {
2938  LocationSummary* locations = instruction->GetLocations();
2939  Register obj = locations->InAt(0).AsRegister<Register>();
2940  Location index = locations->InAt(1);
2941
2942  switch (instruction->GetType()) {
2943    case Primitive::kPrimBoolean: {
2944      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
2945      Register out = locations->Out().AsRegister<Register>();
2946      if (index.IsConstant()) {
2947        size_t offset =
2948            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2949        __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
2950      } else {
2951        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
2952        __ LoadFromOffset(kLoadUnsignedByte, out, IP, data_offset);
2953      }
2954      break;
2955    }
2956
2957    case Primitive::kPrimByte: {
2958      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
2959      Register out = locations->Out().AsRegister<Register>();
2960      if (index.IsConstant()) {
2961        size_t offset =
2962            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2963        __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
2964      } else {
2965        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
2966        __ LoadFromOffset(kLoadSignedByte, out, IP, data_offset);
2967      }
2968      break;
2969    }
2970
2971    case Primitive::kPrimShort: {
2972      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
2973      Register out = locations->Out().AsRegister<Register>();
2974      if (index.IsConstant()) {
2975        size_t offset =
2976            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2977        __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
2978      } else {
2979        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
2980        __ LoadFromOffset(kLoadSignedHalfword, out, IP, data_offset);
2981      }
2982      break;
2983    }
2984
2985    case Primitive::kPrimChar: {
2986      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
2987      Register out = locations->Out().AsRegister<Register>();
2988      if (index.IsConstant()) {
2989        size_t offset =
2990            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2991        __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
2992      } else {
2993        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
2994        __ LoadFromOffset(kLoadUnsignedHalfword, out, IP, data_offset);
2995      }
2996      break;
2997    }
2998
2999    case Primitive::kPrimInt:
3000    case Primitive::kPrimNot: {
3001      DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
3002      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
3003      Register out = locations->Out().AsRegister<Register>();
3004      if (index.IsConstant()) {
3005        size_t offset =
3006            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3007        __ LoadFromOffset(kLoadWord, out, obj, offset);
3008      } else {
3009        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3010        __ LoadFromOffset(kLoadWord, out, IP, data_offset);
3011      }
3012      break;
3013    }
3014
3015    case Primitive::kPrimLong: {
3016      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
3017      Location out = locations->Out();
3018      if (index.IsConstant()) {
3019        size_t offset =
3020            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3021        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset);
3022      } else {
3023        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3024        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), IP, data_offset);
3025      }
3026      break;
3027    }
3028
3029    case Primitive::kPrimFloat: {
3030      uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
3031      Location out = locations->Out();
3032      DCHECK(out.IsFpuRegister());
3033      if (index.IsConstant()) {
3034        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3035        __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), obj, offset);
3036      } else {
3037        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3038        __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), IP, data_offset);
3039      }
3040      break;
3041    }
3042
3043    case Primitive::kPrimDouble: {
3044      uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
3045      Location out = locations->Out();
3046      DCHECK(out.IsFpuRegisterPair());
3047      if (index.IsConstant()) {
3048        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3049        __ LoadDFromOffset(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), obj, offset);
3050      } else {
3051        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3052        __ LoadDFromOffset(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), IP, data_offset);
3053      }
3054      break;
3055    }
3056
3057    case Primitive::kPrimVoid:
3058      LOG(FATAL) << "Unreachable type " << instruction->GetType();
3059      UNREACHABLE();
3060  }
3061  codegen_->MaybeRecordImplicitNullCheck(instruction);
3062}
3063
3064void LocationsBuilderARM::VisitArraySet(HArraySet* instruction) {
3065  Primitive::Type value_type = instruction->GetComponentType();
3066
3067  bool needs_write_barrier =
3068      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
3069  bool needs_runtime_call = instruction->NeedsTypeCheck();
3070
3071  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3072      instruction, needs_runtime_call ? LocationSummary::kCall : LocationSummary::kNoCall);
3073  if (needs_runtime_call) {
3074    InvokeRuntimeCallingConvention calling_convention;
3075    locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3076    locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3077    locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
3078  } else {
3079    locations->SetInAt(0, Location::RequiresRegister());
3080    locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3081    locations->SetInAt(2, Location::RequiresRegister());
3082
3083    if (needs_write_barrier) {
3084      // Temporary registers for the write barrier.
3085      locations->AddTemp(Location::RequiresRegister());
3086      locations->AddTemp(Location::RequiresRegister());
3087    }
3088  }
3089}
3090
3091void InstructionCodeGeneratorARM::VisitArraySet(HArraySet* instruction) {
3092  LocationSummary* locations = instruction->GetLocations();
3093  Register obj = locations->InAt(0).AsRegister<Register>();
3094  Location index = locations->InAt(1);
3095  Primitive::Type value_type = instruction->GetComponentType();
3096  bool needs_runtime_call = locations->WillCall();
3097  bool needs_write_barrier =
3098      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
3099
3100  switch (value_type) {
3101    case Primitive::kPrimBoolean:
3102    case Primitive::kPrimByte: {
3103      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
3104      Register value = locations->InAt(2).AsRegister<Register>();
3105      if (index.IsConstant()) {
3106        size_t offset =
3107            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
3108        __ StoreToOffset(kStoreByte, value, obj, offset);
3109      } else {
3110        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
3111        __ StoreToOffset(kStoreByte, value, IP, data_offset);
3112      }
3113      break;
3114    }
3115
3116    case Primitive::kPrimShort:
3117    case Primitive::kPrimChar: {
3118      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
3119      Register value = locations->InAt(2).AsRegister<Register>();
3120      if (index.IsConstant()) {
3121        size_t offset =
3122            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
3123        __ StoreToOffset(kStoreHalfword, value, obj, offset);
3124      } else {
3125        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
3126        __ StoreToOffset(kStoreHalfword, value, IP, data_offset);
3127      }
3128      break;
3129    }
3130
3131    case Primitive::kPrimInt:
3132    case Primitive::kPrimNot: {
3133      if (!needs_runtime_call) {
3134        uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
3135        Register value = locations->InAt(2).AsRegister<Register>();
3136        if (index.IsConstant()) {
3137          size_t offset =
3138              (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3139          __ StoreToOffset(kStoreWord, value, obj, offset);
3140        } else {
3141          DCHECK(index.IsRegister()) << index;
3142          __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3143          __ StoreToOffset(kStoreWord, value, IP, data_offset);
3144        }
3145        codegen_->MaybeRecordImplicitNullCheck(instruction);
3146        if (needs_write_barrier) {
3147          DCHECK_EQ(value_type, Primitive::kPrimNot);
3148          Register temp = locations->GetTemp(0).AsRegister<Register>();
3149          Register card = locations->GetTemp(1).AsRegister<Register>();
3150          codegen_->MarkGCCard(temp, card, obj, value);
3151        }
3152      } else {
3153        DCHECK_EQ(value_type, Primitive::kPrimNot);
3154        codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
3155                                instruction,
3156                                instruction->GetDexPc());
3157      }
3158      break;
3159    }
3160
3161    case Primitive::kPrimLong: {
3162      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
3163      Location value = locations->InAt(2);
3164      if (index.IsConstant()) {
3165        size_t offset =
3166            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3167        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset);
3168      } else {
3169        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3170        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), IP, data_offset);
3171      }
3172      break;
3173    }
3174
3175    case Primitive::kPrimFloat: {
3176      uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
3177      Location value = locations->InAt(2);
3178      DCHECK(value.IsFpuRegister());
3179      if (index.IsConstant()) {
3180        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3181        __ StoreSToOffset(value.AsFpuRegister<SRegister>(), obj, offset);
3182      } else {
3183        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3184        __ StoreSToOffset(value.AsFpuRegister<SRegister>(), IP, data_offset);
3185      }
3186      break;
3187    }
3188
3189    case Primitive::kPrimDouble: {
3190      uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
3191      Location value = locations->InAt(2);
3192      DCHECK(value.IsFpuRegisterPair());
3193      if (index.IsConstant()) {
3194        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3195        __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), obj, offset);
3196      } else {
3197        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3198        __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), IP, data_offset);
3199      }
3200
3201      break;
3202    }
3203
3204    case Primitive::kPrimVoid:
3205      LOG(FATAL) << "Unreachable type " << value_type;
3206      UNREACHABLE();
3207  }
3208
3209  // Ints and objects are handled in the switch.
3210  if (value_type != Primitive::kPrimInt && value_type != Primitive::kPrimNot) {
3211    codegen_->MaybeRecordImplicitNullCheck(instruction);
3212  }
3213}
3214
3215void LocationsBuilderARM::VisitArrayLength(HArrayLength* instruction) {
3216  LocationSummary* locations =
3217      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3218  locations->SetInAt(0, Location::RequiresRegister());
3219  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3220}
3221
3222void InstructionCodeGeneratorARM::VisitArrayLength(HArrayLength* instruction) {
3223  LocationSummary* locations = instruction->GetLocations();
3224  uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
3225  Register obj = locations->InAt(0).AsRegister<Register>();
3226  Register out = locations->Out().AsRegister<Register>();
3227  __ LoadFromOffset(kLoadWord, out, obj, offset);
3228  codegen_->MaybeRecordImplicitNullCheck(instruction);
3229}
3230
3231void LocationsBuilderARM::VisitBoundsCheck(HBoundsCheck* instruction) {
3232  LocationSummary* locations =
3233      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3234  locations->SetInAt(0, Location::RequiresRegister());
3235  locations->SetInAt(1, Location::RequiresRegister());
3236  if (instruction->HasUses()) {
3237    locations->SetOut(Location::SameAsFirstInput());
3238  }
3239}
3240
3241void InstructionCodeGeneratorARM::VisitBoundsCheck(HBoundsCheck* instruction) {
3242  LocationSummary* locations = instruction->GetLocations();
3243  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM(
3244      instruction, locations->InAt(0), locations->InAt(1));
3245  codegen_->AddSlowPath(slow_path);
3246
3247  Register index = locations->InAt(0).AsRegister<Register>();
3248  Register length = locations->InAt(1).AsRegister<Register>();
3249
3250  __ cmp(index, ShifterOperand(length));
3251  __ b(slow_path->GetEntryLabel(), CS);
3252}
3253
3254void CodeGeneratorARM::MarkGCCard(Register temp, Register card, Register object, Register value) {
3255  Label is_null;
3256  __ CompareAndBranchIfZero(value, &is_null);
3257  __ LoadFromOffset(kLoadWord, card, TR, Thread::CardTableOffset<kArmWordSize>().Int32Value());
3258  __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
3259  __ strb(card, Address(card, temp));
3260  __ Bind(&is_null);
3261}
3262
3263void LocationsBuilderARM::VisitTemporary(HTemporary* temp) {
3264  temp->SetLocations(nullptr);
3265}
3266
3267void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp) {
3268  // Nothing to do, this is driven by the code generator.
3269  UNUSED(temp);
3270}
3271
3272void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction) {
3273  UNUSED(instruction);
3274  LOG(FATAL) << "Unreachable";
3275}
3276
3277void InstructionCodeGeneratorARM::VisitParallelMove(HParallelMove* instruction) {
3278  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
3279}
3280
3281void LocationsBuilderARM::VisitSuspendCheck(HSuspendCheck* instruction) {
3282  new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
3283}
3284
3285void InstructionCodeGeneratorARM::VisitSuspendCheck(HSuspendCheck* instruction) {
3286  HBasicBlock* block = instruction->GetBlock();
3287  if (block->GetLoopInformation() != nullptr) {
3288    DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
3289    // The back edge will generate the suspend check.
3290    return;
3291  }
3292  if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
3293    // The goto will generate the suspend check.
3294    return;
3295  }
3296  GenerateSuspendCheck(instruction, nullptr);
3297}
3298
3299void InstructionCodeGeneratorARM::GenerateSuspendCheck(HSuspendCheck* instruction,
3300                                                       HBasicBlock* successor) {
3301  SuspendCheckSlowPathARM* slow_path =
3302      new (GetGraph()->GetArena()) SuspendCheckSlowPathARM(instruction, successor);
3303  codegen_->AddSlowPath(slow_path);
3304
3305  __ LoadFromOffset(
3306      kLoadUnsignedHalfword, IP, TR, Thread::ThreadFlagsOffset<kArmWordSize>().Int32Value());
3307  __ cmp(IP, ShifterOperand(0));
3308  // TODO: Figure out the branch offsets and use cbz/cbnz.
3309  if (successor == nullptr) {
3310    __ b(slow_path->GetEntryLabel(), NE);
3311    __ Bind(slow_path->GetReturnLabel());
3312  } else {
3313    __ b(codegen_->GetLabelOf(successor), EQ);
3314    __ b(slow_path->GetEntryLabel());
3315  }
3316}
3317
3318ArmAssembler* ParallelMoveResolverARM::GetAssembler() const {
3319  return codegen_->GetAssembler();
3320}
3321
3322void ParallelMoveResolverARM::EmitMove(size_t index) {
3323  MoveOperands* move = moves_.Get(index);
3324  Location source = move->GetSource();
3325  Location destination = move->GetDestination();
3326
3327  if (source.IsRegister()) {
3328    if (destination.IsRegister()) {
3329      __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>());
3330    } else {
3331      DCHECK(destination.IsStackSlot());
3332      __ StoreToOffset(kStoreWord, source.AsRegister<Register>(),
3333                       SP, destination.GetStackIndex());
3334    }
3335  } else if (source.IsStackSlot()) {
3336    if (destination.IsRegister()) {
3337      __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(),
3338                        SP, source.GetStackIndex());
3339    } else if (destination.IsFpuRegister()) {
3340      __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex());
3341    } else {
3342      DCHECK(destination.IsStackSlot());
3343      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
3344      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3345    }
3346  } else if (source.IsFpuRegister()) {
3347    if (destination.IsFpuRegister()) {
3348      __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>());
3349    } else {
3350      DCHECK(destination.IsStackSlot());
3351      __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex());
3352    }
3353  } else if (source.IsDoubleStackSlot()) {
3354    DCHECK(destination.IsDoubleStackSlot()) << destination;
3355    __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
3356    __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3357    __ LoadFromOffset(kLoadWord, IP, SP, source.GetHighStackIndex(kArmWordSize));
3358    __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
3359  } else {
3360    DCHECK(source.IsConstant()) << source;
3361    HInstruction* constant = source.GetConstant();
3362    if (constant->IsIntConstant()) {
3363      int32_t value = constant->AsIntConstant()->GetValue();
3364      if (destination.IsRegister()) {
3365        __ LoadImmediate(destination.AsRegister<Register>(), value);
3366      } else {
3367        DCHECK(destination.IsStackSlot());
3368        __ LoadImmediate(IP, value);
3369        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3370      }
3371    } else if (constant->IsLongConstant()) {
3372      int64_t value = constant->AsLongConstant()->GetValue();
3373      if (destination.IsRegister()) {
3374        // In the presence of long or double constants, the parallel move resolver will
3375        // split the move into two, but keeps the same constant for both moves. Here,
3376        // we use the low or high part depending on which register this move goes to.
3377        if (destination.reg() % 2 == 0) {
3378          __ LoadImmediate(destination.AsRegister<Register>(), Low32Bits(value));
3379        } else {
3380          __ LoadImmediate(destination.AsRegister<Register>(), High32Bits(value));
3381        }
3382      } else {
3383        DCHECK(destination.IsDoubleStackSlot());
3384        __ LoadImmediate(IP, Low32Bits(value));
3385        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3386        __ LoadImmediate(IP, High32Bits(value));
3387        __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
3388      }
3389    } else if (constant->IsDoubleConstant()) {
3390      double value = constant->AsDoubleConstant()->GetValue();
3391      uint64_t int_value = bit_cast<uint64_t, double>(value);
3392      if (destination.IsFpuRegister()) {
3393        // In the presence of long or double constants, the parallel move resolver will
3394        // split the move into two, but keeps the same constant for both moves. Here,
3395        // we use the low or high part depending on which register this move goes to.
3396        if (destination.reg() % 2 == 0) {
3397          __ LoadSImmediate(destination.AsFpuRegister<SRegister>(),
3398                            bit_cast<float, uint32_t>(Low32Bits(int_value)));
3399        } else {
3400          __ LoadSImmediate(destination.AsFpuRegister<SRegister>(),
3401                            bit_cast<float, uint32_t>(High32Bits(int_value)));
3402        }
3403      } else {
3404        DCHECK(destination.IsDoubleStackSlot());
3405        __ LoadImmediate(IP, Low32Bits(int_value));
3406        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3407        __ LoadImmediate(IP, High32Bits(int_value));
3408        __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
3409      }
3410    } else {
3411      DCHECK(constant->IsFloatConstant()) << constant->DebugName();
3412      float value = constant->AsFloatConstant()->GetValue();
3413      if (destination.IsFpuRegister()) {
3414        __ LoadSImmediate(destination.AsFpuRegister<SRegister>(), value);
3415      } else {
3416        DCHECK(destination.IsStackSlot());
3417        __ LoadImmediate(IP, bit_cast<int32_t, float>(value));
3418        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3419      }
3420    }
3421  }
3422}
3423
3424void ParallelMoveResolverARM::Exchange(Register reg, int mem) {
3425  __ Mov(IP, reg);
3426  __ LoadFromOffset(kLoadWord, reg, SP, mem);
3427  __ StoreToOffset(kStoreWord, IP, SP, mem);
3428}
3429
3430void ParallelMoveResolverARM::Exchange(int mem1, int mem2) {
3431  ScratchRegisterScope ensure_scratch(this, IP, R0, codegen_->GetNumberOfCoreRegisters());
3432  int stack_offset = ensure_scratch.IsSpilled() ? kArmWordSize : 0;
3433  __ LoadFromOffset(kLoadWord, static_cast<Register>(ensure_scratch.GetRegister()),
3434                    SP, mem1 + stack_offset);
3435  __ LoadFromOffset(kLoadWord, IP, SP, mem2 + stack_offset);
3436  __ StoreToOffset(kStoreWord, static_cast<Register>(ensure_scratch.GetRegister()),
3437                   SP, mem2 + stack_offset);
3438  __ StoreToOffset(kStoreWord, IP, SP, mem1 + stack_offset);
3439}
3440
3441void ParallelMoveResolverARM::EmitSwap(size_t index) {
3442  MoveOperands* move = moves_.Get(index);
3443  Location source = move->GetSource();
3444  Location destination = move->GetDestination();
3445
3446  if (source.IsRegister() && destination.IsRegister()) {
3447    DCHECK_NE(source.AsRegister<Register>(), IP);
3448    DCHECK_NE(destination.AsRegister<Register>(), IP);
3449    __ Mov(IP, source.AsRegister<Register>());
3450    __ Mov(source.AsRegister<Register>(), destination.AsRegister<Register>());
3451    __ Mov(destination.AsRegister<Register>(), IP);
3452  } else if (source.IsRegister() && destination.IsStackSlot()) {
3453    Exchange(source.AsRegister<Register>(), destination.GetStackIndex());
3454  } else if (source.IsStackSlot() && destination.IsRegister()) {
3455    Exchange(destination.AsRegister<Register>(), source.GetStackIndex());
3456  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
3457    Exchange(source.GetStackIndex(), destination.GetStackIndex());
3458  } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
3459    __ vmovrs(IP, source.AsFpuRegister<SRegister>());
3460    __ vmovs(source.AsFpuRegister<SRegister>(), destination.AsFpuRegister<SRegister>());
3461    __ vmovsr(destination.AsFpuRegister<SRegister>(), IP);
3462  } else if (source.IsFpuRegister() || destination.IsFpuRegister()) {
3463    SRegister reg = source.IsFpuRegister() ? source.AsFpuRegister<SRegister>()
3464                                           : destination.AsFpuRegister<SRegister>();
3465    int mem = source.IsFpuRegister()
3466        ? destination.GetStackIndex()
3467        : source.GetStackIndex();
3468
3469    __ vmovrs(IP, reg);
3470    __ LoadFromOffset(kLoadWord, IP, SP, mem);
3471    __ StoreToOffset(kStoreWord, IP, SP, mem);
3472  } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
3473    Exchange(source.GetStackIndex(), destination.GetStackIndex());
3474    Exchange(source.GetHighStackIndex(kArmWordSize), destination.GetHighStackIndex(kArmWordSize));
3475  } else {
3476    LOG(FATAL) << "Unimplemented" << source << " <-> " << destination;
3477  }
3478}
3479
3480void ParallelMoveResolverARM::SpillScratch(int reg) {
3481  __ Push(static_cast<Register>(reg));
3482}
3483
3484void ParallelMoveResolverARM::RestoreScratch(int reg) {
3485  __ Pop(static_cast<Register>(reg));
3486}
3487
3488void LocationsBuilderARM::VisitLoadClass(HLoadClass* cls) {
3489  LocationSummary::CallKind call_kind = cls->CanCallRuntime()
3490      ? LocationSummary::kCallOnSlowPath
3491      : LocationSummary::kNoCall;
3492  LocationSummary* locations =
3493      new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
3494  locations->SetOut(Location::RequiresRegister());
3495}
3496
3497void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) {
3498  Register out = cls->GetLocations()->Out().AsRegister<Register>();
3499  if (cls->IsReferrersClass()) {
3500    DCHECK(!cls->CanCallRuntime());
3501    DCHECK(!cls->MustGenerateClinitCheck());
3502    codegen_->LoadCurrentMethod(out);
3503    __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
3504  } else {
3505    DCHECK(cls->CanCallRuntime());
3506    codegen_->LoadCurrentMethod(out);
3507    __ LoadFromOffset(
3508        kLoadWord, out, out, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value());
3509    __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
3510
3511    SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
3512        cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
3513    codegen_->AddSlowPath(slow_path);
3514    __ cmp(out, ShifterOperand(0));
3515    __ b(slow_path->GetEntryLabel(), EQ);
3516    if (cls->MustGenerateClinitCheck()) {
3517      GenerateClassInitializationCheck(slow_path, out);
3518    } else {
3519      __ Bind(slow_path->GetExitLabel());
3520    }
3521  }
3522}
3523
3524void LocationsBuilderARM::VisitClinitCheck(HClinitCheck* check) {
3525  LocationSummary* locations =
3526      new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
3527  locations->SetInAt(0, Location::RequiresRegister());
3528  if (check->HasUses()) {
3529    locations->SetOut(Location::SameAsFirstInput());
3530  }
3531}
3532
3533void InstructionCodeGeneratorARM::VisitClinitCheck(HClinitCheck* check) {
3534  // We assume the class is not null.
3535  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
3536      check->GetLoadClass(), check, check->GetDexPc(), true);
3537  codegen_->AddSlowPath(slow_path);
3538  GenerateClassInitializationCheck(slow_path,
3539                                   check->GetLocations()->InAt(0).AsRegister<Register>());
3540}
3541
3542void InstructionCodeGeneratorARM::GenerateClassInitializationCheck(
3543    SlowPathCodeARM* slow_path, Register class_reg) {
3544  __ LoadFromOffset(kLoadWord, IP, class_reg, mirror::Class::StatusOffset().Int32Value());
3545  __ cmp(IP, ShifterOperand(mirror::Class::kStatusInitialized));
3546  __ b(slow_path->GetEntryLabel(), LT);
3547  // Even if the initialized flag is set, we may be in a situation where caches are not synced
3548  // properly. Therefore, we do a memory fence.
3549  __ dmb(ISH);
3550  __ Bind(slow_path->GetExitLabel());
3551}
3552
3553void LocationsBuilderARM::VisitLoadString(HLoadString* load) {
3554  LocationSummary* locations =
3555      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
3556  locations->SetOut(Location::RequiresRegister());
3557}
3558
3559void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) {
3560  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM(load);
3561  codegen_->AddSlowPath(slow_path);
3562
3563  Register out = load->GetLocations()->Out().AsRegister<Register>();
3564  codegen_->LoadCurrentMethod(out);
3565  __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
3566  __ LoadFromOffset(kLoadWord, out, out, mirror::Class::DexCacheStringsOffset().Int32Value());
3567  __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(load->GetStringIndex()));
3568  __ cmp(out, ShifterOperand(0));
3569  __ b(slow_path->GetEntryLabel(), EQ);
3570  __ Bind(slow_path->GetExitLabel());
3571}
3572
3573void LocationsBuilderARM::VisitLoadException(HLoadException* load) {
3574  LocationSummary* locations =
3575      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
3576  locations->SetOut(Location::RequiresRegister());
3577}
3578
3579void InstructionCodeGeneratorARM::VisitLoadException(HLoadException* load) {
3580  Register out = load->GetLocations()->Out().AsRegister<Register>();
3581  int32_t offset = Thread::ExceptionOffset<kArmWordSize>().Int32Value();
3582  __ LoadFromOffset(kLoadWord, out, TR, offset);
3583  __ LoadImmediate(IP, 0);
3584  __ StoreToOffset(kStoreWord, IP, TR, offset);
3585}
3586
3587void LocationsBuilderARM::VisitThrow(HThrow* instruction) {
3588  LocationSummary* locations =
3589      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3590  InvokeRuntimeCallingConvention calling_convention;
3591  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3592}
3593
3594void InstructionCodeGeneratorARM::VisitThrow(HThrow* instruction) {
3595  codegen_->InvokeRuntime(
3596      QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc());
3597}
3598
3599void LocationsBuilderARM::VisitInstanceOf(HInstanceOf* instruction) {
3600  LocationSummary::CallKind call_kind = instruction->IsClassFinal()
3601      ? LocationSummary::kNoCall
3602      : LocationSummary::kCallOnSlowPath;
3603  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3604  locations->SetInAt(0, Location::RequiresRegister());
3605  locations->SetInAt(1, Location::RequiresRegister());
3606  // The out register is used as a temporary, so it overlaps with the inputs.
3607  locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3608}
3609
3610void InstructionCodeGeneratorARM::VisitInstanceOf(HInstanceOf* instruction) {
3611  LocationSummary* locations = instruction->GetLocations();
3612  Register obj = locations->InAt(0).AsRegister<Register>();
3613  Register cls = locations->InAt(1).AsRegister<Register>();
3614  Register out = locations->Out().AsRegister<Register>();
3615  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3616  Label done, zero;
3617  SlowPathCodeARM* slow_path = nullptr;
3618
3619  // Return 0 if `obj` is null.
3620  // TODO: avoid this check if we know obj is not null.
3621  __ cmp(obj, ShifterOperand(0));
3622  __ b(&zero, EQ);
3623  // Compare the class of `obj` with `cls`.
3624  __ LoadFromOffset(kLoadWord, out, obj, class_offset);
3625  __ cmp(out, ShifterOperand(cls));
3626  if (instruction->IsClassFinal()) {
3627    // Classes must be equal for the instanceof to succeed.
3628    __ b(&zero, NE);
3629    __ LoadImmediate(out, 1);
3630    __ b(&done);
3631  } else {
3632    // If the classes are not equal, we go into a slow path.
3633    DCHECK(locations->OnlyCallsOnSlowPath());
3634    slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
3635        instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc());
3636    codegen_->AddSlowPath(slow_path);
3637    __ b(slow_path->GetEntryLabel(), NE);
3638    __ LoadImmediate(out, 1);
3639    __ b(&done);
3640  }
3641  __ Bind(&zero);
3642  __ LoadImmediate(out, 0);
3643  if (slow_path != nullptr) {
3644    __ Bind(slow_path->GetExitLabel());
3645  }
3646  __ Bind(&done);
3647}
3648
3649void LocationsBuilderARM::VisitCheckCast(HCheckCast* instruction) {
3650  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3651      instruction, LocationSummary::kCallOnSlowPath);
3652  locations->SetInAt(0, Location::RequiresRegister());
3653  locations->SetInAt(1, Location::RequiresRegister());
3654  locations->AddTemp(Location::RequiresRegister());
3655}
3656
3657void InstructionCodeGeneratorARM::VisitCheckCast(HCheckCast* instruction) {
3658  LocationSummary* locations = instruction->GetLocations();
3659  Register obj = locations->InAt(0).AsRegister<Register>();
3660  Register cls = locations->InAt(1).AsRegister<Register>();
3661  Register temp = locations->GetTemp(0).AsRegister<Register>();
3662  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3663
3664  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
3665      instruction, locations->InAt(1), locations->GetTemp(0), instruction->GetDexPc());
3666  codegen_->AddSlowPath(slow_path);
3667
3668  // TODO: avoid this check if we know obj is not null.
3669  __ cmp(obj, ShifterOperand(0));
3670  __ b(slow_path->GetExitLabel(), EQ);
3671  // Compare the class of `obj` with `cls`.
3672  __ LoadFromOffset(kLoadWord, temp, obj, class_offset);
3673  __ cmp(temp, ShifterOperand(cls));
3674  __ b(slow_path->GetEntryLabel(), NE);
3675  __ Bind(slow_path->GetExitLabel());
3676}
3677
3678void LocationsBuilderARM::VisitMonitorOperation(HMonitorOperation* instruction) {
3679  LocationSummary* locations =
3680      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3681  InvokeRuntimeCallingConvention calling_convention;
3682  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3683}
3684
3685void InstructionCodeGeneratorARM::VisitMonitorOperation(HMonitorOperation* instruction) {
3686  codegen_->InvokeRuntime(instruction->IsEnter()
3687        ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
3688      instruction,
3689      instruction->GetDexPc());
3690}
3691
3692void LocationsBuilderARM::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
3693void LocationsBuilderARM::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
3694void LocationsBuilderARM::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
3695
3696void LocationsBuilderARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
3697  LocationSummary* locations =
3698      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3699  DCHECK(instruction->GetResultType() == Primitive::kPrimInt
3700         || instruction->GetResultType() == Primitive::kPrimLong);
3701  locations->SetInAt(0, Location::RequiresRegister());
3702  locations->SetInAt(1, Location::RequiresRegister());
3703  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3704}
3705
3706void InstructionCodeGeneratorARM::VisitAnd(HAnd* instruction) {
3707  HandleBitwiseOperation(instruction);
3708}
3709
3710void InstructionCodeGeneratorARM::VisitOr(HOr* instruction) {
3711  HandleBitwiseOperation(instruction);
3712}
3713
3714void InstructionCodeGeneratorARM::VisitXor(HXor* instruction) {
3715  HandleBitwiseOperation(instruction);
3716}
3717
3718void InstructionCodeGeneratorARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
3719  LocationSummary* locations = instruction->GetLocations();
3720
3721  if (instruction->GetResultType() == Primitive::kPrimInt) {
3722    Register first = locations->InAt(0).AsRegister<Register>();
3723    Register second = locations->InAt(1).AsRegister<Register>();
3724    Register out = locations->Out().AsRegister<Register>();
3725    if (instruction->IsAnd()) {
3726      __ and_(out, first, ShifterOperand(second));
3727    } else if (instruction->IsOr()) {
3728      __ orr(out, first, ShifterOperand(second));
3729    } else {
3730      DCHECK(instruction->IsXor());
3731      __ eor(out, first, ShifterOperand(second));
3732    }
3733  } else {
3734    DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3735    Location first = locations->InAt(0);
3736    Location second = locations->InAt(1);
3737    Location out = locations->Out();
3738    if (instruction->IsAnd()) {
3739      __ and_(out.AsRegisterPairLow<Register>(),
3740              first.AsRegisterPairLow<Register>(),
3741              ShifterOperand(second.AsRegisterPairLow<Register>()));
3742      __ and_(out.AsRegisterPairHigh<Register>(),
3743              first.AsRegisterPairHigh<Register>(),
3744              ShifterOperand(second.AsRegisterPairHigh<Register>()));
3745    } else if (instruction->IsOr()) {
3746      __ orr(out.AsRegisterPairLow<Register>(),
3747             first.AsRegisterPairLow<Register>(),
3748             ShifterOperand(second.AsRegisterPairLow<Register>()));
3749      __ orr(out.AsRegisterPairHigh<Register>(),
3750             first.AsRegisterPairHigh<Register>(),
3751             ShifterOperand(second.AsRegisterPairHigh<Register>()));
3752    } else {
3753      DCHECK(instruction->IsXor());
3754      __ eor(out.AsRegisterPairLow<Register>(),
3755             first.AsRegisterPairLow<Register>(),
3756             ShifterOperand(second.AsRegisterPairLow<Register>()));
3757      __ eor(out.AsRegisterPairHigh<Register>(),
3758             first.AsRegisterPairHigh<Register>(),
3759             ShifterOperand(second.AsRegisterPairHigh<Register>()));
3760    }
3761  }
3762}
3763
3764void CodeGeneratorARM::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Register temp) {
3765  DCHECK_EQ(temp, kArtMethodRegister);
3766
3767  // TODO: Implement all kinds of calls:
3768  // 1) boot -> boot
3769  // 2) app -> boot
3770  // 3) app -> app
3771  //
3772  // Currently we implement the app -> app logic, which looks up in the resolve cache.
3773
3774  // temp = method;
3775  LoadCurrentMethod(temp);
3776  if (!invoke->IsRecursive()) {
3777    // temp = temp->dex_cache_resolved_methods_;
3778    __ LoadFromOffset(
3779        kLoadWord, temp, temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value());
3780    // temp = temp[index_in_cache]
3781    __ LoadFromOffset(
3782        kLoadWord, temp, temp, CodeGenerator::GetCacheOffset(invoke->GetDexMethodIndex()));
3783    // LR = temp[offset_of_quick_compiled_code]
3784    __ LoadFromOffset(kLoadWord, LR, temp,
3785                      mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
3786                          kArmWordSize).Int32Value());
3787    // LR()
3788    __ blx(LR);
3789  } else {
3790    __ bl(GetFrameEntryLabel());
3791  }
3792
3793  RecordPcInfo(invoke, invoke->GetDexPc());
3794  DCHECK(!IsLeafMethod());
3795}
3796
3797}  // namespace arm
3798}  // namespace art
3799