code_generator_arm.cc revision ffe8a577a4c644a2c5387f1e8efe92fb0efac43f
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
20#include "entrypoints/quick/quick_entrypoints.h"
21#include "gc/accounting/card_table.h"
22#include "intrinsics.h"
23#include "intrinsics_arm.h"
24#include "mirror/array-inl.h"
25#include "mirror/art_method.h"
26#include "mirror/class.h"
27#include "thread.h"
28#include "utils/arm/assembler_arm.h"
29#include "utils/arm/managed_register_arm.h"
30#include "utils/assembler.h"
31#include "utils/stack_checks.h"
32
33namespace art {
34
35namespace arm {
36
37static bool ExpectedPairLayout(Location location) {
38  // We expected this for both core and fpu register pairs.
39  return ((location.low() & 1) == 0) && (location.low() + 1 == location.high());
40}
41
42static constexpr int kCurrentMethodStackOffset = 0;
43
44static constexpr Register kRuntimeParameterCoreRegisters[] = { R0, R1, R2, R3 };
45static constexpr size_t kRuntimeParameterCoreRegistersLength =
46    arraysize(kRuntimeParameterCoreRegisters);
47static constexpr SRegister kRuntimeParameterFpuRegisters[] = { S0, S1, S2, S3 };
48static constexpr size_t kRuntimeParameterFpuRegistersLength =
49    arraysize(kRuntimeParameterFpuRegisters);
50// We unconditionally allocate R5 to ensure we can do long operations
51// with baseline.
52static constexpr Register kCoreSavedRegisterForBaseline = R5;
53static constexpr Register kCoreCalleeSaves[] =
54    { R5, R6, R7, R8, R10, R11, PC };
55static constexpr SRegister kFpuCalleeSaves[] =
56    { S16, S17, S18, S19, S20, S21, S22, S23, S24, S25, S26, S27, S28, S29, S30, S31 };
57
58// D31 cannot be split into two S registers, and the register allocator only works on
59// S registers. Therefore there is no need to block it.
60static constexpr DRegister DTMP = D31;
61
62class InvokeRuntimeCallingConvention : public CallingConvention<Register, SRegister> {
63 public:
64  InvokeRuntimeCallingConvention()
65      : CallingConvention(kRuntimeParameterCoreRegisters,
66                          kRuntimeParameterCoreRegistersLength,
67                          kRuntimeParameterFpuRegisters,
68                          kRuntimeParameterFpuRegistersLength) {}
69
70 private:
71  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
72};
73
74#define __ reinterpret_cast<ArmAssembler*>(codegen->GetAssembler())->
75#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArmWordSize, x).Int32Value()
76
77class NullCheckSlowPathARM : public SlowPathCodeARM {
78 public:
79  explicit NullCheckSlowPathARM(HNullCheck* instruction) : instruction_(instruction) {}
80
81  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
82    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
83    __ Bind(GetEntryLabel());
84    arm_codegen->InvokeRuntime(
85        QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc());
86  }
87
88 private:
89  HNullCheck* const instruction_;
90  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM);
91};
92
93class DivZeroCheckSlowPathARM : public SlowPathCodeARM {
94 public:
95  explicit DivZeroCheckSlowPathARM(HDivZeroCheck* instruction) : instruction_(instruction) {}
96
97  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
98    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
99    __ Bind(GetEntryLabel());
100    arm_codegen->InvokeRuntime(
101        QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc());
102  }
103
104 private:
105  HDivZeroCheck* const instruction_;
106  DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM);
107};
108
109class SuspendCheckSlowPathARM : public SlowPathCodeARM {
110 public:
111  SuspendCheckSlowPathARM(HSuspendCheck* instruction, HBasicBlock* successor)
112      : instruction_(instruction), successor_(successor) {}
113
114  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
115    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
116    __ Bind(GetEntryLabel());
117    codegen->SaveLiveRegisters(instruction_->GetLocations());
118    arm_codegen->InvokeRuntime(
119        QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc());
120    codegen->RestoreLiveRegisters(instruction_->GetLocations());
121    if (successor_ == nullptr) {
122      __ b(GetReturnLabel());
123    } else {
124      __ b(arm_codegen->GetLabelOf(successor_));
125    }
126  }
127
128  Label* GetReturnLabel() {
129    DCHECK(successor_ == nullptr);
130    return &return_label_;
131  }
132
133 private:
134  HSuspendCheck* const instruction_;
135  // If not null, the block to branch to after the suspend check.
136  HBasicBlock* const successor_;
137
138  // If `successor_` is null, the label to branch to after the suspend check.
139  Label return_label_;
140
141  DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM);
142};
143
144class BoundsCheckSlowPathARM : public SlowPathCodeARM {
145 public:
146  BoundsCheckSlowPathARM(HBoundsCheck* instruction,
147                         Location index_location,
148                         Location length_location)
149      : instruction_(instruction),
150        index_location_(index_location),
151        length_location_(length_location) {}
152
153  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
154    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
155    __ Bind(GetEntryLabel());
156    // We're moving two locations to locations that could overlap, so we need a parallel
157    // move resolver.
158    InvokeRuntimeCallingConvention calling_convention;
159    codegen->EmitParallelMoves(
160        index_location_,
161        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
162        length_location_,
163        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
164    arm_codegen->InvokeRuntime(
165        QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc());
166  }
167
168 private:
169  HBoundsCheck* const instruction_;
170  const Location index_location_;
171  const Location length_location_;
172
173  DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM);
174};
175
176class LoadClassSlowPathARM : public SlowPathCodeARM {
177 public:
178  LoadClassSlowPathARM(HLoadClass* cls,
179                       HInstruction* at,
180                       uint32_t dex_pc,
181                       bool do_clinit)
182      : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
183    DCHECK(at->IsLoadClass() || at->IsClinitCheck());
184  }
185
186  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
187    LocationSummary* locations = at_->GetLocations();
188
189    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
190    __ Bind(GetEntryLabel());
191    codegen->SaveLiveRegisters(locations);
192
193    InvokeRuntimeCallingConvention calling_convention;
194    __ LoadImmediate(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex());
195    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
196    int32_t entry_point_offset = do_clinit_
197        ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
198        : QUICK_ENTRY_POINT(pInitializeType);
199    arm_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_);
200
201    // Move the class to the desired location.
202    Location out = locations->Out();
203    if (out.IsValid()) {
204      DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
205      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
206    }
207    codegen->RestoreLiveRegisters(locations);
208    __ b(GetExitLabel());
209  }
210
211 private:
212  // The class this slow path will load.
213  HLoadClass* const cls_;
214
215  // The instruction where this slow path is happening.
216  // (Might be the load class or an initialization check).
217  HInstruction* const at_;
218
219  // The dex PC of `at_`.
220  const uint32_t dex_pc_;
221
222  // Whether to initialize the class.
223  const bool do_clinit_;
224
225  DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM);
226};
227
228class LoadStringSlowPathARM : public SlowPathCodeARM {
229 public:
230  explicit LoadStringSlowPathARM(HLoadString* instruction) : instruction_(instruction) {}
231
232  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
233    LocationSummary* locations = instruction_->GetLocations();
234    DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
235
236    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
237    __ Bind(GetEntryLabel());
238    codegen->SaveLiveRegisters(locations);
239
240    InvokeRuntimeCallingConvention calling_convention;
241    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
242    __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction_->GetStringIndex());
243    arm_codegen->InvokeRuntime(
244        QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc());
245    arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
246
247    codegen->RestoreLiveRegisters(locations);
248    __ b(GetExitLabel());
249  }
250
251 private:
252  HLoadString* const instruction_;
253
254  DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM);
255};
256
257class TypeCheckSlowPathARM : public SlowPathCodeARM {
258 public:
259  TypeCheckSlowPathARM(HInstruction* instruction,
260                       Location class_to_check,
261                       Location object_class,
262                       uint32_t dex_pc)
263      : instruction_(instruction),
264        class_to_check_(class_to_check),
265        object_class_(object_class),
266        dex_pc_(dex_pc) {}
267
268  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
269    LocationSummary* locations = instruction_->GetLocations();
270    DCHECK(instruction_->IsCheckCast()
271           || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
272
273    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
274    __ Bind(GetEntryLabel());
275    codegen->SaveLiveRegisters(locations);
276
277    // We're moving two locations to locations that could overlap, so we need a parallel
278    // move resolver.
279    InvokeRuntimeCallingConvention calling_convention;
280    codegen->EmitParallelMoves(
281        class_to_check_,
282        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
283        object_class_,
284        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
285
286    if (instruction_->IsInstanceOf()) {
287      arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc_);
288      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
289    } else {
290      DCHECK(instruction_->IsCheckCast());
291      arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc_);
292    }
293
294    codegen->RestoreLiveRegisters(locations);
295    __ b(GetExitLabel());
296  }
297
298 private:
299  HInstruction* const instruction_;
300  const Location class_to_check_;
301  const Location object_class_;
302  uint32_t dex_pc_;
303
304  DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM);
305};
306
307#undef __
308
309#undef __
310#define __ reinterpret_cast<ArmAssembler*>(GetAssembler())->
311
312inline Condition ARMCondition(IfCondition cond) {
313  switch (cond) {
314    case kCondEQ: return EQ;
315    case kCondNE: return NE;
316    case kCondLT: return LT;
317    case kCondLE: return LE;
318    case kCondGT: return GT;
319    case kCondGE: return GE;
320    default:
321      LOG(FATAL) << "Unknown if condition";
322  }
323  return EQ;        // Unreachable.
324}
325
326inline Condition ARMOppositeCondition(IfCondition cond) {
327  switch (cond) {
328    case kCondEQ: return NE;
329    case kCondNE: return EQ;
330    case kCondLT: return GE;
331    case kCondLE: return GT;
332    case kCondGT: return LE;
333    case kCondGE: return LT;
334    default:
335      LOG(FATAL) << "Unknown if condition";
336  }
337  return EQ;        // Unreachable.
338}
339
340void CodeGeneratorARM::DumpCoreRegister(std::ostream& stream, int reg) const {
341  stream << ArmManagedRegister::FromCoreRegister(Register(reg));
342}
343
344void CodeGeneratorARM::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
345  stream << ArmManagedRegister::FromSRegister(SRegister(reg));
346}
347
348size_t CodeGeneratorARM::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
349  __ StoreToOffset(kStoreWord, static_cast<Register>(reg_id), SP, stack_index);
350  return kArmWordSize;
351}
352
353size_t CodeGeneratorARM::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
354  __ LoadFromOffset(kLoadWord, static_cast<Register>(reg_id), SP, stack_index);
355  return kArmWordSize;
356}
357
358size_t CodeGeneratorARM::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
359  __ StoreSToOffset(static_cast<SRegister>(reg_id), SP, stack_index);
360  return kArmWordSize;
361}
362
363size_t CodeGeneratorARM::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
364  __ LoadSFromOffset(static_cast<SRegister>(reg_id), SP, stack_index);
365  return kArmWordSize;
366}
367
368CodeGeneratorARM::CodeGeneratorARM(HGraph* graph,
369                                   const ArmInstructionSetFeatures& isa_features,
370                                   const CompilerOptions& compiler_options)
371    : CodeGenerator(graph,
372                    kNumberOfCoreRegisters,
373                    kNumberOfSRegisters,
374                    kNumberOfRegisterPairs,
375                    ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
376                                        arraysize(kCoreCalleeSaves)),
377                    ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
378                                        arraysize(kFpuCalleeSaves)),
379                    compiler_options),
380      block_labels_(graph->GetArena(), 0),
381      location_builder_(graph, this),
382      instruction_visitor_(graph, this),
383      move_resolver_(graph->GetArena(), this),
384      assembler_(true),
385      isa_features_(isa_features) {
386  // Save the PC register to mimic Quick.
387  AddAllocatedRegister(Location::RegisterLocation(PC));
388}
389
390Location CodeGeneratorARM::AllocateFreeRegister(Primitive::Type type) const {
391  switch (type) {
392    case Primitive::kPrimLong: {
393      size_t reg = FindFreeEntry(blocked_register_pairs_, kNumberOfRegisterPairs);
394      ArmManagedRegister pair =
395          ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
396      DCHECK(!blocked_core_registers_[pair.AsRegisterPairLow()]);
397      DCHECK(!blocked_core_registers_[pair.AsRegisterPairHigh()]);
398
399      blocked_core_registers_[pair.AsRegisterPairLow()] = true;
400      blocked_core_registers_[pair.AsRegisterPairHigh()] = true;
401      UpdateBlockedPairRegisters();
402      return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
403    }
404
405    case Primitive::kPrimByte:
406    case Primitive::kPrimBoolean:
407    case Primitive::kPrimChar:
408    case Primitive::kPrimShort:
409    case Primitive::kPrimInt:
410    case Primitive::kPrimNot: {
411      int reg = FindFreeEntry(blocked_core_registers_, kNumberOfCoreRegisters);
412      // Block all register pairs that contain `reg`.
413      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
414        ArmManagedRegister current =
415            ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
416        if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
417          blocked_register_pairs_[i] = true;
418        }
419      }
420      return Location::RegisterLocation(reg);
421    }
422
423    case Primitive::kPrimFloat: {
424      int reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfSRegisters);
425      return Location::FpuRegisterLocation(reg);
426    }
427
428    case Primitive::kPrimDouble: {
429      int reg = FindTwoFreeConsecutiveAlignedEntries(blocked_fpu_registers_, kNumberOfSRegisters);
430      DCHECK_EQ(reg % 2, 0);
431      return Location::FpuRegisterPairLocation(reg, reg + 1);
432    }
433
434    case Primitive::kPrimVoid:
435      LOG(FATAL) << "Unreachable type " << type;
436  }
437
438  return Location();
439}
440
441void CodeGeneratorARM::SetupBlockedRegisters(bool is_baseline) const {
442  // Don't allocate the dalvik style register pair passing.
443  blocked_register_pairs_[R1_R2] = true;
444
445  // Stack register, LR and PC are always reserved.
446  blocked_core_registers_[SP] = true;
447  blocked_core_registers_[LR] = true;
448  blocked_core_registers_[PC] = true;
449
450  // Reserve thread register.
451  blocked_core_registers_[TR] = true;
452
453  // Reserve temp register.
454  blocked_core_registers_[IP] = true;
455
456  if (is_baseline) {
457    for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
458      blocked_core_registers_[kCoreCalleeSaves[i]] = true;
459    }
460
461    blocked_core_registers_[kCoreSavedRegisterForBaseline] = false;
462
463    for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
464      blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
465    }
466  }
467
468  UpdateBlockedPairRegisters();
469}
470
471void CodeGeneratorARM::UpdateBlockedPairRegisters() const {
472  for (int i = 0; i < kNumberOfRegisterPairs; i++) {
473    ArmManagedRegister current =
474        ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
475    if (blocked_core_registers_[current.AsRegisterPairLow()]
476        || blocked_core_registers_[current.AsRegisterPairHigh()]) {
477      blocked_register_pairs_[i] = true;
478    }
479  }
480}
481
482InstructionCodeGeneratorARM::InstructionCodeGeneratorARM(HGraph* graph, CodeGeneratorARM* codegen)
483      : HGraphVisitor(graph),
484        assembler_(codegen->GetAssembler()),
485        codegen_(codegen) {}
486
487static uint32_t LeastSignificantBit(uint32_t mask) {
488  // ffs starts at 1.
489  return ffs(mask) - 1;
490}
491
492void CodeGeneratorARM::ComputeSpillMask() {
493  core_spill_mask_ = allocated_registers_.GetCoreRegisters() & core_callee_save_mask_;
494  // Save one extra register for baseline. Note that on thumb2, there is no easy
495  // instruction to restore just the PC, so this actually helps both baseline
496  // and non-baseline to save and restore at least two registers at entry and exit.
497  core_spill_mask_ |= (1 << kCoreSavedRegisterForBaseline);
498  DCHECK_NE(core_spill_mask_, 0u) << "At least the return address register must be saved";
499  fpu_spill_mask_ = allocated_registers_.GetFloatingPointRegisters() & fpu_callee_save_mask_;
500  // We use vpush and vpop for saving and restoring floating point registers, which take
501  // a SRegister and the number of registers to save/restore after that SRegister. We
502  // therefore update the `fpu_spill_mask_` to also contain those registers not allocated,
503  // but in the range.
504  if (fpu_spill_mask_ != 0) {
505    uint32_t least_significant_bit = LeastSignificantBit(fpu_spill_mask_);
506    uint32_t most_significant_bit = MostSignificantBit(fpu_spill_mask_);
507    for (uint32_t i = least_significant_bit + 1 ; i < most_significant_bit; ++i) {
508      fpu_spill_mask_ |= (1 << i);
509    }
510  }
511}
512
513void CodeGeneratorARM::GenerateFrameEntry() {
514  bool skip_overflow_check =
515      IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm);
516  DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
517  __ Bind(&frame_entry_label_);
518
519  if (HasEmptyFrame()) {
520    return;
521  }
522
523  if (!skip_overflow_check) {
524    __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm)));
525    __ LoadFromOffset(kLoadWord, IP, IP, 0);
526    RecordPcInfo(nullptr, 0);
527  }
528
529  // PC is in the list of callee-save to mimic Quick, but we need to push
530  // LR at entry instead.
531  __ PushList((core_spill_mask_ & (~(1 << PC))) | 1 << LR);
532  if (fpu_spill_mask_ != 0) {
533    SRegister start_register = SRegister(LeastSignificantBit(fpu_spill_mask_));
534    __ vpushs(start_register, POPCOUNT(fpu_spill_mask_));
535  }
536  __ AddConstant(SP, -(GetFrameSize() - FrameEntrySpillSize()));
537  __ StoreToOffset(kStoreWord, R0, SP, 0);
538}
539
540void CodeGeneratorARM::GenerateFrameExit() {
541  if (HasEmptyFrame()) {
542    __ bx(LR);
543    return;
544  }
545  __ AddConstant(SP, GetFrameSize() - FrameEntrySpillSize());
546  if (fpu_spill_mask_ != 0) {
547    SRegister start_register = SRegister(LeastSignificantBit(fpu_spill_mask_));
548    __ vpops(start_register, POPCOUNT(fpu_spill_mask_));
549  }
550  __ PopList(core_spill_mask_);
551}
552
553void CodeGeneratorARM::Bind(HBasicBlock* block) {
554  __ Bind(GetLabelOf(block));
555}
556
557Location CodeGeneratorARM::GetStackLocation(HLoadLocal* load) const {
558  switch (load->GetType()) {
559    case Primitive::kPrimLong:
560    case Primitive::kPrimDouble:
561      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
562      break;
563
564    case Primitive::kPrimInt:
565    case Primitive::kPrimNot:
566    case Primitive::kPrimFloat:
567      return Location::StackSlot(GetStackSlot(load->GetLocal()));
568
569    case Primitive::kPrimBoolean:
570    case Primitive::kPrimByte:
571    case Primitive::kPrimChar:
572    case Primitive::kPrimShort:
573    case Primitive::kPrimVoid:
574      LOG(FATAL) << "Unexpected type " << load->GetType();
575  }
576
577  LOG(FATAL) << "Unreachable";
578  return Location();
579}
580
581Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
582  switch (type) {
583    case Primitive::kPrimBoolean:
584    case Primitive::kPrimByte:
585    case Primitive::kPrimChar:
586    case Primitive::kPrimShort:
587    case Primitive::kPrimInt:
588    case Primitive::kPrimNot: {
589      uint32_t index = gp_index_++;
590      uint32_t stack_index = stack_index_++;
591      if (index < calling_convention.GetNumberOfRegisters()) {
592        return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
593      } else {
594        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
595      }
596    }
597
598    case Primitive::kPrimLong: {
599      uint32_t index = gp_index_;
600      uint32_t stack_index = stack_index_;
601      gp_index_ += 2;
602      stack_index_ += 2;
603      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
604        if (calling_convention.GetRegisterAt(index) == R1) {
605          // Skip R1, and use R2_R3 instead.
606          gp_index_++;
607          index++;
608        }
609      }
610      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
611        DCHECK_EQ(calling_convention.GetRegisterAt(index) + 1,
612                  calling_convention.GetRegisterAt(index + 1));
613        return Location::RegisterPairLocation(calling_convention.GetRegisterAt(index),
614                                              calling_convention.GetRegisterAt(index + 1));
615      } else {
616        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
617      }
618    }
619
620    case Primitive::kPrimFloat: {
621      uint32_t stack_index = stack_index_++;
622      if (float_index_ % 2 == 0) {
623        float_index_ = std::max(double_index_, float_index_);
624      }
625      if (float_index_ < calling_convention.GetNumberOfFpuRegisters()) {
626        return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(float_index_++));
627      } else {
628        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
629      }
630    }
631
632    case Primitive::kPrimDouble: {
633      double_index_ = std::max(double_index_, RoundUp(float_index_, 2));
634      uint32_t stack_index = stack_index_;
635      stack_index_ += 2;
636      if (double_index_ + 1 < calling_convention.GetNumberOfFpuRegisters()) {
637        uint32_t index = double_index_;
638        double_index_ += 2;
639        Location result = Location::FpuRegisterPairLocation(
640          calling_convention.GetFpuRegisterAt(index),
641          calling_convention.GetFpuRegisterAt(index + 1));
642        DCHECK(ExpectedPairLayout(result));
643        return result;
644      } else {
645        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
646      }
647    }
648
649    case Primitive::kPrimVoid:
650      LOG(FATAL) << "Unexpected parameter type " << type;
651      break;
652  }
653  return Location();
654}
655
656Location InvokeDexCallingConventionVisitor::GetReturnLocation(Primitive::Type type) {
657  switch (type) {
658    case Primitive::kPrimBoolean:
659    case Primitive::kPrimByte:
660    case Primitive::kPrimChar:
661    case Primitive::kPrimShort:
662    case Primitive::kPrimInt:
663    case Primitive::kPrimNot: {
664      return Location::RegisterLocation(R0);
665    }
666
667    case Primitive::kPrimFloat: {
668      return Location::FpuRegisterLocation(S0);
669    }
670
671    case Primitive::kPrimLong: {
672      return Location::RegisterPairLocation(R0, R1);
673    }
674
675    case Primitive::kPrimDouble: {
676      return Location::FpuRegisterPairLocation(S0, S1);
677    }
678
679    case Primitive::kPrimVoid:
680      return Location();
681  }
682  UNREACHABLE();
683  return Location();
684}
685
686void CodeGeneratorARM::Move32(Location destination, Location source) {
687  if (source.Equals(destination)) {
688    return;
689  }
690  if (destination.IsRegister()) {
691    if (source.IsRegister()) {
692      __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>());
693    } else if (source.IsFpuRegister()) {
694      __ vmovrs(destination.AsRegister<Register>(), source.AsFpuRegister<SRegister>());
695    } else {
696      __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(), SP, source.GetStackIndex());
697    }
698  } else if (destination.IsFpuRegister()) {
699    if (source.IsRegister()) {
700      __ vmovsr(destination.AsFpuRegister<SRegister>(), source.AsRegister<Register>());
701    } else if (source.IsFpuRegister()) {
702      __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>());
703    } else {
704      __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex());
705    }
706  } else {
707    DCHECK(destination.IsStackSlot()) << destination;
708    if (source.IsRegister()) {
709      __ StoreToOffset(kStoreWord, source.AsRegister<Register>(), SP, destination.GetStackIndex());
710    } else if (source.IsFpuRegister()) {
711      __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex());
712    } else {
713      DCHECK(source.IsStackSlot()) << source;
714      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
715      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
716    }
717  }
718}
719
720void CodeGeneratorARM::Move64(Location destination, Location source) {
721  if (source.Equals(destination)) {
722    return;
723  }
724  if (destination.IsRegisterPair()) {
725    if (source.IsRegisterPair()) {
726      EmitParallelMoves(
727          Location::RegisterLocation(source.AsRegisterPairHigh<Register>()),
728          Location::RegisterLocation(destination.AsRegisterPairHigh<Register>()),
729          Location::RegisterLocation(source.AsRegisterPairLow<Register>()),
730          Location::RegisterLocation(destination.AsRegisterPairLow<Register>()));
731    } else if (source.IsFpuRegister()) {
732      UNIMPLEMENTED(FATAL);
733    } else {
734      DCHECK(source.IsDoubleStackSlot());
735      DCHECK(ExpectedPairLayout(destination));
736      __ LoadFromOffset(kLoadWordPair, destination.AsRegisterPairLow<Register>(),
737                        SP, source.GetStackIndex());
738    }
739  } else if (destination.IsFpuRegisterPair()) {
740    if (source.IsDoubleStackSlot()) {
741      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
742                         SP,
743                         source.GetStackIndex());
744    } else {
745      UNIMPLEMENTED(FATAL);
746    }
747  } else {
748    DCHECK(destination.IsDoubleStackSlot());
749    if (source.IsRegisterPair()) {
750      // No conflict possible, so just do the moves.
751      if (source.AsRegisterPairLow<Register>() == R1) {
752        DCHECK_EQ(source.AsRegisterPairHigh<Register>(), R2);
753        __ StoreToOffset(kStoreWord, R1, SP, destination.GetStackIndex());
754        __ StoreToOffset(kStoreWord, R2, SP, destination.GetHighStackIndex(kArmWordSize));
755      } else {
756        __ StoreToOffset(kStoreWordPair, source.AsRegisterPairLow<Register>(),
757                         SP, destination.GetStackIndex());
758      }
759    } else if (source.IsFpuRegisterPair()) {
760      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
761                        SP,
762                        destination.GetStackIndex());
763    } else {
764      DCHECK(source.IsDoubleStackSlot());
765      EmitParallelMoves(
766          Location::StackSlot(source.GetStackIndex()),
767          Location::StackSlot(destination.GetStackIndex()),
768          Location::StackSlot(source.GetHighStackIndex(kArmWordSize)),
769          Location::StackSlot(destination.GetHighStackIndex(kArmWordSize)));
770    }
771  }
772}
773
774void CodeGeneratorARM::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
775  LocationSummary* locations = instruction->GetLocations();
776  if (locations != nullptr && locations->Out().Equals(location)) {
777    return;
778  }
779
780  if (locations != nullptr && locations->Out().IsConstant()) {
781    HConstant* const_to_move = locations->Out().GetConstant();
782    if (const_to_move->IsIntConstant()) {
783      int32_t value = const_to_move->AsIntConstant()->GetValue();
784      if (location.IsRegister()) {
785        __ LoadImmediate(location.AsRegister<Register>(), value);
786      } else {
787        DCHECK(location.IsStackSlot());
788        __ LoadImmediate(IP, value);
789        __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
790      }
791    } else {
792      DCHECK(const_to_move->IsLongConstant()) << const_to_move->DebugName();
793      int64_t value = const_to_move->AsLongConstant()->GetValue();
794      if (location.IsRegisterPair()) {
795        __ LoadImmediate(location.AsRegisterPairLow<Register>(), Low32Bits(value));
796        __ LoadImmediate(location.AsRegisterPairHigh<Register>(), High32Bits(value));
797      } else {
798        DCHECK(location.IsDoubleStackSlot());
799        __ LoadImmediate(IP, Low32Bits(value));
800        __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
801        __ LoadImmediate(IP, High32Bits(value));
802        __ StoreToOffset(kStoreWord, IP, SP, location.GetHighStackIndex(kArmWordSize));
803      }
804    }
805  } else if (instruction->IsLoadLocal()) {
806    uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
807    switch (instruction->GetType()) {
808      case Primitive::kPrimBoolean:
809      case Primitive::kPrimByte:
810      case Primitive::kPrimChar:
811      case Primitive::kPrimShort:
812      case Primitive::kPrimInt:
813      case Primitive::kPrimNot:
814      case Primitive::kPrimFloat:
815        Move32(location, Location::StackSlot(stack_slot));
816        break;
817
818      case Primitive::kPrimLong:
819      case Primitive::kPrimDouble:
820        Move64(location, Location::DoubleStackSlot(stack_slot));
821        break;
822
823      default:
824        LOG(FATAL) << "Unexpected type " << instruction->GetType();
825    }
826  } else if (instruction->IsTemporary()) {
827    Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
828    if (temp_location.IsStackSlot()) {
829      Move32(location, temp_location);
830    } else {
831      DCHECK(temp_location.IsDoubleStackSlot());
832      Move64(location, temp_location);
833    }
834  } else {
835    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
836    switch (instruction->GetType()) {
837      case Primitive::kPrimBoolean:
838      case Primitive::kPrimByte:
839      case Primitive::kPrimChar:
840      case Primitive::kPrimShort:
841      case Primitive::kPrimNot:
842      case Primitive::kPrimInt:
843      case Primitive::kPrimFloat:
844        Move32(location, locations->Out());
845        break;
846
847      case Primitive::kPrimLong:
848      case Primitive::kPrimDouble:
849        Move64(location, locations->Out());
850        break;
851
852      default:
853        LOG(FATAL) << "Unexpected type " << instruction->GetType();
854    }
855  }
856}
857
858void CodeGeneratorARM::InvokeRuntime(int32_t entry_point_offset,
859                                     HInstruction* instruction,
860                                     uint32_t dex_pc) {
861  __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
862  __ blx(LR);
863  RecordPcInfo(instruction, dex_pc);
864  DCHECK(instruction->IsSuspendCheck()
865      || instruction->IsBoundsCheck()
866      || instruction->IsNullCheck()
867      || instruction->IsDivZeroCheck()
868      || instruction->GetLocations()->CanCall()
869      || !IsLeafMethod());
870}
871
872void LocationsBuilderARM::VisitGoto(HGoto* got) {
873  got->SetLocations(nullptr);
874}
875
876void InstructionCodeGeneratorARM::VisitGoto(HGoto* got) {
877  HBasicBlock* successor = got->GetSuccessor();
878  DCHECK(!successor->IsExitBlock());
879
880  HBasicBlock* block = got->GetBlock();
881  HInstruction* previous = got->GetPrevious();
882
883  HLoopInformation* info = block->GetLoopInformation();
884  if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
885    codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
886    GenerateSuspendCheck(info->GetSuspendCheck(), successor);
887    return;
888  }
889
890  if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
891    GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
892  }
893  if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
894    __ b(codegen_->GetLabelOf(successor));
895  }
896}
897
898void LocationsBuilderARM::VisitExit(HExit* exit) {
899  exit->SetLocations(nullptr);
900}
901
902void InstructionCodeGeneratorARM::VisitExit(HExit* exit) {
903  UNUSED(exit);
904  if (kIsDebugBuild) {
905    __ Comment("Unreachable");
906    __ bkpt(0);
907  }
908}
909
910void LocationsBuilderARM::VisitIf(HIf* if_instr) {
911  LocationSummary* locations =
912      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
913  HInstruction* cond = if_instr->InputAt(0);
914  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
915    locations->SetInAt(0, Location::RequiresRegister());
916  }
917}
918
919void InstructionCodeGeneratorARM::VisitIf(HIf* if_instr) {
920  HInstruction* cond = if_instr->InputAt(0);
921  if (cond->IsIntConstant()) {
922    // Constant condition, statically compared against 1.
923    int32_t cond_value = cond->AsIntConstant()->GetValue();
924    if (cond_value == 1) {
925      if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
926                                     if_instr->IfTrueSuccessor())) {
927        __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
928      }
929      return;
930    } else {
931      DCHECK_EQ(cond_value, 0);
932    }
933  } else {
934    if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
935      // Condition has been materialized, compare the output to 0
936      DCHECK(if_instr->GetLocations()->InAt(0).IsRegister());
937      __ cmp(if_instr->GetLocations()->InAt(0).AsRegister<Register>(),
938             ShifterOperand(0));
939      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()), NE);
940    } else {
941      // Condition has not been materialized, use its inputs as the
942      // comparison and its condition as the branch condition.
943      LocationSummary* locations = cond->GetLocations();
944      DCHECK(locations->InAt(0).IsRegister()) << locations->InAt(0);
945      Register left = locations->InAt(0).AsRegister<Register>();
946      if (locations->InAt(1).IsRegister()) {
947        __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>()));
948      } else {
949        DCHECK(locations->InAt(1).IsConstant());
950        int32_t value =
951            locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
952        ShifterOperand operand;
953        if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) {
954          __ cmp(left, operand);
955        } else {
956          Register temp = IP;
957          __ LoadImmediate(temp, value);
958          __ cmp(left, ShifterOperand(temp));
959        }
960      }
961      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()),
962           ARMCondition(cond->AsCondition()->GetCondition()));
963    }
964  }
965  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
966                                 if_instr->IfFalseSuccessor())) {
967    __ b(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
968  }
969}
970
971
972void LocationsBuilderARM::VisitCondition(HCondition* comp) {
973  LocationSummary* locations =
974      new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall);
975  locations->SetInAt(0, Location::RequiresRegister());
976  locations->SetInAt(1, Location::RegisterOrConstant(comp->InputAt(1)));
977  if (comp->NeedsMaterialization()) {
978    locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
979  }
980}
981
982void InstructionCodeGeneratorARM::VisitCondition(HCondition* comp) {
983  if (!comp->NeedsMaterialization()) return;
984  LocationSummary* locations = comp->GetLocations();
985  Register left = locations->InAt(0).AsRegister<Register>();
986
987  if (locations->InAt(1).IsRegister()) {
988    __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>()));
989  } else {
990    DCHECK(locations->InAt(1).IsConstant());
991    int32_t value = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
992    ShifterOperand operand;
993    if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) {
994      __ cmp(left, operand);
995    } else {
996      Register temp = IP;
997      __ LoadImmediate(temp, value);
998      __ cmp(left, ShifterOperand(temp));
999    }
1000  }
1001  __ it(ARMCondition(comp->GetCondition()), kItElse);
1002  __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(1),
1003         ARMCondition(comp->GetCondition()));
1004  __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(0),
1005         ARMOppositeCondition(comp->GetCondition()));
1006}
1007
1008void LocationsBuilderARM::VisitEqual(HEqual* comp) {
1009  VisitCondition(comp);
1010}
1011
1012void InstructionCodeGeneratorARM::VisitEqual(HEqual* comp) {
1013  VisitCondition(comp);
1014}
1015
1016void LocationsBuilderARM::VisitNotEqual(HNotEqual* comp) {
1017  VisitCondition(comp);
1018}
1019
1020void InstructionCodeGeneratorARM::VisitNotEqual(HNotEqual* comp) {
1021  VisitCondition(comp);
1022}
1023
1024void LocationsBuilderARM::VisitLessThan(HLessThan* comp) {
1025  VisitCondition(comp);
1026}
1027
1028void InstructionCodeGeneratorARM::VisitLessThan(HLessThan* comp) {
1029  VisitCondition(comp);
1030}
1031
1032void LocationsBuilderARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1033  VisitCondition(comp);
1034}
1035
1036void InstructionCodeGeneratorARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1037  VisitCondition(comp);
1038}
1039
1040void LocationsBuilderARM::VisitGreaterThan(HGreaterThan* comp) {
1041  VisitCondition(comp);
1042}
1043
1044void InstructionCodeGeneratorARM::VisitGreaterThan(HGreaterThan* comp) {
1045  VisitCondition(comp);
1046}
1047
1048void LocationsBuilderARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1049  VisitCondition(comp);
1050}
1051
1052void InstructionCodeGeneratorARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1053  VisitCondition(comp);
1054}
1055
1056void LocationsBuilderARM::VisitLocal(HLocal* local) {
1057  local->SetLocations(nullptr);
1058}
1059
1060void InstructionCodeGeneratorARM::VisitLocal(HLocal* local) {
1061  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
1062}
1063
1064void LocationsBuilderARM::VisitLoadLocal(HLoadLocal* load) {
1065  load->SetLocations(nullptr);
1066}
1067
1068void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load) {
1069  // Nothing to do, this is driven by the code generator.
1070  UNUSED(load);
1071}
1072
1073void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) {
1074  LocationSummary* locations =
1075      new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
1076  switch (store->InputAt(1)->GetType()) {
1077    case Primitive::kPrimBoolean:
1078    case Primitive::kPrimByte:
1079    case Primitive::kPrimChar:
1080    case Primitive::kPrimShort:
1081    case Primitive::kPrimInt:
1082    case Primitive::kPrimNot:
1083    case Primitive::kPrimFloat:
1084      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
1085      break;
1086
1087    case Primitive::kPrimLong:
1088    case Primitive::kPrimDouble:
1089      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
1090      break;
1091
1092    default:
1093      LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType();
1094  }
1095}
1096
1097void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store) {
1098  UNUSED(store);
1099}
1100
1101void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) {
1102  LocationSummary* locations =
1103      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1104  locations->SetOut(Location::ConstantLocation(constant));
1105}
1106
1107void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant) {
1108  // Will be generated at use site.
1109  UNUSED(constant);
1110}
1111
1112void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) {
1113  LocationSummary* locations =
1114      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1115  locations->SetOut(Location::ConstantLocation(constant));
1116}
1117
1118void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant) {
1119  // Will be generated at use site.
1120  UNUSED(constant);
1121}
1122
1123void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) {
1124  LocationSummary* locations =
1125      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1126  locations->SetOut(Location::ConstantLocation(constant));
1127}
1128
1129void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant) {
1130  // Will be generated at use site.
1131  UNUSED(constant);
1132}
1133
1134void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) {
1135  LocationSummary* locations =
1136      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1137  locations->SetOut(Location::ConstantLocation(constant));
1138}
1139
1140void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant) {
1141  // Will be generated at use site.
1142  UNUSED(constant);
1143}
1144
1145void LocationsBuilderARM::VisitReturnVoid(HReturnVoid* ret) {
1146  ret->SetLocations(nullptr);
1147}
1148
1149void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret) {
1150  UNUSED(ret);
1151  codegen_->GenerateFrameExit();
1152}
1153
1154void LocationsBuilderARM::VisitReturn(HReturn* ret) {
1155  LocationSummary* locations =
1156      new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
1157  locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType()));
1158}
1159
1160void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret) {
1161  UNUSED(ret);
1162  codegen_->GenerateFrameExit();
1163}
1164
1165void LocationsBuilderARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1166  IntrinsicLocationsBuilderARM intrinsic(GetGraph()->GetArena(),
1167                                         codegen_->GetInstructionSetFeatures());
1168  if (intrinsic.TryDispatch(invoke)) {
1169    return;
1170  }
1171
1172  HandleInvoke(invoke);
1173}
1174
1175void CodeGeneratorARM::LoadCurrentMethod(Register reg) {
1176  DCHECK(RequiresCurrentMethod());
1177  __ LoadFromOffset(kLoadWord, reg, SP, kCurrentMethodStackOffset);
1178}
1179
1180static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM* codegen) {
1181  if (invoke->GetLocations()->Intrinsified()) {
1182    IntrinsicCodeGeneratorARM intrinsic(codegen);
1183    intrinsic.Dispatch(invoke);
1184    return true;
1185  }
1186  return false;
1187}
1188
1189void InstructionCodeGeneratorARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1190  if (TryGenerateIntrinsicCode(invoke, codegen_)) {
1191    return;
1192  }
1193
1194  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1195
1196  codegen_->GenerateStaticOrDirectCall(invoke, temp);
1197}
1198
1199void LocationsBuilderARM::HandleInvoke(HInvoke* invoke) {
1200  LocationSummary* locations =
1201      new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1202  locations->AddTemp(Location::RegisterLocation(R0));
1203
1204  InvokeDexCallingConventionVisitor calling_convention_visitor;
1205  for (size_t i = 0; i < invoke->InputCount(); i++) {
1206    HInstruction* input = invoke->InputAt(i);
1207    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1208  }
1209
1210  locations->SetOut(calling_convention_visitor.GetReturnLocation(invoke->GetType()));
1211}
1212
1213void LocationsBuilderARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1214  IntrinsicLocationsBuilderARM intrinsic(GetGraph()->GetArena(),
1215                                         codegen_->GetInstructionSetFeatures());
1216  if (intrinsic.TryDispatch(invoke)) {
1217    return;
1218  }
1219
1220  HandleInvoke(invoke);
1221}
1222
1223void InstructionCodeGeneratorARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1224  if (TryGenerateIntrinsicCode(invoke, codegen_)) {
1225    return;
1226  }
1227
1228  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1229  uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() +
1230          invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
1231  LocationSummary* locations = invoke->GetLocations();
1232  Location receiver = locations->InAt(0);
1233  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1234  // temp = object->GetClass();
1235  if (receiver.IsStackSlot()) {
1236    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1237    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1238  } else {
1239    __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
1240  }
1241  codegen_->MaybeRecordImplicitNullCheck(invoke);
1242  // temp = temp->GetMethodAt(method_offset);
1243  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1244      kArmWordSize).Int32Value();
1245  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1246  // LR = temp->GetEntryPoint();
1247  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1248  // LR();
1249  __ blx(LR);
1250  DCHECK(!codegen_->IsLeafMethod());
1251  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1252}
1253
1254void LocationsBuilderARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1255  HandleInvoke(invoke);
1256  // Add the hidden argument.
1257  invoke->GetLocations()->AddTemp(Location::RegisterLocation(R12));
1258}
1259
1260void InstructionCodeGeneratorARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1261  // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
1262  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1263  uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() +
1264          (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry);
1265  LocationSummary* locations = invoke->GetLocations();
1266  Location receiver = locations->InAt(0);
1267  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1268
1269  // Set the hidden argument.
1270  __ LoadImmediate(invoke->GetLocations()->GetTemp(1).AsRegister<Register>(),
1271                   invoke->GetDexMethodIndex());
1272
1273  // temp = object->GetClass();
1274  if (receiver.IsStackSlot()) {
1275    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1276    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1277  } else {
1278    __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
1279  }
1280  codegen_->MaybeRecordImplicitNullCheck(invoke);
1281  // temp = temp->GetImtEntryAt(method_offset);
1282  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1283      kArmWordSize).Int32Value();
1284  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1285  // LR = temp->GetEntryPoint();
1286  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1287  // LR();
1288  __ blx(LR);
1289  DCHECK(!codegen_->IsLeafMethod());
1290  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1291}
1292
1293void LocationsBuilderARM::VisitNeg(HNeg* neg) {
1294  LocationSummary* locations =
1295      new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
1296  switch (neg->GetResultType()) {
1297    case Primitive::kPrimInt: {
1298      locations->SetInAt(0, Location::RequiresRegister());
1299      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1300      break;
1301    }
1302    case Primitive::kPrimLong: {
1303      locations->SetInAt(0, Location::RequiresRegister());
1304      locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1305      break;
1306    }
1307
1308    case Primitive::kPrimFloat:
1309    case Primitive::kPrimDouble:
1310      locations->SetInAt(0, Location::RequiresFpuRegister());
1311      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1312      break;
1313
1314    default:
1315      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1316  }
1317}
1318
1319void InstructionCodeGeneratorARM::VisitNeg(HNeg* neg) {
1320  LocationSummary* locations = neg->GetLocations();
1321  Location out = locations->Out();
1322  Location in = locations->InAt(0);
1323  switch (neg->GetResultType()) {
1324    case Primitive::kPrimInt:
1325      DCHECK(in.IsRegister());
1326      __ rsb(out.AsRegister<Register>(), in.AsRegister<Register>(), ShifterOperand(0));
1327      break;
1328
1329    case Primitive::kPrimLong:
1330      DCHECK(in.IsRegisterPair());
1331      // out.lo = 0 - in.lo (and update the carry/borrow (C) flag)
1332      __ rsbs(out.AsRegisterPairLow<Register>(),
1333              in.AsRegisterPairLow<Register>(),
1334              ShifterOperand(0));
1335      // We cannot emit an RSC (Reverse Subtract with Carry)
1336      // instruction here, as it does not exist in the Thumb-2
1337      // instruction set.  We use the following approach
1338      // using SBC and SUB instead.
1339      //
1340      // out.hi = -C
1341      __ sbc(out.AsRegisterPairHigh<Register>(),
1342             out.AsRegisterPairHigh<Register>(),
1343             ShifterOperand(out.AsRegisterPairHigh<Register>()));
1344      // out.hi = out.hi - in.hi
1345      __ sub(out.AsRegisterPairHigh<Register>(),
1346             out.AsRegisterPairHigh<Register>(),
1347             ShifterOperand(in.AsRegisterPairHigh<Register>()));
1348      break;
1349
1350    case Primitive::kPrimFloat:
1351      DCHECK(in.IsFpuRegister());
1352      __ vnegs(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>());
1353      break;
1354
1355    case Primitive::kPrimDouble:
1356      DCHECK(in.IsFpuRegisterPair());
1357      __ vnegd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1358               FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1359      break;
1360
1361    default:
1362      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1363  }
1364}
1365
1366void LocationsBuilderARM::VisitTypeConversion(HTypeConversion* conversion) {
1367  Primitive::Type result_type = conversion->GetResultType();
1368  Primitive::Type input_type = conversion->GetInputType();
1369  DCHECK_NE(result_type, input_type);
1370
1371  // The float-to-long and double-to-long type conversions rely on a
1372  // call to the runtime.
1373  LocationSummary::CallKind call_kind =
1374      ((input_type == Primitive::kPrimFloat || input_type == Primitive::kPrimDouble)
1375       && result_type == Primitive::kPrimLong)
1376      ? LocationSummary::kCall
1377      : LocationSummary::kNoCall;
1378  LocationSummary* locations =
1379      new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind);
1380
1381  switch (result_type) {
1382    case Primitive::kPrimByte:
1383      switch (input_type) {
1384        case Primitive::kPrimShort:
1385        case Primitive::kPrimInt:
1386        case Primitive::kPrimChar:
1387          // Processing a Dex `int-to-byte' instruction.
1388          locations->SetInAt(0, Location::RequiresRegister());
1389          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1390          break;
1391
1392        default:
1393          LOG(FATAL) << "Unexpected type conversion from " << input_type
1394                     << " to " << result_type;
1395      }
1396      break;
1397
1398    case Primitive::kPrimShort:
1399      switch (input_type) {
1400        case Primitive::kPrimByte:
1401        case Primitive::kPrimInt:
1402        case Primitive::kPrimChar:
1403          // Processing a Dex `int-to-short' instruction.
1404          locations->SetInAt(0, Location::RequiresRegister());
1405          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1406          break;
1407
1408        default:
1409          LOG(FATAL) << "Unexpected type conversion from " << input_type
1410                     << " to " << result_type;
1411      }
1412      break;
1413
1414    case Primitive::kPrimInt:
1415      switch (input_type) {
1416        case Primitive::kPrimLong:
1417          // Processing a Dex `long-to-int' instruction.
1418          locations->SetInAt(0, Location::Any());
1419          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1420          break;
1421
1422        case Primitive::kPrimFloat:
1423          // Processing a Dex `float-to-int' instruction.
1424          locations->SetInAt(0, Location::RequiresFpuRegister());
1425          locations->SetOut(Location::RequiresRegister());
1426          locations->AddTemp(Location::RequiresFpuRegister());
1427          break;
1428
1429        case Primitive::kPrimDouble:
1430          // Processing a Dex `double-to-int' instruction.
1431          locations->SetInAt(0, Location::RequiresFpuRegister());
1432          locations->SetOut(Location::RequiresRegister());
1433          locations->AddTemp(Location::RequiresFpuRegister());
1434          break;
1435
1436        default:
1437          LOG(FATAL) << "Unexpected type conversion from " << input_type
1438                     << " to " << result_type;
1439      }
1440      break;
1441
1442    case Primitive::kPrimLong:
1443      switch (input_type) {
1444        case Primitive::kPrimByte:
1445        case Primitive::kPrimShort:
1446        case Primitive::kPrimInt:
1447        case Primitive::kPrimChar:
1448          // Processing a Dex `int-to-long' instruction.
1449          locations->SetInAt(0, Location::RequiresRegister());
1450          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1451          break;
1452
1453        case Primitive::kPrimFloat: {
1454          // Processing a Dex `float-to-long' instruction.
1455          InvokeRuntimeCallingConvention calling_convention;
1456          locations->SetInAt(0, Location::FpuRegisterLocation(
1457              calling_convention.GetFpuRegisterAt(0)));
1458          locations->SetOut(Location::RegisterPairLocation(R0, R1));
1459          break;
1460        }
1461
1462        case Primitive::kPrimDouble: {
1463          // Processing a Dex `double-to-long' instruction.
1464          InvokeRuntimeCallingConvention calling_convention;
1465          locations->SetInAt(0, Location::FpuRegisterPairLocation(
1466              calling_convention.GetFpuRegisterAt(0),
1467              calling_convention.GetFpuRegisterAt(1)));
1468          locations->SetOut(Location::RegisterPairLocation(R0, R1));
1469          break;
1470        }
1471
1472        default:
1473          LOG(FATAL) << "Unexpected type conversion from " << input_type
1474                     << " to " << result_type;
1475      }
1476      break;
1477
1478    case Primitive::kPrimChar:
1479      switch (input_type) {
1480        case Primitive::kPrimByte:
1481        case Primitive::kPrimShort:
1482        case Primitive::kPrimInt:
1483          // Processing a Dex `int-to-char' instruction.
1484          locations->SetInAt(0, Location::RequiresRegister());
1485          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1486          break;
1487
1488        default:
1489          LOG(FATAL) << "Unexpected type conversion from " << input_type
1490                     << " to " << result_type;
1491      }
1492      break;
1493
1494    case Primitive::kPrimFloat:
1495      switch (input_type) {
1496        case Primitive::kPrimByte:
1497        case Primitive::kPrimShort:
1498        case Primitive::kPrimInt:
1499        case Primitive::kPrimChar:
1500          // Processing a Dex `int-to-float' instruction.
1501          locations->SetInAt(0, Location::RequiresRegister());
1502          locations->SetOut(Location::RequiresFpuRegister());
1503          break;
1504
1505        case Primitive::kPrimLong:
1506          // Processing a Dex `long-to-float' instruction.
1507          locations->SetInAt(0, Location::RequiresRegister());
1508          locations->SetOut(Location::RequiresFpuRegister());
1509          locations->AddTemp(Location::RequiresRegister());
1510          locations->AddTemp(Location::RequiresRegister());
1511          locations->AddTemp(Location::RequiresFpuRegister());
1512          locations->AddTemp(Location::RequiresFpuRegister());
1513          break;
1514
1515        case Primitive::kPrimDouble:
1516          // Processing a Dex `double-to-float' instruction.
1517          locations->SetInAt(0, Location::RequiresFpuRegister());
1518          locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1519          break;
1520
1521        default:
1522          LOG(FATAL) << "Unexpected type conversion from " << input_type
1523                     << " to " << result_type;
1524      };
1525      break;
1526
1527    case Primitive::kPrimDouble:
1528      switch (input_type) {
1529        case Primitive::kPrimByte:
1530        case Primitive::kPrimShort:
1531        case Primitive::kPrimInt:
1532        case Primitive::kPrimChar:
1533          // Processing a Dex `int-to-double' instruction.
1534          locations->SetInAt(0, Location::RequiresRegister());
1535          locations->SetOut(Location::RequiresFpuRegister());
1536          break;
1537
1538        case Primitive::kPrimLong:
1539          // Processing a Dex `long-to-double' instruction.
1540          locations->SetInAt(0, Location::RequiresRegister());
1541          locations->SetOut(Location::RequiresFpuRegister());
1542          locations->AddTemp(Location::RequiresRegister());
1543          locations->AddTemp(Location::RequiresRegister());
1544          locations->AddTemp(Location::RequiresFpuRegister());
1545          break;
1546
1547        case Primitive::kPrimFloat:
1548          // Processing a Dex `float-to-double' instruction.
1549          locations->SetInAt(0, Location::RequiresFpuRegister());
1550          locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1551          break;
1552
1553        default:
1554          LOG(FATAL) << "Unexpected type conversion from " << input_type
1555                     << " to " << result_type;
1556      };
1557      break;
1558
1559    default:
1560      LOG(FATAL) << "Unexpected type conversion from " << input_type
1561                 << " to " << result_type;
1562  }
1563}
1564
1565void InstructionCodeGeneratorARM::VisitTypeConversion(HTypeConversion* conversion) {
1566  LocationSummary* locations = conversion->GetLocations();
1567  Location out = locations->Out();
1568  Location in = locations->InAt(0);
1569  Primitive::Type result_type = conversion->GetResultType();
1570  Primitive::Type input_type = conversion->GetInputType();
1571  DCHECK_NE(result_type, input_type);
1572  switch (result_type) {
1573    case Primitive::kPrimByte:
1574      switch (input_type) {
1575        case Primitive::kPrimShort:
1576        case Primitive::kPrimInt:
1577        case Primitive::kPrimChar:
1578          // Processing a Dex `int-to-byte' instruction.
1579          __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 8);
1580          break;
1581
1582        default:
1583          LOG(FATAL) << "Unexpected type conversion from " << input_type
1584                     << " to " << result_type;
1585      }
1586      break;
1587
1588    case Primitive::kPrimShort:
1589      switch (input_type) {
1590        case Primitive::kPrimByte:
1591        case Primitive::kPrimInt:
1592        case Primitive::kPrimChar:
1593          // Processing a Dex `int-to-short' instruction.
1594          __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16);
1595          break;
1596
1597        default:
1598          LOG(FATAL) << "Unexpected type conversion from " << input_type
1599                     << " to " << result_type;
1600      }
1601      break;
1602
1603    case Primitive::kPrimInt:
1604      switch (input_type) {
1605        case Primitive::kPrimLong:
1606          // Processing a Dex `long-to-int' instruction.
1607          DCHECK(out.IsRegister());
1608          if (in.IsRegisterPair()) {
1609            __ Mov(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
1610          } else if (in.IsDoubleStackSlot()) {
1611            __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), SP, in.GetStackIndex());
1612          } else {
1613            DCHECK(in.IsConstant());
1614            DCHECK(in.GetConstant()->IsLongConstant());
1615            int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
1616            __ LoadImmediate(out.AsRegister<Register>(), static_cast<int32_t>(value));
1617          }
1618          break;
1619
1620        case Primitive::kPrimFloat: {
1621          // Processing a Dex `float-to-int' instruction.
1622          SRegister temp = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
1623          __ vmovs(temp, in.AsFpuRegister<SRegister>());
1624          __ vcvtis(temp, temp);
1625          __ vmovrs(out.AsRegister<Register>(), temp);
1626          break;
1627        }
1628
1629        case Primitive::kPrimDouble: {
1630          // Processing a Dex `double-to-int' instruction.
1631          SRegister temp_s = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
1632          DRegister temp_d = FromLowSToD(temp_s);
1633          __ vmovd(temp_d, FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1634          __ vcvtid(temp_s, temp_d);
1635          __ vmovrs(out.AsRegister<Register>(), temp_s);
1636          break;
1637        }
1638
1639        default:
1640          LOG(FATAL) << "Unexpected type conversion from " << input_type
1641                     << " to " << result_type;
1642      }
1643      break;
1644
1645    case Primitive::kPrimLong:
1646      switch (input_type) {
1647        case Primitive::kPrimByte:
1648        case Primitive::kPrimShort:
1649        case Primitive::kPrimInt:
1650        case Primitive::kPrimChar:
1651          // Processing a Dex `int-to-long' instruction.
1652          DCHECK(out.IsRegisterPair());
1653          DCHECK(in.IsRegister());
1654          __ Mov(out.AsRegisterPairLow<Register>(), in.AsRegister<Register>());
1655          // Sign extension.
1656          __ Asr(out.AsRegisterPairHigh<Register>(),
1657                 out.AsRegisterPairLow<Register>(),
1658                 31);
1659          break;
1660
1661        case Primitive::kPrimFloat:
1662          // Processing a Dex `float-to-long' instruction.
1663          codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pF2l),
1664                                  conversion,
1665                                  conversion->GetDexPc());
1666          break;
1667
1668        case Primitive::kPrimDouble:
1669          // Processing a Dex `double-to-long' instruction.
1670          codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pD2l),
1671                                  conversion,
1672                                  conversion->GetDexPc());
1673          break;
1674
1675        default:
1676          LOG(FATAL) << "Unexpected type conversion from " << input_type
1677                     << " to " << result_type;
1678      }
1679      break;
1680
1681    case Primitive::kPrimChar:
1682      switch (input_type) {
1683        case Primitive::kPrimByte:
1684        case Primitive::kPrimShort:
1685        case Primitive::kPrimInt:
1686          // Processing a Dex `int-to-char' instruction.
1687          __ ubfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16);
1688          break;
1689
1690        default:
1691          LOG(FATAL) << "Unexpected type conversion from " << input_type
1692                     << " to " << result_type;
1693      }
1694      break;
1695
1696    case Primitive::kPrimFloat:
1697      switch (input_type) {
1698        case Primitive::kPrimByte:
1699        case Primitive::kPrimShort:
1700        case Primitive::kPrimInt:
1701        case Primitive::kPrimChar: {
1702          // Processing a Dex `int-to-float' instruction.
1703          __ vmovsr(out.AsFpuRegister<SRegister>(), in.AsRegister<Register>());
1704          __ vcvtsi(out.AsFpuRegister<SRegister>(), out.AsFpuRegister<SRegister>());
1705          break;
1706        }
1707
1708        case Primitive::kPrimLong: {
1709          // Processing a Dex `long-to-float' instruction.
1710          Register low = in.AsRegisterPairLow<Register>();
1711          Register high = in.AsRegisterPairHigh<Register>();
1712          SRegister output = out.AsFpuRegister<SRegister>();
1713          Register constant_low = locations->GetTemp(0).AsRegister<Register>();
1714          Register constant_high = locations->GetTemp(1).AsRegister<Register>();
1715          SRegister temp1_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>();
1716          DRegister temp1_d = FromLowSToD(temp1_s);
1717          SRegister temp2_s = locations->GetTemp(3).AsFpuRegisterPairLow<SRegister>();
1718          DRegister temp2_d = FromLowSToD(temp2_s);
1719
1720          // Operations use doubles for precision reasons (each 32-bit
1721          // half of a long fits in the 53-bit mantissa of a double,
1722          // but not in the 24-bit mantissa of a float).  This is
1723          // especially important for the low bits.  The result is
1724          // eventually converted to float.
1725
1726          // temp1_d = int-to-double(high)
1727          __ vmovsr(temp1_s, high);
1728          __ vcvtdi(temp1_d, temp1_s);
1729          // Using vmovd to load the `k2Pow32EncodingForDouble` constant
1730          // as an immediate value into `temp2_d` does not work, as
1731          // this instruction only transfers 8 significant bits of its
1732          // immediate operand.  Instead, use two 32-bit core
1733          // registers to load `k2Pow32EncodingForDouble` into
1734          // `temp2_d`.
1735          __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble));
1736          __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble));
1737          __ vmovdrr(temp2_d, constant_low, constant_high);
1738          // temp1_d = temp1_d * 2^32
1739          __ vmuld(temp1_d, temp1_d, temp2_d);
1740          // temp2_d = unsigned-to-double(low)
1741          __ vmovsr(temp2_s, low);
1742          __ vcvtdu(temp2_d, temp2_s);
1743          // temp1_d = temp1_d + temp2_d
1744          __ vaddd(temp1_d, temp1_d, temp2_d);
1745          // output = double-to-float(temp1_d);
1746          __ vcvtsd(output, temp1_d);
1747          break;
1748        }
1749
1750        case Primitive::kPrimDouble:
1751          // Processing a Dex `double-to-float' instruction.
1752          __ vcvtsd(out.AsFpuRegister<SRegister>(),
1753                    FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1754          break;
1755
1756        default:
1757          LOG(FATAL) << "Unexpected type conversion from " << input_type
1758                     << " to " << result_type;
1759      };
1760      break;
1761
1762    case Primitive::kPrimDouble:
1763      switch (input_type) {
1764        case Primitive::kPrimByte:
1765        case Primitive::kPrimShort:
1766        case Primitive::kPrimInt:
1767        case Primitive::kPrimChar: {
1768          // Processing a Dex `int-to-double' instruction.
1769          __ vmovsr(out.AsFpuRegisterPairLow<SRegister>(), in.AsRegister<Register>());
1770          __ vcvtdi(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1771                    out.AsFpuRegisterPairLow<SRegister>());
1772          break;
1773        }
1774
1775        case Primitive::kPrimLong: {
1776          // Processing a Dex `long-to-double' instruction.
1777          Register low = in.AsRegisterPairLow<Register>();
1778          Register high = in.AsRegisterPairHigh<Register>();
1779          SRegister out_s = out.AsFpuRegisterPairLow<SRegister>();
1780          DRegister out_d = FromLowSToD(out_s);
1781          Register constant_low = locations->GetTemp(0).AsRegister<Register>();
1782          Register constant_high = locations->GetTemp(1).AsRegister<Register>();
1783          SRegister temp_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>();
1784          DRegister temp_d = FromLowSToD(temp_s);
1785
1786          // out_d = int-to-double(high)
1787          __ vmovsr(out_s, high);
1788          __ vcvtdi(out_d, out_s);
1789          // Using vmovd to load the `k2Pow32EncodingForDouble` constant
1790          // as an immediate value into `temp_d` does not work, as
1791          // this instruction only transfers 8 significant bits of its
1792          // immediate operand.  Instead, use two 32-bit core
1793          // registers to load `k2Pow32EncodingForDouble` into `temp_d`.
1794          __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble));
1795          __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble));
1796          __ vmovdrr(temp_d, constant_low, constant_high);
1797          // out_d = out_d * 2^32
1798          __ vmuld(out_d, out_d, temp_d);
1799          // temp_d = unsigned-to-double(low)
1800          __ vmovsr(temp_s, low);
1801          __ vcvtdu(temp_d, temp_s);
1802          // out_d = out_d + temp_d
1803          __ vaddd(out_d, out_d, temp_d);
1804          break;
1805        }
1806
1807        case Primitive::kPrimFloat:
1808          // Processing a Dex `float-to-double' instruction.
1809          __ vcvtds(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1810                    in.AsFpuRegister<SRegister>());
1811          break;
1812
1813        default:
1814          LOG(FATAL) << "Unexpected type conversion from " << input_type
1815                     << " to " << result_type;
1816      };
1817      break;
1818
1819    default:
1820      LOG(FATAL) << "Unexpected type conversion from " << input_type
1821                 << " to " << result_type;
1822  }
1823}
1824
1825void LocationsBuilderARM::VisitAdd(HAdd* add) {
1826  LocationSummary* locations =
1827      new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
1828  switch (add->GetResultType()) {
1829    case Primitive::kPrimInt: {
1830      locations->SetInAt(0, Location::RequiresRegister());
1831      locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
1832      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1833      break;
1834    }
1835
1836    case Primitive::kPrimLong: {
1837      locations->SetInAt(0, Location::RequiresRegister());
1838      locations->SetInAt(1, Location::RequiresRegister());
1839      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1840      break;
1841    }
1842
1843    case Primitive::kPrimFloat:
1844    case Primitive::kPrimDouble: {
1845      locations->SetInAt(0, Location::RequiresFpuRegister());
1846      locations->SetInAt(1, Location::RequiresFpuRegister());
1847      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1848      break;
1849    }
1850
1851    default:
1852      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1853  }
1854}
1855
1856void InstructionCodeGeneratorARM::VisitAdd(HAdd* add) {
1857  LocationSummary* locations = add->GetLocations();
1858  Location out = locations->Out();
1859  Location first = locations->InAt(0);
1860  Location second = locations->InAt(1);
1861  switch (add->GetResultType()) {
1862    case Primitive::kPrimInt:
1863      if (second.IsRegister()) {
1864        __ add(out.AsRegister<Register>(),
1865               first.AsRegister<Register>(),
1866               ShifterOperand(second.AsRegister<Register>()));
1867      } else {
1868        __ AddConstant(out.AsRegister<Register>(),
1869                       first.AsRegister<Register>(),
1870                       second.GetConstant()->AsIntConstant()->GetValue());
1871      }
1872      break;
1873
1874    case Primitive::kPrimLong: {
1875      DCHECK(second.IsRegisterPair());
1876      __ adds(out.AsRegisterPairLow<Register>(),
1877              first.AsRegisterPairLow<Register>(),
1878              ShifterOperand(second.AsRegisterPairLow<Register>()));
1879      __ adc(out.AsRegisterPairHigh<Register>(),
1880             first.AsRegisterPairHigh<Register>(),
1881             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1882      break;
1883    }
1884
1885    case Primitive::kPrimFloat:
1886      __ vadds(out.AsFpuRegister<SRegister>(),
1887               first.AsFpuRegister<SRegister>(),
1888               second.AsFpuRegister<SRegister>());
1889      break;
1890
1891    case Primitive::kPrimDouble:
1892      __ vaddd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1893               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1894               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1895      break;
1896
1897    default:
1898      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1899  }
1900}
1901
1902void LocationsBuilderARM::VisitSub(HSub* sub) {
1903  LocationSummary* locations =
1904      new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
1905  switch (sub->GetResultType()) {
1906    case Primitive::kPrimInt: {
1907      locations->SetInAt(0, Location::RequiresRegister());
1908      locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1)));
1909      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1910      break;
1911    }
1912
1913    case Primitive::kPrimLong: {
1914      locations->SetInAt(0, Location::RequiresRegister());
1915      locations->SetInAt(1, Location::RequiresRegister());
1916      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1917      break;
1918    }
1919    case Primitive::kPrimFloat:
1920    case Primitive::kPrimDouble: {
1921      locations->SetInAt(0, Location::RequiresFpuRegister());
1922      locations->SetInAt(1, Location::RequiresFpuRegister());
1923      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1924      break;
1925    }
1926    default:
1927      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1928  }
1929}
1930
1931void InstructionCodeGeneratorARM::VisitSub(HSub* sub) {
1932  LocationSummary* locations = sub->GetLocations();
1933  Location out = locations->Out();
1934  Location first = locations->InAt(0);
1935  Location second = locations->InAt(1);
1936  switch (sub->GetResultType()) {
1937    case Primitive::kPrimInt: {
1938      if (second.IsRegister()) {
1939        __ sub(out.AsRegister<Register>(),
1940               first.AsRegister<Register>(),
1941               ShifterOperand(second.AsRegister<Register>()));
1942      } else {
1943        __ AddConstant(out.AsRegister<Register>(),
1944                       first.AsRegister<Register>(),
1945                       -second.GetConstant()->AsIntConstant()->GetValue());
1946      }
1947      break;
1948    }
1949
1950    case Primitive::kPrimLong: {
1951      DCHECK(second.IsRegisterPair());
1952      __ subs(out.AsRegisterPairLow<Register>(),
1953              first.AsRegisterPairLow<Register>(),
1954              ShifterOperand(second.AsRegisterPairLow<Register>()));
1955      __ sbc(out.AsRegisterPairHigh<Register>(),
1956             first.AsRegisterPairHigh<Register>(),
1957             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1958      break;
1959    }
1960
1961    case Primitive::kPrimFloat: {
1962      __ vsubs(out.AsFpuRegister<SRegister>(),
1963               first.AsFpuRegister<SRegister>(),
1964               second.AsFpuRegister<SRegister>());
1965      break;
1966    }
1967
1968    case Primitive::kPrimDouble: {
1969      __ vsubd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1970               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1971               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1972      break;
1973    }
1974
1975
1976    default:
1977      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1978  }
1979}
1980
1981void LocationsBuilderARM::VisitMul(HMul* mul) {
1982  LocationSummary* locations =
1983      new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
1984  switch (mul->GetResultType()) {
1985    case Primitive::kPrimInt:
1986    case Primitive::kPrimLong:  {
1987      locations->SetInAt(0, Location::RequiresRegister());
1988      locations->SetInAt(1, Location::RequiresRegister());
1989      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1990      break;
1991    }
1992
1993    case Primitive::kPrimFloat:
1994    case Primitive::kPrimDouble: {
1995      locations->SetInAt(0, Location::RequiresFpuRegister());
1996      locations->SetInAt(1, Location::RequiresFpuRegister());
1997      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1998      break;
1999    }
2000
2001    default:
2002      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2003  }
2004}
2005
2006void InstructionCodeGeneratorARM::VisitMul(HMul* mul) {
2007  LocationSummary* locations = mul->GetLocations();
2008  Location out = locations->Out();
2009  Location first = locations->InAt(0);
2010  Location second = locations->InAt(1);
2011  switch (mul->GetResultType()) {
2012    case Primitive::kPrimInt: {
2013      __ mul(out.AsRegister<Register>(),
2014             first.AsRegister<Register>(),
2015             second.AsRegister<Register>());
2016      break;
2017    }
2018    case Primitive::kPrimLong: {
2019      Register out_hi = out.AsRegisterPairHigh<Register>();
2020      Register out_lo = out.AsRegisterPairLow<Register>();
2021      Register in1_hi = first.AsRegisterPairHigh<Register>();
2022      Register in1_lo = first.AsRegisterPairLow<Register>();
2023      Register in2_hi = second.AsRegisterPairHigh<Register>();
2024      Register in2_lo = second.AsRegisterPairLow<Register>();
2025
2026      // Extra checks to protect caused by the existence of R1_R2.
2027      // The algorithm is wrong if out.hi is either in1.lo or in2.lo:
2028      // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2);
2029      DCHECK_NE(out_hi, in1_lo);
2030      DCHECK_NE(out_hi, in2_lo);
2031
2032      // input: in1 - 64 bits, in2 - 64 bits
2033      // output: out
2034      // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
2035      // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
2036      // parts: out.lo = (in1.lo * in2.lo)[31:0]
2037
2038      // IP <- in1.lo * in2.hi
2039      __ mul(IP, in1_lo, in2_hi);
2040      // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo
2041      __ mla(out_hi, in1_hi, in2_lo, IP);
2042      // out.lo <- (in1.lo * in2.lo)[31:0];
2043      __ umull(out_lo, IP, in1_lo, in2_lo);
2044      // out.hi <- in2.hi * in1.lo +  in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
2045      __ add(out_hi, out_hi, ShifterOperand(IP));
2046      break;
2047    }
2048
2049    case Primitive::kPrimFloat: {
2050      __ vmuls(out.AsFpuRegister<SRegister>(),
2051               first.AsFpuRegister<SRegister>(),
2052               second.AsFpuRegister<SRegister>());
2053      break;
2054    }
2055
2056    case Primitive::kPrimDouble: {
2057      __ vmuld(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2058               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2059               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2060      break;
2061    }
2062
2063    default:
2064      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2065  }
2066}
2067
2068void LocationsBuilderARM::VisitDiv(HDiv* div) {
2069  LocationSummary::CallKind call_kind = div->GetResultType() == Primitive::kPrimLong
2070      ? LocationSummary::kCall
2071      : LocationSummary::kNoCall;
2072  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
2073
2074  switch (div->GetResultType()) {
2075    case Primitive::kPrimInt: {
2076      locations->SetInAt(0, Location::RequiresRegister());
2077      locations->SetInAt(1, Location::RequiresRegister());
2078      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2079      break;
2080    }
2081    case Primitive::kPrimLong: {
2082      InvokeRuntimeCallingConvention calling_convention;
2083      locations->SetInAt(0, Location::RegisterPairLocation(
2084          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2085      locations->SetInAt(1, Location::RegisterPairLocation(
2086          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2087      locations->SetOut(Location::RegisterPairLocation(R0, R1));
2088      break;
2089    }
2090    case Primitive::kPrimFloat:
2091    case Primitive::kPrimDouble: {
2092      locations->SetInAt(0, Location::RequiresFpuRegister());
2093      locations->SetInAt(1, Location::RequiresFpuRegister());
2094      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2095      break;
2096    }
2097
2098    default:
2099      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2100  }
2101}
2102
2103void InstructionCodeGeneratorARM::VisitDiv(HDiv* div) {
2104  LocationSummary* locations = div->GetLocations();
2105  Location out = locations->Out();
2106  Location first = locations->InAt(0);
2107  Location second = locations->InAt(1);
2108
2109  switch (div->GetResultType()) {
2110    case Primitive::kPrimInt: {
2111      __ sdiv(out.AsRegister<Register>(),
2112              first.AsRegister<Register>(),
2113              second.AsRegister<Register>());
2114      break;
2115    }
2116
2117    case Primitive::kPrimLong: {
2118      InvokeRuntimeCallingConvention calling_convention;
2119      DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
2120      DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
2121      DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegisterPairLow<Register>());
2122      DCHECK_EQ(calling_convention.GetRegisterAt(3), second.AsRegisterPairHigh<Register>());
2123      DCHECK_EQ(R0, out.AsRegisterPairLow<Register>());
2124      DCHECK_EQ(R1, out.AsRegisterPairHigh<Register>());
2125
2126      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLdiv), div, div->GetDexPc());
2127      break;
2128    }
2129
2130    case Primitive::kPrimFloat: {
2131      __ vdivs(out.AsFpuRegister<SRegister>(),
2132               first.AsFpuRegister<SRegister>(),
2133               second.AsFpuRegister<SRegister>());
2134      break;
2135    }
2136
2137    case Primitive::kPrimDouble: {
2138      __ vdivd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2139               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2140               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2141      break;
2142    }
2143
2144    default:
2145      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2146  }
2147}
2148
2149void LocationsBuilderARM::VisitRem(HRem* rem) {
2150  Primitive::Type type = rem->GetResultType();
2151  LocationSummary::CallKind call_kind = type == Primitive::kPrimInt
2152      ? LocationSummary::kNoCall
2153      : LocationSummary::kCall;
2154  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
2155
2156  switch (type) {
2157    case Primitive::kPrimInt: {
2158      locations->SetInAt(0, Location::RequiresRegister());
2159      locations->SetInAt(1, Location::RequiresRegister());
2160      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2161      locations->AddTemp(Location::RequiresRegister());
2162      break;
2163    }
2164    case Primitive::kPrimLong: {
2165      InvokeRuntimeCallingConvention calling_convention;
2166      locations->SetInAt(0, Location::RegisterPairLocation(
2167          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2168      locations->SetInAt(1, Location::RegisterPairLocation(
2169          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2170      // The runtime helper puts the output in R2,R3.
2171      locations->SetOut(Location::RegisterPairLocation(R2, R3));
2172      break;
2173    }
2174    case Primitive::kPrimFloat: {
2175      InvokeRuntimeCallingConvention calling_convention;
2176      locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
2177      locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
2178      locations->SetOut(Location::FpuRegisterLocation(S0));
2179      break;
2180    }
2181
2182    case Primitive::kPrimDouble: {
2183      InvokeRuntimeCallingConvention calling_convention;
2184      locations->SetInAt(0, Location::FpuRegisterPairLocation(
2185          calling_convention.GetFpuRegisterAt(0), calling_convention.GetFpuRegisterAt(1)));
2186      locations->SetInAt(1, Location::FpuRegisterPairLocation(
2187          calling_convention.GetFpuRegisterAt(2), calling_convention.GetFpuRegisterAt(3)));
2188      locations->SetOut(Location::Location::FpuRegisterPairLocation(S0, S1));
2189      break;
2190    }
2191
2192    default:
2193      LOG(FATAL) << "Unexpected rem type " << type;
2194  }
2195}
2196
2197void InstructionCodeGeneratorARM::VisitRem(HRem* rem) {
2198  LocationSummary* locations = rem->GetLocations();
2199  Location out = locations->Out();
2200  Location first = locations->InAt(0);
2201  Location second = locations->InAt(1);
2202
2203  Primitive::Type type = rem->GetResultType();
2204  switch (type) {
2205    case Primitive::kPrimInt: {
2206      Register reg1 = first.AsRegister<Register>();
2207      Register reg2 = second.AsRegister<Register>();
2208      Register temp = locations->GetTemp(0).AsRegister<Register>();
2209
2210      // temp = reg1 / reg2  (integer division)
2211      // temp = temp * reg2
2212      // dest = reg1 - temp
2213      __ sdiv(temp, reg1, reg2);
2214      __ mul(temp, temp, reg2);
2215      __ sub(out.AsRegister<Register>(), reg1, ShifterOperand(temp));
2216      break;
2217    }
2218
2219    case Primitive::kPrimLong: {
2220      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLmod), rem, rem->GetDexPc());
2221      break;
2222    }
2223
2224    case Primitive::kPrimFloat: {
2225      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmodf), rem, rem->GetDexPc());
2226      break;
2227    }
2228
2229    case Primitive::kPrimDouble: {
2230      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmod), rem, rem->GetDexPc());
2231      break;
2232    }
2233
2234    default:
2235      LOG(FATAL) << "Unexpected rem type " << type;
2236  }
2237}
2238
2239void LocationsBuilderARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2240  LocationSummary* locations =
2241      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2242  locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2243  if (instruction->HasUses()) {
2244    locations->SetOut(Location::SameAsFirstInput());
2245  }
2246}
2247
2248void InstructionCodeGeneratorARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2249  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM(instruction);
2250  codegen_->AddSlowPath(slow_path);
2251
2252  LocationSummary* locations = instruction->GetLocations();
2253  Location value = locations->InAt(0);
2254
2255  switch (instruction->GetType()) {
2256    case Primitive::kPrimInt: {
2257      if (value.IsRegister()) {
2258        __ cmp(value.AsRegister<Register>(), ShifterOperand(0));
2259        __ b(slow_path->GetEntryLabel(), EQ);
2260      } else {
2261        DCHECK(value.IsConstant()) << value;
2262        if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
2263          __ b(slow_path->GetEntryLabel());
2264        }
2265      }
2266      break;
2267    }
2268    case Primitive::kPrimLong: {
2269      if (value.IsRegisterPair()) {
2270        __ orrs(IP,
2271                value.AsRegisterPairLow<Register>(),
2272                ShifterOperand(value.AsRegisterPairHigh<Register>()));
2273        __ b(slow_path->GetEntryLabel(), EQ);
2274      } else {
2275        DCHECK(value.IsConstant()) << value;
2276        if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
2277          __ b(slow_path->GetEntryLabel());
2278        }
2279      }
2280      break;
2281    default:
2282      LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
2283    }
2284  }
2285}
2286
2287void LocationsBuilderARM::HandleShift(HBinaryOperation* op) {
2288  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2289
2290  LocationSummary::CallKind call_kind = op->GetResultType() == Primitive::kPrimLong
2291      ? LocationSummary::kCall
2292      : LocationSummary::kNoCall;
2293  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(op, call_kind);
2294
2295  switch (op->GetResultType()) {
2296    case Primitive::kPrimInt: {
2297      locations->SetInAt(0, Location::RequiresRegister());
2298      locations->SetInAt(1, Location::RegisterOrConstant(op->InputAt(1)));
2299      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2300      break;
2301    }
2302    case Primitive::kPrimLong: {
2303      InvokeRuntimeCallingConvention calling_convention;
2304      locations->SetInAt(0, Location::RegisterPairLocation(
2305          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2306      locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2307      // The runtime helper puts the output in R0,R1.
2308      locations->SetOut(Location::RegisterPairLocation(R0, R1));
2309      break;
2310    }
2311    default:
2312      LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
2313  }
2314}
2315
2316void InstructionCodeGeneratorARM::HandleShift(HBinaryOperation* op) {
2317  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2318
2319  LocationSummary* locations = op->GetLocations();
2320  Location out = locations->Out();
2321  Location first = locations->InAt(0);
2322  Location second = locations->InAt(1);
2323
2324  Primitive::Type type = op->GetResultType();
2325  switch (type) {
2326    case Primitive::kPrimInt: {
2327      Register out_reg = out.AsRegister<Register>();
2328      Register first_reg = first.AsRegister<Register>();
2329      // Arm doesn't mask the shift count so we need to do it ourselves.
2330      if (second.IsRegister()) {
2331        Register second_reg = second.AsRegister<Register>();
2332        __ and_(second_reg, second_reg, ShifterOperand(kMaxIntShiftValue));
2333        if (op->IsShl()) {
2334          __ Lsl(out_reg, first_reg, second_reg);
2335        } else if (op->IsShr()) {
2336          __ Asr(out_reg, first_reg, second_reg);
2337        } else {
2338          __ Lsr(out_reg, first_reg, second_reg);
2339        }
2340      } else {
2341        int32_t cst = second.GetConstant()->AsIntConstant()->GetValue();
2342        uint32_t shift_value = static_cast<uint32_t>(cst & kMaxIntShiftValue);
2343        if (shift_value == 0) {  // arm does not support shifting with 0 immediate.
2344          __ Mov(out_reg, first_reg);
2345        } else if (op->IsShl()) {
2346          __ Lsl(out_reg, first_reg, shift_value);
2347        } else if (op->IsShr()) {
2348          __ Asr(out_reg, first_reg, shift_value);
2349        } else {
2350          __ Lsr(out_reg, first_reg, shift_value);
2351        }
2352      }
2353      break;
2354    }
2355    case Primitive::kPrimLong: {
2356      // TODO: Inline the assembly instead of calling the runtime.
2357      InvokeRuntimeCallingConvention calling_convention;
2358      DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
2359      DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
2360      DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegister<Register>());
2361      DCHECK_EQ(R0, out.AsRegisterPairLow<Register>());
2362      DCHECK_EQ(R1, out.AsRegisterPairHigh<Register>());
2363
2364      int32_t entry_point_offset;
2365      if (op->IsShl()) {
2366        entry_point_offset = QUICK_ENTRY_POINT(pShlLong);
2367      } else if (op->IsShr()) {
2368        entry_point_offset = QUICK_ENTRY_POINT(pShrLong);
2369      } else {
2370        entry_point_offset = QUICK_ENTRY_POINT(pUshrLong);
2371      }
2372      __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
2373      __ blx(LR);
2374      break;
2375    }
2376    default:
2377      LOG(FATAL) << "Unexpected operation type " << type;
2378  }
2379}
2380
2381void LocationsBuilderARM::VisitShl(HShl* shl) {
2382  HandleShift(shl);
2383}
2384
2385void InstructionCodeGeneratorARM::VisitShl(HShl* shl) {
2386  HandleShift(shl);
2387}
2388
2389void LocationsBuilderARM::VisitShr(HShr* shr) {
2390  HandleShift(shr);
2391}
2392
2393void InstructionCodeGeneratorARM::VisitShr(HShr* shr) {
2394  HandleShift(shr);
2395}
2396
2397void LocationsBuilderARM::VisitUShr(HUShr* ushr) {
2398  HandleShift(ushr);
2399}
2400
2401void InstructionCodeGeneratorARM::VisitUShr(HUShr* ushr) {
2402  HandleShift(ushr);
2403}
2404
2405void LocationsBuilderARM::VisitNewInstance(HNewInstance* instruction) {
2406  LocationSummary* locations =
2407      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2408  InvokeRuntimeCallingConvention calling_convention;
2409  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2410  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2411  locations->SetOut(Location::RegisterLocation(R0));
2412}
2413
2414void InstructionCodeGeneratorARM::VisitNewInstance(HNewInstance* instruction) {
2415  InvokeRuntimeCallingConvention calling_convention;
2416  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
2417  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
2418  codegen_->InvokeRuntime(GetThreadOffset<kArmWordSize>(instruction->GetEntrypoint()).Int32Value(),
2419                          instruction,
2420                          instruction->GetDexPc());
2421}
2422
2423void LocationsBuilderARM::VisitNewArray(HNewArray* instruction) {
2424  LocationSummary* locations =
2425      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2426  InvokeRuntimeCallingConvention calling_convention;
2427  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2428  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2429  locations->SetOut(Location::RegisterLocation(R0));
2430  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2431}
2432
2433void InstructionCodeGeneratorARM::VisitNewArray(HNewArray* instruction) {
2434  InvokeRuntimeCallingConvention calling_convention;
2435  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(2));
2436  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
2437  codegen_->InvokeRuntime(GetThreadOffset<kArmWordSize>(instruction->GetEntrypoint()).Int32Value(),
2438                          instruction,
2439                          instruction->GetDexPc());
2440}
2441
2442void LocationsBuilderARM::VisitParameterValue(HParameterValue* instruction) {
2443  LocationSummary* locations =
2444      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2445  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
2446  if (location.IsStackSlot()) {
2447    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2448  } else if (location.IsDoubleStackSlot()) {
2449    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2450  }
2451  locations->SetOut(location);
2452}
2453
2454void InstructionCodeGeneratorARM::VisitParameterValue(HParameterValue* instruction) {
2455  // Nothing to do, the parameter is already at its location.
2456  UNUSED(instruction);
2457}
2458
2459void LocationsBuilderARM::VisitNot(HNot* not_) {
2460  LocationSummary* locations =
2461      new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
2462  locations->SetInAt(0, Location::RequiresRegister());
2463  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2464}
2465
2466void InstructionCodeGeneratorARM::VisitNot(HNot* not_) {
2467  LocationSummary* locations = not_->GetLocations();
2468  Location out = locations->Out();
2469  Location in = locations->InAt(0);
2470  switch (not_->InputAt(0)->GetType()) {
2471    case Primitive::kPrimInt:
2472      __ mvn(out.AsRegister<Register>(), ShifterOperand(in.AsRegister<Register>()));
2473      break;
2474
2475    case Primitive::kPrimLong:
2476      __ mvn(out.AsRegisterPairLow<Register>(),
2477             ShifterOperand(in.AsRegisterPairLow<Register>()));
2478      __ mvn(out.AsRegisterPairHigh<Register>(),
2479             ShifterOperand(in.AsRegisterPairHigh<Register>()));
2480      break;
2481
2482    default:
2483      LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
2484  }
2485}
2486
2487void LocationsBuilderARM::VisitCompare(HCompare* compare) {
2488  LocationSummary* locations =
2489      new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2490  switch (compare->InputAt(0)->GetType()) {
2491    case Primitive::kPrimLong: {
2492      locations->SetInAt(0, Location::RequiresRegister());
2493      locations->SetInAt(1, Location::RequiresRegister());
2494      // Output overlaps because it is written before doing the low comparison.
2495      locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2496      break;
2497    }
2498    case Primitive::kPrimFloat:
2499    case Primitive::kPrimDouble: {
2500      locations->SetInAt(0, Location::RequiresFpuRegister());
2501      locations->SetInAt(1, Location::RequiresFpuRegister());
2502      locations->SetOut(Location::RequiresRegister());
2503      break;
2504    }
2505    default:
2506      LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2507  }
2508}
2509
2510void InstructionCodeGeneratorARM::VisitCompare(HCompare* compare) {
2511  LocationSummary* locations = compare->GetLocations();
2512  Register out = locations->Out().AsRegister<Register>();
2513  Location left = locations->InAt(0);
2514  Location right = locations->InAt(1);
2515
2516  Label less, greater, done;
2517  Primitive::Type type = compare->InputAt(0)->GetType();
2518  switch (type) {
2519    case Primitive::kPrimLong: {
2520      __ cmp(left.AsRegisterPairHigh<Register>(),
2521             ShifterOperand(right.AsRegisterPairHigh<Register>()));  // Signed compare.
2522      __ b(&less, LT);
2523      __ b(&greater, GT);
2524      // Do LoadImmediate before any `cmp`, as LoadImmediate might affect the status flags.
2525      __ LoadImmediate(out, 0);
2526      __ cmp(left.AsRegisterPairLow<Register>(),
2527             ShifterOperand(right.AsRegisterPairLow<Register>()));  // Unsigned compare.
2528      break;
2529    }
2530    case Primitive::kPrimFloat:
2531    case Primitive::kPrimDouble: {
2532      __ LoadImmediate(out, 0);
2533      if (type == Primitive::kPrimFloat) {
2534        __ vcmps(left.AsFpuRegister<SRegister>(), right.AsFpuRegister<SRegister>());
2535      } else {
2536        __ vcmpd(FromLowSToD(left.AsFpuRegisterPairLow<SRegister>()),
2537                 FromLowSToD(right.AsFpuRegisterPairLow<SRegister>()));
2538      }
2539      __ vmstat();  // transfer FP status register to ARM APSR.
2540      __ b(compare->IsGtBias() ? &greater : &less, VS);  // VS for unordered.
2541      break;
2542    }
2543    default:
2544      LOG(FATAL) << "Unexpected compare type " << type;
2545  }
2546  __ b(&done, EQ);
2547  __ b(&less, CC);  // CC is for both: unsigned compare for longs and 'less than' for floats.
2548
2549  __ Bind(&greater);
2550  __ LoadImmediate(out, 1);
2551  __ b(&done);
2552
2553  __ Bind(&less);
2554  __ LoadImmediate(out, -1);
2555
2556  __ Bind(&done);
2557}
2558
2559void LocationsBuilderARM::VisitPhi(HPhi* instruction) {
2560  LocationSummary* locations =
2561      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2562  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
2563    locations->SetInAt(i, Location::Any());
2564  }
2565  locations->SetOut(Location::Any());
2566}
2567
2568void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction) {
2569  UNUSED(instruction);
2570  LOG(FATAL) << "Unreachable";
2571}
2572
2573void InstructionCodeGeneratorARM::GenerateMemoryBarrier(MemBarrierKind kind) {
2574  // TODO (ported from quick): revisit Arm barrier kinds
2575  DmbOptions flavour = DmbOptions::ISH;  // quiet c++ warnings
2576  switch (kind) {
2577    case MemBarrierKind::kAnyStore:
2578    case MemBarrierKind::kLoadAny:
2579    case MemBarrierKind::kAnyAny: {
2580      flavour = DmbOptions::ISH;
2581      break;
2582    }
2583    case MemBarrierKind::kStoreStore: {
2584      flavour = DmbOptions::ISHST;
2585      break;
2586    }
2587    default:
2588      LOG(FATAL) << "Unexpected memory barrier " << kind;
2589  }
2590  __ dmb(flavour);
2591}
2592
2593void InstructionCodeGeneratorARM::GenerateWideAtomicLoad(Register addr,
2594                                                         uint32_t offset,
2595                                                         Register out_lo,
2596                                                         Register out_hi) {
2597  if (offset != 0) {
2598    __ LoadImmediate(out_lo, offset);
2599    __ add(IP, addr, ShifterOperand(out_lo));
2600    addr = IP;
2601  }
2602  __ ldrexd(out_lo, out_hi, addr);
2603}
2604
2605void InstructionCodeGeneratorARM::GenerateWideAtomicStore(Register addr,
2606                                                          uint32_t offset,
2607                                                          Register value_lo,
2608                                                          Register value_hi,
2609                                                          Register temp1,
2610                                                          Register temp2,
2611                                                          HInstruction* instruction) {
2612  Label fail;
2613  if (offset != 0) {
2614    __ LoadImmediate(temp1, offset);
2615    __ add(IP, addr, ShifterOperand(temp1));
2616    addr = IP;
2617  }
2618  __ Bind(&fail);
2619  // We need a load followed by store. (The address used in a STREX instruction must
2620  // be the same as the address in the most recently executed LDREX instruction.)
2621  __ ldrexd(temp1, temp2, addr);
2622  codegen_->MaybeRecordImplicitNullCheck(instruction);
2623  __ strexd(temp1, value_lo, value_hi, addr);
2624  __ cmp(temp1, ShifterOperand(0));
2625  __ b(&fail, NE);
2626}
2627
2628void LocationsBuilderARM::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) {
2629  DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2630
2631  LocationSummary* locations =
2632      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2633  locations->SetInAt(0, Location::RequiresRegister());
2634  locations->SetInAt(1, Location::RequiresRegister());
2635
2636
2637  Primitive::Type field_type = field_info.GetFieldType();
2638  bool is_wide = field_type == Primitive::kPrimLong || field_type == Primitive::kPrimDouble;
2639  bool generate_volatile = field_info.IsVolatile()
2640      && is_wide
2641      && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2642  // Temporary registers for the write barrier.
2643  // TODO: consider renaming StoreNeedsWriteBarrier to StoreNeedsGCMark.
2644  if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
2645    locations->AddTemp(Location::RequiresRegister());
2646    locations->AddTemp(Location::RequiresRegister());
2647  } else if (generate_volatile) {
2648    // Arm encoding have some additional constraints for ldrexd/strexd:
2649    // - registers need to be consecutive
2650    // - the first register should be even but not R14.
2651    // We don't test for Arm yet, and the assertion makes sure that we revisit this if we ever
2652    // enable Arm encoding.
2653    DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
2654
2655    locations->AddTemp(Location::RequiresRegister());
2656    locations->AddTemp(Location::RequiresRegister());
2657    if (field_type == Primitive::kPrimDouble) {
2658      // For doubles we need two more registers to copy the value.
2659      locations->AddTemp(Location::RegisterLocation(R2));
2660      locations->AddTemp(Location::RegisterLocation(R3));
2661    }
2662  }
2663}
2664
2665void InstructionCodeGeneratorARM::HandleFieldSet(HInstruction* instruction,
2666                                                 const FieldInfo& field_info) {
2667  DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2668
2669  LocationSummary* locations = instruction->GetLocations();
2670  Register base = locations->InAt(0).AsRegister<Register>();
2671  Location value = locations->InAt(1);
2672
2673  bool is_volatile = field_info.IsVolatile();
2674  bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2675  Primitive::Type field_type = field_info.GetFieldType();
2676  uint32_t offset = field_info.GetFieldOffset().Uint32Value();
2677
2678  if (is_volatile) {
2679    GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
2680  }
2681
2682  switch (field_type) {
2683    case Primitive::kPrimBoolean:
2684    case Primitive::kPrimByte: {
2685      __ StoreToOffset(kStoreByte, value.AsRegister<Register>(), base, offset);
2686      break;
2687    }
2688
2689    case Primitive::kPrimShort:
2690    case Primitive::kPrimChar: {
2691      __ StoreToOffset(kStoreHalfword, value.AsRegister<Register>(), base, offset);
2692      break;
2693    }
2694
2695    case Primitive::kPrimInt:
2696    case Primitive::kPrimNot: {
2697      __ StoreToOffset(kStoreWord, value.AsRegister<Register>(), base, offset);
2698      break;
2699    }
2700
2701    case Primitive::kPrimLong: {
2702      if (is_volatile && !atomic_ldrd_strd) {
2703        GenerateWideAtomicStore(base, offset,
2704                                value.AsRegisterPairLow<Register>(),
2705                                value.AsRegisterPairHigh<Register>(),
2706                                locations->GetTemp(0).AsRegister<Register>(),
2707                                locations->GetTemp(1).AsRegister<Register>(),
2708                                instruction);
2709      } else {
2710        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), base, offset);
2711        codegen_->MaybeRecordImplicitNullCheck(instruction);
2712      }
2713      break;
2714    }
2715
2716    case Primitive::kPrimFloat: {
2717      __ StoreSToOffset(value.AsFpuRegister<SRegister>(), base, offset);
2718      break;
2719    }
2720
2721    case Primitive::kPrimDouble: {
2722      DRegister value_reg = FromLowSToD(value.AsFpuRegisterPairLow<SRegister>());
2723      if (is_volatile && !atomic_ldrd_strd) {
2724        Register value_reg_lo = locations->GetTemp(0).AsRegister<Register>();
2725        Register value_reg_hi = locations->GetTemp(1).AsRegister<Register>();
2726
2727        __ vmovrrd(value_reg_lo, value_reg_hi, value_reg);
2728
2729        GenerateWideAtomicStore(base, offset,
2730                                value_reg_lo,
2731                                value_reg_hi,
2732                                locations->GetTemp(2).AsRegister<Register>(),
2733                                locations->GetTemp(3).AsRegister<Register>(),
2734                                instruction);
2735      } else {
2736        __ StoreDToOffset(value_reg, base, offset);
2737        codegen_->MaybeRecordImplicitNullCheck(instruction);
2738      }
2739      break;
2740    }
2741
2742    case Primitive::kPrimVoid:
2743      LOG(FATAL) << "Unreachable type " << field_type;
2744      UNREACHABLE();
2745  }
2746
2747  // Longs and doubles are handled in the switch.
2748  if (field_type != Primitive::kPrimLong && field_type != Primitive::kPrimDouble) {
2749    codegen_->MaybeRecordImplicitNullCheck(instruction);
2750  }
2751
2752  if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
2753    Register temp = locations->GetTemp(0).AsRegister<Register>();
2754    Register card = locations->GetTemp(1).AsRegister<Register>();
2755    codegen_->MarkGCCard(temp, card, base, value.AsRegister<Register>());
2756  }
2757
2758  if (is_volatile) {
2759    GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
2760  }
2761}
2762
2763void LocationsBuilderARM::HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info) {
2764  DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2765  LocationSummary* locations =
2766      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2767  locations->SetInAt(0, Location::RequiresRegister());
2768
2769  bool volatile_for_double = field_info.IsVolatile()
2770      && (field_info.GetFieldType() == Primitive::kPrimDouble)
2771      && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2772  bool overlap = field_info.IsVolatile() && (field_info.GetFieldType() == Primitive::kPrimLong);
2773  locations->SetOut(Location::RequiresRegister(),
2774                    (overlap ? Location::kOutputOverlap : Location::kNoOutputOverlap));
2775  if (volatile_for_double) {
2776    // Arm encoding have some additional constraints for ldrexd/strexd:
2777    // - registers need to be consecutive
2778    // - the first register should be even but not R14.
2779    // We don't test for Arm yet, and the assertion makes sure that we revisit this if we ever
2780    // enable Arm encoding.
2781    DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
2782    locations->AddTemp(Location::RequiresRegister());
2783    locations->AddTemp(Location::RequiresRegister());
2784  }
2785}
2786
2787void InstructionCodeGeneratorARM::HandleFieldGet(HInstruction* instruction,
2788                                                 const FieldInfo& field_info) {
2789  DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2790
2791  LocationSummary* locations = instruction->GetLocations();
2792  Register base = locations->InAt(0).AsRegister<Register>();
2793  Location out = locations->Out();
2794  bool is_volatile = field_info.IsVolatile();
2795  bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2796  Primitive::Type field_type = field_info.GetFieldType();
2797  uint32_t offset = field_info.GetFieldOffset().Uint32Value();
2798
2799  switch (field_type) {
2800    case Primitive::kPrimBoolean: {
2801      __ LoadFromOffset(kLoadUnsignedByte, out.AsRegister<Register>(), base, offset);
2802      break;
2803    }
2804
2805    case Primitive::kPrimByte: {
2806      __ LoadFromOffset(kLoadSignedByte, out.AsRegister<Register>(), base, offset);
2807      break;
2808    }
2809
2810    case Primitive::kPrimShort: {
2811      __ LoadFromOffset(kLoadSignedHalfword, out.AsRegister<Register>(), base, offset);
2812      break;
2813    }
2814
2815    case Primitive::kPrimChar: {
2816      __ LoadFromOffset(kLoadUnsignedHalfword, out.AsRegister<Register>(), base, offset);
2817      break;
2818    }
2819
2820    case Primitive::kPrimInt:
2821    case Primitive::kPrimNot: {
2822      __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), base, offset);
2823      break;
2824    }
2825
2826    case Primitive::kPrimLong: {
2827      if (is_volatile && !atomic_ldrd_strd) {
2828        GenerateWideAtomicLoad(base, offset,
2829                               out.AsRegisterPairLow<Register>(),
2830                               out.AsRegisterPairHigh<Register>());
2831      } else {
2832        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), base, offset);
2833      }
2834      break;
2835    }
2836
2837    case Primitive::kPrimFloat: {
2838      __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), base, offset);
2839      break;
2840    }
2841
2842    case Primitive::kPrimDouble: {
2843      DRegister out_reg = FromLowSToD(out.AsFpuRegisterPairLow<SRegister>());
2844      if (is_volatile && !atomic_ldrd_strd) {
2845        Register lo = locations->GetTemp(0).AsRegister<Register>();
2846        Register hi = locations->GetTemp(1).AsRegister<Register>();
2847        GenerateWideAtomicLoad(base, offset, lo, hi);
2848        codegen_->MaybeRecordImplicitNullCheck(instruction);
2849        __ vmovdrr(out_reg, lo, hi);
2850      } else {
2851        __ LoadDFromOffset(out_reg, base, offset);
2852        codegen_->MaybeRecordImplicitNullCheck(instruction);
2853      }
2854      break;
2855    }
2856
2857    case Primitive::kPrimVoid:
2858      LOG(FATAL) << "Unreachable type " << field_type;
2859      UNREACHABLE();
2860  }
2861
2862  // Doubles are handled in the switch.
2863  if (field_type != Primitive::kPrimDouble) {
2864    codegen_->MaybeRecordImplicitNullCheck(instruction);
2865  }
2866
2867  if (is_volatile) {
2868    GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
2869  }
2870}
2871
2872void LocationsBuilderARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2873  HandleFieldSet(instruction, instruction->GetFieldInfo());
2874}
2875
2876void InstructionCodeGeneratorARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2877  HandleFieldSet(instruction, instruction->GetFieldInfo());
2878}
2879
2880void LocationsBuilderARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2881  HandleFieldGet(instruction, instruction->GetFieldInfo());
2882}
2883
2884void InstructionCodeGeneratorARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2885  HandleFieldGet(instruction, instruction->GetFieldInfo());
2886}
2887
2888void LocationsBuilderARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2889  HandleFieldGet(instruction, instruction->GetFieldInfo());
2890}
2891
2892void InstructionCodeGeneratorARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2893  HandleFieldGet(instruction, instruction->GetFieldInfo());
2894}
2895
2896void LocationsBuilderARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2897  HandleFieldSet(instruction, instruction->GetFieldInfo());
2898}
2899
2900void InstructionCodeGeneratorARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2901  HandleFieldSet(instruction, instruction->GetFieldInfo());
2902}
2903
2904void LocationsBuilderARM::VisitNullCheck(HNullCheck* instruction) {
2905  LocationSummary* locations =
2906      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2907  locations->SetInAt(0, Location::RequiresRegister());
2908  if (instruction->HasUses()) {
2909    locations->SetOut(Location::SameAsFirstInput());
2910  }
2911}
2912
2913void InstructionCodeGeneratorARM::GenerateImplicitNullCheck(HNullCheck* instruction) {
2914  if (codegen_->CanMoveNullCheckToUser(instruction)) {
2915    return;
2916  }
2917  Location obj = instruction->GetLocations()->InAt(0);
2918
2919  __ LoadFromOffset(kLoadWord, IP, obj.AsRegister<Register>(), 0);
2920  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
2921}
2922
2923void InstructionCodeGeneratorARM::GenerateExplicitNullCheck(HNullCheck* instruction) {
2924  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM(instruction);
2925  codegen_->AddSlowPath(slow_path);
2926
2927  LocationSummary* locations = instruction->GetLocations();
2928  Location obj = locations->InAt(0);
2929
2930  __ cmp(obj.AsRegister<Register>(), ShifterOperand(0));
2931  __ b(slow_path->GetEntryLabel(), EQ);
2932}
2933
2934void InstructionCodeGeneratorARM::VisitNullCheck(HNullCheck* instruction) {
2935  if (codegen_->GetCompilerOptions().GetImplicitNullChecks()) {
2936    GenerateImplicitNullCheck(instruction);
2937  } else {
2938    GenerateExplicitNullCheck(instruction);
2939  }
2940}
2941
2942void LocationsBuilderARM::VisitArrayGet(HArrayGet* instruction) {
2943  LocationSummary* locations =
2944      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2945  locations->SetInAt(0, Location::RequiresRegister());
2946  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2947  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2948}
2949
2950void InstructionCodeGeneratorARM::VisitArrayGet(HArrayGet* instruction) {
2951  LocationSummary* locations = instruction->GetLocations();
2952  Register obj = locations->InAt(0).AsRegister<Register>();
2953  Location index = locations->InAt(1);
2954
2955  switch (instruction->GetType()) {
2956    case Primitive::kPrimBoolean: {
2957      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
2958      Register out = locations->Out().AsRegister<Register>();
2959      if (index.IsConstant()) {
2960        size_t offset =
2961            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2962        __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
2963      } else {
2964        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
2965        __ LoadFromOffset(kLoadUnsignedByte, out, IP, data_offset);
2966      }
2967      break;
2968    }
2969
2970    case Primitive::kPrimByte: {
2971      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
2972      Register out = locations->Out().AsRegister<Register>();
2973      if (index.IsConstant()) {
2974        size_t offset =
2975            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2976        __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
2977      } else {
2978        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
2979        __ LoadFromOffset(kLoadSignedByte, out, IP, data_offset);
2980      }
2981      break;
2982    }
2983
2984    case Primitive::kPrimShort: {
2985      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
2986      Register out = locations->Out().AsRegister<Register>();
2987      if (index.IsConstant()) {
2988        size_t offset =
2989            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2990        __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
2991      } else {
2992        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
2993        __ LoadFromOffset(kLoadSignedHalfword, out, IP, data_offset);
2994      }
2995      break;
2996    }
2997
2998    case Primitive::kPrimChar: {
2999      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
3000      Register out = locations->Out().AsRegister<Register>();
3001      if (index.IsConstant()) {
3002        size_t offset =
3003            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
3004        __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
3005      } else {
3006        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
3007        __ LoadFromOffset(kLoadUnsignedHalfword, out, IP, data_offset);
3008      }
3009      break;
3010    }
3011
3012    case Primitive::kPrimInt:
3013    case Primitive::kPrimNot: {
3014      DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
3015      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
3016      Register out = locations->Out().AsRegister<Register>();
3017      if (index.IsConstant()) {
3018        size_t offset =
3019            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3020        __ LoadFromOffset(kLoadWord, out, obj, offset);
3021      } else {
3022        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3023        __ LoadFromOffset(kLoadWord, out, IP, data_offset);
3024      }
3025      break;
3026    }
3027
3028    case Primitive::kPrimLong: {
3029      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
3030      Location out = locations->Out();
3031      if (index.IsConstant()) {
3032        size_t offset =
3033            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3034        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset);
3035      } else {
3036        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3037        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), IP, data_offset);
3038      }
3039      break;
3040    }
3041
3042    case Primitive::kPrimFloat: {
3043      uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
3044      Location out = locations->Out();
3045      DCHECK(out.IsFpuRegister());
3046      if (index.IsConstant()) {
3047        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3048        __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), obj, offset);
3049      } else {
3050        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3051        __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), IP, data_offset);
3052      }
3053      break;
3054    }
3055
3056    case Primitive::kPrimDouble: {
3057      uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
3058      Location out = locations->Out();
3059      DCHECK(out.IsFpuRegisterPair());
3060      if (index.IsConstant()) {
3061        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3062        __ LoadDFromOffset(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), obj, offset);
3063      } else {
3064        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3065        __ LoadDFromOffset(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), IP, data_offset);
3066      }
3067      break;
3068    }
3069
3070    case Primitive::kPrimVoid:
3071      LOG(FATAL) << "Unreachable type " << instruction->GetType();
3072      UNREACHABLE();
3073  }
3074  codegen_->MaybeRecordImplicitNullCheck(instruction);
3075}
3076
3077void LocationsBuilderARM::VisitArraySet(HArraySet* instruction) {
3078  Primitive::Type value_type = instruction->GetComponentType();
3079
3080  bool needs_write_barrier =
3081      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
3082  bool needs_runtime_call = instruction->NeedsTypeCheck();
3083
3084  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3085      instruction, needs_runtime_call ? LocationSummary::kCall : LocationSummary::kNoCall);
3086  if (needs_runtime_call) {
3087    InvokeRuntimeCallingConvention calling_convention;
3088    locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3089    locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3090    locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
3091  } else {
3092    locations->SetInAt(0, Location::RequiresRegister());
3093    locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3094    locations->SetInAt(2, Location::RequiresRegister());
3095
3096    if (needs_write_barrier) {
3097      // Temporary registers for the write barrier.
3098      locations->AddTemp(Location::RequiresRegister());
3099      locations->AddTemp(Location::RequiresRegister());
3100    }
3101  }
3102}
3103
3104void InstructionCodeGeneratorARM::VisitArraySet(HArraySet* instruction) {
3105  LocationSummary* locations = instruction->GetLocations();
3106  Register obj = locations->InAt(0).AsRegister<Register>();
3107  Location index = locations->InAt(1);
3108  Primitive::Type value_type = instruction->GetComponentType();
3109  bool needs_runtime_call = locations->WillCall();
3110  bool needs_write_barrier =
3111      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
3112
3113  switch (value_type) {
3114    case Primitive::kPrimBoolean:
3115    case Primitive::kPrimByte: {
3116      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
3117      Register value = locations->InAt(2).AsRegister<Register>();
3118      if (index.IsConstant()) {
3119        size_t offset =
3120            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
3121        __ StoreToOffset(kStoreByte, value, obj, offset);
3122      } else {
3123        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
3124        __ StoreToOffset(kStoreByte, value, IP, data_offset);
3125      }
3126      break;
3127    }
3128
3129    case Primitive::kPrimShort:
3130    case Primitive::kPrimChar: {
3131      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
3132      Register value = locations->InAt(2).AsRegister<Register>();
3133      if (index.IsConstant()) {
3134        size_t offset =
3135            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
3136        __ StoreToOffset(kStoreHalfword, value, obj, offset);
3137      } else {
3138        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
3139        __ StoreToOffset(kStoreHalfword, value, IP, data_offset);
3140      }
3141      break;
3142    }
3143
3144    case Primitive::kPrimInt:
3145    case Primitive::kPrimNot: {
3146      if (!needs_runtime_call) {
3147        uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
3148        Register value = locations->InAt(2).AsRegister<Register>();
3149        if (index.IsConstant()) {
3150          size_t offset =
3151              (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3152          __ StoreToOffset(kStoreWord, value, obj, offset);
3153        } else {
3154          DCHECK(index.IsRegister()) << index;
3155          __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3156          __ StoreToOffset(kStoreWord, value, IP, data_offset);
3157        }
3158        codegen_->MaybeRecordImplicitNullCheck(instruction);
3159        if (needs_write_barrier) {
3160          DCHECK_EQ(value_type, Primitive::kPrimNot);
3161          Register temp = locations->GetTemp(0).AsRegister<Register>();
3162          Register card = locations->GetTemp(1).AsRegister<Register>();
3163          codegen_->MarkGCCard(temp, card, obj, value);
3164        }
3165      } else {
3166        DCHECK_EQ(value_type, Primitive::kPrimNot);
3167        codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
3168                                instruction,
3169                                instruction->GetDexPc());
3170      }
3171      break;
3172    }
3173
3174    case Primitive::kPrimLong: {
3175      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
3176      Location value = locations->InAt(2);
3177      if (index.IsConstant()) {
3178        size_t offset =
3179            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3180        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset);
3181      } else {
3182        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3183        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), IP, data_offset);
3184      }
3185      break;
3186    }
3187
3188    case Primitive::kPrimFloat: {
3189      uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
3190      Location value = locations->InAt(2);
3191      DCHECK(value.IsFpuRegister());
3192      if (index.IsConstant()) {
3193        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3194        __ StoreSToOffset(value.AsFpuRegister<SRegister>(), obj, offset);
3195      } else {
3196        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3197        __ StoreSToOffset(value.AsFpuRegister<SRegister>(), IP, data_offset);
3198      }
3199      break;
3200    }
3201
3202    case Primitive::kPrimDouble: {
3203      uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
3204      Location value = locations->InAt(2);
3205      DCHECK(value.IsFpuRegisterPair());
3206      if (index.IsConstant()) {
3207        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3208        __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), obj, offset);
3209      } else {
3210        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3211        __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), IP, data_offset);
3212      }
3213
3214      break;
3215    }
3216
3217    case Primitive::kPrimVoid:
3218      LOG(FATAL) << "Unreachable type " << value_type;
3219      UNREACHABLE();
3220  }
3221
3222  // Ints and objects are handled in the switch.
3223  if (value_type != Primitive::kPrimInt && value_type != Primitive::kPrimNot) {
3224    codegen_->MaybeRecordImplicitNullCheck(instruction);
3225  }
3226}
3227
3228void LocationsBuilderARM::VisitArrayLength(HArrayLength* instruction) {
3229  LocationSummary* locations =
3230      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3231  locations->SetInAt(0, Location::RequiresRegister());
3232  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3233}
3234
3235void InstructionCodeGeneratorARM::VisitArrayLength(HArrayLength* instruction) {
3236  LocationSummary* locations = instruction->GetLocations();
3237  uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
3238  Register obj = locations->InAt(0).AsRegister<Register>();
3239  Register out = locations->Out().AsRegister<Register>();
3240  __ LoadFromOffset(kLoadWord, out, obj, offset);
3241  codegen_->MaybeRecordImplicitNullCheck(instruction);
3242}
3243
3244void LocationsBuilderARM::VisitBoundsCheck(HBoundsCheck* instruction) {
3245  LocationSummary* locations =
3246      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3247  locations->SetInAt(0, Location::RequiresRegister());
3248  locations->SetInAt(1, Location::RequiresRegister());
3249  if (instruction->HasUses()) {
3250    locations->SetOut(Location::SameAsFirstInput());
3251  }
3252}
3253
3254void InstructionCodeGeneratorARM::VisitBoundsCheck(HBoundsCheck* instruction) {
3255  LocationSummary* locations = instruction->GetLocations();
3256  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM(
3257      instruction, locations->InAt(0), locations->InAt(1));
3258  codegen_->AddSlowPath(slow_path);
3259
3260  Register index = locations->InAt(0).AsRegister<Register>();
3261  Register length = locations->InAt(1).AsRegister<Register>();
3262
3263  __ cmp(index, ShifterOperand(length));
3264  __ b(slow_path->GetEntryLabel(), CS);
3265}
3266
3267void CodeGeneratorARM::MarkGCCard(Register temp, Register card, Register object, Register value) {
3268  Label is_null;
3269  __ CompareAndBranchIfZero(value, &is_null);
3270  __ LoadFromOffset(kLoadWord, card, TR, Thread::CardTableOffset<kArmWordSize>().Int32Value());
3271  __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
3272  __ strb(card, Address(card, temp));
3273  __ Bind(&is_null);
3274}
3275
3276void LocationsBuilderARM::VisitTemporary(HTemporary* temp) {
3277  temp->SetLocations(nullptr);
3278}
3279
3280void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp) {
3281  // Nothing to do, this is driven by the code generator.
3282  UNUSED(temp);
3283}
3284
3285void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction) {
3286  UNUSED(instruction);
3287  LOG(FATAL) << "Unreachable";
3288}
3289
3290void InstructionCodeGeneratorARM::VisitParallelMove(HParallelMove* instruction) {
3291  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
3292}
3293
3294void LocationsBuilderARM::VisitSuspendCheck(HSuspendCheck* instruction) {
3295  new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
3296}
3297
3298void InstructionCodeGeneratorARM::VisitSuspendCheck(HSuspendCheck* instruction) {
3299  HBasicBlock* block = instruction->GetBlock();
3300  if (block->GetLoopInformation() != nullptr) {
3301    DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
3302    // The back edge will generate the suspend check.
3303    return;
3304  }
3305  if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
3306    // The goto will generate the suspend check.
3307    return;
3308  }
3309  GenerateSuspendCheck(instruction, nullptr);
3310}
3311
3312void InstructionCodeGeneratorARM::GenerateSuspendCheck(HSuspendCheck* instruction,
3313                                                       HBasicBlock* successor) {
3314  SuspendCheckSlowPathARM* slow_path =
3315      new (GetGraph()->GetArena()) SuspendCheckSlowPathARM(instruction, successor);
3316  codegen_->AddSlowPath(slow_path);
3317
3318  __ LoadFromOffset(
3319      kLoadUnsignedHalfword, IP, TR, Thread::ThreadFlagsOffset<kArmWordSize>().Int32Value());
3320  __ cmp(IP, ShifterOperand(0));
3321  // TODO: Figure out the branch offsets and use cbz/cbnz.
3322  if (successor == nullptr) {
3323    __ b(slow_path->GetEntryLabel(), NE);
3324    __ Bind(slow_path->GetReturnLabel());
3325  } else {
3326    __ b(codegen_->GetLabelOf(successor), EQ);
3327    __ b(slow_path->GetEntryLabel());
3328  }
3329}
3330
3331ArmAssembler* ParallelMoveResolverARM::GetAssembler() const {
3332  return codegen_->GetAssembler();
3333}
3334
3335void ParallelMoveResolverARM::EmitMove(size_t index) {
3336  MoveOperands* move = moves_.Get(index);
3337  Location source = move->GetSource();
3338  Location destination = move->GetDestination();
3339
3340  if (source.IsRegister()) {
3341    if (destination.IsRegister()) {
3342      __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>());
3343    } else {
3344      DCHECK(destination.IsStackSlot());
3345      __ StoreToOffset(kStoreWord, source.AsRegister<Register>(),
3346                       SP, destination.GetStackIndex());
3347    }
3348  } else if (source.IsStackSlot()) {
3349    if (destination.IsRegister()) {
3350      __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(),
3351                        SP, source.GetStackIndex());
3352    } else if (destination.IsFpuRegister()) {
3353      __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex());
3354    } else {
3355      DCHECK(destination.IsStackSlot());
3356      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
3357      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3358    }
3359  } else if (source.IsFpuRegister()) {
3360    if (destination.IsFpuRegister()) {
3361      __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>());
3362    } else {
3363      DCHECK(destination.IsStackSlot());
3364      __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex());
3365    }
3366  } else if (source.IsDoubleStackSlot()) {
3367    if (destination.IsDoubleStackSlot()) {
3368      __ LoadDFromOffset(DTMP, SP, source.GetStackIndex());
3369      __ StoreDToOffset(DTMP, SP, destination.GetStackIndex());
3370    } else if (destination.IsRegisterPair()) {
3371      DCHECK(ExpectedPairLayout(destination));
3372      __ LoadFromOffset(
3373          kLoadWordPair, destination.AsRegisterPairLow<Register>(), SP, source.GetStackIndex());
3374    } else {
3375      DCHECK(destination.IsFpuRegisterPair()) << destination;
3376      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
3377                         SP,
3378                         source.GetStackIndex());
3379    }
3380  } else if (source.IsRegisterPair()) {
3381    if (destination.IsRegisterPair()) {
3382      __ Mov(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairLow<Register>());
3383      __ Mov(destination.AsRegisterPairHigh<Register>(), source.AsRegisterPairHigh<Register>());
3384    } else {
3385      DCHECK(destination.IsDoubleStackSlot()) << destination;
3386      DCHECK(ExpectedPairLayout(source));
3387      __ StoreToOffset(
3388          kStoreWordPair, source.AsRegisterPairLow<Register>(), SP, destination.GetStackIndex());
3389    }
3390  } else if (source.IsFpuRegisterPair()) {
3391    if (destination.IsFpuRegisterPair()) {
3392      __ vmovd(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
3393               FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()));
3394    } else {
3395      DCHECK(destination.IsDoubleStackSlot()) << destination;
3396      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
3397                        SP,
3398                        destination.GetStackIndex());
3399    }
3400  } else {
3401    DCHECK(source.IsConstant()) << source;
3402    HInstruction* constant = source.GetConstant();
3403    if (constant->IsIntConstant()) {
3404      int32_t value = constant->AsIntConstant()->GetValue();
3405      if (destination.IsRegister()) {
3406        __ LoadImmediate(destination.AsRegister<Register>(), value);
3407      } else {
3408        DCHECK(destination.IsStackSlot());
3409        __ LoadImmediate(IP, value);
3410        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3411      }
3412    } else if (constant->IsLongConstant()) {
3413      int64_t value = constant->AsLongConstant()->GetValue();
3414      if (destination.IsRegisterPair()) {
3415        __ LoadImmediate(destination.AsRegisterPairLow<Register>(), Low32Bits(value));
3416        __ LoadImmediate(destination.AsRegisterPairHigh<Register>(), High32Bits(value));
3417      } else {
3418        DCHECK(destination.IsDoubleStackSlot()) << destination;
3419        __ LoadImmediate(IP, Low32Bits(value));
3420        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3421        __ LoadImmediate(IP, High32Bits(value));
3422        __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
3423      }
3424    } else if (constant->IsDoubleConstant()) {
3425      double value = constant->AsDoubleConstant()->GetValue();
3426      if (destination.IsFpuRegisterPair()) {
3427        __ LoadDImmediate(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()), value);
3428      } else {
3429        DCHECK(destination.IsDoubleStackSlot()) << destination;
3430        uint64_t int_value = bit_cast<uint64_t, double>(value);
3431        __ LoadImmediate(IP, Low32Bits(int_value));
3432        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3433        __ LoadImmediate(IP, High32Bits(int_value));
3434        __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
3435      }
3436    } else {
3437      DCHECK(constant->IsFloatConstant()) << constant->DebugName();
3438      float value = constant->AsFloatConstant()->GetValue();
3439      if (destination.IsFpuRegister()) {
3440        __ LoadSImmediate(destination.AsFpuRegister<SRegister>(), value);
3441      } else {
3442        DCHECK(destination.IsStackSlot());
3443        __ LoadImmediate(IP, bit_cast<int32_t, float>(value));
3444        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3445      }
3446    }
3447  }
3448}
3449
3450void ParallelMoveResolverARM::Exchange(Register reg, int mem) {
3451  __ Mov(IP, reg);
3452  __ LoadFromOffset(kLoadWord, reg, SP, mem);
3453  __ StoreToOffset(kStoreWord, IP, SP, mem);
3454}
3455
3456void ParallelMoveResolverARM::Exchange(int mem1, int mem2) {
3457  ScratchRegisterScope ensure_scratch(this, IP, R0, codegen_->GetNumberOfCoreRegisters());
3458  int stack_offset = ensure_scratch.IsSpilled() ? kArmWordSize : 0;
3459  __ LoadFromOffset(kLoadWord, static_cast<Register>(ensure_scratch.GetRegister()),
3460                    SP, mem1 + stack_offset);
3461  __ LoadFromOffset(kLoadWord, IP, SP, mem2 + stack_offset);
3462  __ StoreToOffset(kStoreWord, static_cast<Register>(ensure_scratch.GetRegister()),
3463                   SP, mem2 + stack_offset);
3464  __ StoreToOffset(kStoreWord, IP, SP, mem1 + stack_offset);
3465}
3466
3467void ParallelMoveResolverARM::EmitSwap(size_t index) {
3468  MoveOperands* move = moves_.Get(index);
3469  Location source = move->GetSource();
3470  Location destination = move->GetDestination();
3471
3472  if (source.IsRegister() && destination.IsRegister()) {
3473    DCHECK_NE(source.AsRegister<Register>(), IP);
3474    DCHECK_NE(destination.AsRegister<Register>(), IP);
3475    __ Mov(IP, source.AsRegister<Register>());
3476    __ Mov(source.AsRegister<Register>(), destination.AsRegister<Register>());
3477    __ Mov(destination.AsRegister<Register>(), IP);
3478  } else if (source.IsRegister() && destination.IsStackSlot()) {
3479    Exchange(source.AsRegister<Register>(), destination.GetStackIndex());
3480  } else if (source.IsStackSlot() && destination.IsRegister()) {
3481    Exchange(destination.AsRegister<Register>(), source.GetStackIndex());
3482  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
3483    Exchange(source.GetStackIndex(), destination.GetStackIndex());
3484  } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
3485    __ vmovrs(IP, source.AsFpuRegister<SRegister>());
3486    __ vmovs(source.AsFpuRegister<SRegister>(), destination.AsFpuRegister<SRegister>());
3487    __ vmovsr(destination.AsFpuRegister<SRegister>(), IP);
3488  } else if (source.IsRegisterPair() && destination.IsRegisterPair()) {
3489    __ vmovdrr(DTMP, source.AsRegisterPairLow<Register>(), source.AsRegisterPairHigh<Register>());
3490    __ Mov(source.AsRegisterPairLow<Register>(), destination.AsRegisterPairLow<Register>());
3491    __ Mov(source.AsRegisterPairHigh<Register>(), destination.AsRegisterPairHigh<Register>());
3492    __ vmovrrd(destination.AsRegisterPairLow<Register>(),
3493               destination.AsRegisterPairHigh<Register>(),
3494               DTMP);
3495  } else if (source.IsRegisterPair() || destination.IsRegisterPair()) {
3496    Register low_reg = source.IsRegisterPair()
3497        ? source.AsRegisterPairLow<Register>()
3498        : destination.AsRegisterPairLow<Register>();
3499    int mem = source.IsRegisterPair()
3500        ? destination.GetStackIndex()
3501        : source.GetStackIndex();
3502    DCHECK(ExpectedPairLayout(source.IsRegisterPair() ? source : destination));
3503    __ vmovdrr(DTMP, low_reg, static_cast<Register>(low_reg + 1));
3504    __ LoadFromOffset(kLoadWordPair, low_reg, SP, mem);
3505    __ StoreDToOffset(DTMP, SP, mem);
3506  } else if (source.IsFpuRegisterPair() && destination.IsFpuRegisterPair()) {
3507    DRegister first = FromLowSToD(source.AsFpuRegisterPairLow<SRegister>());
3508    DRegister second = FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>());
3509    __ vmovd(DTMP, first);
3510    __ vmovd(first, second);
3511    __ vmovd(second, DTMP);
3512  } else if (source.IsFpuRegisterPair() || destination.IsFpuRegisterPair()) {
3513    DRegister reg = source.IsFpuRegisterPair()
3514        ? FromLowSToD(source.AsFpuRegisterPairLow<SRegister>())
3515        : FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>());
3516    int mem = source.IsFpuRegisterPair()
3517        ? destination.GetStackIndex()
3518        : source.GetStackIndex();
3519    __ vmovd(DTMP, reg);
3520    __ LoadDFromOffset(reg, SP, mem);
3521    __ StoreDToOffset(DTMP, SP, mem);
3522  } else if (source.IsFpuRegister() || destination.IsFpuRegister()) {
3523    SRegister reg = source.IsFpuRegister() ? source.AsFpuRegister<SRegister>()
3524                                           : destination.AsFpuRegister<SRegister>();
3525    int mem = source.IsFpuRegister()
3526        ? destination.GetStackIndex()
3527        : source.GetStackIndex();
3528
3529    __ vmovrs(IP, reg);
3530    __ LoadSFromOffset(reg, SP, mem);
3531    __ StoreToOffset(kStoreWord, IP, SP, mem);
3532  } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
3533    Exchange(source.GetStackIndex(), destination.GetStackIndex());
3534    Exchange(source.GetHighStackIndex(kArmWordSize), destination.GetHighStackIndex(kArmWordSize));
3535  } else {
3536    LOG(FATAL) << "Unimplemented" << source << " <-> " << destination;
3537  }
3538}
3539
3540void ParallelMoveResolverARM::SpillScratch(int reg) {
3541  __ Push(static_cast<Register>(reg));
3542}
3543
3544void ParallelMoveResolverARM::RestoreScratch(int reg) {
3545  __ Pop(static_cast<Register>(reg));
3546}
3547
3548void LocationsBuilderARM::VisitLoadClass(HLoadClass* cls) {
3549  LocationSummary::CallKind call_kind = cls->CanCallRuntime()
3550      ? LocationSummary::kCallOnSlowPath
3551      : LocationSummary::kNoCall;
3552  LocationSummary* locations =
3553      new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
3554  locations->SetOut(Location::RequiresRegister());
3555}
3556
3557void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) {
3558  Register out = cls->GetLocations()->Out().AsRegister<Register>();
3559  if (cls->IsReferrersClass()) {
3560    DCHECK(!cls->CanCallRuntime());
3561    DCHECK(!cls->MustGenerateClinitCheck());
3562    codegen_->LoadCurrentMethod(out);
3563    __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
3564  } else {
3565    DCHECK(cls->CanCallRuntime());
3566    codegen_->LoadCurrentMethod(out);
3567    __ LoadFromOffset(
3568        kLoadWord, out, out, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value());
3569    __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
3570
3571    SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
3572        cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
3573    codegen_->AddSlowPath(slow_path);
3574    __ cmp(out, ShifterOperand(0));
3575    __ b(slow_path->GetEntryLabel(), EQ);
3576    if (cls->MustGenerateClinitCheck()) {
3577      GenerateClassInitializationCheck(slow_path, out);
3578    } else {
3579      __ Bind(slow_path->GetExitLabel());
3580    }
3581  }
3582}
3583
3584void LocationsBuilderARM::VisitClinitCheck(HClinitCheck* check) {
3585  LocationSummary* locations =
3586      new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
3587  locations->SetInAt(0, Location::RequiresRegister());
3588  if (check->HasUses()) {
3589    locations->SetOut(Location::SameAsFirstInput());
3590  }
3591}
3592
3593void InstructionCodeGeneratorARM::VisitClinitCheck(HClinitCheck* check) {
3594  // We assume the class is not null.
3595  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
3596      check->GetLoadClass(), check, check->GetDexPc(), true);
3597  codegen_->AddSlowPath(slow_path);
3598  GenerateClassInitializationCheck(slow_path,
3599                                   check->GetLocations()->InAt(0).AsRegister<Register>());
3600}
3601
3602void InstructionCodeGeneratorARM::GenerateClassInitializationCheck(
3603    SlowPathCodeARM* slow_path, Register class_reg) {
3604  __ LoadFromOffset(kLoadWord, IP, class_reg, mirror::Class::StatusOffset().Int32Value());
3605  __ cmp(IP, ShifterOperand(mirror::Class::kStatusInitialized));
3606  __ b(slow_path->GetEntryLabel(), LT);
3607  // Even if the initialized flag is set, we may be in a situation where caches are not synced
3608  // properly. Therefore, we do a memory fence.
3609  __ dmb(ISH);
3610  __ Bind(slow_path->GetExitLabel());
3611}
3612
3613void LocationsBuilderARM::VisitLoadString(HLoadString* load) {
3614  LocationSummary* locations =
3615      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
3616  locations->SetOut(Location::RequiresRegister());
3617}
3618
3619void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) {
3620  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM(load);
3621  codegen_->AddSlowPath(slow_path);
3622
3623  Register out = load->GetLocations()->Out().AsRegister<Register>();
3624  codegen_->LoadCurrentMethod(out);
3625  __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
3626  __ LoadFromOffset(kLoadWord, out, out, mirror::Class::DexCacheStringsOffset().Int32Value());
3627  __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(load->GetStringIndex()));
3628  __ cmp(out, ShifterOperand(0));
3629  __ b(slow_path->GetEntryLabel(), EQ);
3630  __ Bind(slow_path->GetExitLabel());
3631}
3632
3633void LocationsBuilderARM::VisitLoadException(HLoadException* load) {
3634  LocationSummary* locations =
3635      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
3636  locations->SetOut(Location::RequiresRegister());
3637}
3638
3639void InstructionCodeGeneratorARM::VisitLoadException(HLoadException* load) {
3640  Register out = load->GetLocations()->Out().AsRegister<Register>();
3641  int32_t offset = Thread::ExceptionOffset<kArmWordSize>().Int32Value();
3642  __ LoadFromOffset(kLoadWord, out, TR, offset);
3643  __ LoadImmediate(IP, 0);
3644  __ StoreToOffset(kStoreWord, IP, TR, offset);
3645}
3646
3647void LocationsBuilderARM::VisitThrow(HThrow* instruction) {
3648  LocationSummary* locations =
3649      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3650  InvokeRuntimeCallingConvention calling_convention;
3651  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3652}
3653
3654void InstructionCodeGeneratorARM::VisitThrow(HThrow* instruction) {
3655  codegen_->InvokeRuntime(
3656      QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc());
3657}
3658
3659void LocationsBuilderARM::VisitInstanceOf(HInstanceOf* instruction) {
3660  LocationSummary::CallKind call_kind = instruction->IsClassFinal()
3661      ? LocationSummary::kNoCall
3662      : LocationSummary::kCallOnSlowPath;
3663  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3664  locations->SetInAt(0, Location::RequiresRegister());
3665  locations->SetInAt(1, Location::RequiresRegister());
3666  // The out register is used as a temporary, so it overlaps with the inputs.
3667  locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3668}
3669
3670void InstructionCodeGeneratorARM::VisitInstanceOf(HInstanceOf* instruction) {
3671  LocationSummary* locations = instruction->GetLocations();
3672  Register obj = locations->InAt(0).AsRegister<Register>();
3673  Register cls = locations->InAt(1).AsRegister<Register>();
3674  Register out = locations->Out().AsRegister<Register>();
3675  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3676  Label done, zero;
3677  SlowPathCodeARM* slow_path = nullptr;
3678
3679  // Return 0 if `obj` is null.
3680  // TODO: avoid this check if we know obj is not null.
3681  __ cmp(obj, ShifterOperand(0));
3682  __ b(&zero, EQ);
3683  // Compare the class of `obj` with `cls`.
3684  __ LoadFromOffset(kLoadWord, out, obj, class_offset);
3685  __ cmp(out, ShifterOperand(cls));
3686  if (instruction->IsClassFinal()) {
3687    // Classes must be equal for the instanceof to succeed.
3688    __ b(&zero, NE);
3689    __ LoadImmediate(out, 1);
3690    __ b(&done);
3691  } else {
3692    // If the classes are not equal, we go into a slow path.
3693    DCHECK(locations->OnlyCallsOnSlowPath());
3694    slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
3695        instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc());
3696    codegen_->AddSlowPath(slow_path);
3697    __ b(slow_path->GetEntryLabel(), NE);
3698    __ LoadImmediate(out, 1);
3699    __ b(&done);
3700  }
3701  __ Bind(&zero);
3702  __ LoadImmediate(out, 0);
3703  if (slow_path != nullptr) {
3704    __ Bind(slow_path->GetExitLabel());
3705  }
3706  __ Bind(&done);
3707}
3708
3709void LocationsBuilderARM::VisitCheckCast(HCheckCast* instruction) {
3710  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3711      instruction, LocationSummary::kCallOnSlowPath);
3712  locations->SetInAt(0, Location::RequiresRegister());
3713  locations->SetInAt(1, Location::RequiresRegister());
3714  locations->AddTemp(Location::RequiresRegister());
3715}
3716
3717void InstructionCodeGeneratorARM::VisitCheckCast(HCheckCast* instruction) {
3718  LocationSummary* locations = instruction->GetLocations();
3719  Register obj = locations->InAt(0).AsRegister<Register>();
3720  Register cls = locations->InAt(1).AsRegister<Register>();
3721  Register temp = locations->GetTemp(0).AsRegister<Register>();
3722  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3723
3724  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
3725      instruction, locations->InAt(1), locations->GetTemp(0), instruction->GetDexPc());
3726  codegen_->AddSlowPath(slow_path);
3727
3728  // TODO: avoid this check if we know obj is not null.
3729  __ cmp(obj, ShifterOperand(0));
3730  __ b(slow_path->GetExitLabel(), EQ);
3731  // Compare the class of `obj` with `cls`.
3732  __ LoadFromOffset(kLoadWord, temp, obj, class_offset);
3733  __ cmp(temp, ShifterOperand(cls));
3734  __ b(slow_path->GetEntryLabel(), NE);
3735  __ Bind(slow_path->GetExitLabel());
3736}
3737
3738void LocationsBuilderARM::VisitMonitorOperation(HMonitorOperation* instruction) {
3739  LocationSummary* locations =
3740      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3741  InvokeRuntimeCallingConvention calling_convention;
3742  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3743}
3744
3745void InstructionCodeGeneratorARM::VisitMonitorOperation(HMonitorOperation* instruction) {
3746  codegen_->InvokeRuntime(instruction->IsEnter()
3747        ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
3748      instruction,
3749      instruction->GetDexPc());
3750}
3751
3752void LocationsBuilderARM::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
3753void LocationsBuilderARM::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
3754void LocationsBuilderARM::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
3755
3756void LocationsBuilderARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
3757  LocationSummary* locations =
3758      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3759  DCHECK(instruction->GetResultType() == Primitive::kPrimInt
3760         || instruction->GetResultType() == Primitive::kPrimLong);
3761  locations->SetInAt(0, Location::RequiresRegister());
3762  locations->SetInAt(1, Location::RequiresRegister());
3763  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3764}
3765
3766void InstructionCodeGeneratorARM::VisitAnd(HAnd* instruction) {
3767  HandleBitwiseOperation(instruction);
3768}
3769
3770void InstructionCodeGeneratorARM::VisitOr(HOr* instruction) {
3771  HandleBitwiseOperation(instruction);
3772}
3773
3774void InstructionCodeGeneratorARM::VisitXor(HXor* instruction) {
3775  HandleBitwiseOperation(instruction);
3776}
3777
3778void InstructionCodeGeneratorARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
3779  LocationSummary* locations = instruction->GetLocations();
3780
3781  if (instruction->GetResultType() == Primitive::kPrimInt) {
3782    Register first = locations->InAt(0).AsRegister<Register>();
3783    Register second = locations->InAt(1).AsRegister<Register>();
3784    Register out = locations->Out().AsRegister<Register>();
3785    if (instruction->IsAnd()) {
3786      __ and_(out, first, ShifterOperand(second));
3787    } else if (instruction->IsOr()) {
3788      __ orr(out, first, ShifterOperand(second));
3789    } else {
3790      DCHECK(instruction->IsXor());
3791      __ eor(out, first, ShifterOperand(second));
3792    }
3793  } else {
3794    DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3795    Location first = locations->InAt(0);
3796    Location second = locations->InAt(1);
3797    Location out = locations->Out();
3798    if (instruction->IsAnd()) {
3799      __ and_(out.AsRegisterPairLow<Register>(),
3800              first.AsRegisterPairLow<Register>(),
3801              ShifterOperand(second.AsRegisterPairLow<Register>()));
3802      __ and_(out.AsRegisterPairHigh<Register>(),
3803              first.AsRegisterPairHigh<Register>(),
3804              ShifterOperand(second.AsRegisterPairHigh<Register>()));
3805    } else if (instruction->IsOr()) {
3806      __ orr(out.AsRegisterPairLow<Register>(),
3807             first.AsRegisterPairLow<Register>(),
3808             ShifterOperand(second.AsRegisterPairLow<Register>()));
3809      __ orr(out.AsRegisterPairHigh<Register>(),
3810             first.AsRegisterPairHigh<Register>(),
3811             ShifterOperand(second.AsRegisterPairHigh<Register>()));
3812    } else {
3813      DCHECK(instruction->IsXor());
3814      __ eor(out.AsRegisterPairLow<Register>(),
3815             first.AsRegisterPairLow<Register>(),
3816             ShifterOperand(second.AsRegisterPairLow<Register>()));
3817      __ eor(out.AsRegisterPairHigh<Register>(),
3818             first.AsRegisterPairHigh<Register>(),
3819             ShifterOperand(second.AsRegisterPairHigh<Register>()));
3820    }
3821  }
3822}
3823
3824void CodeGeneratorARM::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Register temp) {
3825  DCHECK_EQ(temp, kArtMethodRegister);
3826
3827  // TODO: Implement all kinds of calls:
3828  // 1) boot -> boot
3829  // 2) app -> boot
3830  // 3) app -> app
3831  //
3832  // Currently we implement the app -> app logic, which looks up in the resolve cache.
3833
3834  // temp = method;
3835  LoadCurrentMethod(temp);
3836  if (!invoke->IsRecursive()) {
3837    // temp = temp->dex_cache_resolved_methods_;
3838    __ LoadFromOffset(
3839        kLoadWord, temp, temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value());
3840    // temp = temp[index_in_cache]
3841    __ LoadFromOffset(
3842        kLoadWord, temp, temp, CodeGenerator::GetCacheOffset(invoke->GetDexMethodIndex()));
3843    // LR = temp[offset_of_quick_compiled_code]
3844    __ LoadFromOffset(kLoadWord, LR, temp,
3845                      mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
3846                          kArmWordSize).Int32Value());
3847    // LR()
3848    __ blx(LR);
3849  } else {
3850    __ bl(GetFrameEntryLabel());
3851  }
3852
3853  RecordPcInfo(invoke, invoke->GetDexPc());
3854  DCHECK(!IsLeafMethod());
3855}
3856
3857}  // namespace arm
3858}  // namespace art
3859