code_generator_arm.cc revision 988939683c26c0b1c8808fc206add6337319509a
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
20#include "entrypoints/quick/quick_entrypoints.h"
21#include "gc/accounting/card_table.h"
22#include "mirror/array-inl.h"
23#include "mirror/art_method.h"
24#include "mirror/class.h"
25#include "thread.h"
26#include "utils/arm/assembler_arm.h"
27#include "utils/arm/managed_register_arm.h"
28#include "utils/assembler.h"
29#include "utils/stack_checks.h"
30
31namespace art {
32
33namespace arm {
34
35static DRegister FromLowSToD(SRegister reg) {
36  DCHECK_EQ(reg % 2, 0);
37  return static_cast<DRegister>(reg / 2);
38}
39
40static bool ExpectedPairLayout(Location location) {
41  // We expected this for both core and fpu register pairs.
42  return ((location.low() & 1) == 0) && (location.low() + 1 == location.high());
43}
44
45static constexpr int kNumberOfPushedRegistersAtEntry = 1 + 2;  // LR, R6, R7
46static constexpr int kCurrentMethodStackOffset = 0;
47
48static constexpr Register kRuntimeParameterCoreRegisters[] = { R0, R1, R2, R3 };
49static constexpr size_t kRuntimeParameterCoreRegistersLength =
50    arraysize(kRuntimeParameterCoreRegisters);
51static constexpr SRegister kRuntimeParameterFpuRegisters[] = { S0, S1, S2, S3 };
52static constexpr size_t kRuntimeParameterFpuRegistersLength =
53    arraysize(kRuntimeParameterFpuRegisters);
54
55class InvokeRuntimeCallingConvention : public CallingConvention<Register, SRegister> {
56 public:
57  InvokeRuntimeCallingConvention()
58      : CallingConvention(kRuntimeParameterCoreRegisters,
59                          kRuntimeParameterCoreRegistersLength,
60                          kRuntimeParameterFpuRegisters,
61                          kRuntimeParameterFpuRegistersLength) {}
62
63 private:
64  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
65};
66
67#define __ reinterpret_cast<ArmAssembler*>(codegen->GetAssembler())->
68#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArmWordSize, x).Int32Value()
69
70class SlowPathCodeARM : public SlowPathCode {
71 public:
72  SlowPathCodeARM() : entry_label_(), exit_label_() {}
73
74  Label* GetEntryLabel() { return &entry_label_; }
75  Label* GetExitLabel() { return &exit_label_; }
76
77 private:
78  Label entry_label_;
79  Label exit_label_;
80
81  DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM);
82};
83
84class NullCheckSlowPathARM : public SlowPathCodeARM {
85 public:
86  explicit NullCheckSlowPathARM(HNullCheck* instruction) : instruction_(instruction) {}
87
88  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
89    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
90    __ Bind(GetEntryLabel());
91    arm_codegen->InvokeRuntime(
92        QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc());
93  }
94
95 private:
96  HNullCheck* const instruction_;
97  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM);
98};
99
100class DivZeroCheckSlowPathARM : public SlowPathCodeARM {
101 public:
102  explicit DivZeroCheckSlowPathARM(HDivZeroCheck* instruction) : instruction_(instruction) {}
103
104  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
105    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
106    __ Bind(GetEntryLabel());
107    arm_codegen->InvokeRuntime(
108        QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc());
109  }
110
111 private:
112  HDivZeroCheck* const instruction_;
113  DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM);
114};
115
116class StackOverflowCheckSlowPathARM : public SlowPathCodeARM {
117 public:
118  StackOverflowCheckSlowPathARM() {}
119
120  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
121    __ Bind(GetEntryLabel());
122    __ LoadFromOffset(kLoadWord, PC, TR,
123        QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pThrowStackOverflow).Int32Value());
124  }
125
126 private:
127  DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathARM);
128};
129
130class SuspendCheckSlowPathARM : public SlowPathCodeARM {
131 public:
132  SuspendCheckSlowPathARM(HSuspendCheck* instruction, HBasicBlock* successor)
133      : instruction_(instruction), successor_(successor) {}
134
135  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
136    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
137    __ Bind(GetEntryLabel());
138    codegen->SaveLiveRegisters(instruction_->GetLocations());
139    arm_codegen->InvokeRuntime(
140        QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc());
141    codegen->RestoreLiveRegisters(instruction_->GetLocations());
142    if (successor_ == nullptr) {
143      __ b(GetReturnLabel());
144    } else {
145      __ b(arm_codegen->GetLabelOf(successor_));
146    }
147  }
148
149  Label* GetReturnLabel() {
150    DCHECK(successor_ == nullptr);
151    return &return_label_;
152  }
153
154 private:
155  HSuspendCheck* const instruction_;
156  // If not null, the block to branch to after the suspend check.
157  HBasicBlock* const successor_;
158
159  // If `successor_` is null, the label to branch to after the suspend check.
160  Label return_label_;
161
162  DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM);
163};
164
165class BoundsCheckSlowPathARM : public SlowPathCodeARM {
166 public:
167  BoundsCheckSlowPathARM(HBoundsCheck* instruction,
168                         Location index_location,
169                         Location length_location)
170      : instruction_(instruction),
171        index_location_(index_location),
172        length_location_(length_location) {}
173
174  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
175    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
176    __ Bind(GetEntryLabel());
177    // We're moving two locations to locations that could overlap, so we need a parallel
178    // move resolver.
179    InvokeRuntimeCallingConvention calling_convention;
180    codegen->EmitParallelMoves(
181        index_location_,
182        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
183        length_location_,
184        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
185    arm_codegen->InvokeRuntime(
186        QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc());
187  }
188
189 private:
190  HBoundsCheck* const instruction_;
191  const Location index_location_;
192  const Location length_location_;
193
194  DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM);
195};
196
197class LoadClassSlowPathARM : public SlowPathCodeARM {
198 public:
199  LoadClassSlowPathARM(HLoadClass* cls,
200                       HInstruction* at,
201                       uint32_t dex_pc,
202                       bool do_clinit)
203      : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
204    DCHECK(at->IsLoadClass() || at->IsClinitCheck());
205  }
206
207  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
208    LocationSummary* locations = at_->GetLocations();
209
210    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
211    __ Bind(GetEntryLabel());
212    codegen->SaveLiveRegisters(locations);
213
214    InvokeRuntimeCallingConvention calling_convention;
215    __ LoadImmediate(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex());
216    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
217    int32_t entry_point_offset = do_clinit_
218        ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
219        : QUICK_ENTRY_POINT(pInitializeType);
220    arm_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_);
221
222    // Move the class to the desired location.
223    Location out = locations->Out();
224    if (out.IsValid()) {
225      DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
226      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
227    }
228    codegen->RestoreLiveRegisters(locations);
229    __ b(GetExitLabel());
230  }
231
232 private:
233  // The class this slow path will load.
234  HLoadClass* const cls_;
235
236  // The instruction where this slow path is happening.
237  // (Might be the load class or an initialization check).
238  HInstruction* const at_;
239
240  // The dex PC of `at_`.
241  const uint32_t dex_pc_;
242
243  // Whether to initialize the class.
244  const bool do_clinit_;
245
246  DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM);
247};
248
249class LoadStringSlowPathARM : public SlowPathCodeARM {
250 public:
251  explicit LoadStringSlowPathARM(HLoadString* instruction) : instruction_(instruction) {}
252
253  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
254    LocationSummary* locations = instruction_->GetLocations();
255    DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
256
257    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
258    __ Bind(GetEntryLabel());
259    codegen->SaveLiveRegisters(locations);
260
261    InvokeRuntimeCallingConvention calling_convention;
262    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
263    __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction_->GetStringIndex());
264    arm_codegen->InvokeRuntime(
265        QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc());
266    arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
267
268    codegen->RestoreLiveRegisters(locations);
269    __ b(GetExitLabel());
270  }
271
272 private:
273  HLoadString* const instruction_;
274
275  DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM);
276};
277
278class TypeCheckSlowPathARM : public SlowPathCodeARM {
279 public:
280  TypeCheckSlowPathARM(HInstruction* instruction,
281                       Location class_to_check,
282                       Location object_class,
283                       uint32_t dex_pc)
284      : instruction_(instruction),
285        class_to_check_(class_to_check),
286        object_class_(object_class),
287        dex_pc_(dex_pc) {}
288
289  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
290    LocationSummary* locations = instruction_->GetLocations();
291    DCHECK(instruction_->IsCheckCast()
292           || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
293
294    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
295    __ Bind(GetEntryLabel());
296    codegen->SaveLiveRegisters(locations);
297
298    // We're moving two locations to locations that could overlap, so we need a parallel
299    // move resolver.
300    InvokeRuntimeCallingConvention calling_convention;
301    codegen->EmitParallelMoves(
302        class_to_check_,
303        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
304        object_class_,
305        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
306
307    if (instruction_->IsInstanceOf()) {
308      arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc_);
309      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
310    } else {
311      DCHECK(instruction_->IsCheckCast());
312      arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc_);
313    }
314
315    codegen->RestoreLiveRegisters(locations);
316    __ b(GetExitLabel());
317  }
318
319 private:
320  HInstruction* const instruction_;
321  const Location class_to_check_;
322  const Location object_class_;
323  uint32_t dex_pc_;
324
325  DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM);
326};
327
328#undef __
329
330#undef __
331#define __ reinterpret_cast<ArmAssembler*>(GetAssembler())->
332
333inline Condition ARMCondition(IfCondition cond) {
334  switch (cond) {
335    case kCondEQ: return EQ;
336    case kCondNE: return NE;
337    case kCondLT: return LT;
338    case kCondLE: return LE;
339    case kCondGT: return GT;
340    case kCondGE: return GE;
341    default:
342      LOG(FATAL) << "Unknown if condition";
343  }
344  return EQ;        // Unreachable.
345}
346
347inline Condition ARMOppositeCondition(IfCondition cond) {
348  switch (cond) {
349    case kCondEQ: return NE;
350    case kCondNE: return EQ;
351    case kCondLT: return GE;
352    case kCondLE: return GT;
353    case kCondGT: return LE;
354    case kCondGE: return LT;
355    default:
356      LOG(FATAL) << "Unknown if condition";
357  }
358  return EQ;        // Unreachable.
359}
360
361void CodeGeneratorARM::DumpCoreRegister(std::ostream& stream, int reg) const {
362  stream << ArmManagedRegister::FromCoreRegister(Register(reg));
363}
364
365void CodeGeneratorARM::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
366  stream << ArmManagedRegister::FromSRegister(SRegister(reg));
367}
368
369size_t CodeGeneratorARM::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
370  __ StoreToOffset(kStoreWord, static_cast<Register>(reg_id), SP, stack_index);
371  return kArmWordSize;
372}
373
374size_t CodeGeneratorARM::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
375  __ LoadFromOffset(kLoadWord, static_cast<Register>(reg_id), SP, stack_index);
376  return kArmWordSize;
377}
378
379size_t CodeGeneratorARM::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
380  __ StoreSToOffset(static_cast<SRegister>(reg_id), SP, stack_index);
381  return kArmWordSize;
382}
383
384size_t CodeGeneratorARM::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
385  __ LoadSFromOffset(static_cast<SRegister>(reg_id), SP, stack_index);
386  return kArmWordSize;
387}
388
389CodeGeneratorARM::CodeGeneratorARM(HGraph* graph,
390                                   const ArmInstructionSetFeatures& isa_features,
391                                   const CompilerOptions& compiler_options)
392    : CodeGenerator(graph, kNumberOfCoreRegisters, kNumberOfSRegisters,
393                    kNumberOfRegisterPairs, 0, 0, compiler_options),
394      block_labels_(graph->GetArena(), 0),
395      location_builder_(graph, this),
396      instruction_visitor_(graph, this),
397      move_resolver_(graph->GetArena(), this),
398      assembler_(true),
399      isa_features_(isa_features) {}
400
401size_t CodeGeneratorARM::FrameEntrySpillSize() const {
402  return kNumberOfPushedRegistersAtEntry * kArmWordSize;
403}
404
405Location CodeGeneratorARM::AllocateFreeRegister(Primitive::Type type) const {
406  switch (type) {
407    case Primitive::kPrimLong: {
408      size_t reg = FindFreeEntry(blocked_register_pairs_, kNumberOfRegisterPairs);
409      ArmManagedRegister pair =
410          ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
411      DCHECK(!blocked_core_registers_[pair.AsRegisterPairLow()]);
412      DCHECK(!blocked_core_registers_[pair.AsRegisterPairHigh()]);
413
414      blocked_core_registers_[pair.AsRegisterPairLow()] = true;
415      blocked_core_registers_[pair.AsRegisterPairHigh()] = true;
416      UpdateBlockedPairRegisters();
417      return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
418    }
419
420    case Primitive::kPrimByte:
421    case Primitive::kPrimBoolean:
422    case Primitive::kPrimChar:
423    case Primitive::kPrimShort:
424    case Primitive::kPrimInt:
425    case Primitive::kPrimNot: {
426      int reg = FindFreeEntry(blocked_core_registers_, kNumberOfCoreRegisters);
427      // Block all register pairs that contain `reg`.
428      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
429        ArmManagedRegister current =
430            ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
431        if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
432          blocked_register_pairs_[i] = true;
433        }
434      }
435      return Location::RegisterLocation(reg);
436    }
437
438    case Primitive::kPrimFloat: {
439      int reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfSRegisters);
440      return Location::FpuRegisterLocation(reg);
441    }
442
443    case Primitive::kPrimDouble: {
444      int reg = FindTwoFreeConsecutiveAlignedEntries(blocked_fpu_registers_, kNumberOfSRegisters);
445      DCHECK_EQ(reg % 2, 0);
446      return Location::FpuRegisterPairLocation(reg, reg + 1);
447    }
448
449    case Primitive::kPrimVoid:
450      LOG(FATAL) << "Unreachable type " << type;
451  }
452
453  return Location();
454}
455
456void CodeGeneratorARM::SetupBlockedRegisters(bool is_baseline ATTRIBUTE_UNUSED) const {
457  // Don't allocate the dalvik style register pair passing.
458  blocked_register_pairs_[R1_R2] = true;
459
460  // Stack register, LR and PC are always reserved.
461  blocked_core_registers_[SP] = true;
462  blocked_core_registers_[LR] = true;
463  blocked_core_registers_[PC] = true;
464
465  // Reserve thread register.
466  blocked_core_registers_[TR] = true;
467
468  // Reserve temp register.
469  blocked_core_registers_[IP] = true;
470
471  // TODO: We currently don't use Quick's callee saved registers.
472  // We always save and restore R6 and R7 to make sure we can use three
473  // register pairs for long operations.
474  blocked_core_registers_[R4] = true;
475  blocked_core_registers_[R5] = true;
476  blocked_core_registers_[R8] = true;
477  blocked_core_registers_[R10] = true;
478  blocked_core_registers_[R11] = true;
479
480  blocked_fpu_registers_[S16] = true;
481  blocked_fpu_registers_[S17] = true;
482  blocked_fpu_registers_[S18] = true;
483  blocked_fpu_registers_[S19] = true;
484  blocked_fpu_registers_[S20] = true;
485  blocked_fpu_registers_[S21] = true;
486  blocked_fpu_registers_[S22] = true;
487  blocked_fpu_registers_[S23] = true;
488  blocked_fpu_registers_[S24] = true;
489  blocked_fpu_registers_[S25] = true;
490  blocked_fpu_registers_[S26] = true;
491  blocked_fpu_registers_[S27] = true;
492  blocked_fpu_registers_[S28] = true;
493  blocked_fpu_registers_[S29] = true;
494  blocked_fpu_registers_[S30] = true;
495  blocked_fpu_registers_[S31] = true;
496
497  UpdateBlockedPairRegisters();
498}
499
500void CodeGeneratorARM::UpdateBlockedPairRegisters() const {
501  for (int i = 0; i < kNumberOfRegisterPairs; i++) {
502    ArmManagedRegister current =
503        ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
504    if (blocked_core_registers_[current.AsRegisterPairLow()]
505        || blocked_core_registers_[current.AsRegisterPairHigh()]) {
506      blocked_register_pairs_[i] = true;
507    }
508  }
509}
510
511InstructionCodeGeneratorARM::InstructionCodeGeneratorARM(HGraph* graph, CodeGeneratorARM* codegen)
512      : HGraphVisitor(graph),
513        assembler_(codegen->GetAssembler()),
514        codegen_(codegen) {}
515
516void CodeGeneratorARM::GenerateFrameEntry() {
517  bool skip_overflow_check =
518      IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm);
519  if (!skip_overflow_check) {
520    if (GetCompilerOptions().GetImplicitStackOverflowChecks()) {
521      __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm)));
522      __ LoadFromOffset(kLoadWord, IP, IP, 0);
523      RecordPcInfo(nullptr, 0);
524    } else {
525      SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathARM();
526      AddSlowPath(slow_path);
527
528      __ LoadFromOffset(kLoadWord, IP, TR, Thread::StackEndOffset<kArmWordSize>().Int32Value());
529      __ cmp(SP, ShifterOperand(IP));
530      __ b(slow_path->GetEntryLabel(), CC);
531    }
532  }
533
534  core_spill_mask_ |= (1 << LR | 1 << R6 | 1 << R7);
535  __ PushList(1 << LR | 1 << R6 | 1 << R7);
536
537  // The return PC has already been pushed on the stack.
538  __ AddConstant(SP, -(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize));
539  __ StoreToOffset(kStoreWord, R0, SP, 0);
540}
541
542void CodeGeneratorARM::GenerateFrameExit() {
543  __ AddConstant(SP, GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize);
544  __ PopList(1 << PC | 1 << R6 | 1 << R7);
545}
546
547void CodeGeneratorARM::Bind(HBasicBlock* block) {
548  __ Bind(GetLabelOf(block));
549}
550
551Location CodeGeneratorARM::GetStackLocation(HLoadLocal* load) const {
552  switch (load->GetType()) {
553    case Primitive::kPrimLong:
554    case Primitive::kPrimDouble:
555      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
556      break;
557
558    case Primitive::kPrimInt:
559    case Primitive::kPrimNot:
560    case Primitive::kPrimFloat:
561      return Location::StackSlot(GetStackSlot(load->GetLocal()));
562
563    case Primitive::kPrimBoolean:
564    case Primitive::kPrimByte:
565    case Primitive::kPrimChar:
566    case Primitive::kPrimShort:
567    case Primitive::kPrimVoid:
568      LOG(FATAL) << "Unexpected type " << load->GetType();
569  }
570
571  LOG(FATAL) << "Unreachable";
572  return Location();
573}
574
575Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
576  switch (type) {
577    case Primitive::kPrimBoolean:
578    case Primitive::kPrimByte:
579    case Primitive::kPrimChar:
580    case Primitive::kPrimShort:
581    case Primitive::kPrimInt:
582    case Primitive::kPrimNot: {
583      uint32_t index = gp_index_++;
584      uint32_t stack_index = stack_index_++;
585      if (index < calling_convention.GetNumberOfRegisters()) {
586        return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
587      } else {
588        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
589      }
590    }
591
592    case Primitive::kPrimLong: {
593      uint32_t index = gp_index_;
594      uint32_t stack_index = stack_index_;
595      gp_index_ += 2;
596      stack_index_ += 2;
597      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
598        if (calling_convention.GetRegisterAt(index) == R1) {
599          // Skip R1, and use R2_R3 instead.
600          gp_index_++;
601          index++;
602        }
603      }
604      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
605        DCHECK_EQ(calling_convention.GetRegisterAt(index) + 1,
606                  calling_convention.GetRegisterAt(index + 1));
607        return Location::RegisterPairLocation(calling_convention.GetRegisterAt(index),
608                                              calling_convention.GetRegisterAt(index + 1));
609      } else {
610        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
611      }
612    }
613
614    case Primitive::kPrimFloat: {
615      uint32_t stack_index = stack_index_++;
616      if (float_index_ % 2 == 0) {
617        float_index_ = std::max(double_index_, float_index_);
618      }
619      if (float_index_ < calling_convention.GetNumberOfFpuRegisters()) {
620        return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(float_index_++));
621      } else {
622        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
623      }
624    }
625
626    case Primitive::kPrimDouble: {
627      double_index_ = std::max(double_index_, RoundUp(float_index_, 2));
628      uint32_t stack_index = stack_index_;
629      stack_index_ += 2;
630      if (double_index_ + 1 < calling_convention.GetNumberOfFpuRegisters()) {
631        uint32_t index = double_index_;
632        double_index_ += 2;
633        Location result = Location::FpuRegisterPairLocation(
634          calling_convention.GetFpuRegisterAt(index),
635          calling_convention.GetFpuRegisterAt(index + 1));
636        DCHECK(ExpectedPairLayout(result));
637        return result;
638      } else {
639        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
640      }
641    }
642
643    case Primitive::kPrimVoid:
644      LOG(FATAL) << "Unexpected parameter type " << type;
645      break;
646  }
647  return Location();
648}
649
650Location InvokeDexCallingConventionVisitor::GetReturnLocation(Primitive::Type type) {
651  switch (type) {
652    case Primitive::kPrimBoolean:
653    case Primitive::kPrimByte:
654    case Primitive::kPrimChar:
655    case Primitive::kPrimShort:
656    case Primitive::kPrimInt:
657    case Primitive::kPrimNot: {
658      return Location::RegisterLocation(R0);
659    }
660
661    case Primitive::kPrimFloat: {
662      return Location::FpuRegisterLocation(S0);
663    }
664
665    case Primitive::kPrimLong: {
666      return Location::RegisterPairLocation(R0, R1);
667    }
668
669    case Primitive::kPrimDouble: {
670      return Location::FpuRegisterPairLocation(S0, S1);
671    }
672
673    case Primitive::kPrimVoid:
674      return Location();
675  }
676  UNREACHABLE();
677  return Location();
678}
679
680void CodeGeneratorARM::Move32(Location destination, Location source) {
681  if (source.Equals(destination)) {
682    return;
683  }
684  if (destination.IsRegister()) {
685    if (source.IsRegister()) {
686      __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>());
687    } else if (source.IsFpuRegister()) {
688      __ vmovrs(destination.AsRegister<Register>(), source.AsFpuRegister<SRegister>());
689    } else {
690      __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(), SP, source.GetStackIndex());
691    }
692  } else if (destination.IsFpuRegister()) {
693    if (source.IsRegister()) {
694      __ vmovsr(destination.AsFpuRegister<SRegister>(), source.AsRegister<Register>());
695    } else if (source.IsFpuRegister()) {
696      __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>());
697    } else {
698      __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex());
699    }
700  } else {
701    DCHECK(destination.IsStackSlot()) << destination;
702    if (source.IsRegister()) {
703      __ StoreToOffset(kStoreWord, source.AsRegister<Register>(), SP, destination.GetStackIndex());
704    } else if (source.IsFpuRegister()) {
705      __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex());
706    } else {
707      DCHECK(source.IsStackSlot()) << source;
708      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
709      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
710    }
711  }
712}
713
714void CodeGeneratorARM::Move64(Location destination, Location source) {
715  if (source.Equals(destination)) {
716    return;
717  }
718  if (destination.IsRegisterPair()) {
719    if (source.IsRegisterPair()) {
720      EmitParallelMoves(
721          Location::RegisterLocation(source.AsRegisterPairHigh<Register>()),
722          Location::RegisterLocation(destination.AsRegisterPairHigh<Register>()),
723          Location::RegisterLocation(source.AsRegisterPairLow<Register>()),
724          Location::RegisterLocation(destination.AsRegisterPairLow<Register>()));
725    } else if (source.IsFpuRegister()) {
726      UNIMPLEMENTED(FATAL);
727    } else {
728      DCHECK(source.IsDoubleStackSlot());
729      DCHECK(ExpectedPairLayout(destination));
730      __ LoadFromOffset(kLoadWordPair, destination.AsRegisterPairLow<Register>(),
731                        SP, source.GetStackIndex());
732    }
733  } else if (destination.IsFpuRegisterPair()) {
734    if (source.IsDoubleStackSlot()) {
735      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
736                         SP,
737                         source.GetStackIndex());
738    } else {
739      UNIMPLEMENTED(FATAL);
740    }
741  } else {
742    DCHECK(destination.IsDoubleStackSlot());
743    if (source.IsRegisterPair()) {
744      // No conflict possible, so just do the moves.
745      if (source.AsRegisterPairLow<Register>() == R1) {
746        DCHECK_EQ(source.AsRegisterPairHigh<Register>(), R2);
747        __ StoreToOffset(kStoreWord, R1, SP, destination.GetStackIndex());
748        __ StoreToOffset(kStoreWord, R2, SP, destination.GetHighStackIndex(kArmWordSize));
749      } else {
750        __ StoreToOffset(kStoreWordPair, source.AsRegisterPairLow<Register>(),
751                         SP, destination.GetStackIndex());
752      }
753    } else if (source.IsFpuRegisterPair()) {
754      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
755                        SP,
756                        destination.GetStackIndex());
757    } else {
758      DCHECK(source.IsDoubleStackSlot());
759      EmitParallelMoves(
760          Location::StackSlot(source.GetStackIndex()),
761          Location::StackSlot(destination.GetStackIndex()),
762          Location::StackSlot(source.GetHighStackIndex(kArmWordSize)),
763          Location::StackSlot(destination.GetHighStackIndex(kArmWordSize)));
764    }
765  }
766}
767
768void CodeGeneratorARM::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
769  LocationSummary* locations = instruction->GetLocations();
770  if (locations != nullptr && locations->Out().Equals(location)) {
771    return;
772  }
773
774  if (locations != nullptr && locations->Out().IsConstant()) {
775    HConstant* const_to_move = locations->Out().GetConstant();
776    if (const_to_move->IsIntConstant()) {
777      int32_t value = const_to_move->AsIntConstant()->GetValue();
778      if (location.IsRegister()) {
779        __ LoadImmediate(location.AsRegister<Register>(), value);
780      } else {
781        DCHECK(location.IsStackSlot());
782        __ LoadImmediate(IP, value);
783        __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
784      }
785    } else {
786      DCHECK(const_to_move->IsLongConstant()) << const_to_move->DebugName();
787      int64_t value = const_to_move->AsLongConstant()->GetValue();
788      if (location.IsRegisterPair()) {
789        __ LoadImmediate(location.AsRegisterPairLow<Register>(), Low32Bits(value));
790        __ LoadImmediate(location.AsRegisterPairHigh<Register>(), High32Bits(value));
791      } else {
792        DCHECK(location.IsDoubleStackSlot());
793        __ LoadImmediate(IP, Low32Bits(value));
794        __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
795        __ LoadImmediate(IP, High32Bits(value));
796        __ StoreToOffset(kStoreWord, IP, SP, location.GetHighStackIndex(kArmWordSize));
797      }
798    }
799  } else if (instruction->IsLoadLocal()) {
800    uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
801    switch (instruction->GetType()) {
802      case Primitive::kPrimBoolean:
803      case Primitive::kPrimByte:
804      case Primitive::kPrimChar:
805      case Primitive::kPrimShort:
806      case Primitive::kPrimInt:
807      case Primitive::kPrimNot:
808      case Primitive::kPrimFloat:
809        Move32(location, Location::StackSlot(stack_slot));
810        break;
811
812      case Primitive::kPrimLong:
813      case Primitive::kPrimDouble:
814        Move64(location, Location::DoubleStackSlot(stack_slot));
815        break;
816
817      default:
818        LOG(FATAL) << "Unexpected type " << instruction->GetType();
819    }
820  } else if (instruction->IsTemporary()) {
821    Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
822    if (temp_location.IsStackSlot()) {
823      Move32(location, temp_location);
824    } else {
825      DCHECK(temp_location.IsDoubleStackSlot());
826      Move64(location, temp_location);
827    }
828  } else {
829    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
830    switch (instruction->GetType()) {
831      case Primitive::kPrimBoolean:
832      case Primitive::kPrimByte:
833      case Primitive::kPrimChar:
834      case Primitive::kPrimShort:
835      case Primitive::kPrimNot:
836      case Primitive::kPrimInt:
837      case Primitive::kPrimFloat:
838        Move32(location, locations->Out());
839        break;
840
841      case Primitive::kPrimLong:
842      case Primitive::kPrimDouble:
843        Move64(location, locations->Out());
844        break;
845
846      default:
847        LOG(FATAL) << "Unexpected type " << instruction->GetType();
848    }
849  }
850}
851
852void CodeGeneratorARM::InvokeRuntime(int32_t entry_point_offset,
853                                     HInstruction* instruction,
854                                     uint32_t dex_pc) {
855  __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
856  __ blx(LR);
857  RecordPcInfo(instruction, dex_pc);
858  DCHECK(instruction->IsSuspendCheck()
859      || instruction->IsBoundsCheck()
860      || instruction->IsNullCheck()
861      || instruction->IsDivZeroCheck()
862      || instruction->GetLocations()->CanCall()
863      || !IsLeafMethod());
864}
865
866void LocationsBuilderARM::VisitGoto(HGoto* got) {
867  got->SetLocations(nullptr);
868}
869
870void InstructionCodeGeneratorARM::VisitGoto(HGoto* got) {
871  HBasicBlock* successor = got->GetSuccessor();
872  DCHECK(!successor->IsExitBlock());
873
874  HBasicBlock* block = got->GetBlock();
875  HInstruction* previous = got->GetPrevious();
876
877  HLoopInformation* info = block->GetLoopInformation();
878  if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
879    codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
880    GenerateSuspendCheck(info->GetSuspendCheck(), successor);
881    return;
882  }
883
884  if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
885    GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
886  }
887  if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
888    __ b(codegen_->GetLabelOf(successor));
889  }
890}
891
892void LocationsBuilderARM::VisitExit(HExit* exit) {
893  exit->SetLocations(nullptr);
894}
895
896void InstructionCodeGeneratorARM::VisitExit(HExit* exit) {
897  UNUSED(exit);
898  if (kIsDebugBuild) {
899    __ Comment("Unreachable");
900    __ bkpt(0);
901  }
902}
903
904void LocationsBuilderARM::VisitIf(HIf* if_instr) {
905  LocationSummary* locations =
906      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
907  HInstruction* cond = if_instr->InputAt(0);
908  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
909    locations->SetInAt(0, Location::RequiresRegister());
910  }
911}
912
913void InstructionCodeGeneratorARM::VisitIf(HIf* if_instr) {
914  HInstruction* cond = if_instr->InputAt(0);
915  if (cond->IsIntConstant()) {
916    // Constant condition, statically compared against 1.
917    int32_t cond_value = cond->AsIntConstant()->GetValue();
918    if (cond_value == 1) {
919      if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
920                                     if_instr->IfTrueSuccessor())) {
921        __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
922      }
923      return;
924    } else {
925      DCHECK_EQ(cond_value, 0);
926    }
927  } else {
928    if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
929      // Condition has been materialized, compare the output to 0
930      DCHECK(if_instr->GetLocations()->InAt(0).IsRegister());
931      __ cmp(if_instr->GetLocations()->InAt(0).AsRegister<Register>(),
932             ShifterOperand(0));
933      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()), NE);
934    } else {
935      // Condition has not been materialized, use its inputs as the
936      // comparison and its condition as the branch condition.
937      LocationSummary* locations = cond->GetLocations();
938      DCHECK(locations->InAt(0).IsRegister()) << locations->InAt(0);
939      Register left = locations->InAt(0).AsRegister<Register>();
940      if (locations->InAt(1).IsRegister()) {
941        __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>()));
942      } else {
943        DCHECK(locations->InAt(1).IsConstant());
944        int32_t value =
945            locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
946        ShifterOperand operand;
947        if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) {
948          __ cmp(left, operand);
949        } else {
950          Register temp = IP;
951          __ LoadImmediate(temp, value);
952          __ cmp(left, ShifterOperand(temp));
953        }
954      }
955      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()),
956           ARMCondition(cond->AsCondition()->GetCondition()));
957    }
958  }
959  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
960                                 if_instr->IfFalseSuccessor())) {
961    __ b(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
962  }
963}
964
965
966void LocationsBuilderARM::VisitCondition(HCondition* comp) {
967  LocationSummary* locations =
968      new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall);
969  locations->SetInAt(0, Location::RequiresRegister());
970  locations->SetInAt(1, Location::RegisterOrConstant(comp->InputAt(1)));
971  if (comp->NeedsMaterialization()) {
972    locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
973  }
974}
975
976void InstructionCodeGeneratorARM::VisitCondition(HCondition* comp) {
977  if (!comp->NeedsMaterialization()) return;
978  LocationSummary* locations = comp->GetLocations();
979  Register left = locations->InAt(0).AsRegister<Register>();
980
981  if (locations->InAt(1).IsRegister()) {
982    __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>()));
983  } else {
984    DCHECK(locations->InAt(1).IsConstant());
985    int32_t value = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
986    ShifterOperand operand;
987    if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) {
988      __ cmp(left, operand);
989    } else {
990      Register temp = IP;
991      __ LoadImmediate(temp, value);
992      __ cmp(left, ShifterOperand(temp));
993    }
994  }
995  __ it(ARMCondition(comp->GetCondition()), kItElse);
996  __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(1),
997         ARMCondition(comp->GetCondition()));
998  __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(0),
999         ARMOppositeCondition(comp->GetCondition()));
1000}
1001
1002void LocationsBuilderARM::VisitEqual(HEqual* comp) {
1003  VisitCondition(comp);
1004}
1005
1006void InstructionCodeGeneratorARM::VisitEqual(HEqual* comp) {
1007  VisitCondition(comp);
1008}
1009
1010void LocationsBuilderARM::VisitNotEqual(HNotEqual* comp) {
1011  VisitCondition(comp);
1012}
1013
1014void InstructionCodeGeneratorARM::VisitNotEqual(HNotEqual* comp) {
1015  VisitCondition(comp);
1016}
1017
1018void LocationsBuilderARM::VisitLessThan(HLessThan* comp) {
1019  VisitCondition(comp);
1020}
1021
1022void InstructionCodeGeneratorARM::VisitLessThan(HLessThan* comp) {
1023  VisitCondition(comp);
1024}
1025
1026void LocationsBuilderARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1027  VisitCondition(comp);
1028}
1029
1030void InstructionCodeGeneratorARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1031  VisitCondition(comp);
1032}
1033
1034void LocationsBuilderARM::VisitGreaterThan(HGreaterThan* comp) {
1035  VisitCondition(comp);
1036}
1037
1038void InstructionCodeGeneratorARM::VisitGreaterThan(HGreaterThan* comp) {
1039  VisitCondition(comp);
1040}
1041
1042void LocationsBuilderARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1043  VisitCondition(comp);
1044}
1045
1046void InstructionCodeGeneratorARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1047  VisitCondition(comp);
1048}
1049
1050void LocationsBuilderARM::VisitLocal(HLocal* local) {
1051  local->SetLocations(nullptr);
1052}
1053
1054void InstructionCodeGeneratorARM::VisitLocal(HLocal* local) {
1055  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
1056}
1057
1058void LocationsBuilderARM::VisitLoadLocal(HLoadLocal* load) {
1059  load->SetLocations(nullptr);
1060}
1061
1062void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load) {
1063  // Nothing to do, this is driven by the code generator.
1064  UNUSED(load);
1065}
1066
1067void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) {
1068  LocationSummary* locations =
1069      new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
1070  switch (store->InputAt(1)->GetType()) {
1071    case Primitive::kPrimBoolean:
1072    case Primitive::kPrimByte:
1073    case Primitive::kPrimChar:
1074    case Primitive::kPrimShort:
1075    case Primitive::kPrimInt:
1076    case Primitive::kPrimNot:
1077    case Primitive::kPrimFloat:
1078      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
1079      break;
1080
1081    case Primitive::kPrimLong:
1082    case Primitive::kPrimDouble:
1083      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
1084      break;
1085
1086    default:
1087      LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType();
1088  }
1089}
1090
1091void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store) {
1092  UNUSED(store);
1093}
1094
1095void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) {
1096  LocationSummary* locations =
1097      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1098  locations->SetOut(Location::ConstantLocation(constant));
1099}
1100
1101void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant) {
1102  // Will be generated at use site.
1103  UNUSED(constant);
1104}
1105
1106void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) {
1107  LocationSummary* locations =
1108      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1109  locations->SetOut(Location::ConstantLocation(constant));
1110}
1111
1112void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant) {
1113  // Will be generated at use site.
1114  UNUSED(constant);
1115}
1116
1117void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) {
1118  LocationSummary* locations =
1119      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1120  locations->SetOut(Location::ConstantLocation(constant));
1121}
1122
1123void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant) {
1124  // Will be generated at use site.
1125  UNUSED(constant);
1126}
1127
1128void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) {
1129  LocationSummary* locations =
1130      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1131  locations->SetOut(Location::ConstantLocation(constant));
1132}
1133
1134void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant) {
1135  // Will be generated at use site.
1136  UNUSED(constant);
1137}
1138
1139void LocationsBuilderARM::VisitReturnVoid(HReturnVoid* ret) {
1140  ret->SetLocations(nullptr);
1141}
1142
1143void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret) {
1144  UNUSED(ret);
1145  codegen_->GenerateFrameExit();
1146}
1147
1148void LocationsBuilderARM::VisitReturn(HReturn* ret) {
1149  LocationSummary* locations =
1150      new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
1151  locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType()));
1152}
1153
1154void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret) {
1155  UNUSED(ret);
1156  codegen_->GenerateFrameExit();
1157}
1158
1159void LocationsBuilderARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1160  HandleInvoke(invoke);
1161}
1162
1163void CodeGeneratorARM::LoadCurrentMethod(Register reg) {
1164  __ LoadFromOffset(kLoadWord, reg, SP, kCurrentMethodStackOffset);
1165}
1166
1167void InstructionCodeGeneratorARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
1168  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1169
1170  // TODO: Implement all kinds of calls:
1171  // 1) boot -> boot
1172  // 2) app -> boot
1173  // 3) app -> app
1174  //
1175  // Currently we implement the app -> app logic, which looks up in the resolve cache.
1176
1177  // temp = method;
1178  codegen_->LoadCurrentMethod(temp);
1179  // temp = temp->dex_cache_resolved_methods_;
1180  __ LoadFromOffset(
1181      kLoadWord, temp, temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value());
1182  // temp = temp[index_in_cache]
1183  __ LoadFromOffset(
1184      kLoadWord, temp, temp, CodeGenerator::GetCacheOffset(invoke->GetDexMethodIndex()));
1185  // LR = temp[offset_of_quick_compiled_code]
1186  __ LoadFromOffset(kLoadWord, LR, temp,
1187                     mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1188                         kArmWordSize).Int32Value());
1189  // LR()
1190  __ blx(LR);
1191
1192  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1193  DCHECK(!codegen_->IsLeafMethod());
1194}
1195
1196void LocationsBuilderARM::HandleInvoke(HInvoke* invoke) {
1197  LocationSummary* locations =
1198      new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1199  locations->AddTemp(Location::RegisterLocation(R0));
1200
1201  InvokeDexCallingConventionVisitor calling_convention_visitor;
1202  for (size_t i = 0; i < invoke->InputCount(); i++) {
1203    HInstruction* input = invoke->InputAt(i);
1204    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1205  }
1206
1207  locations->SetOut(calling_convention_visitor.GetReturnLocation(invoke->GetType()));
1208}
1209
1210void LocationsBuilderARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1211  HandleInvoke(invoke);
1212}
1213
1214void InstructionCodeGeneratorARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1215  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1216  uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() +
1217          invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
1218  LocationSummary* locations = invoke->GetLocations();
1219  Location receiver = locations->InAt(0);
1220  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1221  // temp = object->GetClass();
1222  if (receiver.IsStackSlot()) {
1223    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1224    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1225  } else {
1226    __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
1227  }
1228  codegen_->MaybeRecordImplicitNullCheck(invoke);
1229  // temp = temp->GetMethodAt(method_offset);
1230  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1231      kArmWordSize).Int32Value();
1232  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1233  // LR = temp->GetEntryPoint();
1234  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1235  // LR();
1236  __ blx(LR);
1237  DCHECK(!codegen_->IsLeafMethod());
1238  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1239}
1240
1241void LocationsBuilderARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1242  HandleInvoke(invoke);
1243  // Add the hidden argument.
1244  invoke->GetLocations()->AddTemp(Location::RegisterLocation(R12));
1245}
1246
1247void InstructionCodeGeneratorARM::VisitInvokeInterface(HInvokeInterface* invoke) {
1248  // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
1249  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1250  uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() +
1251          (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry);
1252  LocationSummary* locations = invoke->GetLocations();
1253  Location receiver = locations->InAt(0);
1254  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1255
1256  // Set the hidden argument.
1257  __ LoadImmediate(invoke->GetLocations()->GetTemp(1).AsRegister<Register>(),
1258                   invoke->GetDexMethodIndex());
1259
1260  // temp = object->GetClass();
1261  if (receiver.IsStackSlot()) {
1262    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1263    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1264  } else {
1265    __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
1266  }
1267  codegen_->MaybeRecordImplicitNullCheck(invoke);
1268  // temp = temp->GetImtEntryAt(method_offset);
1269  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1270      kArmWordSize).Int32Value();
1271  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1272  // LR = temp->GetEntryPoint();
1273  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1274  // LR();
1275  __ blx(LR);
1276  DCHECK(!codegen_->IsLeafMethod());
1277  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1278}
1279
1280void LocationsBuilderARM::VisitNeg(HNeg* neg) {
1281  LocationSummary* locations =
1282      new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
1283  switch (neg->GetResultType()) {
1284    case Primitive::kPrimInt:
1285    case Primitive::kPrimLong: {
1286      Location::OutputOverlap output_overlaps = (neg->GetResultType() == Primitive::kPrimLong)
1287          ? Location::kOutputOverlap
1288          : Location::kNoOutputOverlap;
1289      locations->SetInAt(0, Location::RequiresRegister());
1290      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1291      break;
1292    }
1293
1294    case Primitive::kPrimFloat:
1295    case Primitive::kPrimDouble:
1296      locations->SetInAt(0, Location::RequiresFpuRegister());
1297      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1298      break;
1299
1300    default:
1301      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1302  }
1303}
1304
1305void InstructionCodeGeneratorARM::VisitNeg(HNeg* neg) {
1306  LocationSummary* locations = neg->GetLocations();
1307  Location out = locations->Out();
1308  Location in = locations->InAt(0);
1309  switch (neg->GetResultType()) {
1310    case Primitive::kPrimInt:
1311      DCHECK(in.IsRegister());
1312      __ rsb(out.AsRegister<Register>(), in.AsRegister<Register>(), ShifterOperand(0));
1313      break;
1314
1315    case Primitive::kPrimLong:
1316      DCHECK(in.IsRegisterPair());
1317      // out.lo = 0 - in.lo (and update the carry/borrow (C) flag)
1318      __ rsbs(out.AsRegisterPairLow<Register>(),
1319              in.AsRegisterPairLow<Register>(),
1320              ShifterOperand(0));
1321      // We cannot emit an RSC (Reverse Subtract with Carry)
1322      // instruction here, as it does not exist in the Thumb-2
1323      // instruction set.  We use the following approach
1324      // using SBC and SUB instead.
1325      //
1326      // out.hi = -C
1327      __ sbc(out.AsRegisterPairHigh<Register>(),
1328             out.AsRegisterPairHigh<Register>(),
1329             ShifterOperand(out.AsRegisterPairHigh<Register>()));
1330      // out.hi = out.hi - in.hi
1331      __ sub(out.AsRegisterPairHigh<Register>(),
1332             out.AsRegisterPairHigh<Register>(),
1333             ShifterOperand(in.AsRegisterPairHigh<Register>()));
1334      break;
1335
1336    case Primitive::kPrimFloat:
1337      DCHECK(in.IsFpuRegister());
1338      __ vnegs(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>());
1339      break;
1340
1341    case Primitive::kPrimDouble:
1342      DCHECK(in.IsFpuRegisterPair());
1343      __ vnegd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1344               FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1345      break;
1346
1347    default:
1348      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1349  }
1350}
1351
1352void LocationsBuilderARM::VisitTypeConversion(HTypeConversion* conversion) {
1353  Primitive::Type result_type = conversion->GetResultType();
1354  Primitive::Type input_type = conversion->GetInputType();
1355  DCHECK_NE(result_type, input_type);
1356
1357  // The float-to-long and double-to-long type conversions rely on a
1358  // call to the runtime.
1359  LocationSummary::CallKind call_kind =
1360      ((input_type == Primitive::kPrimFloat || input_type == Primitive::kPrimDouble)
1361       && result_type == Primitive::kPrimLong)
1362      ? LocationSummary::kCall
1363      : LocationSummary::kNoCall;
1364  LocationSummary* locations =
1365      new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind);
1366
1367  switch (result_type) {
1368    case Primitive::kPrimByte:
1369      switch (input_type) {
1370        case Primitive::kPrimShort:
1371        case Primitive::kPrimInt:
1372        case Primitive::kPrimChar:
1373          // Processing a Dex `int-to-byte' instruction.
1374          locations->SetInAt(0, Location::RequiresRegister());
1375          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1376          break;
1377
1378        default:
1379          LOG(FATAL) << "Unexpected type conversion from " << input_type
1380                     << " to " << result_type;
1381      }
1382      break;
1383
1384    case Primitive::kPrimShort:
1385      switch (input_type) {
1386        case Primitive::kPrimByte:
1387        case Primitive::kPrimInt:
1388        case Primitive::kPrimChar:
1389          // Processing a Dex `int-to-short' instruction.
1390          locations->SetInAt(0, Location::RequiresRegister());
1391          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1392          break;
1393
1394        default:
1395          LOG(FATAL) << "Unexpected type conversion from " << input_type
1396                     << " to " << result_type;
1397      }
1398      break;
1399
1400    case Primitive::kPrimInt:
1401      switch (input_type) {
1402        case Primitive::kPrimLong:
1403          // Processing a Dex `long-to-int' instruction.
1404          locations->SetInAt(0, Location::Any());
1405          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1406          break;
1407
1408        case Primitive::kPrimFloat:
1409          // Processing a Dex `float-to-int' instruction.
1410          locations->SetInAt(0, Location::RequiresFpuRegister());
1411          locations->SetOut(Location::RequiresRegister());
1412          locations->AddTemp(Location::RequiresFpuRegister());
1413          break;
1414
1415        case Primitive::kPrimDouble:
1416          // Processing a Dex `double-to-int' instruction.
1417          locations->SetInAt(0, Location::RequiresFpuRegister());
1418          locations->SetOut(Location::RequiresRegister());
1419          locations->AddTemp(Location::RequiresFpuRegister());
1420          break;
1421
1422        default:
1423          LOG(FATAL) << "Unexpected type conversion from " << input_type
1424                     << " to " << result_type;
1425      }
1426      break;
1427
1428    case Primitive::kPrimLong:
1429      switch (input_type) {
1430        case Primitive::kPrimByte:
1431        case Primitive::kPrimShort:
1432        case Primitive::kPrimInt:
1433        case Primitive::kPrimChar:
1434          // Processing a Dex `int-to-long' instruction.
1435          locations->SetInAt(0, Location::RequiresRegister());
1436          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1437          break;
1438
1439        case Primitive::kPrimFloat: {
1440          // Processing a Dex `float-to-long' instruction.
1441          InvokeRuntimeCallingConvention calling_convention;
1442          locations->SetInAt(0, Location::FpuRegisterLocation(
1443              calling_convention.GetFpuRegisterAt(0)));
1444          locations->SetOut(Location::RegisterPairLocation(R0, R1));
1445          break;
1446        }
1447
1448        case Primitive::kPrimDouble: {
1449          // Processing a Dex `double-to-long' instruction.
1450          InvokeRuntimeCallingConvention calling_convention;
1451          locations->SetInAt(0, Location::FpuRegisterPairLocation(
1452              calling_convention.GetFpuRegisterAt(0),
1453              calling_convention.GetFpuRegisterAt(1)));
1454          locations->SetOut(Location::RegisterPairLocation(R0, R1));
1455          break;
1456        }
1457
1458        default:
1459          LOG(FATAL) << "Unexpected type conversion from " << input_type
1460                     << " to " << result_type;
1461      }
1462      break;
1463
1464    case Primitive::kPrimChar:
1465      switch (input_type) {
1466        case Primitive::kPrimByte:
1467        case Primitive::kPrimShort:
1468        case Primitive::kPrimInt:
1469          // Processing a Dex `int-to-char' instruction.
1470          locations->SetInAt(0, Location::RequiresRegister());
1471          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1472          break;
1473
1474        default:
1475          LOG(FATAL) << "Unexpected type conversion from " << input_type
1476                     << " to " << result_type;
1477      }
1478      break;
1479
1480    case Primitive::kPrimFloat:
1481      switch (input_type) {
1482        case Primitive::kPrimByte:
1483        case Primitive::kPrimShort:
1484        case Primitive::kPrimInt:
1485        case Primitive::kPrimChar:
1486          // Processing a Dex `int-to-float' instruction.
1487          locations->SetInAt(0, Location::RequiresRegister());
1488          locations->SetOut(Location::RequiresFpuRegister());
1489          break;
1490
1491        case Primitive::kPrimLong:
1492          // Processing a Dex `long-to-float' instruction.
1493          locations->SetInAt(0, Location::RequiresRegister());
1494          locations->SetOut(Location::RequiresFpuRegister());
1495          locations->AddTemp(Location::RequiresRegister());
1496          locations->AddTemp(Location::RequiresRegister());
1497          locations->AddTemp(Location::RequiresFpuRegister());
1498          locations->AddTemp(Location::RequiresFpuRegister());
1499          break;
1500
1501        case Primitive::kPrimDouble:
1502          // Processing a Dex `double-to-float' instruction.
1503          locations->SetInAt(0, Location::RequiresFpuRegister());
1504          locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1505          break;
1506
1507        default:
1508          LOG(FATAL) << "Unexpected type conversion from " << input_type
1509                     << " to " << result_type;
1510      };
1511      break;
1512
1513    case Primitive::kPrimDouble:
1514      switch (input_type) {
1515        case Primitive::kPrimByte:
1516        case Primitive::kPrimShort:
1517        case Primitive::kPrimInt:
1518        case Primitive::kPrimChar:
1519          // Processing a Dex `int-to-double' instruction.
1520          locations->SetInAt(0, Location::RequiresRegister());
1521          locations->SetOut(Location::RequiresFpuRegister());
1522          break;
1523
1524        case Primitive::kPrimLong:
1525          // Processing a Dex `long-to-double' instruction.
1526          locations->SetInAt(0, Location::RequiresRegister());
1527          locations->SetOut(Location::RequiresFpuRegister());
1528          locations->AddTemp(Location::RequiresRegister());
1529          locations->AddTemp(Location::RequiresRegister());
1530          locations->AddTemp(Location::RequiresFpuRegister());
1531          break;
1532
1533        case Primitive::kPrimFloat:
1534          // Processing a Dex `float-to-double' instruction.
1535          locations->SetInAt(0, Location::RequiresFpuRegister());
1536          locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1537          break;
1538
1539        default:
1540          LOG(FATAL) << "Unexpected type conversion from " << input_type
1541                     << " to " << result_type;
1542      };
1543      break;
1544
1545    default:
1546      LOG(FATAL) << "Unexpected type conversion from " << input_type
1547                 << " to " << result_type;
1548  }
1549}
1550
1551void InstructionCodeGeneratorARM::VisitTypeConversion(HTypeConversion* conversion) {
1552  LocationSummary* locations = conversion->GetLocations();
1553  Location out = locations->Out();
1554  Location in = locations->InAt(0);
1555  Primitive::Type result_type = conversion->GetResultType();
1556  Primitive::Type input_type = conversion->GetInputType();
1557  DCHECK_NE(result_type, input_type);
1558  switch (result_type) {
1559    case Primitive::kPrimByte:
1560      switch (input_type) {
1561        case Primitive::kPrimShort:
1562        case Primitive::kPrimInt:
1563        case Primitive::kPrimChar:
1564          // Processing a Dex `int-to-byte' instruction.
1565          __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 8);
1566          break;
1567
1568        default:
1569          LOG(FATAL) << "Unexpected type conversion from " << input_type
1570                     << " to " << result_type;
1571      }
1572      break;
1573
1574    case Primitive::kPrimShort:
1575      switch (input_type) {
1576        case Primitive::kPrimByte:
1577        case Primitive::kPrimInt:
1578        case Primitive::kPrimChar:
1579          // Processing a Dex `int-to-short' instruction.
1580          __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16);
1581          break;
1582
1583        default:
1584          LOG(FATAL) << "Unexpected type conversion from " << input_type
1585                     << " to " << result_type;
1586      }
1587      break;
1588
1589    case Primitive::kPrimInt:
1590      switch (input_type) {
1591        case Primitive::kPrimLong:
1592          // Processing a Dex `long-to-int' instruction.
1593          DCHECK(out.IsRegister());
1594          if (in.IsRegisterPair()) {
1595            __ Mov(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
1596          } else if (in.IsDoubleStackSlot()) {
1597            __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), SP, in.GetStackIndex());
1598          } else {
1599            DCHECK(in.IsConstant());
1600            DCHECK(in.GetConstant()->IsLongConstant());
1601            int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
1602            __ LoadImmediate(out.AsRegister<Register>(), static_cast<int32_t>(value));
1603          }
1604          break;
1605
1606        case Primitive::kPrimFloat: {
1607          // Processing a Dex `float-to-int' instruction.
1608          SRegister temp = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
1609          __ vmovs(temp, in.AsFpuRegister<SRegister>());
1610          __ vcvtis(temp, temp);
1611          __ vmovrs(out.AsRegister<Register>(), temp);
1612          break;
1613        }
1614
1615        case Primitive::kPrimDouble: {
1616          // Processing a Dex `double-to-int' instruction.
1617          SRegister temp_s = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
1618          DRegister temp_d = FromLowSToD(temp_s);
1619          __ vmovd(temp_d, FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1620          __ vcvtid(temp_s, temp_d);
1621          __ vmovrs(out.AsRegister<Register>(), temp_s);
1622          break;
1623        }
1624
1625        default:
1626          LOG(FATAL) << "Unexpected type conversion from " << input_type
1627                     << " to " << result_type;
1628      }
1629      break;
1630
1631    case Primitive::kPrimLong:
1632      switch (input_type) {
1633        case Primitive::kPrimByte:
1634        case Primitive::kPrimShort:
1635        case Primitive::kPrimInt:
1636        case Primitive::kPrimChar:
1637          // Processing a Dex `int-to-long' instruction.
1638          DCHECK(out.IsRegisterPair());
1639          DCHECK(in.IsRegister());
1640          __ Mov(out.AsRegisterPairLow<Register>(), in.AsRegister<Register>());
1641          // Sign extension.
1642          __ Asr(out.AsRegisterPairHigh<Register>(),
1643                 out.AsRegisterPairLow<Register>(),
1644                 31);
1645          break;
1646
1647        case Primitive::kPrimFloat:
1648          // Processing a Dex `float-to-long' instruction.
1649          codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pF2l),
1650                                  conversion,
1651                                  conversion->GetDexPc());
1652          break;
1653
1654        case Primitive::kPrimDouble:
1655          // Processing a Dex `double-to-long' instruction.
1656          codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pD2l),
1657                                  conversion,
1658                                  conversion->GetDexPc());
1659          break;
1660
1661        default:
1662          LOG(FATAL) << "Unexpected type conversion from " << input_type
1663                     << " to " << result_type;
1664      }
1665      break;
1666
1667    case Primitive::kPrimChar:
1668      switch (input_type) {
1669        case Primitive::kPrimByte:
1670        case Primitive::kPrimShort:
1671        case Primitive::kPrimInt:
1672          // Processing a Dex `int-to-char' instruction.
1673          __ ubfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16);
1674          break;
1675
1676        default:
1677          LOG(FATAL) << "Unexpected type conversion from " << input_type
1678                     << " to " << result_type;
1679      }
1680      break;
1681
1682    case Primitive::kPrimFloat:
1683      switch (input_type) {
1684        case Primitive::kPrimByte:
1685        case Primitive::kPrimShort:
1686        case Primitive::kPrimInt:
1687        case Primitive::kPrimChar: {
1688          // Processing a Dex `int-to-float' instruction.
1689          __ vmovsr(out.AsFpuRegister<SRegister>(), in.AsRegister<Register>());
1690          __ vcvtsi(out.AsFpuRegister<SRegister>(), out.AsFpuRegister<SRegister>());
1691          break;
1692        }
1693
1694        case Primitive::kPrimLong: {
1695          // Processing a Dex `long-to-float' instruction.
1696          Register low = in.AsRegisterPairLow<Register>();
1697          Register high = in.AsRegisterPairHigh<Register>();
1698          SRegister output = out.AsFpuRegister<SRegister>();
1699          Register constant_low = locations->GetTemp(0).AsRegister<Register>();
1700          Register constant_high = locations->GetTemp(1).AsRegister<Register>();
1701          SRegister temp1_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>();
1702          DRegister temp1_d = FromLowSToD(temp1_s);
1703          SRegister temp2_s = locations->GetTemp(3).AsFpuRegisterPairLow<SRegister>();
1704          DRegister temp2_d = FromLowSToD(temp2_s);
1705
1706          // Operations use doubles for precision reasons (each 32-bit
1707          // half of a long fits in the 53-bit mantissa of a double,
1708          // but not in the 24-bit mantissa of a float).  This is
1709          // especially important for the low bits.  The result is
1710          // eventually converted to float.
1711
1712          // temp1_d = int-to-double(high)
1713          __ vmovsr(temp1_s, high);
1714          __ vcvtdi(temp1_d, temp1_s);
1715          // Using vmovd to load the `k2Pow32EncodingForDouble` constant
1716          // as an immediate value into `temp2_d` does not work, as
1717          // this instruction only transfers 8 significant bits of its
1718          // immediate operand.  Instead, use two 32-bit core
1719          // registers to load `k2Pow32EncodingForDouble` into
1720          // `temp2_d`.
1721          __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble));
1722          __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble));
1723          __ vmovdrr(temp2_d, constant_low, constant_high);
1724          // temp1_d = temp1_d * 2^32
1725          __ vmuld(temp1_d, temp1_d, temp2_d);
1726          // temp2_d = unsigned-to-double(low)
1727          __ vmovsr(temp2_s, low);
1728          __ vcvtdu(temp2_d, temp2_s);
1729          // temp1_d = temp1_d + temp2_d
1730          __ vaddd(temp1_d, temp1_d, temp2_d);
1731          // output = double-to-float(temp1_d);
1732          __ vcvtsd(output, temp1_d);
1733          break;
1734        }
1735
1736        case Primitive::kPrimDouble:
1737          // Processing a Dex `double-to-float' instruction.
1738          __ vcvtsd(out.AsFpuRegister<SRegister>(),
1739                    FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1740          break;
1741
1742        default:
1743          LOG(FATAL) << "Unexpected type conversion from " << input_type
1744                     << " to " << result_type;
1745      };
1746      break;
1747
1748    case Primitive::kPrimDouble:
1749      switch (input_type) {
1750        case Primitive::kPrimByte:
1751        case Primitive::kPrimShort:
1752        case Primitive::kPrimInt:
1753        case Primitive::kPrimChar: {
1754          // Processing a Dex `int-to-double' instruction.
1755          __ vmovsr(out.AsFpuRegisterPairLow<SRegister>(), in.AsRegister<Register>());
1756          __ vcvtdi(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1757                    out.AsFpuRegisterPairLow<SRegister>());
1758          break;
1759        }
1760
1761        case Primitive::kPrimLong: {
1762          // Processing a Dex `long-to-double' instruction.
1763          Register low = in.AsRegisterPairLow<Register>();
1764          Register high = in.AsRegisterPairHigh<Register>();
1765          SRegister out_s = out.AsFpuRegisterPairLow<SRegister>();
1766          DRegister out_d = FromLowSToD(out_s);
1767          Register constant_low = locations->GetTemp(0).AsRegister<Register>();
1768          Register constant_high = locations->GetTemp(1).AsRegister<Register>();
1769          SRegister temp_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>();
1770          DRegister temp_d = FromLowSToD(temp_s);
1771
1772          // out_d = int-to-double(high)
1773          __ vmovsr(out_s, high);
1774          __ vcvtdi(out_d, out_s);
1775          // Using vmovd to load the `k2Pow32EncodingForDouble` constant
1776          // as an immediate value into `temp_d` does not work, as
1777          // this instruction only transfers 8 significant bits of its
1778          // immediate operand.  Instead, use two 32-bit core
1779          // registers to load `k2Pow32EncodingForDouble` into `temp_d`.
1780          __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble));
1781          __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble));
1782          __ vmovdrr(temp_d, constant_low, constant_high);
1783          // out_d = out_d * 2^32
1784          __ vmuld(out_d, out_d, temp_d);
1785          // temp_d = unsigned-to-double(low)
1786          __ vmovsr(temp_s, low);
1787          __ vcvtdu(temp_d, temp_s);
1788          // out_d = out_d + temp_d
1789          __ vaddd(out_d, out_d, temp_d);
1790          break;
1791        }
1792
1793        case Primitive::kPrimFloat:
1794          // Processing a Dex `float-to-double' instruction.
1795          __ vcvtds(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1796                    in.AsFpuRegister<SRegister>());
1797          break;
1798
1799        default:
1800          LOG(FATAL) << "Unexpected type conversion from " << input_type
1801                     << " to " << result_type;
1802      };
1803      break;
1804
1805    default:
1806      LOG(FATAL) << "Unexpected type conversion from " << input_type
1807                 << " to " << result_type;
1808  }
1809}
1810
1811void LocationsBuilderARM::VisitAdd(HAdd* add) {
1812  LocationSummary* locations =
1813      new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
1814  switch (add->GetResultType()) {
1815    case Primitive::kPrimInt: {
1816      locations->SetInAt(0, Location::RequiresRegister());
1817      locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
1818      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1819      break;
1820    }
1821
1822    case Primitive::kPrimLong: {
1823      locations->SetInAt(0, Location::RequiresRegister());
1824      locations->SetInAt(1, Location::RequiresRegister());
1825      locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1826      break;
1827    }
1828
1829    case Primitive::kPrimFloat:
1830    case Primitive::kPrimDouble: {
1831      locations->SetInAt(0, Location::RequiresFpuRegister());
1832      locations->SetInAt(1, Location::RequiresFpuRegister());
1833      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1834      break;
1835    }
1836
1837    default:
1838      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1839  }
1840}
1841
1842void InstructionCodeGeneratorARM::VisitAdd(HAdd* add) {
1843  LocationSummary* locations = add->GetLocations();
1844  Location out = locations->Out();
1845  Location first = locations->InAt(0);
1846  Location second = locations->InAt(1);
1847  switch (add->GetResultType()) {
1848    case Primitive::kPrimInt:
1849      if (second.IsRegister()) {
1850        __ add(out.AsRegister<Register>(),
1851               first.AsRegister<Register>(),
1852               ShifterOperand(second.AsRegister<Register>()));
1853      } else {
1854        __ AddConstant(out.AsRegister<Register>(),
1855                       first.AsRegister<Register>(),
1856                       second.GetConstant()->AsIntConstant()->GetValue());
1857      }
1858      break;
1859
1860    case Primitive::kPrimLong: {
1861      DCHECK(second.IsRegisterPair());
1862      __ adds(out.AsRegisterPairLow<Register>(),
1863              first.AsRegisterPairLow<Register>(),
1864              ShifterOperand(second.AsRegisterPairLow<Register>()));
1865      __ adc(out.AsRegisterPairHigh<Register>(),
1866             first.AsRegisterPairHigh<Register>(),
1867             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1868      break;
1869    }
1870
1871    case Primitive::kPrimFloat:
1872      __ vadds(out.AsFpuRegister<SRegister>(),
1873               first.AsFpuRegister<SRegister>(),
1874               second.AsFpuRegister<SRegister>());
1875      break;
1876
1877    case Primitive::kPrimDouble:
1878      __ vaddd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1879               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1880               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1881      break;
1882
1883    default:
1884      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1885  }
1886}
1887
1888void LocationsBuilderARM::VisitSub(HSub* sub) {
1889  LocationSummary* locations =
1890      new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
1891  switch (sub->GetResultType()) {
1892    case Primitive::kPrimInt: {
1893      locations->SetInAt(0, Location::RequiresRegister());
1894      locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1)));
1895      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1896      break;
1897    }
1898
1899    case Primitive::kPrimLong: {
1900      locations->SetInAt(0, Location::RequiresRegister());
1901      locations->SetInAt(1, Location::RequiresRegister());
1902      locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1903      break;
1904    }
1905    case Primitive::kPrimFloat:
1906    case Primitive::kPrimDouble: {
1907      locations->SetInAt(0, Location::RequiresFpuRegister());
1908      locations->SetInAt(1, Location::RequiresFpuRegister());
1909      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1910      break;
1911    }
1912    default:
1913      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1914  }
1915}
1916
1917void InstructionCodeGeneratorARM::VisitSub(HSub* sub) {
1918  LocationSummary* locations = sub->GetLocations();
1919  Location out = locations->Out();
1920  Location first = locations->InAt(0);
1921  Location second = locations->InAt(1);
1922  switch (sub->GetResultType()) {
1923    case Primitive::kPrimInt: {
1924      if (second.IsRegister()) {
1925        __ sub(out.AsRegister<Register>(),
1926               first.AsRegister<Register>(),
1927               ShifterOperand(second.AsRegister<Register>()));
1928      } else {
1929        __ AddConstant(out.AsRegister<Register>(),
1930                       first.AsRegister<Register>(),
1931                       -second.GetConstant()->AsIntConstant()->GetValue());
1932      }
1933      break;
1934    }
1935
1936    case Primitive::kPrimLong: {
1937      DCHECK(second.IsRegisterPair());
1938      __ subs(out.AsRegisterPairLow<Register>(),
1939              first.AsRegisterPairLow<Register>(),
1940              ShifterOperand(second.AsRegisterPairLow<Register>()));
1941      __ sbc(out.AsRegisterPairHigh<Register>(),
1942             first.AsRegisterPairHigh<Register>(),
1943             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1944      break;
1945    }
1946
1947    case Primitive::kPrimFloat: {
1948      __ vsubs(out.AsFpuRegister<SRegister>(),
1949               first.AsFpuRegister<SRegister>(),
1950               second.AsFpuRegister<SRegister>());
1951      break;
1952    }
1953
1954    case Primitive::kPrimDouble: {
1955      __ vsubd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1956               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1957               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1958      break;
1959    }
1960
1961
1962    default:
1963      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1964  }
1965}
1966
1967void LocationsBuilderARM::VisitMul(HMul* mul) {
1968  LocationSummary* locations =
1969      new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
1970  switch (mul->GetResultType()) {
1971    case Primitive::kPrimInt:
1972    case Primitive::kPrimLong:  {
1973      locations->SetInAt(0, Location::RequiresRegister());
1974      locations->SetInAt(1, Location::RequiresRegister());
1975      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1976      break;
1977    }
1978
1979    case Primitive::kPrimFloat:
1980    case Primitive::kPrimDouble: {
1981      locations->SetInAt(0, Location::RequiresFpuRegister());
1982      locations->SetInAt(1, Location::RequiresFpuRegister());
1983      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1984      break;
1985    }
1986
1987    default:
1988      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1989  }
1990}
1991
1992void InstructionCodeGeneratorARM::VisitMul(HMul* mul) {
1993  LocationSummary* locations = mul->GetLocations();
1994  Location out = locations->Out();
1995  Location first = locations->InAt(0);
1996  Location second = locations->InAt(1);
1997  switch (mul->GetResultType()) {
1998    case Primitive::kPrimInt: {
1999      __ mul(out.AsRegister<Register>(),
2000             first.AsRegister<Register>(),
2001             second.AsRegister<Register>());
2002      break;
2003    }
2004    case Primitive::kPrimLong: {
2005      Register out_hi = out.AsRegisterPairHigh<Register>();
2006      Register out_lo = out.AsRegisterPairLow<Register>();
2007      Register in1_hi = first.AsRegisterPairHigh<Register>();
2008      Register in1_lo = first.AsRegisterPairLow<Register>();
2009      Register in2_hi = second.AsRegisterPairHigh<Register>();
2010      Register in2_lo = second.AsRegisterPairLow<Register>();
2011
2012      // Extra checks to protect caused by the existence of R1_R2.
2013      // The algorithm is wrong if out.hi is either in1.lo or in2.lo:
2014      // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2);
2015      DCHECK_NE(out_hi, in1_lo);
2016      DCHECK_NE(out_hi, in2_lo);
2017
2018      // input: in1 - 64 bits, in2 - 64 bits
2019      // output: out
2020      // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
2021      // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
2022      // parts: out.lo = (in1.lo * in2.lo)[31:0]
2023
2024      // IP <- in1.lo * in2.hi
2025      __ mul(IP, in1_lo, in2_hi);
2026      // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo
2027      __ mla(out_hi, in1_hi, in2_lo, IP);
2028      // out.lo <- (in1.lo * in2.lo)[31:0];
2029      __ umull(out_lo, IP, in1_lo, in2_lo);
2030      // out.hi <- in2.hi * in1.lo +  in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
2031      __ add(out_hi, out_hi, ShifterOperand(IP));
2032      break;
2033    }
2034
2035    case Primitive::kPrimFloat: {
2036      __ vmuls(out.AsFpuRegister<SRegister>(),
2037               first.AsFpuRegister<SRegister>(),
2038               second.AsFpuRegister<SRegister>());
2039      break;
2040    }
2041
2042    case Primitive::kPrimDouble: {
2043      __ vmuld(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2044               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2045               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2046      break;
2047    }
2048
2049    default:
2050      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2051  }
2052}
2053
2054void LocationsBuilderARM::VisitDiv(HDiv* div) {
2055  LocationSummary::CallKind call_kind = div->GetResultType() == Primitive::kPrimLong
2056      ? LocationSummary::kCall
2057      : LocationSummary::kNoCall;
2058  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
2059
2060  switch (div->GetResultType()) {
2061    case Primitive::kPrimInt: {
2062      locations->SetInAt(0, Location::RequiresRegister());
2063      locations->SetInAt(1, Location::RequiresRegister());
2064      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2065      break;
2066    }
2067    case Primitive::kPrimLong: {
2068      InvokeRuntimeCallingConvention calling_convention;
2069      locations->SetInAt(0, Location::RegisterPairLocation(
2070          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2071      locations->SetInAt(1, Location::RegisterPairLocation(
2072          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2073      locations->SetOut(Location::RegisterPairLocation(R0, R1));
2074      break;
2075    }
2076    case Primitive::kPrimFloat:
2077    case Primitive::kPrimDouble: {
2078      locations->SetInAt(0, Location::RequiresFpuRegister());
2079      locations->SetInAt(1, Location::RequiresFpuRegister());
2080      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2081      break;
2082    }
2083
2084    default:
2085      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2086  }
2087}
2088
2089void InstructionCodeGeneratorARM::VisitDiv(HDiv* div) {
2090  LocationSummary* locations = div->GetLocations();
2091  Location out = locations->Out();
2092  Location first = locations->InAt(0);
2093  Location second = locations->InAt(1);
2094
2095  switch (div->GetResultType()) {
2096    case Primitive::kPrimInt: {
2097      __ sdiv(out.AsRegister<Register>(),
2098              first.AsRegister<Register>(),
2099              second.AsRegister<Register>());
2100      break;
2101    }
2102
2103    case Primitive::kPrimLong: {
2104      InvokeRuntimeCallingConvention calling_convention;
2105      DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
2106      DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
2107      DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegisterPairLow<Register>());
2108      DCHECK_EQ(calling_convention.GetRegisterAt(3), second.AsRegisterPairHigh<Register>());
2109      DCHECK_EQ(R0, out.AsRegisterPairLow<Register>());
2110      DCHECK_EQ(R1, out.AsRegisterPairHigh<Register>());
2111
2112      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLdiv), div, div->GetDexPc());
2113      break;
2114    }
2115
2116    case Primitive::kPrimFloat: {
2117      __ vdivs(out.AsFpuRegister<SRegister>(),
2118               first.AsFpuRegister<SRegister>(),
2119               second.AsFpuRegister<SRegister>());
2120      break;
2121    }
2122
2123    case Primitive::kPrimDouble: {
2124      __ vdivd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
2125               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
2126               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
2127      break;
2128    }
2129
2130    default:
2131      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2132  }
2133}
2134
2135void LocationsBuilderARM::VisitRem(HRem* rem) {
2136  Primitive::Type type = rem->GetResultType();
2137  LocationSummary::CallKind call_kind = type == Primitive::kPrimInt
2138      ? LocationSummary::kNoCall
2139      : LocationSummary::kCall;
2140  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
2141
2142  switch (type) {
2143    case Primitive::kPrimInt: {
2144      locations->SetInAt(0, Location::RequiresRegister());
2145      locations->SetInAt(1, Location::RequiresRegister());
2146      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2147      locations->AddTemp(Location::RequiresRegister());
2148      break;
2149    }
2150    case Primitive::kPrimLong: {
2151      InvokeRuntimeCallingConvention calling_convention;
2152      locations->SetInAt(0, Location::RegisterPairLocation(
2153          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2154      locations->SetInAt(1, Location::RegisterPairLocation(
2155          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2156      // The runtime helper puts the output in R2,R3.
2157      locations->SetOut(Location::RegisterPairLocation(R2, R3));
2158      break;
2159    }
2160    case Primitive::kPrimFloat: {
2161      InvokeRuntimeCallingConvention calling_convention;
2162      locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
2163      locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
2164      locations->SetOut(Location::FpuRegisterLocation(S0));
2165      break;
2166    }
2167
2168    case Primitive::kPrimDouble: {
2169      InvokeRuntimeCallingConvention calling_convention;
2170      locations->SetInAt(0, Location::FpuRegisterPairLocation(
2171          calling_convention.GetFpuRegisterAt(0), calling_convention.GetFpuRegisterAt(1)));
2172      locations->SetInAt(1, Location::FpuRegisterPairLocation(
2173          calling_convention.GetFpuRegisterAt(2), calling_convention.GetFpuRegisterAt(3)));
2174      locations->SetOut(Location::Location::FpuRegisterPairLocation(S0, S1));
2175      break;
2176    }
2177
2178    default:
2179      LOG(FATAL) << "Unexpected rem type " << type;
2180  }
2181}
2182
2183void InstructionCodeGeneratorARM::VisitRem(HRem* rem) {
2184  LocationSummary* locations = rem->GetLocations();
2185  Location out = locations->Out();
2186  Location first = locations->InAt(0);
2187  Location second = locations->InAt(1);
2188
2189  Primitive::Type type = rem->GetResultType();
2190  switch (type) {
2191    case Primitive::kPrimInt: {
2192      Register reg1 = first.AsRegister<Register>();
2193      Register reg2 = second.AsRegister<Register>();
2194      Register temp = locations->GetTemp(0).AsRegister<Register>();
2195
2196      // temp = reg1 / reg2  (integer division)
2197      // temp = temp * reg2
2198      // dest = reg1 - temp
2199      __ sdiv(temp, reg1, reg2);
2200      __ mul(temp, temp, reg2);
2201      __ sub(out.AsRegister<Register>(), reg1, ShifterOperand(temp));
2202      break;
2203    }
2204
2205    case Primitive::kPrimLong: {
2206      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLmod), rem, rem->GetDexPc());
2207      break;
2208    }
2209
2210    case Primitive::kPrimFloat: {
2211      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmodf), rem, rem->GetDexPc());
2212      break;
2213    }
2214
2215    case Primitive::kPrimDouble: {
2216      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmod), rem, rem->GetDexPc());
2217      break;
2218    }
2219
2220    default:
2221      LOG(FATAL) << "Unexpected rem type " << type;
2222  }
2223}
2224
2225void LocationsBuilderARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2226  LocationSummary* locations =
2227      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2228  locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2229  if (instruction->HasUses()) {
2230    locations->SetOut(Location::SameAsFirstInput());
2231  }
2232}
2233
2234void InstructionCodeGeneratorARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2235  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM(instruction);
2236  codegen_->AddSlowPath(slow_path);
2237
2238  LocationSummary* locations = instruction->GetLocations();
2239  Location value = locations->InAt(0);
2240
2241  switch (instruction->GetType()) {
2242    case Primitive::kPrimInt: {
2243      if (value.IsRegister()) {
2244        __ cmp(value.AsRegister<Register>(), ShifterOperand(0));
2245        __ b(slow_path->GetEntryLabel(), EQ);
2246      } else {
2247        DCHECK(value.IsConstant()) << value;
2248        if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
2249          __ b(slow_path->GetEntryLabel());
2250        }
2251      }
2252      break;
2253    }
2254    case Primitive::kPrimLong: {
2255      if (value.IsRegisterPair()) {
2256        __ orrs(IP,
2257                value.AsRegisterPairLow<Register>(),
2258                ShifterOperand(value.AsRegisterPairHigh<Register>()));
2259        __ b(slow_path->GetEntryLabel(), EQ);
2260      } else {
2261        DCHECK(value.IsConstant()) << value;
2262        if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
2263          __ b(slow_path->GetEntryLabel());
2264        }
2265      }
2266      break;
2267    default:
2268      LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
2269    }
2270  }
2271}
2272
2273void LocationsBuilderARM::HandleShift(HBinaryOperation* op) {
2274  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2275
2276  LocationSummary::CallKind call_kind = op->GetResultType() == Primitive::kPrimLong
2277      ? LocationSummary::kCall
2278      : LocationSummary::kNoCall;
2279  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(op, call_kind);
2280
2281  switch (op->GetResultType()) {
2282    case Primitive::kPrimInt: {
2283      locations->SetInAt(0, Location::RequiresRegister());
2284      locations->SetInAt(1, Location::RegisterOrConstant(op->InputAt(1)));
2285      locations->SetOut(Location::RequiresRegister());
2286      break;
2287    }
2288    case Primitive::kPrimLong: {
2289      InvokeRuntimeCallingConvention calling_convention;
2290      locations->SetInAt(0, Location::RegisterPairLocation(
2291          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2292      locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2293      // The runtime helper puts the output in R0,R1.
2294      locations->SetOut(Location::RegisterPairLocation(R0, R1));
2295      break;
2296    }
2297    default:
2298      LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
2299  }
2300}
2301
2302void InstructionCodeGeneratorARM::HandleShift(HBinaryOperation* op) {
2303  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2304
2305  LocationSummary* locations = op->GetLocations();
2306  Location out = locations->Out();
2307  Location first = locations->InAt(0);
2308  Location second = locations->InAt(1);
2309
2310  Primitive::Type type = op->GetResultType();
2311  switch (type) {
2312    case Primitive::kPrimInt: {
2313      Register out_reg = out.AsRegister<Register>();
2314      Register first_reg = first.AsRegister<Register>();
2315      // Arm doesn't mask the shift count so we need to do it ourselves.
2316      if (second.IsRegister()) {
2317        Register second_reg = second.AsRegister<Register>();
2318        __ and_(second_reg, second_reg, ShifterOperand(kMaxIntShiftValue));
2319        if (op->IsShl()) {
2320          __ Lsl(out_reg, first_reg, second_reg);
2321        } else if (op->IsShr()) {
2322          __ Asr(out_reg, first_reg, second_reg);
2323        } else {
2324          __ Lsr(out_reg, first_reg, second_reg);
2325        }
2326      } else {
2327        int32_t cst = second.GetConstant()->AsIntConstant()->GetValue();
2328        uint32_t shift_value = static_cast<uint32_t>(cst & kMaxIntShiftValue);
2329        if (shift_value == 0) {  // arm does not support shifting with 0 immediate.
2330          __ Mov(out_reg, first_reg);
2331        } else if (op->IsShl()) {
2332          __ Lsl(out_reg, first_reg, shift_value);
2333        } else if (op->IsShr()) {
2334          __ Asr(out_reg, first_reg, shift_value);
2335        } else {
2336          __ Lsr(out_reg, first_reg, shift_value);
2337        }
2338      }
2339      break;
2340    }
2341    case Primitive::kPrimLong: {
2342      // TODO: Inline the assembly instead of calling the runtime.
2343      InvokeRuntimeCallingConvention calling_convention;
2344      DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
2345      DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
2346      DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegister<Register>());
2347      DCHECK_EQ(R0, out.AsRegisterPairLow<Register>());
2348      DCHECK_EQ(R1, out.AsRegisterPairHigh<Register>());
2349
2350      int32_t entry_point_offset;
2351      if (op->IsShl()) {
2352        entry_point_offset = QUICK_ENTRY_POINT(pShlLong);
2353      } else if (op->IsShr()) {
2354        entry_point_offset = QUICK_ENTRY_POINT(pShrLong);
2355      } else {
2356        entry_point_offset = QUICK_ENTRY_POINT(pUshrLong);
2357      }
2358      __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
2359      __ blx(LR);
2360      break;
2361    }
2362    default:
2363      LOG(FATAL) << "Unexpected operation type " << type;
2364  }
2365}
2366
2367void LocationsBuilderARM::VisitShl(HShl* shl) {
2368  HandleShift(shl);
2369}
2370
2371void InstructionCodeGeneratorARM::VisitShl(HShl* shl) {
2372  HandleShift(shl);
2373}
2374
2375void LocationsBuilderARM::VisitShr(HShr* shr) {
2376  HandleShift(shr);
2377}
2378
2379void InstructionCodeGeneratorARM::VisitShr(HShr* shr) {
2380  HandleShift(shr);
2381}
2382
2383void LocationsBuilderARM::VisitUShr(HUShr* ushr) {
2384  HandleShift(ushr);
2385}
2386
2387void InstructionCodeGeneratorARM::VisitUShr(HUShr* ushr) {
2388  HandleShift(ushr);
2389}
2390
2391void LocationsBuilderARM::VisitNewInstance(HNewInstance* instruction) {
2392  LocationSummary* locations =
2393      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2394  InvokeRuntimeCallingConvention calling_convention;
2395  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2396  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2397  locations->SetOut(Location::RegisterLocation(R0));
2398}
2399
2400void InstructionCodeGeneratorARM::VisitNewInstance(HNewInstance* instruction) {
2401  InvokeRuntimeCallingConvention calling_convention;
2402  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
2403  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
2404  codegen_->InvokeRuntime(
2405      QUICK_ENTRY_POINT(pAllocObjectWithAccessCheck), instruction, instruction->GetDexPc());
2406}
2407
2408void LocationsBuilderARM::VisitNewArray(HNewArray* instruction) {
2409  LocationSummary* locations =
2410      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2411  InvokeRuntimeCallingConvention calling_convention;
2412  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2413  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2414  locations->SetOut(Location::RegisterLocation(R0));
2415  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2416}
2417
2418void InstructionCodeGeneratorARM::VisitNewArray(HNewArray* instruction) {
2419  InvokeRuntimeCallingConvention calling_convention;
2420  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(2));
2421  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
2422  codegen_->InvokeRuntime(
2423      QUICK_ENTRY_POINT(pAllocArrayWithAccessCheck), instruction, instruction->GetDexPc());
2424}
2425
2426void LocationsBuilderARM::VisitParameterValue(HParameterValue* instruction) {
2427  LocationSummary* locations =
2428      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2429  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
2430  if (location.IsStackSlot()) {
2431    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2432  } else if (location.IsDoubleStackSlot()) {
2433    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2434  }
2435  locations->SetOut(location);
2436}
2437
2438void InstructionCodeGeneratorARM::VisitParameterValue(HParameterValue* instruction) {
2439  // Nothing to do, the parameter is already at its location.
2440  UNUSED(instruction);
2441}
2442
2443void LocationsBuilderARM::VisitNot(HNot* not_) {
2444  LocationSummary* locations =
2445      new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
2446  locations->SetInAt(0, Location::RequiresRegister());
2447  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2448}
2449
2450void InstructionCodeGeneratorARM::VisitNot(HNot* not_) {
2451  LocationSummary* locations = not_->GetLocations();
2452  Location out = locations->Out();
2453  Location in = locations->InAt(0);
2454  switch (not_->InputAt(0)->GetType()) {
2455    case Primitive::kPrimInt:
2456      __ mvn(out.AsRegister<Register>(), ShifterOperand(in.AsRegister<Register>()));
2457      break;
2458
2459    case Primitive::kPrimLong:
2460      __ mvn(out.AsRegisterPairLow<Register>(),
2461             ShifterOperand(in.AsRegisterPairLow<Register>()));
2462      __ mvn(out.AsRegisterPairHigh<Register>(),
2463             ShifterOperand(in.AsRegisterPairHigh<Register>()));
2464      break;
2465
2466    default:
2467      LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
2468  }
2469}
2470
2471void LocationsBuilderARM::VisitCompare(HCompare* compare) {
2472  LocationSummary* locations =
2473      new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2474  switch (compare->InputAt(0)->GetType()) {
2475    case Primitive::kPrimLong: {
2476      locations->SetInAt(0, Location::RequiresRegister());
2477      locations->SetInAt(1, Location::RequiresRegister());
2478      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2479      break;
2480    }
2481    case Primitive::kPrimFloat:
2482    case Primitive::kPrimDouble: {
2483      locations->SetInAt(0, Location::RequiresFpuRegister());
2484      locations->SetInAt(1, Location::RequiresFpuRegister());
2485      locations->SetOut(Location::RequiresRegister());
2486      break;
2487    }
2488    default:
2489      LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2490  }
2491}
2492
2493void InstructionCodeGeneratorARM::VisitCompare(HCompare* compare) {
2494  LocationSummary* locations = compare->GetLocations();
2495  Register out = locations->Out().AsRegister<Register>();
2496  Location left = locations->InAt(0);
2497  Location right = locations->InAt(1);
2498
2499  Label less, greater, done;
2500  Primitive::Type type = compare->InputAt(0)->GetType();
2501  switch (type) {
2502    case Primitive::kPrimLong: {
2503      __ cmp(left.AsRegisterPairHigh<Register>(),
2504             ShifterOperand(right.AsRegisterPairHigh<Register>()));  // Signed compare.
2505      __ b(&less, LT);
2506      __ b(&greater, GT);
2507      // Do LoadImmediate before any `cmp`, as LoadImmediate might affect the status flags.
2508      __ LoadImmediate(out, 0);
2509      __ cmp(left.AsRegisterPairLow<Register>(),
2510             ShifterOperand(right.AsRegisterPairLow<Register>()));  // Unsigned compare.
2511      break;
2512    }
2513    case Primitive::kPrimFloat:
2514    case Primitive::kPrimDouble: {
2515      __ LoadImmediate(out, 0);
2516      if (type == Primitive::kPrimFloat) {
2517        __ vcmps(left.AsFpuRegister<SRegister>(), right.AsFpuRegister<SRegister>());
2518      } else {
2519        __ vcmpd(FromLowSToD(left.AsFpuRegisterPairLow<SRegister>()),
2520                 FromLowSToD(right.AsFpuRegisterPairLow<SRegister>()));
2521      }
2522      __ vmstat();  // transfer FP status register to ARM APSR.
2523      __ b(compare->IsGtBias() ? &greater : &less, VS);  // VS for unordered.
2524      break;
2525    }
2526    default:
2527      LOG(FATAL) << "Unexpected compare type " << type;
2528  }
2529  __ b(&done, EQ);
2530  __ b(&less, CC);  // CC is for both: unsigned compare for longs and 'less than' for floats.
2531
2532  __ Bind(&greater);
2533  __ LoadImmediate(out, 1);
2534  __ b(&done);
2535
2536  __ Bind(&less);
2537  __ LoadImmediate(out, -1);
2538
2539  __ Bind(&done);
2540}
2541
2542void LocationsBuilderARM::VisitPhi(HPhi* instruction) {
2543  LocationSummary* locations =
2544      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2545  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
2546    locations->SetInAt(i, Location::Any());
2547  }
2548  locations->SetOut(Location::Any());
2549}
2550
2551void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction) {
2552  UNUSED(instruction);
2553  LOG(FATAL) << "Unreachable";
2554}
2555
2556void InstructionCodeGeneratorARM::GenerateMemoryBarrier(MemBarrierKind kind) {
2557  // TODO (ported from quick): revisit Arm barrier kinds
2558  DmbOptions flavour = DmbOptions::ISH;  // quiet c++ warnings
2559  switch (kind) {
2560    case MemBarrierKind::kAnyStore:
2561    case MemBarrierKind::kLoadAny:
2562    case MemBarrierKind::kAnyAny: {
2563      flavour = DmbOptions::ISH;
2564      break;
2565    }
2566    case MemBarrierKind::kStoreStore: {
2567      flavour = DmbOptions::ISHST;
2568      break;
2569    }
2570    default:
2571      LOG(FATAL) << "Unexpected memory barrier " << kind;
2572  }
2573  __ dmb(flavour);
2574}
2575
2576void InstructionCodeGeneratorARM::GenerateWideAtomicLoad(Register addr,
2577                                                         uint32_t offset,
2578                                                         Register out_lo,
2579                                                         Register out_hi) {
2580  if (offset != 0) {
2581    __ LoadImmediate(out_lo, offset);
2582    __ add(IP, addr, ShifterOperand(out_lo));
2583    addr = IP;
2584  }
2585  __ ldrexd(out_lo, out_hi, addr);
2586}
2587
2588void InstructionCodeGeneratorARM::GenerateWideAtomicStore(Register addr,
2589                                                          uint32_t offset,
2590                                                          Register value_lo,
2591                                                          Register value_hi,
2592                                                          Register temp1,
2593                                                          Register temp2,
2594                                                          HInstruction* instruction) {
2595  Label fail;
2596  if (offset != 0) {
2597    __ LoadImmediate(temp1, offset);
2598    __ add(IP, addr, ShifterOperand(temp1));
2599    addr = IP;
2600  }
2601  __ Bind(&fail);
2602  // We need a load followed by store. (The address used in a STREX instruction must
2603  // be the same as the address in the most recently executed LDREX instruction.)
2604  __ ldrexd(temp1, temp2, addr);
2605  codegen_->MaybeRecordImplicitNullCheck(instruction);
2606  __ strexd(temp1, value_lo, value_hi, addr);
2607  __ cmp(temp1, ShifterOperand(0));
2608  __ b(&fail, NE);
2609}
2610
2611void LocationsBuilderARM::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) {
2612  DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2613
2614  LocationSummary* locations =
2615      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2616  locations->SetInAt(0, Location::RequiresRegister());
2617  locations->SetInAt(1, Location::RequiresRegister());
2618
2619
2620  Primitive::Type field_type = field_info.GetFieldType();
2621  bool is_wide = field_type == Primitive::kPrimLong || field_type == Primitive::kPrimDouble;
2622  bool generate_volatile = field_info.IsVolatile()
2623      && is_wide
2624      && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2625  // Temporary registers for the write barrier.
2626  // TODO: consider renaming StoreNeedsWriteBarrier to StoreNeedsGCMark.
2627  if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
2628    locations->AddTemp(Location::RequiresRegister());
2629    locations->AddTemp(Location::RequiresRegister());
2630  } else if (generate_volatile) {
2631    // Arm encoding have some additional constraints for ldrexd/strexd:
2632    // - registers need to be consecutive
2633    // - the first register should be even but not R14.
2634    // We don't test for Arm yet, and the assertion makes sure that we revisit this if we ever
2635    // enable Arm encoding.
2636    DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
2637
2638    locations->AddTemp(Location::RequiresRegister());
2639    locations->AddTemp(Location::RequiresRegister());
2640    if (field_type == Primitive::kPrimDouble) {
2641      // For doubles we need two more registers to copy the value.
2642      locations->AddTemp(Location::RegisterLocation(R2));
2643      locations->AddTemp(Location::RegisterLocation(R3));
2644    }
2645  }
2646}
2647
2648void InstructionCodeGeneratorARM::HandleFieldSet(HInstruction* instruction,
2649                                                 const FieldInfo& field_info) {
2650  DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2651
2652  LocationSummary* locations = instruction->GetLocations();
2653  Register base = locations->InAt(0).AsRegister<Register>();
2654  Location value = locations->InAt(1);
2655
2656  bool is_volatile = field_info.IsVolatile();
2657  bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2658  Primitive::Type field_type = field_info.GetFieldType();
2659  uint32_t offset = field_info.GetFieldOffset().Uint32Value();
2660
2661  if (is_volatile) {
2662    GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
2663  }
2664
2665  switch (field_type) {
2666    case Primitive::kPrimBoolean:
2667    case Primitive::kPrimByte: {
2668      __ StoreToOffset(kStoreByte, value.AsRegister<Register>(), base, offset);
2669      break;
2670    }
2671
2672    case Primitive::kPrimShort:
2673    case Primitive::kPrimChar: {
2674      __ StoreToOffset(kStoreHalfword, value.AsRegister<Register>(), base, offset);
2675      break;
2676    }
2677
2678    case Primitive::kPrimInt:
2679    case Primitive::kPrimNot: {
2680      __ StoreToOffset(kStoreWord, value.AsRegister<Register>(), base, offset);
2681      break;
2682    }
2683
2684    case Primitive::kPrimLong: {
2685      if (is_volatile && !atomic_ldrd_strd) {
2686        GenerateWideAtomicStore(base, offset,
2687                                value.AsRegisterPairLow<Register>(),
2688                                value.AsRegisterPairHigh<Register>(),
2689                                locations->GetTemp(0).AsRegister<Register>(),
2690                                locations->GetTemp(1).AsRegister<Register>(),
2691                                instruction);
2692      } else {
2693        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), base, offset);
2694        codegen_->MaybeRecordImplicitNullCheck(instruction);
2695      }
2696      break;
2697    }
2698
2699    case Primitive::kPrimFloat: {
2700      __ StoreSToOffset(value.AsFpuRegister<SRegister>(), base, offset);
2701      break;
2702    }
2703
2704    case Primitive::kPrimDouble: {
2705      DRegister value_reg = FromLowSToD(value.AsFpuRegisterPairLow<SRegister>());
2706      if (is_volatile && !atomic_ldrd_strd) {
2707        Register value_reg_lo = locations->GetTemp(0).AsRegister<Register>();
2708        Register value_reg_hi = locations->GetTemp(1).AsRegister<Register>();
2709
2710        __ vmovrrd(value_reg_lo, value_reg_hi, value_reg);
2711
2712        GenerateWideAtomicStore(base, offset,
2713                                value_reg_lo,
2714                                value_reg_hi,
2715                                locations->GetTemp(2).AsRegister<Register>(),
2716                                locations->GetTemp(3).AsRegister<Register>(),
2717                                instruction);
2718      } else {
2719        __ StoreDToOffset(value_reg, base, offset);
2720        codegen_->MaybeRecordImplicitNullCheck(instruction);
2721      }
2722      break;
2723    }
2724
2725    case Primitive::kPrimVoid:
2726      LOG(FATAL) << "Unreachable type " << field_type;
2727      UNREACHABLE();
2728  }
2729
2730  // Longs and doubles are handled in the switch.
2731  if (field_type != Primitive::kPrimLong && field_type != Primitive::kPrimDouble) {
2732    codegen_->MaybeRecordImplicitNullCheck(instruction);
2733  }
2734
2735  if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
2736    Register temp = locations->GetTemp(0).AsRegister<Register>();
2737    Register card = locations->GetTemp(1).AsRegister<Register>();
2738    codegen_->MarkGCCard(temp, card, base, value.AsRegister<Register>());
2739  }
2740
2741  if (is_volatile) {
2742    GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
2743  }
2744}
2745
2746void LocationsBuilderARM::HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info) {
2747  DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2748  LocationSummary* locations =
2749      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2750  locations->SetInAt(0, Location::RequiresRegister());
2751  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2752
2753  bool generate_volatile = field_info.IsVolatile()
2754      && (field_info.GetFieldType() == Primitive::kPrimDouble)
2755      && !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2756  if (generate_volatile) {
2757    // Arm encoding have some additional constraints for ldrexd/strexd:
2758    // - registers need to be consecutive
2759    // - the first register should be even but not R14.
2760    // We don't test for Arm yet, and the assertion makes sure that we revisit this if we ever
2761    // enable Arm encoding.
2762    DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet());
2763    locations->AddTemp(Location::RequiresRegister());
2764    locations->AddTemp(Location::RequiresRegister());
2765  }
2766}
2767
2768void InstructionCodeGeneratorARM::HandleFieldGet(HInstruction* instruction,
2769                                                 const FieldInfo& field_info) {
2770  DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2771
2772  LocationSummary* locations = instruction->GetLocations();
2773  Register base = locations->InAt(0).AsRegister<Register>();
2774  Location out = locations->Out();
2775  bool is_volatile = field_info.IsVolatile();
2776  bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
2777  Primitive::Type field_type = field_info.GetFieldType();
2778  uint32_t offset = field_info.GetFieldOffset().Uint32Value();
2779
2780  switch (field_type) {
2781    case Primitive::kPrimBoolean: {
2782      __ LoadFromOffset(kLoadUnsignedByte, out.AsRegister<Register>(), base, offset);
2783      break;
2784    }
2785
2786    case Primitive::kPrimByte: {
2787      __ LoadFromOffset(kLoadSignedByte, out.AsRegister<Register>(), base, offset);
2788      break;
2789    }
2790
2791    case Primitive::kPrimShort: {
2792      __ LoadFromOffset(kLoadSignedHalfword, out.AsRegister<Register>(), base, offset);
2793      break;
2794    }
2795
2796    case Primitive::kPrimChar: {
2797      __ LoadFromOffset(kLoadUnsignedHalfword, out.AsRegister<Register>(), base, offset);
2798      break;
2799    }
2800
2801    case Primitive::kPrimInt:
2802    case Primitive::kPrimNot: {
2803      __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), base, offset);
2804      break;
2805    }
2806
2807    case Primitive::kPrimLong: {
2808      if (is_volatile && !atomic_ldrd_strd) {
2809        GenerateWideAtomicLoad(base, offset,
2810                               out.AsRegisterPairLow<Register>(),
2811                               out.AsRegisterPairHigh<Register>());
2812      } else {
2813        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), base, offset);
2814      }
2815      break;
2816    }
2817
2818    case Primitive::kPrimFloat: {
2819      __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), base, offset);
2820      break;
2821    }
2822
2823    case Primitive::kPrimDouble: {
2824      DRegister out_reg = FromLowSToD(out.AsFpuRegisterPairLow<SRegister>());
2825      if (is_volatile && !atomic_ldrd_strd) {
2826        Register lo = locations->GetTemp(0).AsRegister<Register>();
2827        Register hi = locations->GetTemp(1).AsRegister<Register>();
2828        GenerateWideAtomicLoad(base, offset, lo, hi);
2829        codegen_->MaybeRecordImplicitNullCheck(instruction);
2830        __ vmovdrr(out_reg, lo, hi);
2831      } else {
2832        __ LoadDFromOffset(out_reg, base, offset);
2833        codegen_->MaybeRecordImplicitNullCheck(instruction);
2834      }
2835      break;
2836    }
2837
2838    case Primitive::kPrimVoid:
2839      LOG(FATAL) << "Unreachable type " << field_type;
2840      UNREACHABLE();
2841  }
2842
2843  // Doubles are handled in the switch.
2844  if (field_type != Primitive::kPrimDouble) {
2845    codegen_->MaybeRecordImplicitNullCheck(instruction);
2846  }
2847
2848  if (is_volatile) {
2849    GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
2850  }
2851}
2852
2853void LocationsBuilderARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2854  HandleFieldSet(instruction, instruction->GetFieldInfo());
2855}
2856
2857void InstructionCodeGeneratorARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2858  HandleFieldSet(instruction, instruction->GetFieldInfo());
2859}
2860
2861void LocationsBuilderARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2862  HandleFieldGet(instruction, instruction->GetFieldInfo());
2863}
2864
2865void InstructionCodeGeneratorARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2866  HandleFieldGet(instruction, instruction->GetFieldInfo());
2867}
2868
2869void LocationsBuilderARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2870  HandleFieldGet(instruction, instruction->GetFieldInfo());
2871}
2872
2873void InstructionCodeGeneratorARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2874  HandleFieldGet(instruction, instruction->GetFieldInfo());
2875}
2876
2877void LocationsBuilderARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2878  HandleFieldSet(instruction, instruction->GetFieldInfo());
2879}
2880
2881void InstructionCodeGeneratorARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2882  HandleFieldSet(instruction, instruction->GetFieldInfo());
2883}
2884
2885void LocationsBuilderARM::VisitNullCheck(HNullCheck* instruction) {
2886  LocationSummary* locations =
2887      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2888  locations->SetInAt(0, Location::RequiresRegister());
2889  if (instruction->HasUses()) {
2890    locations->SetOut(Location::SameAsFirstInput());
2891  }
2892}
2893
2894void InstructionCodeGeneratorARM::GenerateImplicitNullCheck(HNullCheck* instruction) {
2895  if (codegen_->CanMoveNullCheckToUser(instruction)) {
2896    return;
2897  }
2898  Location obj = instruction->GetLocations()->InAt(0);
2899
2900  __ LoadFromOffset(kLoadWord, IP, obj.AsRegister<Register>(), 0);
2901  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
2902}
2903
2904void InstructionCodeGeneratorARM::GenerateExplicitNullCheck(HNullCheck* instruction) {
2905  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM(instruction);
2906  codegen_->AddSlowPath(slow_path);
2907
2908  LocationSummary* locations = instruction->GetLocations();
2909  Location obj = locations->InAt(0);
2910
2911  __ cmp(obj.AsRegister<Register>(), ShifterOperand(0));
2912  __ b(slow_path->GetEntryLabel(), EQ);
2913}
2914
2915void InstructionCodeGeneratorARM::VisitNullCheck(HNullCheck* instruction) {
2916  if (codegen_->GetCompilerOptions().GetImplicitNullChecks()) {
2917    GenerateImplicitNullCheck(instruction);
2918  } else {
2919    GenerateExplicitNullCheck(instruction);
2920  }
2921}
2922
2923void LocationsBuilderARM::VisitArrayGet(HArrayGet* instruction) {
2924  LocationSummary* locations =
2925      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2926  locations->SetInAt(0, Location::RequiresRegister());
2927  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2928  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2929}
2930
2931void InstructionCodeGeneratorARM::VisitArrayGet(HArrayGet* instruction) {
2932  LocationSummary* locations = instruction->GetLocations();
2933  Register obj = locations->InAt(0).AsRegister<Register>();
2934  Location index = locations->InAt(1);
2935
2936  switch (instruction->GetType()) {
2937    case Primitive::kPrimBoolean: {
2938      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
2939      Register out = locations->Out().AsRegister<Register>();
2940      if (index.IsConstant()) {
2941        size_t offset =
2942            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2943        __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
2944      } else {
2945        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
2946        __ LoadFromOffset(kLoadUnsignedByte, out, IP, data_offset);
2947      }
2948      break;
2949    }
2950
2951    case Primitive::kPrimByte: {
2952      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
2953      Register out = locations->Out().AsRegister<Register>();
2954      if (index.IsConstant()) {
2955        size_t offset =
2956            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2957        __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
2958      } else {
2959        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
2960        __ LoadFromOffset(kLoadSignedByte, out, IP, data_offset);
2961      }
2962      break;
2963    }
2964
2965    case Primitive::kPrimShort: {
2966      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
2967      Register out = locations->Out().AsRegister<Register>();
2968      if (index.IsConstant()) {
2969        size_t offset =
2970            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2971        __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
2972      } else {
2973        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
2974        __ LoadFromOffset(kLoadSignedHalfword, out, IP, data_offset);
2975      }
2976      break;
2977    }
2978
2979    case Primitive::kPrimChar: {
2980      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
2981      Register out = locations->Out().AsRegister<Register>();
2982      if (index.IsConstant()) {
2983        size_t offset =
2984            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2985        __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
2986      } else {
2987        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
2988        __ LoadFromOffset(kLoadUnsignedHalfword, out, IP, data_offset);
2989      }
2990      break;
2991    }
2992
2993    case Primitive::kPrimInt:
2994    case Primitive::kPrimNot: {
2995      DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
2996      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2997      Register out = locations->Out().AsRegister<Register>();
2998      if (index.IsConstant()) {
2999        size_t offset =
3000            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3001        __ LoadFromOffset(kLoadWord, out, obj, offset);
3002      } else {
3003        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3004        __ LoadFromOffset(kLoadWord, out, IP, data_offset);
3005      }
3006      break;
3007    }
3008
3009    case Primitive::kPrimLong: {
3010      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
3011      Location out = locations->Out();
3012      if (index.IsConstant()) {
3013        size_t offset =
3014            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3015        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset);
3016      } else {
3017        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3018        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), IP, data_offset);
3019      }
3020      break;
3021    }
3022
3023    case Primitive::kPrimFloat: {
3024      uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
3025      Location out = locations->Out();
3026      DCHECK(out.IsFpuRegister());
3027      if (index.IsConstant()) {
3028        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3029        __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), obj, offset);
3030      } else {
3031        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3032        __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), IP, data_offset);
3033      }
3034      break;
3035    }
3036
3037    case Primitive::kPrimDouble: {
3038      uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
3039      Location out = locations->Out();
3040      DCHECK(out.IsFpuRegisterPair());
3041      if (index.IsConstant()) {
3042        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3043        __ LoadDFromOffset(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), obj, offset);
3044      } else {
3045        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3046        __ LoadDFromOffset(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), IP, data_offset);
3047      }
3048      break;
3049    }
3050
3051    case Primitive::kPrimVoid:
3052      LOG(FATAL) << "Unreachable type " << instruction->GetType();
3053      UNREACHABLE();
3054  }
3055  codegen_->MaybeRecordImplicitNullCheck(instruction);
3056}
3057
3058void LocationsBuilderARM::VisitArraySet(HArraySet* instruction) {
3059  Primitive::Type value_type = instruction->GetComponentType();
3060
3061  bool needs_write_barrier =
3062      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
3063  bool needs_runtime_call = instruction->NeedsTypeCheck();
3064
3065  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3066      instruction, needs_runtime_call ? LocationSummary::kCall : LocationSummary::kNoCall);
3067  if (needs_runtime_call) {
3068    InvokeRuntimeCallingConvention calling_convention;
3069    locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3070    locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3071    locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
3072  } else {
3073    locations->SetInAt(0, Location::RequiresRegister());
3074    locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3075    locations->SetInAt(2, Location::RequiresRegister());
3076
3077    if (needs_write_barrier) {
3078      // Temporary registers for the write barrier.
3079      locations->AddTemp(Location::RequiresRegister());
3080      locations->AddTemp(Location::RequiresRegister());
3081    }
3082  }
3083}
3084
3085void InstructionCodeGeneratorARM::VisitArraySet(HArraySet* instruction) {
3086  LocationSummary* locations = instruction->GetLocations();
3087  Register obj = locations->InAt(0).AsRegister<Register>();
3088  Location index = locations->InAt(1);
3089  Primitive::Type value_type = instruction->GetComponentType();
3090  bool needs_runtime_call = locations->WillCall();
3091  bool needs_write_barrier =
3092      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
3093
3094  switch (value_type) {
3095    case Primitive::kPrimBoolean:
3096    case Primitive::kPrimByte: {
3097      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
3098      Register value = locations->InAt(2).AsRegister<Register>();
3099      if (index.IsConstant()) {
3100        size_t offset =
3101            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
3102        __ StoreToOffset(kStoreByte, value, obj, offset);
3103      } else {
3104        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>()));
3105        __ StoreToOffset(kStoreByte, value, IP, data_offset);
3106      }
3107      break;
3108    }
3109
3110    case Primitive::kPrimShort:
3111    case Primitive::kPrimChar: {
3112      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
3113      Register value = locations->InAt(2).AsRegister<Register>();
3114      if (index.IsConstant()) {
3115        size_t offset =
3116            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
3117        __ StoreToOffset(kStoreHalfword, value, obj, offset);
3118      } else {
3119        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2));
3120        __ StoreToOffset(kStoreHalfword, value, IP, data_offset);
3121      }
3122      break;
3123    }
3124
3125    case Primitive::kPrimInt:
3126    case Primitive::kPrimNot: {
3127      if (!needs_runtime_call) {
3128        uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
3129        Register value = locations->InAt(2).AsRegister<Register>();
3130        if (index.IsConstant()) {
3131          size_t offset =
3132              (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3133          __ StoreToOffset(kStoreWord, value, obj, offset);
3134        } else {
3135          DCHECK(index.IsRegister()) << index;
3136          __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3137          __ StoreToOffset(kStoreWord, value, IP, data_offset);
3138        }
3139        codegen_->MaybeRecordImplicitNullCheck(instruction);
3140        if (needs_write_barrier) {
3141          DCHECK_EQ(value_type, Primitive::kPrimNot);
3142          Register temp = locations->GetTemp(0).AsRegister<Register>();
3143          Register card = locations->GetTemp(1).AsRegister<Register>();
3144          codegen_->MarkGCCard(temp, card, obj, value);
3145        }
3146      } else {
3147        DCHECK_EQ(value_type, Primitive::kPrimNot);
3148        codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
3149                                instruction,
3150                                instruction->GetDexPc());
3151      }
3152      break;
3153    }
3154
3155    case Primitive::kPrimLong: {
3156      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
3157      Location value = locations->InAt(2);
3158      if (index.IsConstant()) {
3159        size_t offset =
3160            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3161        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset);
3162      } else {
3163        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3164        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), IP, data_offset);
3165      }
3166      break;
3167    }
3168
3169    case Primitive::kPrimFloat: {
3170      uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
3171      Location value = locations->InAt(2);
3172      DCHECK(value.IsFpuRegister());
3173      if (index.IsConstant()) {
3174        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
3175        __ StoreSToOffset(value.AsFpuRegister<SRegister>(), obj, offset);
3176      } else {
3177        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
3178        __ StoreSToOffset(value.AsFpuRegister<SRegister>(), IP, data_offset);
3179      }
3180      break;
3181    }
3182
3183    case Primitive::kPrimDouble: {
3184      uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
3185      Location value = locations->InAt(2);
3186      DCHECK(value.IsFpuRegisterPair());
3187      if (index.IsConstant()) {
3188        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3189        __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), obj, offset);
3190      } else {
3191        __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8));
3192        __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), IP, data_offset);
3193      }
3194
3195      break;
3196    }
3197
3198    case Primitive::kPrimVoid:
3199      LOG(FATAL) << "Unreachable type " << value_type;
3200      UNREACHABLE();
3201  }
3202
3203  // Ints and objects are handled in the switch.
3204  if (value_type != Primitive::kPrimInt && value_type != Primitive::kPrimNot) {
3205    codegen_->MaybeRecordImplicitNullCheck(instruction);
3206  }
3207}
3208
3209void LocationsBuilderARM::VisitArrayLength(HArrayLength* instruction) {
3210  LocationSummary* locations =
3211      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3212  locations->SetInAt(0, Location::RequiresRegister());
3213  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3214}
3215
3216void InstructionCodeGeneratorARM::VisitArrayLength(HArrayLength* instruction) {
3217  LocationSummary* locations = instruction->GetLocations();
3218  uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
3219  Register obj = locations->InAt(0).AsRegister<Register>();
3220  Register out = locations->Out().AsRegister<Register>();
3221  __ LoadFromOffset(kLoadWord, out, obj, offset);
3222  codegen_->MaybeRecordImplicitNullCheck(instruction);
3223}
3224
3225void LocationsBuilderARM::VisitBoundsCheck(HBoundsCheck* instruction) {
3226  LocationSummary* locations =
3227      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3228  locations->SetInAt(0, Location::RequiresRegister());
3229  locations->SetInAt(1, Location::RequiresRegister());
3230  if (instruction->HasUses()) {
3231    locations->SetOut(Location::SameAsFirstInput());
3232  }
3233}
3234
3235void InstructionCodeGeneratorARM::VisitBoundsCheck(HBoundsCheck* instruction) {
3236  LocationSummary* locations = instruction->GetLocations();
3237  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM(
3238      instruction, locations->InAt(0), locations->InAt(1));
3239  codegen_->AddSlowPath(slow_path);
3240
3241  Register index = locations->InAt(0).AsRegister<Register>();
3242  Register length = locations->InAt(1).AsRegister<Register>();
3243
3244  __ cmp(index, ShifterOperand(length));
3245  __ b(slow_path->GetEntryLabel(), CS);
3246}
3247
3248void CodeGeneratorARM::MarkGCCard(Register temp, Register card, Register object, Register value) {
3249  Label is_null;
3250  __ CompareAndBranchIfZero(value, &is_null);
3251  __ LoadFromOffset(kLoadWord, card, TR, Thread::CardTableOffset<kArmWordSize>().Int32Value());
3252  __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
3253  __ strb(card, Address(card, temp));
3254  __ Bind(&is_null);
3255}
3256
3257void LocationsBuilderARM::VisitTemporary(HTemporary* temp) {
3258  temp->SetLocations(nullptr);
3259}
3260
3261void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp) {
3262  // Nothing to do, this is driven by the code generator.
3263  UNUSED(temp);
3264}
3265
3266void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction) {
3267  UNUSED(instruction);
3268  LOG(FATAL) << "Unreachable";
3269}
3270
3271void InstructionCodeGeneratorARM::VisitParallelMove(HParallelMove* instruction) {
3272  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
3273}
3274
3275void LocationsBuilderARM::VisitSuspendCheck(HSuspendCheck* instruction) {
3276  new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
3277}
3278
3279void InstructionCodeGeneratorARM::VisitSuspendCheck(HSuspendCheck* instruction) {
3280  HBasicBlock* block = instruction->GetBlock();
3281  if (block->GetLoopInformation() != nullptr) {
3282    DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
3283    // The back edge will generate the suspend check.
3284    return;
3285  }
3286  if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
3287    // The goto will generate the suspend check.
3288    return;
3289  }
3290  GenerateSuspendCheck(instruction, nullptr);
3291}
3292
3293void InstructionCodeGeneratorARM::GenerateSuspendCheck(HSuspendCheck* instruction,
3294                                                       HBasicBlock* successor) {
3295  SuspendCheckSlowPathARM* slow_path =
3296      new (GetGraph()->GetArena()) SuspendCheckSlowPathARM(instruction, successor);
3297  codegen_->AddSlowPath(slow_path);
3298
3299  __ LoadFromOffset(
3300      kLoadUnsignedHalfword, IP, TR, Thread::ThreadFlagsOffset<kArmWordSize>().Int32Value());
3301  __ cmp(IP, ShifterOperand(0));
3302  // TODO: Figure out the branch offsets and use cbz/cbnz.
3303  if (successor == nullptr) {
3304    __ b(slow_path->GetEntryLabel(), NE);
3305    __ Bind(slow_path->GetReturnLabel());
3306  } else {
3307    __ b(codegen_->GetLabelOf(successor), EQ);
3308    __ b(slow_path->GetEntryLabel());
3309  }
3310}
3311
3312ArmAssembler* ParallelMoveResolverARM::GetAssembler() const {
3313  return codegen_->GetAssembler();
3314}
3315
3316void ParallelMoveResolverARM::EmitMove(size_t index) {
3317  MoveOperands* move = moves_.Get(index);
3318  Location source = move->GetSource();
3319  Location destination = move->GetDestination();
3320
3321  if (source.IsRegister()) {
3322    if (destination.IsRegister()) {
3323      __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>());
3324    } else {
3325      DCHECK(destination.IsStackSlot());
3326      __ StoreToOffset(kStoreWord, source.AsRegister<Register>(),
3327                       SP, destination.GetStackIndex());
3328    }
3329  } else if (source.IsStackSlot()) {
3330    if (destination.IsRegister()) {
3331      __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(),
3332                        SP, source.GetStackIndex());
3333    } else if (destination.IsFpuRegister()) {
3334      __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex());
3335    } else {
3336      DCHECK(destination.IsStackSlot());
3337      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
3338      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3339    }
3340  } else if (source.IsFpuRegister()) {
3341    if (destination.IsFpuRegister()) {
3342      __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>());
3343    } else {
3344      DCHECK(destination.IsStackSlot());
3345      __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex());
3346    }
3347  } else if (source.IsDoubleStackSlot()) {
3348    DCHECK(destination.IsDoubleStackSlot()) << destination;
3349    __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
3350    __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3351    __ LoadFromOffset(kLoadWord, IP, SP, source.GetHighStackIndex(kArmWordSize));
3352    __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
3353  } else {
3354    DCHECK(source.IsConstant()) << source;
3355    HInstruction* constant = source.GetConstant();
3356    if (constant->IsIntConstant()) {
3357      int32_t value = constant->AsIntConstant()->GetValue();
3358      if (destination.IsRegister()) {
3359        __ LoadImmediate(destination.AsRegister<Register>(), value);
3360      } else {
3361        DCHECK(destination.IsStackSlot());
3362        __ LoadImmediate(IP, value);
3363        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3364      }
3365    } else if (constant->IsLongConstant()) {
3366      int64_t value = constant->AsLongConstant()->GetValue();
3367      if (destination.IsRegister()) {
3368        // In the presence of long or double constants, the parallel move resolver will
3369        // split the move into two, but keeps the same constant for both moves. Here,
3370        // we use the low or high part depending on which register this move goes to.
3371        if (destination.reg() % 2 == 0) {
3372          __ LoadImmediate(destination.AsRegister<Register>(), Low32Bits(value));
3373        } else {
3374          __ LoadImmediate(destination.AsRegister<Register>(), High32Bits(value));
3375        }
3376      } else {
3377        DCHECK(destination.IsDoubleStackSlot());
3378        __ LoadImmediate(IP, Low32Bits(value));
3379        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3380        __ LoadImmediate(IP, High32Bits(value));
3381        __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
3382      }
3383    } else if (constant->IsDoubleConstant()) {
3384      double value = constant->AsDoubleConstant()->GetValue();
3385      uint64_t int_value = bit_cast<uint64_t, double>(value);
3386      if (destination.IsFpuRegister()) {
3387        // In the presence of long or double constants, the parallel move resolver will
3388        // split the move into two, but keeps the same constant for both moves. Here,
3389        // we use the low or high part depending on which register this move goes to.
3390        if (destination.reg() % 2 == 0) {
3391          __ LoadSImmediate(destination.AsFpuRegister<SRegister>(),
3392                            bit_cast<float, uint32_t>(Low32Bits(int_value)));
3393        } else {
3394          __ LoadSImmediate(destination.AsFpuRegister<SRegister>(),
3395                            bit_cast<float, uint32_t>(High32Bits(int_value)));
3396        }
3397      } else {
3398        DCHECK(destination.IsDoubleStackSlot());
3399        __ LoadImmediate(IP, Low32Bits(int_value));
3400        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3401        __ LoadImmediate(IP, High32Bits(int_value));
3402        __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
3403      }
3404    } else {
3405      DCHECK(constant->IsFloatConstant()) << constant->DebugName();
3406      float value = constant->AsFloatConstant()->GetValue();
3407      if (destination.IsFpuRegister()) {
3408        __ LoadSImmediate(destination.AsFpuRegister<SRegister>(), value);
3409      } else {
3410        DCHECK(destination.IsStackSlot());
3411        __ LoadImmediate(IP, bit_cast<int32_t, float>(value));
3412        __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
3413      }
3414    }
3415  }
3416}
3417
3418void ParallelMoveResolverARM::Exchange(Register reg, int mem) {
3419  __ Mov(IP, reg);
3420  __ LoadFromOffset(kLoadWord, reg, SP, mem);
3421  __ StoreToOffset(kStoreWord, IP, SP, mem);
3422}
3423
3424void ParallelMoveResolverARM::Exchange(int mem1, int mem2) {
3425  ScratchRegisterScope ensure_scratch(this, IP, R0, codegen_->GetNumberOfCoreRegisters());
3426  int stack_offset = ensure_scratch.IsSpilled() ? kArmWordSize : 0;
3427  __ LoadFromOffset(kLoadWord, static_cast<Register>(ensure_scratch.GetRegister()),
3428                    SP, mem1 + stack_offset);
3429  __ LoadFromOffset(kLoadWord, IP, SP, mem2 + stack_offset);
3430  __ StoreToOffset(kStoreWord, static_cast<Register>(ensure_scratch.GetRegister()),
3431                   SP, mem2 + stack_offset);
3432  __ StoreToOffset(kStoreWord, IP, SP, mem1 + stack_offset);
3433}
3434
3435void ParallelMoveResolverARM::EmitSwap(size_t index) {
3436  MoveOperands* move = moves_.Get(index);
3437  Location source = move->GetSource();
3438  Location destination = move->GetDestination();
3439
3440  if (source.IsRegister() && destination.IsRegister()) {
3441    DCHECK_NE(source.AsRegister<Register>(), IP);
3442    DCHECK_NE(destination.AsRegister<Register>(), IP);
3443    __ Mov(IP, source.AsRegister<Register>());
3444    __ Mov(source.AsRegister<Register>(), destination.AsRegister<Register>());
3445    __ Mov(destination.AsRegister<Register>(), IP);
3446  } else if (source.IsRegister() && destination.IsStackSlot()) {
3447    Exchange(source.AsRegister<Register>(), destination.GetStackIndex());
3448  } else if (source.IsStackSlot() && destination.IsRegister()) {
3449    Exchange(destination.AsRegister<Register>(), source.GetStackIndex());
3450  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
3451    Exchange(source.GetStackIndex(), destination.GetStackIndex());
3452  } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
3453    __ vmovrs(IP, source.AsFpuRegister<SRegister>());
3454    __ vmovs(source.AsFpuRegister<SRegister>(), destination.AsFpuRegister<SRegister>());
3455    __ vmovsr(destination.AsFpuRegister<SRegister>(), IP);
3456  } else if (source.IsFpuRegister() || destination.IsFpuRegister()) {
3457    SRegister reg = source.IsFpuRegister() ? source.AsFpuRegister<SRegister>()
3458                                           : destination.AsFpuRegister<SRegister>();
3459    int mem = source.IsFpuRegister()
3460        ? destination.GetStackIndex()
3461        : source.GetStackIndex();
3462
3463    __ vmovrs(IP, reg);
3464    __ LoadFromOffset(kLoadWord, IP, SP, mem);
3465    __ StoreToOffset(kStoreWord, IP, SP, mem);
3466  } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
3467    Exchange(source.GetStackIndex(), destination.GetStackIndex());
3468    Exchange(source.GetHighStackIndex(kArmWordSize), destination.GetHighStackIndex(kArmWordSize));
3469  } else {
3470    LOG(FATAL) << "Unimplemented" << source << " <-> " << destination;
3471  }
3472}
3473
3474void ParallelMoveResolverARM::SpillScratch(int reg) {
3475  __ Push(static_cast<Register>(reg));
3476}
3477
3478void ParallelMoveResolverARM::RestoreScratch(int reg) {
3479  __ Pop(static_cast<Register>(reg));
3480}
3481
3482void LocationsBuilderARM::VisitLoadClass(HLoadClass* cls) {
3483  LocationSummary::CallKind call_kind = cls->CanCallRuntime()
3484      ? LocationSummary::kCallOnSlowPath
3485      : LocationSummary::kNoCall;
3486  LocationSummary* locations =
3487      new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
3488  locations->SetOut(Location::RequiresRegister());
3489}
3490
3491void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) {
3492  Register out = cls->GetLocations()->Out().AsRegister<Register>();
3493  if (cls->IsReferrersClass()) {
3494    DCHECK(!cls->CanCallRuntime());
3495    DCHECK(!cls->MustGenerateClinitCheck());
3496    codegen_->LoadCurrentMethod(out);
3497    __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
3498  } else {
3499    DCHECK(cls->CanCallRuntime());
3500    codegen_->LoadCurrentMethod(out);
3501    __ LoadFromOffset(
3502        kLoadWord, out, out, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value());
3503    __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
3504
3505    SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
3506        cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
3507    codegen_->AddSlowPath(slow_path);
3508    __ cmp(out, ShifterOperand(0));
3509    __ b(slow_path->GetEntryLabel(), EQ);
3510    if (cls->MustGenerateClinitCheck()) {
3511      GenerateClassInitializationCheck(slow_path, out);
3512    } else {
3513      __ Bind(slow_path->GetExitLabel());
3514    }
3515  }
3516}
3517
3518void LocationsBuilderARM::VisitClinitCheck(HClinitCheck* check) {
3519  LocationSummary* locations =
3520      new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
3521  locations->SetInAt(0, Location::RequiresRegister());
3522  if (check->HasUses()) {
3523    locations->SetOut(Location::SameAsFirstInput());
3524  }
3525}
3526
3527void InstructionCodeGeneratorARM::VisitClinitCheck(HClinitCheck* check) {
3528  // We assume the class is not null.
3529  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
3530      check->GetLoadClass(), check, check->GetDexPc(), true);
3531  codegen_->AddSlowPath(slow_path);
3532  GenerateClassInitializationCheck(slow_path,
3533                                   check->GetLocations()->InAt(0).AsRegister<Register>());
3534}
3535
3536void InstructionCodeGeneratorARM::GenerateClassInitializationCheck(
3537    SlowPathCodeARM* slow_path, Register class_reg) {
3538  __ LoadFromOffset(kLoadWord, IP, class_reg, mirror::Class::StatusOffset().Int32Value());
3539  __ cmp(IP, ShifterOperand(mirror::Class::kStatusInitialized));
3540  __ b(slow_path->GetEntryLabel(), LT);
3541  // Even if the initialized flag is set, we may be in a situation where caches are not synced
3542  // properly. Therefore, we do a memory fence.
3543  __ dmb(ISH);
3544  __ Bind(slow_path->GetExitLabel());
3545}
3546
3547void LocationsBuilderARM::VisitLoadString(HLoadString* load) {
3548  LocationSummary* locations =
3549      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
3550  locations->SetOut(Location::RequiresRegister());
3551}
3552
3553void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) {
3554  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM(load);
3555  codegen_->AddSlowPath(slow_path);
3556
3557  Register out = load->GetLocations()->Out().AsRegister<Register>();
3558  codegen_->LoadCurrentMethod(out);
3559  __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
3560  __ LoadFromOffset(kLoadWord, out, out, mirror::Class::DexCacheStringsOffset().Int32Value());
3561  __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(load->GetStringIndex()));
3562  __ cmp(out, ShifterOperand(0));
3563  __ b(slow_path->GetEntryLabel(), EQ);
3564  __ Bind(slow_path->GetExitLabel());
3565}
3566
3567void LocationsBuilderARM::VisitLoadException(HLoadException* load) {
3568  LocationSummary* locations =
3569      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
3570  locations->SetOut(Location::RequiresRegister());
3571}
3572
3573void InstructionCodeGeneratorARM::VisitLoadException(HLoadException* load) {
3574  Register out = load->GetLocations()->Out().AsRegister<Register>();
3575  int32_t offset = Thread::ExceptionOffset<kArmWordSize>().Int32Value();
3576  __ LoadFromOffset(kLoadWord, out, TR, offset);
3577  __ LoadImmediate(IP, 0);
3578  __ StoreToOffset(kStoreWord, IP, TR, offset);
3579}
3580
3581void LocationsBuilderARM::VisitThrow(HThrow* instruction) {
3582  LocationSummary* locations =
3583      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3584  InvokeRuntimeCallingConvention calling_convention;
3585  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3586}
3587
3588void InstructionCodeGeneratorARM::VisitThrow(HThrow* instruction) {
3589  codegen_->InvokeRuntime(
3590      QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc());
3591}
3592
3593void LocationsBuilderARM::VisitInstanceOf(HInstanceOf* instruction) {
3594  LocationSummary::CallKind call_kind = instruction->IsClassFinal()
3595      ? LocationSummary::kNoCall
3596      : LocationSummary::kCallOnSlowPath;
3597  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3598  locations->SetInAt(0, Location::RequiresRegister());
3599  locations->SetInAt(1, Location::RequiresRegister());
3600  locations->SetOut(Location::RequiresRegister());
3601}
3602
3603void InstructionCodeGeneratorARM::VisitInstanceOf(HInstanceOf* instruction) {
3604  LocationSummary* locations = instruction->GetLocations();
3605  Register obj = locations->InAt(0).AsRegister<Register>();
3606  Register cls = locations->InAt(1).AsRegister<Register>();
3607  Register out = locations->Out().AsRegister<Register>();
3608  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3609  Label done, zero;
3610  SlowPathCodeARM* slow_path = nullptr;
3611
3612  // Return 0 if `obj` is null.
3613  // TODO: avoid this check if we know obj is not null.
3614  __ cmp(obj, ShifterOperand(0));
3615  __ b(&zero, EQ);
3616  // Compare the class of `obj` with `cls`.
3617  __ LoadFromOffset(kLoadWord, out, obj, class_offset);
3618  __ cmp(out, ShifterOperand(cls));
3619  if (instruction->IsClassFinal()) {
3620    // Classes must be equal for the instanceof to succeed.
3621    __ b(&zero, NE);
3622    __ LoadImmediate(out, 1);
3623    __ b(&done);
3624  } else {
3625    // If the classes are not equal, we go into a slow path.
3626    DCHECK(locations->OnlyCallsOnSlowPath());
3627    slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
3628        instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc());
3629    codegen_->AddSlowPath(slow_path);
3630    __ b(slow_path->GetEntryLabel(), NE);
3631    __ LoadImmediate(out, 1);
3632    __ b(&done);
3633  }
3634  __ Bind(&zero);
3635  __ LoadImmediate(out, 0);
3636  if (slow_path != nullptr) {
3637    __ Bind(slow_path->GetExitLabel());
3638  }
3639  __ Bind(&done);
3640}
3641
3642void LocationsBuilderARM::VisitCheckCast(HCheckCast* instruction) {
3643  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3644      instruction, LocationSummary::kCallOnSlowPath);
3645  locations->SetInAt(0, Location::RequiresRegister());
3646  locations->SetInAt(1, Location::RequiresRegister());
3647  locations->AddTemp(Location::RequiresRegister());
3648}
3649
3650void InstructionCodeGeneratorARM::VisitCheckCast(HCheckCast* instruction) {
3651  LocationSummary* locations = instruction->GetLocations();
3652  Register obj = locations->InAt(0).AsRegister<Register>();
3653  Register cls = locations->InAt(1).AsRegister<Register>();
3654  Register temp = locations->GetTemp(0).AsRegister<Register>();
3655  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3656
3657  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM(
3658      instruction, locations->InAt(1), locations->GetTemp(0), instruction->GetDexPc());
3659  codegen_->AddSlowPath(slow_path);
3660
3661  // TODO: avoid this check if we know obj is not null.
3662  __ cmp(obj, ShifterOperand(0));
3663  __ b(slow_path->GetExitLabel(), EQ);
3664  // Compare the class of `obj` with `cls`.
3665  __ LoadFromOffset(kLoadWord, temp, obj, class_offset);
3666  __ cmp(temp, ShifterOperand(cls));
3667  __ b(slow_path->GetEntryLabel(), NE);
3668  __ Bind(slow_path->GetExitLabel());
3669}
3670
3671void LocationsBuilderARM::VisitMonitorOperation(HMonitorOperation* instruction) {
3672  LocationSummary* locations =
3673      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3674  InvokeRuntimeCallingConvention calling_convention;
3675  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3676}
3677
3678void InstructionCodeGeneratorARM::VisitMonitorOperation(HMonitorOperation* instruction) {
3679  codegen_->InvokeRuntime(instruction->IsEnter()
3680        ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
3681      instruction,
3682      instruction->GetDexPc());
3683}
3684
3685void LocationsBuilderARM::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
3686void LocationsBuilderARM::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
3687void LocationsBuilderARM::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
3688
3689void LocationsBuilderARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
3690  LocationSummary* locations =
3691      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3692  DCHECK(instruction->GetResultType() == Primitive::kPrimInt
3693         || instruction->GetResultType() == Primitive::kPrimLong);
3694  locations->SetInAt(0, Location::RequiresRegister());
3695  locations->SetInAt(1, Location::RequiresRegister());
3696  Location::OutputOverlap output_overlaps = (instruction->GetResultType() == Primitive::kPrimLong)
3697      ? Location::kOutputOverlap
3698      : Location::kNoOutputOverlap;
3699  locations->SetOut(Location::RequiresRegister(), output_overlaps);
3700}
3701
3702void InstructionCodeGeneratorARM::VisitAnd(HAnd* instruction) {
3703  HandleBitwiseOperation(instruction);
3704}
3705
3706void InstructionCodeGeneratorARM::VisitOr(HOr* instruction) {
3707  HandleBitwiseOperation(instruction);
3708}
3709
3710void InstructionCodeGeneratorARM::VisitXor(HXor* instruction) {
3711  HandleBitwiseOperation(instruction);
3712}
3713
3714void InstructionCodeGeneratorARM::HandleBitwiseOperation(HBinaryOperation* instruction) {
3715  LocationSummary* locations = instruction->GetLocations();
3716
3717  if (instruction->GetResultType() == Primitive::kPrimInt) {
3718    Register first = locations->InAt(0).AsRegister<Register>();
3719    Register second = locations->InAt(1).AsRegister<Register>();
3720    Register out = locations->Out().AsRegister<Register>();
3721    if (instruction->IsAnd()) {
3722      __ and_(out, first, ShifterOperand(second));
3723    } else if (instruction->IsOr()) {
3724      __ orr(out, first, ShifterOperand(second));
3725    } else {
3726      DCHECK(instruction->IsXor());
3727      __ eor(out, first, ShifterOperand(second));
3728    }
3729  } else {
3730    DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3731    Location first = locations->InAt(0);
3732    Location second = locations->InAt(1);
3733    Location out = locations->Out();
3734    if (instruction->IsAnd()) {
3735      __ and_(out.AsRegisterPairLow<Register>(),
3736              first.AsRegisterPairLow<Register>(),
3737              ShifterOperand(second.AsRegisterPairLow<Register>()));
3738      __ and_(out.AsRegisterPairHigh<Register>(),
3739              first.AsRegisterPairHigh<Register>(),
3740              ShifterOperand(second.AsRegisterPairHigh<Register>()));
3741    } else if (instruction->IsOr()) {
3742      __ orr(out.AsRegisterPairLow<Register>(),
3743             first.AsRegisterPairLow<Register>(),
3744             ShifterOperand(second.AsRegisterPairLow<Register>()));
3745      __ orr(out.AsRegisterPairHigh<Register>(),
3746             first.AsRegisterPairHigh<Register>(),
3747             ShifterOperand(second.AsRegisterPairHigh<Register>()));
3748    } else {
3749      DCHECK(instruction->IsXor());
3750      __ eor(out.AsRegisterPairLow<Register>(),
3751             first.AsRegisterPairLow<Register>(),
3752             ShifterOperand(second.AsRegisterPairLow<Register>()));
3753      __ eor(out.AsRegisterPairHigh<Register>(),
3754             first.AsRegisterPairHigh<Register>(),
3755             ShifterOperand(second.AsRegisterPairHigh<Register>()));
3756    }
3757  }
3758}
3759
3760}  // namespace arm
3761}  // namespace art
3762