code_generator_arm.cc revision f43083d560565aea46c602adb86423daeefe589d
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm.h"
18
19#include "entrypoints/quick/quick_entrypoints.h"
20#include "gc/accounting/card_table.h"
21#include "mirror/array-inl.h"
22#include "mirror/art_method.h"
23#include "mirror/class.h"
24#include "thread.h"
25#include "utils/assembler.h"
26#include "utils/arm/assembler_arm.h"
27#include "utils/arm/managed_register_arm.h"
28#include "utils/stack_checks.h"
29
30namespace art {
31
32namespace arm {
33
34static DRegister FromLowSToD(SRegister reg) {
35  DCHECK_EQ(reg % 2, 0);
36  return static_cast<DRegister>(reg / 2);
37}
38
39static constexpr bool kExplicitStackOverflowCheck = false;
40
41static constexpr int kNumberOfPushedRegistersAtEntry = 1 + 2;  // LR, R6, R7
42static constexpr int kCurrentMethodStackOffset = 0;
43
44static constexpr Register kRuntimeParameterCoreRegisters[] = { R0, R1, R2 };
45static constexpr size_t kRuntimeParameterCoreRegistersLength =
46    arraysize(kRuntimeParameterCoreRegisters);
47static constexpr SRegister kRuntimeParameterFpuRegisters[] = { };
48static constexpr size_t kRuntimeParameterFpuRegistersLength = 0;
49
50class InvokeRuntimeCallingConvention : public CallingConvention<Register, SRegister> {
51 public:
52  InvokeRuntimeCallingConvention()
53      : CallingConvention(kRuntimeParameterCoreRegisters,
54                          kRuntimeParameterCoreRegistersLength,
55                          kRuntimeParameterFpuRegisters,
56                          kRuntimeParameterFpuRegistersLength) {}
57
58 private:
59  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
60};
61
62#define __ reinterpret_cast<ArmAssembler*>(codegen->GetAssembler())->
63#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArmWordSize, x).Int32Value()
64
65class SlowPathCodeARM : public SlowPathCode {
66 public:
67  SlowPathCodeARM() : entry_label_(), exit_label_() {}
68
69  Label* GetEntryLabel() { return &entry_label_; }
70  Label* GetExitLabel() { return &exit_label_; }
71
72 private:
73  Label entry_label_;
74  Label exit_label_;
75
76  DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM);
77};
78
79class NullCheckSlowPathARM : public SlowPathCodeARM {
80 public:
81  explicit NullCheckSlowPathARM(HNullCheck* instruction) : instruction_(instruction) {}
82
83  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
84    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
85    __ Bind(GetEntryLabel());
86    arm_codegen->InvokeRuntime(
87        QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc());
88  }
89
90 private:
91  HNullCheck* const instruction_;
92  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM);
93};
94
95class DivZeroCheckSlowPathARM : public SlowPathCodeARM {
96 public:
97  explicit DivZeroCheckSlowPathARM(HDivZeroCheck* instruction) : instruction_(instruction) {}
98
99  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
100    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
101    __ Bind(GetEntryLabel());
102    arm_codegen->InvokeRuntime(
103        QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc());
104  }
105
106 private:
107  HDivZeroCheck* const instruction_;
108  DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM);
109};
110
111class StackOverflowCheckSlowPathARM : public SlowPathCodeARM {
112 public:
113  StackOverflowCheckSlowPathARM() {}
114
115  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
116    __ Bind(GetEntryLabel());
117    __ LoadFromOffset(kLoadWord, PC, TR,
118        QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pThrowStackOverflow).Int32Value());
119  }
120
121 private:
122  DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathARM);
123};
124
125class SuspendCheckSlowPathARM : public SlowPathCodeARM {
126 public:
127  explicit SuspendCheckSlowPathARM(HSuspendCheck* instruction, HBasicBlock* successor)
128      : instruction_(instruction), successor_(successor) {}
129
130  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
131    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
132    __ Bind(GetEntryLabel());
133    codegen->SaveLiveRegisters(instruction_->GetLocations());
134    arm_codegen->InvokeRuntime(
135        QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc());
136    codegen->RestoreLiveRegisters(instruction_->GetLocations());
137    if (successor_ == nullptr) {
138      __ b(GetReturnLabel());
139    } else {
140      __ b(arm_codegen->GetLabelOf(successor_));
141    }
142  }
143
144  Label* GetReturnLabel() {
145    DCHECK(successor_ == nullptr);
146    return &return_label_;
147  }
148
149 private:
150  HSuspendCheck* const instruction_;
151  // If not null, the block to branch to after the suspend check.
152  HBasicBlock* const successor_;
153
154  // If `successor_` is null, the label to branch to after the suspend check.
155  Label return_label_;
156
157  DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM);
158};
159
160class BoundsCheckSlowPathARM : public SlowPathCodeARM {
161 public:
162  BoundsCheckSlowPathARM(HBoundsCheck* instruction,
163                         Location index_location,
164                         Location length_location)
165      : instruction_(instruction),
166        index_location_(index_location),
167        length_location_(length_location) {}
168
169  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
170    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
171    __ Bind(GetEntryLabel());
172    InvokeRuntimeCallingConvention calling_convention;
173    arm_codegen->Move32(
174        Location::RegisterLocation(calling_convention.GetRegisterAt(0)), index_location_);
175    arm_codegen->Move32(
176        Location::RegisterLocation(calling_convention.GetRegisterAt(1)), length_location_);
177    arm_codegen->InvokeRuntime(
178        QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc());
179  }
180
181 private:
182  HBoundsCheck* const instruction_;
183  const Location index_location_;
184  const Location length_location_;
185
186  DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM);
187};
188
189class LoadClassSlowPathARM : public SlowPathCodeARM {
190 public:
191  LoadClassSlowPathARM(HLoadClass* cls,
192                       HInstruction* at,
193                       uint32_t dex_pc,
194                       bool do_clinit)
195      : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
196    DCHECK(at->IsLoadClass() || at->IsClinitCheck());
197  }
198
199  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
200    LocationSummary* locations = at_->GetLocations();
201
202    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
203    __ Bind(GetEntryLabel());
204    codegen->SaveLiveRegisters(locations);
205
206    InvokeRuntimeCallingConvention calling_convention;
207    __ LoadImmediate(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex());
208    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
209    int32_t entry_point_offset = do_clinit_
210        ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
211        : QUICK_ENTRY_POINT(pInitializeType);
212    arm_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_);
213
214    // Move the class to the desired location.
215    if (locations->Out().IsValid()) {
216      DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
217      arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
218    }
219    codegen->RestoreLiveRegisters(locations);
220    __ b(GetExitLabel());
221  }
222
223 private:
224  // The class this slow path will load.
225  HLoadClass* const cls_;
226
227  // The instruction where this slow path is happening.
228  // (Might be the load class or an initialization check).
229  HInstruction* const at_;
230
231  // The dex PC of `at_`.
232  const uint32_t dex_pc_;
233
234  // Whether to initialize the class.
235  const bool do_clinit_;
236
237  DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM);
238};
239
240class LoadStringSlowPathARM : public SlowPathCodeARM {
241 public:
242  explicit LoadStringSlowPathARM(HLoadString* instruction) : instruction_(instruction) {}
243
244  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
245    LocationSummary* locations = instruction_->GetLocations();
246    DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
247
248    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
249    __ Bind(GetEntryLabel());
250    codegen->SaveLiveRegisters(locations);
251
252    InvokeRuntimeCallingConvention calling_convention;
253    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(0));
254    __ LoadImmediate(calling_convention.GetRegisterAt(1), instruction_->GetStringIndex());
255    arm_codegen->InvokeRuntime(
256        QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc());
257    arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
258
259    codegen->RestoreLiveRegisters(locations);
260    __ b(GetExitLabel());
261  }
262
263 private:
264  HLoadString* const instruction_;
265
266  DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM);
267};
268
269#undef __
270
271#undef __
272#define __ reinterpret_cast<ArmAssembler*>(GetAssembler())->
273
274inline Condition ARMCondition(IfCondition cond) {
275  switch (cond) {
276    case kCondEQ: return EQ;
277    case kCondNE: return NE;
278    case kCondLT: return LT;
279    case kCondLE: return LE;
280    case kCondGT: return GT;
281    case kCondGE: return GE;
282    default:
283      LOG(FATAL) << "Unknown if condition";
284  }
285  return EQ;        // Unreachable.
286}
287
288inline Condition ARMOppositeCondition(IfCondition cond) {
289  switch (cond) {
290    case kCondEQ: return NE;
291    case kCondNE: return EQ;
292    case kCondLT: return GE;
293    case kCondLE: return GT;
294    case kCondGT: return LE;
295    case kCondGE: return LT;
296    default:
297      LOG(FATAL) << "Unknown if condition";
298  }
299  return EQ;        // Unreachable.
300}
301
302void CodeGeneratorARM::DumpCoreRegister(std::ostream& stream, int reg) const {
303  stream << ArmManagedRegister::FromCoreRegister(Register(reg));
304}
305
306void CodeGeneratorARM::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
307  stream << ArmManagedRegister::FromSRegister(SRegister(reg));
308}
309
310size_t CodeGeneratorARM::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
311  __ StoreToOffset(kStoreWord, static_cast<Register>(reg_id), SP, stack_index);
312  return kArmWordSize;
313}
314
315size_t CodeGeneratorARM::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
316  __ LoadFromOffset(kLoadWord, static_cast<Register>(reg_id), SP, stack_index);
317  return kArmWordSize;
318}
319
320CodeGeneratorARM::CodeGeneratorARM(HGraph* graph)
321    : CodeGenerator(graph, kNumberOfCoreRegisters, kNumberOfSRegisters, kNumberOfRegisterPairs),
322      block_labels_(graph->GetArena(), 0),
323      location_builder_(graph, this),
324      instruction_visitor_(graph, this),
325      move_resolver_(graph->GetArena(), this),
326      assembler_(true) {}
327
328size_t CodeGeneratorARM::FrameEntrySpillSize() const {
329  return kNumberOfPushedRegistersAtEntry * kArmWordSize;
330}
331
332Location CodeGeneratorARM::AllocateFreeRegister(Primitive::Type type) const {
333  switch (type) {
334    case Primitive::kPrimLong: {
335      size_t reg = FindFreeEntry(blocked_register_pairs_, kNumberOfRegisterPairs);
336      ArmManagedRegister pair =
337          ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
338      DCHECK(!blocked_core_registers_[pair.AsRegisterPairLow()]);
339      DCHECK(!blocked_core_registers_[pair.AsRegisterPairHigh()]);
340
341      blocked_core_registers_[pair.AsRegisterPairLow()] = true;
342      blocked_core_registers_[pair.AsRegisterPairHigh()] = true;
343      UpdateBlockedPairRegisters();
344      return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
345    }
346
347    case Primitive::kPrimByte:
348    case Primitive::kPrimBoolean:
349    case Primitive::kPrimChar:
350    case Primitive::kPrimShort:
351    case Primitive::kPrimInt:
352    case Primitive::kPrimNot: {
353      int reg = FindFreeEntry(blocked_core_registers_, kNumberOfCoreRegisters);
354      // Block all register pairs that contain `reg`.
355      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
356        ArmManagedRegister current =
357            ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
358        if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
359          blocked_register_pairs_[i] = true;
360        }
361      }
362      return Location::RegisterLocation(reg);
363    }
364
365    case Primitive::kPrimFloat: {
366      int reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfSRegisters);
367      return Location::FpuRegisterLocation(reg);
368    }
369
370    case Primitive::kPrimDouble: {
371      int reg = FindTwoFreeConsecutiveAlignedEntries(blocked_fpu_registers_, kNumberOfSRegisters);
372      DCHECK_EQ(reg % 2, 0);
373      return Location::FpuRegisterPairLocation(reg, reg + 1);
374    }
375
376    case Primitive::kPrimVoid:
377      LOG(FATAL) << "Unreachable type " << type;
378  }
379
380  return Location();
381}
382
383void CodeGeneratorARM::SetupBlockedRegisters() const {
384  // Don't allocate the dalvik style register pair passing.
385  blocked_register_pairs_[R1_R2] = true;
386
387  // Stack register, LR and PC are always reserved.
388  blocked_core_registers_[SP] = true;
389  blocked_core_registers_[LR] = true;
390  blocked_core_registers_[PC] = true;
391
392  // Reserve thread register.
393  blocked_core_registers_[TR] = true;
394
395  // Reserve temp register.
396  blocked_core_registers_[IP] = true;
397
398  // TODO: We currently don't use Quick's callee saved registers.
399  // We always save and restore R6 and R7 to make sure we can use three
400  // register pairs for long operations.
401  blocked_core_registers_[R4] = true;
402  blocked_core_registers_[R5] = true;
403  blocked_core_registers_[R8] = true;
404  blocked_core_registers_[R10] = true;
405  blocked_core_registers_[R11] = true;
406
407  blocked_fpu_registers_[S16] = true;
408  blocked_fpu_registers_[S17] = true;
409  blocked_fpu_registers_[S18] = true;
410  blocked_fpu_registers_[S19] = true;
411  blocked_fpu_registers_[S20] = true;
412  blocked_fpu_registers_[S21] = true;
413  blocked_fpu_registers_[S22] = true;
414  blocked_fpu_registers_[S23] = true;
415  blocked_fpu_registers_[S24] = true;
416  blocked_fpu_registers_[S25] = true;
417  blocked_fpu_registers_[S26] = true;
418  blocked_fpu_registers_[S27] = true;
419  blocked_fpu_registers_[S28] = true;
420  blocked_fpu_registers_[S29] = true;
421  blocked_fpu_registers_[S30] = true;
422  blocked_fpu_registers_[S31] = true;
423
424  UpdateBlockedPairRegisters();
425}
426
427void CodeGeneratorARM::UpdateBlockedPairRegisters() const {
428  for (int i = 0; i < kNumberOfRegisterPairs; i++) {
429    ArmManagedRegister current =
430        ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
431    if (blocked_core_registers_[current.AsRegisterPairLow()]
432        || blocked_core_registers_[current.AsRegisterPairHigh()]) {
433      blocked_register_pairs_[i] = true;
434    }
435  }
436}
437
438InstructionCodeGeneratorARM::InstructionCodeGeneratorARM(HGraph* graph, CodeGeneratorARM* codegen)
439      : HGraphVisitor(graph),
440        assembler_(codegen->GetAssembler()),
441        codegen_(codegen) {}
442
443void CodeGeneratorARM::GenerateFrameEntry() {
444  bool skip_overflow_check = IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm);
445  if (!skip_overflow_check) {
446    if (kExplicitStackOverflowCheck) {
447      SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathARM();
448      AddSlowPath(slow_path);
449
450      __ LoadFromOffset(kLoadWord, IP, TR, Thread::StackEndOffset<kArmWordSize>().Int32Value());
451      __ cmp(SP, ShifterOperand(IP));
452      __ b(slow_path->GetEntryLabel(), CC);
453    } else {
454      __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm)));
455      __ LoadFromOffset(kLoadWord, IP, IP, 0);
456      RecordPcInfo(nullptr, 0);
457    }
458  }
459
460  core_spill_mask_ |= (1 << LR | 1 << R6 | 1 << R7);
461  __ PushList(1 << LR | 1 << R6 | 1 << R7);
462
463  // The return PC has already been pushed on the stack.
464  __ AddConstant(SP, -(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize));
465  __ StoreToOffset(kStoreWord, R0, SP, 0);
466}
467
468void CodeGeneratorARM::GenerateFrameExit() {
469  __ AddConstant(SP, GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize);
470  __ PopList(1 << PC | 1 << R6 | 1 << R7);
471}
472
473void CodeGeneratorARM::Bind(HBasicBlock* block) {
474  __ Bind(GetLabelOf(block));
475}
476
477Location CodeGeneratorARM::GetStackLocation(HLoadLocal* load) const {
478  switch (load->GetType()) {
479    case Primitive::kPrimLong:
480    case Primitive::kPrimDouble:
481      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
482      break;
483
484    case Primitive::kPrimInt:
485    case Primitive::kPrimNot:
486    case Primitive::kPrimFloat:
487      return Location::StackSlot(GetStackSlot(load->GetLocal()));
488
489    case Primitive::kPrimBoolean:
490    case Primitive::kPrimByte:
491    case Primitive::kPrimChar:
492    case Primitive::kPrimShort:
493    case Primitive::kPrimVoid:
494      LOG(FATAL) << "Unexpected type " << load->GetType();
495  }
496
497  LOG(FATAL) << "Unreachable";
498  return Location();
499}
500
501Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
502  switch (type) {
503    case Primitive::kPrimBoolean:
504    case Primitive::kPrimByte:
505    case Primitive::kPrimChar:
506    case Primitive::kPrimShort:
507    case Primitive::kPrimInt:
508    case Primitive::kPrimNot: {
509      uint32_t index = gp_index_++;
510      uint32_t stack_index = stack_index_++;
511      if (index < calling_convention.GetNumberOfRegisters()) {
512        return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
513      } else {
514        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
515      }
516    }
517
518    case Primitive::kPrimLong: {
519      uint32_t index = gp_index_;
520      uint32_t stack_index = stack_index_;
521      gp_index_ += 2;
522      stack_index_ += 2;
523      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
524        ArmManagedRegister pair = ArmManagedRegister::FromRegisterPair(
525            calling_convention.GetRegisterPairAt(index));
526        return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
527      } else if (index + 1 == calling_convention.GetNumberOfRegisters()) {
528        return Location::QuickParameter(index, stack_index);
529      } else {
530        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
531      }
532    }
533
534    case Primitive::kPrimFloat: {
535      uint32_t stack_index = stack_index_++;
536      if (float_index_ % 2 == 0) {
537        float_index_ = std::max(double_index_, float_index_);
538      }
539      if (float_index_ < calling_convention.GetNumberOfFpuRegisters()) {
540        return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(float_index_++));
541      } else {
542        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
543      }
544    }
545
546    case Primitive::kPrimDouble: {
547      double_index_ = std::max(double_index_, RoundUp(float_index_, 2));
548      uint32_t stack_index = stack_index_;
549      stack_index_ += 2;
550      if (double_index_ + 1 < calling_convention.GetNumberOfFpuRegisters()) {
551        uint32_t index = double_index_;
552        double_index_ += 2;
553        return Location::FpuRegisterPairLocation(
554          calling_convention.GetFpuRegisterAt(index),
555          calling_convention.GetFpuRegisterAt(index + 1));
556      } else {
557        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
558      }
559    }
560
561    case Primitive::kPrimVoid:
562      LOG(FATAL) << "Unexpected parameter type " << type;
563      break;
564  }
565  return Location();
566}
567
568Location InvokeDexCallingConventionVisitor::GetReturnLocation(Primitive::Type type) {
569  switch (type) {
570    case Primitive::kPrimBoolean:
571    case Primitive::kPrimByte:
572    case Primitive::kPrimChar:
573    case Primitive::kPrimShort:
574    case Primitive::kPrimInt:
575    case Primitive::kPrimNot: {
576      return Location::RegisterLocation(R0);
577    }
578
579    case Primitive::kPrimFloat: {
580      return Location::FpuRegisterLocation(S0);
581    }
582
583    case Primitive::kPrimLong: {
584      return Location::RegisterPairLocation(R0, R1);
585    }
586
587    case Primitive::kPrimDouble: {
588      return Location::FpuRegisterPairLocation(S0, S1);
589    }
590
591    case Primitive::kPrimVoid:
592      return Location();
593  }
594  UNREACHABLE();
595  return Location();
596}
597
598void CodeGeneratorARM::Move32(Location destination, Location source) {
599  if (source.Equals(destination)) {
600    return;
601  }
602  if (destination.IsRegister()) {
603    if (source.IsRegister()) {
604      __ Mov(destination.As<Register>(), source.As<Register>());
605    } else if (source.IsFpuRegister()) {
606      __ vmovrs(destination.As<Register>(), source.As<SRegister>());
607    } else {
608      __ LoadFromOffset(kLoadWord, destination.As<Register>(), SP, source.GetStackIndex());
609    }
610  } else if (destination.IsFpuRegister()) {
611    if (source.IsRegister()) {
612      __ vmovsr(destination.As<SRegister>(), source.As<Register>());
613    } else if (source.IsFpuRegister()) {
614      __ vmovs(destination.As<SRegister>(), source.As<SRegister>());
615    } else {
616      __ LoadSFromOffset(destination.As<SRegister>(), SP, source.GetStackIndex());
617    }
618  } else {
619    DCHECK(destination.IsStackSlot());
620    if (source.IsRegister()) {
621      __ StoreToOffset(kStoreWord, source.As<Register>(), SP, destination.GetStackIndex());
622    } else if (source.IsFpuRegister()) {
623      __ StoreSToOffset(source.As<SRegister>(), SP, destination.GetStackIndex());
624    } else {
625      DCHECK(source.IsStackSlot());
626      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
627      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
628    }
629  }
630}
631
632void CodeGeneratorARM::Move64(Location destination, Location source) {
633  if (source.Equals(destination)) {
634    return;
635  }
636  if (destination.IsRegisterPair()) {
637    if (source.IsRegisterPair()) {
638      __ Mov(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairLow<Register>());
639      __ Mov(destination.AsRegisterPairHigh<Register>(), source.AsRegisterPairHigh<Register>());
640    } else if (source.IsFpuRegister()) {
641      UNIMPLEMENTED(FATAL);
642    } else if (source.IsQuickParameter()) {
643      uint16_t register_index = source.GetQuickParameterRegisterIndex();
644      uint16_t stack_index = source.GetQuickParameterStackIndex();
645      InvokeDexCallingConvention calling_convention;
646      __ Mov(destination.AsRegisterPairLow<Register>(),
647             calling_convention.GetRegisterAt(register_index));
648      __ LoadFromOffset(kLoadWord, destination.AsRegisterPairHigh<Register>(),
649             SP, calling_convention.GetStackOffsetOf(stack_index + 1) + GetFrameSize());
650    } else {
651      DCHECK(source.IsDoubleStackSlot());
652      if (destination.AsRegisterPairLow<Register>() == R1) {
653        DCHECK_EQ(destination.AsRegisterPairHigh<Register>(), R2);
654        __ LoadFromOffset(kLoadWord, R1, SP, source.GetStackIndex());
655        __ LoadFromOffset(kLoadWord, R2, SP, source.GetHighStackIndex(kArmWordSize));
656      } else {
657        __ LoadFromOffset(kLoadWordPair, destination.AsRegisterPairLow<Register>(),
658                          SP, source.GetStackIndex());
659      }
660    }
661  } else if (destination.IsFpuRegisterPair()) {
662    if (source.IsDoubleStackSlot()) {
663      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
664                         SP,
665                         source.GetStackIndex());
666    } else {
667      UNIMPLEMENTED(FATAL);
668    }
669  } else if (destination.IsQuickParameter()) {
670    InvokeDexCallingConvention calling_convention;
671    uint16_t register_index = destination.GetQuickParameterRegisterIndex();
672    uint16_t stack_index = destination.GetQuickParameterStackIndex();
673    if (source.IsRegisterPair()) {
674      __ Mov(calling_convention.GetRegisterAt(register_index),
675             source.AsRegisterPairLow<Register>());
676      __ StoreToOffset(kStoreWord, source.AsRegisterPairHigh<Register>(),
677             SP, calling_convention.GetStackOffsetOf(stack_index + 1));
678    } else if (source.IsFpuRegister()) {
679      UNIMPLEMENTED(FATAL);
680    } else {
681      DCHECK(source.IsDoubleStackSlot());
682      __ LoadFromOffset(
683          kLoadWord, calling_convention.GetRegisterAt(register_index), SP, source.GetStackIndex());
684      __ LoadFromOffset(kLoadWord, R0, SP, source.GetHighStackIndex(kArmWordSize));
685      __ StoreToOffset(kStoreWord, R0, SP, calling_convention.GetStackOffsetOf(stack_index + 1));
686    }
687  } else {
688    DCHECK(destination.IsDoubleStackSlot());
689    if (source.IsRegisterPair()) {
690      if (source.AsRegisterPairLow<Register>() == R1) {
691        DCHECK_EQ(source.AsRegisterPairHigh<Register>(), R2);
692        __ StoreToOffset(kStoreWord, R1, SP, destination.GetStackIndex());
693        __ StoreToOffset(kStoreWord, R2, SP, destination.GetHighStackIndex(kArmWordSize));
694      } else {
695        __ StoreToOffset(kStoreWordPair, source.AsRegisterPairLow<Register>(),
696                         SP, destination.GetStackIndex());
697      }
698    } else if (source.IsQuickParameter()) {
699      InvokeDexCallingConvention calling_convention;
700      uint16_t register_index = source.GetQuickParameterRegisterIndex();
701      uint16_t stack_index = source.GetQuickParameterStackIndex();
702      __ StoreToOffset(kStoreWord, calling_convention.GetRegisterAt(register_index),
703             SP, destination.GetStackIndex());
704      __ LoadFromOffset(kLoadWord, R0,
705             SP, calling_convention.GetStackOffsetOf(stack_index + 1) + GetFrameSize());
706      __ StoreToOffset(kStoreWord, R0, SP, destination.GetHighStackIndex(kArmWordSize));
707    } else if (source.IsFpuRegisterPair()) {
708      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
709                        SP,
710                        destination.GetStackIndex());
711    } else {
712      DCHECK(source.IsDoubleStackSlot());
713      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
714      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
715      __ LoadFromOffset(kLoadWord, IP, SP, source.GetHighStackIndex(kArmWordSize));
716      __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
717    }
718  }
719}
720
721void CodeGeneratorARM::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
722  LocationSummary* locations = instruction->GetLocations();
723  if (locations != nullptr && locations->Out().Equals(location)) {
724    return;
725  }
726
727  if (instruction->IsIntConstant()) {
728    int32_t value = instruction->AsIntConstant()->GetValue();
729    if (location.IsRegister()) {
730      __ LoadImmediate(location.As<Register>(), value);
731    } else {
732      DCHECK(location.IsStackSlot());
733      __ LoadImmediate(IP, value);
734      __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
735    }
736  } else if (instruction->IsLongConstant()) {
737    int64_t value = instruction->AsLongConstant()->GetValue();
738    if (location.IsRegisterPair()) {
739      __ LoadImmediate(location.AsRegisterPairLow<Register>(), Low32Bits(value));
740      __ LoadImmediate(location.AsRegisterPairHigh<Register>(), High32Bits(value));
741    } else {
742      DCHECK(location.IsDoubleStackSlot());
743      __ LoadImmediate(IP, Low32Bits(value));
744      __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
745      __ LoadImmediate(IP, High32Bits(value));
746      __ StoreToOffset(kStoreWord, IP, SP, location.GetHighStackIndex(kArmWordSize));
747    }
748  } else if (instruction->IsLoadLocal()) {
749    uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
750    switch (instruction->GetType()) {
751      case Primitive::kPrimBoolean:
752      case Primitive::kPrimByte:
753      case Primitive::kPrimChar:
754      case Primitive::kPrimShort:
755      case Primitive::kPrimInt:
756      case Primitive::kPrimNot:
757      case Primitive::kPrimFloat:
758        Move32(location, Location::StackSlot(stack_slot));
759        break;
760
761      case Primitive::kPrimLong:
762      case Primitive::kPrimDouble:
763        Move64(location, Location::DoubleStackSlot(stack_slot));
764        break;
765
766      default:
767        LOG(FATAL) << "Unexpected type " << instruction->GetType();
768    }
769  } else if (instruction->IsTemporary()) {
770    Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
771    Move32(location, temp_location);
772  } else {
773    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
774    switch (instruction->GetType()) {
775      case Primitive::kPrimBoolean:
776      case Primitive::kPrimByte:
777      case Primitive::kPrimChar:
778      case Primitive::kPrimShort:
779      case Primitive::kPrimNot:
780      case Primitive::kPrimInt:
781      case Primitive::kPrimFloat:
782        Move32(location, locations->Out());
783        break;
784
785      case Primitive::kPrimLong:
786      case Primitive::kPrimDouble:
787        Move64(location, locations->Out());
788        break;
789
790      default:
791        LOG(FATAL) << "Unexpected type " << instruction->GetType();
792    }
793  }
794}
795
796void CodeGeneratorARM::InvokeRuntime(int32_t entry_point_offset,
797                                     HInstruction* instruction,
798                                     uint32_t dex_pc) {
799  __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset);
800  __ blx(LR);
801  RecordPcInfo(instruction, dex_pc);
802  DCHECK(instruction->IsSuspendCheck()
803      || instruction->IsBoundsCheck()
804      || instruction->IsNullCheck()
805      || instruction->IsDivZeroCheck()
806      || !IsLeafMethod());
807}
808
809void LocationsBuilderARM::VisitGoto(HGoto* got) {
810  got->SetLocations(nullptr);
811}
812
813void InstructionCodeGeneratorARM::VisitGoto(HGoto* got) {
814  HBasicBlock* successor = got->GetSuccessor();
815  DCHECK(!successor->IsExitBlock());
816
817  HBasicBlock* block = got->GetBlock();
818  HInstruction* previous = got->GetPrevious();
819
820  HLoopInformation* info = block->GetLoopInformation();
821  if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
822    codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
823    GenerateSuspendCheck(info->GetSuspendCheck(), successor);
824    return;
825  }
826
827  if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
828    GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
829  }
830  if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
831    __ b(codegen_->GetLabelOf(successor));
832  }
833}
834
835void LocationsBuilderARM::VisitExit(HExit* exit) {
836  exit->SetLocations(nullptr);
837}
838
839void InstructionCodeGeneratorARM::VisitExit(HExit* exit) {
840  UNUSED(exit);
841  if (kIsDebugBuild) {
842    __ Comment("Unreachable");
843    __ bkpt(0);
844  }
845}
846
847void LocationsBuilderARM::VisitIf(HIf* if_instr) {
848  LocationSummary* locations =
849      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
850  HInstruction* cond = if_instr->InputAt(0);
851  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
852    locations->SetInAt(0, Location::RequiresRegister());
853  }
854}
855
856void InstructionCodeGeneratorARM::VisitIf(HIf* if_instr) {
857  HInstruction* cond = if_instr->InputAt(0);
858  if (cond->IsIntConstant()) {
859    // Constant condition, statically compared against 1.
860    int32_t cond_value = cond->AsIntConstant()->GetValue();
861    if (cond_value == 1) {
862      if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
863                                     if_instr->IfTrueSuccessor())) {
864        __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
865      }
866      return;
867    } else {
868      DCHECK_EQ(cond_value, 0);
869    }
870  } else {
871    if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
872      // Condition has been materialized, compare the output to 0
873      DCHECK(if_instr->GetLocations()->InAt(0).IsRegister());
874      __ cmp(if_instr->GetLocations()->InAt(0).As<Register>(),
875             ShifterOperand(0));
876      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()), NE);
877    } else {
878      // Condition has not been materialized, use its inputs as the
879      // comparison and its condition as the branch condition.
880      LocationSummary* locations = cond->GetLocations();
881      if (locations->InAt(1).IsRegister()) {
882        __ cmp(locations->InAt(0).As<Register>(),
883               ShifterOperand(locations->InAt(1).As<Register>()));
884      } else {
885        DCHECK(locations->InAt(1).IsConstant());
886        int32_t value =
887            locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
888        ShifterOperand operand;
889        if (ShifterOperand::CanHoldArm(value, &operand)) {
890          __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(value));
891        } else {
892          Register temp = IP;
893          __ LoadImmediate(temp, value);
894          __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(temp));
895        }
896      }
897      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()),
898           ARMCondition(cond->AsCondition()->GetCondition()));
899    }
900  }
901  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
902                                 if_instr->IfFalseSuccessor())) {
903    __ b(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
904  }
905}
906
907
908void LocationsBuilderARM::VisitCondition(HCondition* comp) {
909  LocationSummary* locations =
910      new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall);
911  locations->SetInAt(0, Location::RequiresRegister());
912  locations->SetInAt(1, Location::RegisterOrConstant(comp->InputAt(1)));
913  if (comp->NeedsMaterialization()) {
914    locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
915  }
916}
917
918void InstructionCodeGeneratorARM::VisitCondition(HCondition* comp) {
919  if (!comp->NeedsMaterialization()) return;
920
921  LocationSummary* locations = comp->GetLocations();
922  if (locations->InAt(1).IsRegister()) {
923    __ cmp(locations->InAt(0).As<Register>(),
924           ShifterOperand(locations->InAt(1).As<Register>()));
925  } else {
926    DCHECK(locations->InAt(1).IsConstant());
927    int32_t value = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
928    ShifterOperand operand;
929    if (ShifterOperand::CanHoldArm(value, &operand)) {
930      __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(value));
931    } else {
932      Register temp = IP;
933      __ LoadImmediate(temp, value);
934      __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(temp));
935    }
936  }
937  __ it(ARMCondition(comp->GetCondition()), kItElse);
938  __ mov(locations->Out().As<Register>(), ShifterOperand(1),
939         ARMCondition(comp->GetCondition()));
940  __ mov(locations->Out().As<Register>(), ShifterOperand(0),
941         ARMOppositeCondition(comp->GetCondition()));
942}
943
944void LocationsBuilderARM::VisitEqual(HEqual* comp) {
945  VisitCondition(comp);
946}
947
948void InstructionCodeGeneratorARM::VisitEqual(HEqual* comp) {
949  VisitCondition(comp);
950}
951
952void LocationsBuilderARM::VisitNotEqual(HNotEqual* comp) {
953  VisitCondition(comp);
954}
955
956void InstructionCodeGeneratorARM::VisitNotEqual(HNotEqual* comp) {
957  VisitCondition(comp);
958}
959
960void LocationsBuilderARM::VisitLessThan(HLessThan* comp) {
961  VisitCondition(comp);
962}
963
964void InstructionCodeGeneratorARM::VisitLessThan(HLessThan* comp) {
965  VisitCondition(comp);
966}
967
968void LocationsBuilderARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
969  VisitCondition(comp);
970}
971
972void InstructionCodeGeneratorARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
973  VisitCondition(comp);
974}
975
976void LocationsBuilderARM::VisitGreaterThan(HGreaterThan* comp) {
977  VisitCondition(comp);
978}
979
980void InstructionCodeGeneratorARM::VisitGreaterThan(HGreaterThan* comp) {
981  VisitCondition(comp);
982}
983
984void LocationsBuilderARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
985  VisitCondition(comp);
986}
987
988void InstructionCodeGeneratorARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
989  VisitCondition(comp);
990}
991
992void LocationsBuilderARM::VisitLocal(HLocal* local) {
993  local->SetLocations(nullptr);
994}
995
996void InstructionCodeGeneratorARM::VisitLocal(HLocal* local) {
997  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
998}
999
1000void LocationsBuilderARM::VisitLoadLocal(HLoadLocal* load) {
1001  load->SetLocations(nullptr);
1002}
1003
1004void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load) {
1005  // Nothing to do, this is driven by the code generator.
1006  UNUSED(load);
1007}
1008
1009void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) {
1010  LocationSummary* locations =
1011      new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
1012  switch (store->InputAt(1)->GetType()) {
1013    case Primitive::kPrimBoolean:
1014    case Primitive::kPrimByte:
1015    case Primitive::kPrimChar:
1016    case Primitive::kPrimShort:
1017    case Primitive::kPrimInt:
1018    case Primitive::kPrimNot:
1019    case Primitive::kPrimFloat:
1020      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
1021      break;
1022
1023    case Primitive::kPrimLong:
1024    case Primitive::kPrimDouble:
1025      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
1026      break;
1027
1028    default:
1029      LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType();
1030  }
1031}
1032
1033void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store) {
1034  UNUSED(store);
1035}
1036
1037void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) {
1038  LocationSummary* locations =
1039      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1040  locations->SetOut(Location::ConstantLocation(constant));
1041}
1042
1043void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant) {
1044  // Will be generated at use site.
1045  UNUSED(constant);
1046}
1047
1048void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) {
1049  LocationSummary* locations =
1050      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1051  locations->SetOut(Location::ConstantLocation(constant));
1052}
1053
1054void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant) {
1055  // Will be generated at use site.
1056  UNUSED(constant);
1057}
1058
1059void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) {
1060  LocationSummary* locations =
1061      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1062  locations->SetOut(Location::ConstantLocation(constant));
1063}
1064
1065void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant) {
1066  // Will be generated at use site.
1067  UNUSED(constant);
1068}
1069
1070void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) {
1071  LocationSummary* locations =
1072      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1073  locations->SetOut(Location::ConstantLocation(constant));
1074}
1075
1076void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant) {
1077  // Will be generated at use site.
1078  UNUSED(constant);
1079}
1080
1081void LocationsBuilderARM::VisitReturnVoid(HReturnVoid* ret) {
1082  ret->SetLocations(nullptr);
1083}
1084
1085void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret) {
1086  UNUSED(ret);
1087  codegen_->GenerateFrameExit();
1088}
1089
1090void LocationsBuilderARM::VisitReturn(HReturn* ret) {
1091  LocationSummary* locations =
1092      new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
1093  locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType()));
1094}
1095
1096void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret) {
1097  UNUSED(ret);
1098  codegen_->GenerateFrameExit();
1099}
1100
1101void LocationsBuilderARM::VisitInvokeStatic(HInvokeStatic* invoke) {
1102  HandleInvoke(invoke);
1103}
1104
1105void CodeGeneratorARM::LoadCurrentMethod(Register reg) {
1106  __ LoadFromOffset(kLoadWord, reg, SP, kCurrentMethodStackOffset);
1107}
1108
1109void InstructionCodeGeneratorARM::VisitInvokeStatic(HInvokeStatic* invoke) {
1110  Register temp = invoke->GetLocations()->GetTemp(0).As<Register>();
1111
1112  // TODO: Implement all kinds of calls:
1113  // 1) boot -> boot
1114  // 2) app -> boot
1115  // 3) app -> app
1116  //
1117  // Currently we implement the app -> app logic, which looks up in the resolve cache.
1118
1119  // temp = method;
1120  codegen_->LoadCurrentMethod(temp);
1121  // temp = temp->dex_cache_resolved_methods_;
1122  __ LoadFromOffset(
1123      kLoadWord, temp, temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value());
1124  // temp = temp[index_in_cache]
1125  __ LoadFromOffset(
1126      kLoadWord, temp, temp, CodeGenerator::GetCacheOffset(invoke->GetIndexInDexCache()));
1127  // LR = temp[offset_of_quick_compiled_code]
1128  __ LoadFromOffset(kLoadWord, LR, temp,
1129                     mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value());
1130  // LR()
1131  __ blx(LR);
1132
1133  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1134  DCHECK(!codegen_->IsLeafMethod());
1135}
1136
1137void LocationsBuilderARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1138  HandleInvoke(invoke);
1139}
1140
1141void LocationsBuilderARM::HandleInvoke(HInvoke* invoke) {
1142  LocationSummary* locations =
1143      new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1144  locations->AddTemp(Location::RegisterLocation(R0));
1145
1146  InvokeDexCallingConventionVisitor calling_convention_visitor;
1147  for (size_t i = 0; i < invoke->InputCount(); i++) {
1148    HInstruction* input = invoke->InputAt(i);
1149    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1150  }
1151
1152  locations->SetOut(calling_convention_visitor.GetReturnLocation(invoke->GetType()));
1153}
1154
1155
1156void InstructionCodeGeneratorARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1157  Register temp = invoke->GetLocations()->GetTemp(0).As<Register>();
1158  uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() +
1159          invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
1160  LocationSummary* locations = invoke->GetLocations();
1161  Location receiver = locations->InAt(0);
1162  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1163  // temp = object->GetClass();
1164  if (receiver.IsStackSlot()) {
1165    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1166    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1167  } else {
1168    __ LoadFromOffset(kLoadWord, temp, receiver.As<Register>(), class_offset);
1169  }
1170  // temp = temp->GetMethodAt(method_offset);
1171  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value();
1172  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1173  // LR = temp->GetEntryPoint();
1174  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1175  // LR();
1176  __ blx(LR);
1177  DCHECK(!codegen_->IsLeafMethod());
1178  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1179}
1180
1181void LocationsBuilderARM::VisitNeg(HNeg* neg) {
1182  LocationSummary* locations =
1183      new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
1184  switch (neg->GetResultType()) {
1185    case Primitive::kPrimInt:
1186    case Primitive::kPrimLong: {
1187      bool output_overlaps = (neg->GetResultType() == Primitive::kPrimLong);
1188      locations->SetInAt(0, Location::RequiresRegister());
1189      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1190      break;
1191    }
1192
1193    case Primitive::kPrimFloat:
1194    case Primitive::kPrimDouble:
1195      locations->SetInAt(0, Location::RequiresFpuRegister());
1196      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1197      break;
1198
1199    default:
1200      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1201  }
1202}
1203
1204void InstructionCodeGeneratorARM::VisitNeg(HNeg* neg) {
1205  LocationSummary* locations = neg->GetLocations();
1206  Location out = locations->Out();
1207  Location in = locations->InAt(0);
1208  switch (neg->GetResultType()) {
1209    case Primitive::kPrimInt:
1210      DCHECK(in.IsRegister());
1211      __ rsb(out.As<Register>(), in.As<Register>(), ShifterOperand(0));
1212      break;
1213
1214    case Primitive::kPrimLong:
1215      DCHECK(in.IsRegisterPair());
1216      // out.lo = 0 - in.lo (and update the carry/borrow (C) flag)
1217      __ rsbs(out.AsRegisterPairLow<Register>(),
1218              in.AsRegisterPairLow<Register>(),
1219              ShifterOperand(0));
1220      // We cannot emit an RSC (Reverse Subtract with Carry)
1221      // instruction here, as it does not exist in the Thumb-2
1222      // instruction set.  We use the following approach
1223      // using SBC and SUB instead.
1224      //
1225      // out.hi = -C
1226      __ sbc(out.AsRegisterPairHigh<Register>(),
1227             out.AsRegisterPairHigh<Register>(),
1228             ShifterOperand(out.AsRegisterPairHigh<Register>()));
1229      // out.hi = out.hi - in.hi
1230      __ sub(out.AsRegisterPairHigh<Register>(),
1231             out.AsRegisterPairHigh<Register>(),
1232             ShifterOperand(in.AsRegisterPairHigh<Register>()));
1233      break;
1234
1235    case Primitive::kPrimFloat:
1236      DCHECK(in.IsFpuRegister());
1237      __ vnegs(out.As<SRegister>(), in.As<SRegister>());
1238      break;
1239
1240    case Primitive::kPrimDouble:
1241      DCHECK(in.IsFpuRegisterPair());
1242      __ vnegd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1243               FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
1244      break;
1245
1246    default:
1247      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1248  }
1249}
1250
1251void LocationsBuilderARM::VisitTypeConversion(HTypeConversion* conversion) {
1252  LocationSummary* locations =
1253      new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
1254  Primitive::Type result_type = conversion->GetResultType();
1255  Primitive::Type input_type = conversion->GetInputType();
1256  switch (result_type) {
1257    case Primitive::kPrimLong:
1258      switch (input_type) {
1259        case Primitive::kPrimByte:
1260        case Primitive::kPrimShort:
1261        case Primitive::kPrimInt:
1262          // int-to-long conversion.
1263          locations->SetInAt(0, Location::RequiresRegister());
1264          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1265          break;
1266
1267        case Primitive::kPrimFloat:
1268        case Primitive::kPrimDouble:
1269          LOG(FATAL) << "Type conversion from " << input_type << " to "
1270                     << result_type << " not yet implemented";
1271          break;
1272
1273        default:
1274          LOG(FATAL) << "Unexpected type conversion from " << input_type
1275                     << " to " << result_type;
1276      }
1277      break;
1278
1279    case Primitive::kPrimInt:
1280    case Primitive::kPrimFloat:
1281    case Primitive::kPrimDouble:
1282      LOG(FATAL) << "Type conversion from " << input_type
1283                 << " to " << result_type << " not yet implemented";
1284      break;
1285
1286    default:
1287      LOG(FATAL) << "Unexpected type conversion from " << input_type
1288                 << " to " << result_type;
1289  }
1290}
1291
1292void InstructionCodeGeneratorARM::VisitTypeConversion(HTypeConversion* conversion) {
1293  LocationSummary* locations = conversion->GetLocations();
1294  Location out = locations->Out();
1295  Location in = locations->InAt(0);
1296  Primitive::Type result_type = conversion->GetResultType();
1297  Primitive::Type input_type = conversion->GetInputType();
1298  switch (result_type) {
1299    case Primitive::kPrimLong:
1300      switch (input_type) {
1301        case Primitive::kPrimByte:
1302        case Primitive::kPrimShort:
1303        case Primitive::kPrimInt:
1304          // int-to-long conversion.
1305          DCHECK(out.IsRegisterPair());
1306          DCHECK(in.IsRegister());
1307          __ Mov(out.AsRegisterPairLow<Register>(), in.As<Register>());
1308          // Sign extension.
1309          __ Asr(out.AsRegisterPairHigh<Register>(),
1310                 out.AsRegisterPairLow<Register>(),
1311                 31);
1312          break;
1313
1314        case Primitive::kPrimFloat:
1315        case Primitive::kPrimDouble:
1316          LOG(FATAL) << "Type conversion from " << input_type << " to "
1317                     << result_type << " not yet implemented";
1318          break;
1319
1320        default:
1321          LOG(FATAL) << "Unexpected type conversion from " << input_type
1322                     << " to " << result_type;
1323      }
1324      break;
1325
1326    case Primitive::kPrimInt:
1327    case Primitive::kPrimFloat:
1328    case Primitive::kPrimDouble:
1329      LOG(FATAL) << "Type conversion from " << input_type
1330                 << " to " << result_type << " not yet implemented";
1331      break;
1332
1333    default:
1334      LOG(FATAL) << "Unexpected type conversion from " << input_type
1335                 << " to " << result_type;
1336  }
1337}
1338
1339void LocationsBuilderARM::VisitAdd(HAdd* add) {
1340  LocationSummary* locations =
1341      new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
1342  switch (add->GetResultType()) {
1343    case Primitive::kPrimInt:
1344    case Primitive::kPrimLong: {
1345      bool output_overlaps = (add->GetResultType() == Primitive::kPrimLong);
1346      locations->SetInAt(0, Location::RequiresRegister());
1347      locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
1348      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1349      break;
1350    }
1351
1352    case Primitive::kPrimFloat:
1353    case Primitive::kPrimDouble: {
1354      locations->SetInAt(0, Location::RequiresFpuRegister());
1355      locations->SetInAt(1, Location::RequiresFpuRegister());
1356      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1357      break;
1358    }
1359
1360    default:
1361      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1362  }
1363}
1364
1365void InstructionCodeGeneratorARM::VisitAdd(HAdd* add) {
1366  LocationSummary* locations = add->GetLocations();
1367  Location out = locations->Out();
1368  Location first = locations->InAt(0);
1369  Location second = locations->InAt(1);
1370  switch (add->GetResultType()) {
1371    case Primitive::kPrimInt:
1372      if (second.IsRegister()) {
1373        __ add(out.As<Register>(), first.As<Register>(), ShifterOperand(second.As<Register>()));
1374      } else {
1375        __ AddConstant(out.As<Register>(),
1376                       first.As<Register>(),
1377                       second.GetConstant()->AsIntConstant()->GetValue());
1378      }
1379      break;
1380
1381    case Primitive::kPrimLong:
1382      __ adds(out.AsRegisterPairLow<Register>(),
1383              first.AsRegisterPairLow<Register>(),
1384              ShifterOperand(second.AsRegisterPairLow<Register>()));
1385      __ adc(out.AsRegisterPairHigh<Register>(),
1386             first.AsRegisterPairHigh<Register>(),
1387             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1388      break;
1389
1390    case Primitive::kPrimFloat:
1391      __ vadds(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>());
1392      break;
1393
1394    case Primitive::kPrimDouble:
1395      __ vaddd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1396               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1397               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1398      break;
1399
1400    default:
1401      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1402  }
1403}
1404
1405void LocationsBuilderARM::VisitSub(HSub* sub) {
1406  LocationSummary* locations =
1407      new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
1408  switch (sub->GetResultType()) {
1409    case Primitive::kPrimInt:
1410    case Primitive::kPrimLong: {
1411      bool output_overlaps = (sub->GetResultType() == Primitive::kPrimLong);
1412      locations->SetInAt(0, Location::RequiresRegister());
1413      locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1)));
1414      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1415      break;
1416    }
1417    case Primitive::kPrimFloat:
1418    case Primitive::kPrimDouble: {
1419      locations->SetInAt(0, Location::RequiresFpuRegister());
1420      locations->SetInAt(1, Location::RequiresFpuRegister());
1421      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1422      break;
1423    }
1424    default:
1425      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1426  }
1427}
1428
1429void InstructionCodeGeneratorARM::VisitSub(HSub* sub) {
1430  LocationSummary* locations = sub->GetLocations();
1431  Location out = locations->Out();
1432  Location first = locations->InAt(0);
1433  Location second = locations->InAt(1);
1434  switch (sub->GetResultType()) {
1435    case Primitive::kPrimInt: {
1436      if (second.IsRegister()) {
1437        __ sub(out.As<Register>(), first.As<Register>(), ShifterOperand(second.As<Register>()));
1438      } else {
1439        __ AddConstant(out.As<Register>(),
1440                       first.As<Register>(),
1441                       -second.GetConstant()->AsIntConstant()->GetValue());
1442      }
1443      break;
1444    }
1445
1446    case Primitive::kPrimLong: {
1447      __ subs(out.AsRegisterPairLow<Register>(),
1448              first.AsRegisterPairLow<Register>(),
1449              ShifterOperand(second.AsRegisterPairLow<Register>()));
1450      __ sbc(out.AsRegisterPairHigh<Register>(),
1451             first.AsRegisterPairHigh<Register>(),
1452             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1453      break;
1454    }
1455
1456    case Primitive::kPrimFloat: {
1457      __ vsubs(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>());
1458      break;
1459    }
1460
1461    case Primitive::kPrimDouble: {
1462      __ vsubd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1463               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1464               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1465      break;
1466    }
1467
1468
1469    default:
1470      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1471  }
1472}
1473
1474void LocationsBuilderARM::VisitMul(HMul* mul) {
1475  LocationSummary* locations =
1476      new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
1477  switch (mul->GetResultType()) {
1478    case Primitive::kPrimInt:
1479    case Primitive::kPrimLong:  {
1480      locations->SetInAt(0, Location::RequiresRegister());
1481      locations->SetInAt(1, Location::RequiresRegister());
1482      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1483      break;
1484    }
1485
1486    case Primitive::kPrimFloat:
1487    case Primitive::kPrimDouble: {
1488      locations->SetInAt(0, Location::RequiresFpuRegister());
1489      locations->SetInAt(1, Location::RequiresFpuRegister());
1490      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1491      break;
1492    }
1493
1494    default:
1495      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1496  }
1497}
1498
1499void InstructionCodeGeneratorARM::VisitMul(HMul* mul) {
1500  LocationSummary* locations = mul->GetLocations();
1501  Location out = locations->Out();
1502  Location first = locations->InAt(0);
1503  Location second = locations->InAt(1);
1504  switch (mul->GetResultType()) {
1505    case Primitive::kPrimInt: {
1506      __ mul(out.As<Register>(), first.As<Register>(), second.As<Register>());
1507      break;
1508    }
1509    case Primitive::kPrimLong: {
1510      Register out_hi = out.AsRegisterPairHigh<Register>();
1511      Register out_lo = out.AsRegisterPairLow<Register>();
1512      Register in1_hi = first.AsRegisterPairHigh<Register>();
1513      Register in1_lo = first.AsRegisterPairLow<Register>();
1514      Register in2_hi = second.AsRegisterPairHigh<Register>();
1515      Register in2_lo = second.AsRegisterPairLow<Register>();
1516
1517      // Extra checks to protect caused by the existence of R1_R2.
1518      // The algorithm is wrong if out.hi is either in1.lo or in2.lo:
1519      // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2);
1520      DCHECK_NE(out_hi, in1_lo);
1521      DCHECK_NE(out_hi, in2_lo);
1522
1523      // input: in1 - 64 bits, in2 - 64 bits
1524      // output: out
1525      // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
1526      // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
1527      // parts: out.lo = (in1.lo * in2.lo)[31:0]
1528
1529      // IP <- in1.lo * in2.hi
1530      __ mul(IP, in1_lo, in2_hi);
1531      // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo
1532      __ mla(out_hi, in1_hi, in2_lo, IP);
1533      // out.lo <- (in1.lo * in2.lo)[31:0];
1534      __ umull(out_lo, IP, in1_lo, in2_lo);
1535      // out.hi <- in2.hi * in1.lo +  in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
1536      __ add(out_hi, out_hi, ShifterOperand(IP));
1537      break;
1538    }
1539
1540    case Primitive::kPrimFloat: {
1541      __ vmuls(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>());
1542      break;
1543    }
1544
1545    case Primitive::kPrimDouble: {
1546      __ vmuld(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1547               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1548               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1549      break;
1550    }
1551
1552    default:
1553      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1554  }
1555}
1556
1557void LocationsBuilderARM::VisitDiv(HDiv* div) {
1558  LocationSummary* locations =
1559      new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
1560  switch (div->GetResultType()) {
1561    case Primitive::kPrimInt: {
1562      locations->SetInAt(0, Location::RequiresRegister());
1563      locations->SetInAt(1, Location::RequiresRegister());
1564      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1565      break;
1566    }
1567    case Primitive::kPrimLong: {
1568      LOG(FATAL) << "Not implemented div type" << div->GetResultType();
1569      break;
1570    }
1571    case Primitive::kPrimFloat:
1572    case Primitive::kPrimDouble: {
1573      locations->SetInAt(0, Location::RequiresFpuRegister());
1574      locations->SetInAt(1, Location::RequiresFpuRegister());
1575      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1576      break;
1577    }
1578
1579    default:
1580      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
1581  }
1582}
1583
1584void InstructionCodeGeneratorARM::VisitDiv(HDiv* div) {
1585  LocationSummary* locations = div->GetLocations();
1586  Location out = locations->Out();
1587  Location first = locations->InAt(0);
1588  Location second = locations->InAt(1);
1589
1590  switch (div->GetResultType()) {
1591    case Primitive::kPrimInt: {
1592      __ sdiv(out.As<Register>(), first.As<Register>(), second.As<Register>());
1593      break;
1594    }
1595
1596    case Primitive::kPrimLong: {
1597      LOG(FATAL) << "Not implemented div type" << div->GetResultType();
1598      break;
1599    }
1600
1601    case Primitive::kPrimFloat: {
1602      __ vdivs(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>());
1603      break;
1604    }
1605
1606    case Primitive::kPrimDouble: {
1607      __ vdivd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1608               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1609               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1610      break;
1611    }
1612
1613    default:
1614      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
1615  }
1616}
1617
1618void LocationsBuilderARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1619  LocationSummary* locations =
1620      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1621  locations->SetInAt(0, Location::RequiresRegister());
1622  if (instruction->HasUses()) {
1623    locations->SetOut(Location::SameAsFirstInput());
1624  }
1625}
1626
1627void InstructionCodeGeneratorARM::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1628  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM(instruction);
1629  codegen_->AddSlowPath(slow_path);
1630
1631  LocationSummary* locations = instruction->GetLocations();
1632  Location value = locations->InAt(0);
1633
1634  DCHECK(value.IsRegister()) << value;
1635  __ cmp(value.As<Register>(), ShifterOperand(0));
1636  __ b(slow_path->GetEntryLabel(), EQ);
1637}
1638
1639void LocationsBuilderARM::VisitNewInstance(HNewInstance* instruction) {
1640  LocationSummary* locations =
1641      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
1642  InvokeRuntimeCallingConvention calling_convention;
1643  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1644  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1645  locations->SetOut(Location::RegisterLocation(R0));
1646}
1647
1648void InstructionCodeGeneratorARM::VisitNewInstance(HNewInstance* instruction) {
1649  InvokeRuntimeCallingConvention calling_convention;
1650  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
1651  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
1652  codegen_->InvokeRuntime(
1653      QUICK_ENTRY_POINT(pAllocObjectWithAccessCheck), instruction, instruction->GetDexPc());
1654}
1655
1656void LocationsBuilderARM::VisitNewArray(HNewArray* instruction) {
1657  LocationSummary* locations =
1658      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
1659  InvokeRuntimeCallingConvention calling_convention;
1660  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1661  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1662  locations->SetOut(Location::RegisterLocation(R0));
1663  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1664}
1665
1666void InstructionCodeGeneratorARM::VisitNewArray(HNewArray* instruction) {
1667  InvokeRuntimeCallingConvention calling_convention;
1668  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
1669  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
1670  codegen_->InvokeRuntime(
1671      QUICK_ENTRY_POINT(pAllocArrayWithAccessCheck), instruction, instruction->GetDexPc());
1672}
1673
1674void LocationsBuilderARM::VisitParameterValue(HParameterValue* instruction) {
1675  LocationSummary* locations =
1676      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1677  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
1678  if (location.IsStackSlot()) {
1679    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1680  } else if (location.IsDoubleStackSlot()) {
1681    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1682  }
1683  locations->SetOut(location);
1684}
1685
1686void InstructionCodeGeneratorARM::VisitParameterValue(HParameterValue* instruction) {
1687  // Nothing to do, the parameter is already at its location.
1688  UNUSED(instruction);
1689}
1690
1691void LocationsBuilderARM::VisitNot(HNot* not_) {
1692  LocationSummary* locations =
1693      new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
1694  locations->SetInAt(0, Location::RequiresRegister());
1695  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1696}
1697
1698void InstructionCodeGeneratorARM::VisitNot(HNot* not_) {
1699  LocationSummary* locations = not_->GetLocations();
1700  Location out = locations->Out();
1701  Location in = locations->InAt(0);
1702  switch (not_->InputAt(0)->GetType()) {
1703    case Primitive::kPrimBoolean:
1704      __ eor(out.As<Register>(), in.As<Register>(), ShifterOperand(1));
1705      break;
1706
1707    case Primitive::kPrimInt:
1708      __ mvn(out.As<Register>(), ShifterOperand(in.As<Register>()));
1709      break;
1710
1711    case Primitive::kPrimLong:
1712      __ mvn(out.AsRegisterPairLow<Register>(),
1713             ShifterOperand(in.AsRegisterPairLow<Register>()));
1714      __ mvn(out.AsRegisterPairHigh<Register>(),
1715             ShifterOperand(in.AsRegisterPairHigh<Register>()));
1716      break;
1717
1718    default:
1719      LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
1720  }
1721}
1722
1723void LocationsBuilderARM::VisitCompare(HCompare* compare) {
1724  LocationSummary* locations =
1725      new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
1726  locations->SetInAt(0, Location::RequiresRegister());
1727  locations->SetInAt(1, Location::RequiresRegister());
1728  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1729}
1730
1731void InstructionCodeGeneratorARM::VisitCompare(HCompare* compare) {
1732  LocationSummary* locations = compare->GetLocations();
1733  switch (compare->InputAt(0)->GetType()) {
1734    case Primitive::kPrimLong: {
1735      Register output = locations->Out().As<Register>();
1736      Location left = locations->InAt(0);
1737      Location right = locations->InAt(1);
1738      Label less, greater, done;
1739      __ cmp(left.AsRegisterPairHigh<Register>(),
1740             ShifterOperand(right.AsRegisterPairHigh<Register>()));  // Signed compare.
1741      __ b(&less, LT);
1742      __ b(&greater, GT);
1743      // Do LoadImmediate before any `cmp`, as LoadImmediate might affect
1744      // the status flags.
1745      __ LoadImmediate(output, 0);
1746      __ cmp(left.AsRegisterPairLow<Register>(),
1747             ShifterOperand(right.AsRegisterPairLow<Register>()));  // Unsigned compare.
1748      __ b(&done, EQ);
1749      __ b(&less, CC);
1750
1751      __ Bind(&greater);
1752      __ LoadImmediate(output, 1);
1753      __ b(&done);
1754
1755      __ Bind(&less);
1756      __ LoadImmediate(output, -1);
1757
1758      __ Bind(&done);
1759      break;
1760    }
1761    default:
1762      LOG(FATAL) << "Unimplemented compare type " << compare->InputAt(0)->GetType();
1763  }
1764}
1765
1766void LocationsBuilderARM::VisitPhi(HPhi* instruction) {
1767  LocationSummary* locations =
1768      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1769  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
1770    locations->SetInAt(i, Location::Any());
1771  }
1772  locations->SetOut(Location::Any());
1773}
1774
1775void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction) {
1776  UNUSED(instruction);
1777  LOG(FATAL) << "Unreachable";
1778}
1779
1780void LocationsBuilderARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
1781  LocationSummary* locations =
1782      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1783  bool is_object_type = instruction->GetFieldType() == Primitive::kPrimNot;
1784  locations->SetInAt(0, Location::RequiresRegister());
1785  locations->SetInAt(1, Location::RequiresRegister());
1786  // Temporary registers for the write barrier.
1787  if (is_object_type) {
1788    locations->AddTemp(Location::RequiresRegister());
1789    locations->AddTemp(Location::RequiresRegister());
1790  }
1791}
1792
1793void InstructionCodeGeneratorARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
1794  LocationSummary* locations = instruction->GetLocations();
1795  Register obj = locations->InAt(0).As<Register>();
1796  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
1797  Primitive::Type field_type = instruction->GetFieldType();
1798
1799  switch (field_type) {
1800    case Primitive::kPrimBoolean:
1801    case Primitive::kPrimByte: {
1802      Register value = locations->InAt(1).As<Register>();
1803      __ StoreToOffset(kStoreByte, value, obj, offset);
1804      break;
1805    }
1806
1807    case Primitive::kPrimShort:
1808    case Primitive::kPrimChar: {
1809      Register value = locations->InAt(1).As<Register>();
1810      __ StoreToOffset(kStoreHalfword, value, obj, offset);
1811      break;
1812    }
1813
1814    case Primitive::kPrimInt:
1815    case Primitive::kPrimNot: {
1816      Register value = locations->InAt(1).As<Register>();
1817      __ StoreToOffset(kStoreWord, value, obj, offset);
1818      if (field_type == Primitive::kPrimNot) {
1819        Register temp = locations->GetTemp(0).As<Register>();
1820        Register card = locations->GetTemp(1).As<Register>();
1821        codegen_->MarkGCCard(temp, card, obj, value);
1822      }
1823      break;
1824    }
1825
1826    case Primitive::kPrimLong: {
1827      Location value = locations->InAt(1);
1828      __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset);
1829      break;
1830    }
1831
1832    case Primitive::kPrimFloat: {
1833      SRegister value = locations->InAt(1).As<SRegister>();
1834      __ StoreSToOffset(value, obj, offset);
1835      break;
1836    }
1837
1838    case Primitive::kPrimDouble: {
1839      DRegister value = FromLowSToD(locations->InAt(1).AsFpuRegisterPairLow<SRegister>());
1840      __ StoreDToOffset(value, obj, offset);
1841      break;
1842    }
1843
1844    case Primitive::kPrimVoid:
1845      LOG(FATAL) << "Unreachable type " << field_type;
1846      UNREACHABLE();
1847  }
1848}
1849
1850void LocationsBuilderARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
1851  LocationSummary* locations =
1852      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1853  locations->SetInAt(0, Location::RequiresRegister());
1854  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1855}
1856
1857void InstructionCodeGeneratorARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
1858  LocationSummary* locations = instruction->GetLocations();
1859  Register obj = locations->InAt(0).As<Register>();
1860  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
1861
1862  switch (instruction->GetType()) {
1863    case Primitive::kPrimBoolean: {
1864      Register out = locations->Out().As<Register>();
1865      __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
1866      break;
1867    }
1868
1869    case Primitive::kPrimByte: {
1870      Register out = locations->Out().As<Register>();
1871      __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
1872      break;
1873    }
1874
1875    case Primitive::kPrimShort: {
1876      Register out = locations->Out().As<Register>();
1877      __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
1878      break;
1879    }
1880
1881    case Primitive::kPrimChar: {
1882      Register out = locations->Out().As<Register>();
1883      __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
1884      break;
1885    }
1886
1887    case Primitive::kPrimInt:
1888    case Primitive::kPrimNot: {
1889      Register out = locations->Out().As<Register>();
1890      __ LoadFromOffset(kLoadWord, out, obj, offset);
1891      break;
1892    }
1893
1894    case Primitive::kPrimLong: {
1895      // TODO: support volatile.
1896      Location out = locations->Out();
1897      __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset);
1898      break;
1899    }
1900
1901    case Primitive::kPrimFloat: {
1902      SRegister out = locations->Out().As<SRegister>();
1903      __ LoadSFromOffset(out, obj, offset);
1904      break;
1905    }
1906
1907    case Primitive::kPrimDouble: {
1908      DRegister out = FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>());
1909      __ LoadDFromOffset(out, obj, offset);
1910      break;
1911    }
1912
1913    case Primitive::kPrimVoid:
1914      LOG(FATAL) << "Unreachable type " << instruction->GetType();
1915      UNREACHABLE();
1916  }
1917}
1918
1919void LocationsBuilderARM::VisitNullCheck(HNullCheck* instruction) {
1920  LocationSummary* locations =
1921      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1922  locations->SetInAt(0, Location::RequiresRegister());
1923  if (instruction->HasUses()) {
1924    locations->SetOut(Location::SameAsFirstInput());
1925  }
1926}
1927
1928void InstructionCodeGeneratorARM::VisitNullCheck(HNullCheck* instruction) {
1929  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM(instruction);
1930  codegen_->AddSlowPath(slow_path);
1931
1932  LocationSummary* locations = instruction->GetLocations();
1933  Location obj = locations->InAt(0);
1934
1935  if (obj.IsRegister()) {
1936    __ cmp(obj.As<Register>(), ShifterOperand(0));
1937    __ b(slow_path->GetEntryLabel(), EQ);
1938  } else {
1939    DCHECK(obj.IsConstant()) << obj;
1940    DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0);
1941    __ b(slow_path->GetEntryLabel());
1942  }
1943}
1944
1945void LocationsBuilderARM::VisitArrayGet(HArrayGet* instruction) {
1946  LocationSummary* locations =
1947      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1948  locations->SetInAt(0, Location::RequiresRegister());
1949  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1950  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1951}
1952
1953void InstructionCodeGeneratorARM::VisitArrayGet(HArrayGet* instruction) {
1954  LocationSummary* locations = instruction->GetLocations();
1955  Register obj = locations->InAt(0).As<Register>();
1956  Location index = locations->InAt(1);
1957
1958  switch (instruction->GetType()) {
1959    case Primitive::kPrimBoolean: {
1960      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
1961      Register out = locations->Out().As<Register>();
1962      if (index.IsConstant()) {
1963        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
1964        __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
1965      } else {
1966        __ add(IP, obj, ShifterOperand(index.As<Register>()));
1967        __ LoadFromOffset(kLoadUnsignedByte, out, IP, data_offset);
1968      }
1969      break;
1970    }
1971
1972    case Primitive::kPrimByte: {
1973      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
1974      Register out = locations->Out().As<Register>();
1975      if (index.IsConstant()) {
1976        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
1977        __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
1978      } else {
1979        __ add(IP, obj, ShifterOperand(index.As<Register>()));
1980        __ LoadFromOffset(kLoadSignedByte, out, IP, data_offset);
1981      }
1982      break;
1983    }
1984
1985    case Primitive::kPrimShort: {
1986      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
1987      Register out = locations->Out().As<Register>();
1988      if (index.IsConstant()) {
1989        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
1990        __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
1991      } else {
1992        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_2));
1993        __ LoadFromOffset(kLoadSignedHalfword, out, IP, data_offset);
1994      }
1995      break;
1996    }
1997
1998    case Primitive::kPrimChar: {
1999      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
2000      Register out = locations->Out().As<Register>();
2001      if (index.IsConstant()) {
2002        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2003        __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
2004      } else {
2005        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_2));
2006        __ LoadFromOffset(kLoadUnsignedHalfword, out, IP, data_offset);
2007      }
2008      break;
2009    }
2010
2011    case Primitive::kPrimInt:
2012    case Primitive::kPrimNot: {
2013      DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
2014      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2015      Register out = locations->Out().As<Register>();
2016      if (index.IsConstant()) {
2017        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2018        __ LoadFromOffset(kLoadWord, out, obj, offset);
2019      } else {
2020        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_4));
2021        __ LoadFromOffset(kLoadWord, out, IP, data_offset);
2022      }
2023      break;
2024    }
2025
2026    case Primitive::kPrimLong: {
2027      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
2028      Location out = locations->Out();
2029      if (index.IsConstant()) {
2030        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
2031        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset);
2032      } else {
2033        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_8));
2034        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), IP, data_offset);
2035      }
2036      break;
2037    }
2038
2039    case Primitive::kPrimFloat:
2040    case Primitive::kPrimDouble:
2041      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
2042      UNREACHABLE();
2043    case Primitive::kPrimVoid:
2044      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2045      UNREACHABLE();
2046  }
2047}
2048
2049void LocationsBuilderARM::VisitArraySet(HArraySet* instruction) {
2050  Primitive::Type value_type = instruction->GetComponentType();
2051  bool is_object = value_type == Primitive::kPrimNot;
2052  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2053      instruction, is_object ? LocationSummary::kCall : LocationSummary::kNoCall);
2054  if (is_object) {
2055    InvokeRuntimeCallingConvention calling_convention;
2056    locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2057    locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2058    locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2059  } else {
2060    locations->SetInAt(0, Location::RequiresRegister());
2061    locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2062    locations->SetInAt(2, Location::RequiresRegister());
2063  }
2064}
2065
2066void InstructionCodeGeneratorARM::VisitArraySet(HArraySet* instruction) {
2067  LocationSummary* locations = instruction->GetLocations();
2068  Register obj = locations->InAt(0).As<Register>();
2069  Location index = locations->InAt(1);
2070  Primitive::Type value_type = instruction->GetComponentType();
2071
2072  switch (value_type) {
2073    case Primitive::kPrimBoolean:
2074    case Primitive::kPrimByte: {
2075      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
2076      Register value = locations->InAt(2).As<Register>();
2077      if (index.IsConstant()) {
2078        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2079        __ StoreToOffset(kStoreByte, value, obj, offset);
2080      } else {
2081        __ add(IP, obj, ShifterOperand(index.As<Register>()));
2082        __ StoreToOffset(kStoreByte, value, IP, data_offset);
2083      }
2084      break;
2085    }
2086
2087    case Primitive::kPrimShort:
2088    case Primitive::kPrimChar: {
2089      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
2090      Register value = locations->InAt(2).As<Register>();
2091      if (index.IsConstant()) {
2092        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2093        __ StoreToOffset(kStoreHalfword, value, obj, offset);
2094      } else {
2095        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_2));
2096        __ StoreToOffset(kStoreHalfword, value, IP, data_offset);
2097      }
2098      break;
2099    }
2100
2101    case Primitive::kPrimInt: {
2102      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2103      Register value = locations->InAt(2).As<Register>();
2104      if (index.IsConstant()) {
2105        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2106        __ StoreToOffset(kStoreWord, value, obj, offset);
2107      } else {
2108        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_4));
2109        __ StoreToOffset(kStoreWord, value, IP, data_offset);
2110      }
2111      break;
2112    }
2113
2114    case Primitive::kPrimNot: {
2115      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject), instruction, instruction->GetDexPc());
2116      break;
2117    }
2118
2119    case Primitive::kPrimLong: {
2120      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
2121      Location value = locations->InAt(2);
2122      if (index.IsConstant()) {
2123        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
2124        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset);
2125      } else {
2126        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_8));
2127        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), IP, data_offset);
2128      }
2129      break;
2130    }
2131
2132    case Primitive::kPrimFloat:
2133    case Primitive::kPrimDouble:
2134      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
2135      UNREACHABLE();
2136    case Primitive::kPrimVoid:
2137      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2138      UNREACHABLE();
2139  }
2140}
2141
2142void LocationsBuilderARM::VisitArrayLength(HArrayLength* instruction) {
2143  LocationSummary* locations =
2144      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2145  locations->SetInAt(0, Location::RequiresRegister());
2146  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2147}
2148
2149void InstructionCodeGeneratorARM::VisitArrayLength(HArrayLength* instruction) {
2150  LocationSummary* locations = instruction->GetLocations();
2151  uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
2152  Register obj = locations->InAt(0).As<Register>();
2153  Register out = locations->Out().As<Register>();
2154  __ LoadFromOffset(kLoadWord, out, obj, offset);
2155}
2156
2157void LocationsBuilderARM::VisitBoundsCheck(HBoundsCheck* instruction) {
2158  LocationSummary* locations =
2159      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2160  locations->SetInAt(0, Location::RequiresRegister());
2161  locations->SetInAt(1, Location::RequiresRegister());
2162  if (instruction->HasUses()) {
2163    locations->SetOut(Location::SameAsFirstInput());
2164  }
2165}
2166
2167void InstructionCodeGeneratorARM::VisitBoundsCheck(HBoundsCheck* instruction) {
2168  LocationSummary* locations = instruction->GetLocations();
2169  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM(
2170      instruction, locations->InAt(0), locations->InAt(1));
2171  codegen_->AddSlowPath(slow_path);
2172
2173  Register index = locations->InAt(0).As<Register>();
2174  Register length = locations->InAt(1).As<Register>();
2175
2176  __ cmp(index, ShifterOperand(length));
2177  __ b(slow_path->GetEntryLabel(), CS);
2178}
2179
2180void CodeGeneratorARM::MarkGCCard(Register temp, Register card, Register object, Register value) {
2181  Label is_null;
2182  __ CompareAndBranchIfZero(value, &is_null);
2183  __ LoadFromOffset(kLoadWord, card, TR, Thread::CardTableOffset<kArmWordSize>().Int32Value());
2184  __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
2185  __ strb(card, Address(card, temp));
2186  __ Bind(&is_null);
2187}
2188
2189void LocationsBuilderARM::VisitTemporary(HTemporary* temp) {
2190  temp->SetLocations(nullptr);
2191}
2192
2193void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp) {
2194  // Nothing to do, this is driven by the code generator.
2195  UNUSED(temp);
2196}
2197
2198void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction) {
2199  UNUSED(instruction);
2200  LOG(FATAL) << "Unreachable";
2201}
2202
2203void InstructionCodeGeneratorARM::VisitParallelMove(HParallelMove* instruction) {
2204  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
2205}
2206
2207void LocationsBuilderARM::VisitSuspendCheck(HSuspendCheck* instruction) {
2208  new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
2209}
2210
2211void InstructionCodeGeneratorARM::VisitSuspendCheck(HSuspendCheck* instruction) {
2212  HBasicBlock* block = instruction->GetBlock();
2213  if (block->GetLoopInformation() != nullptr) {
2214    DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
2215    // The back edge will generate the suspend check.
2216    return;
2217  }
2218  if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
2219    // The goto will generate the suspend check.
2220    return;
2221  }
2222  GenerateSuspendCheck(instruction, nullptr);
2223}
2224
2225void InstructionCodeGeneratorARM::GenerateSuspendCheck(HSuspendCheck* instruction,
2226                                                       HBasicBlock* successor) {
2227  SuspendCheckSlowPathARM* slow_path =
2228      new (GetGraph()->GetArena()) SuspendCheckSlowPathARM(instruction, successor);
2229  codegen_->AddSlowPath(slow_path);
2230
2231  __ LoadFromOffset(
2232      kLoadUnsignedHalfword, IP, TR, Thread::ThreadFlagsOffset<kArmWordSize>().Int32Value());
2233  __ cmp(IP, ShifterOperand(0));
2234  // TODO: Figure out the branch offsets and use cbz/cbnz.
2235  if (successor == nullptr) {
2236    __ b(slow_path->GetEntryLabel(), NE);
2237    __ Bind(slow_path->GetReturnLabel());
2238  } else {
2239    __ b(codegen_->GetLabelOf(successor), EQ);
2240    __ b(slow_path->GetEntryLabel());
2241  }
2242}
2243
2244ArmAssembler* ParallelMoveResolverARM::GetAssembler() const {
2245  return codegen_->GetAssembler();
2246}
2247
2248void ParallelMoveResolverARM::EmitMove(size_t index) {
2249  MoveOperands* move = moves_.Get(index);
2250  Location source = move->GetSource();
2251  Location destination = move->GetDestination();
2252
2253  if (source.IsRegister()) {
2254    if (destination.IsRegister()) {
2255      __ Mov(destination.As<Register>(), source.As<Register>());
2256    } else {
2257      DCHECK(destination.IsStackSlot());
2258      __ StoreToOffset(kStoreWord, source.As<Register>(),
2259                       SP, destination.GetStackIndex());
2260    }
2261  } else if (source.IsStackSlot()) {
2262    if (destination.IsRegister()) {
2263      __ LoadFromOffset(kLoadWord, destination.As<Register>(),
2264                        SP, source.GetStackIndex());
2265    } else {
2266      DCHECK(destination.IsStackSlot());
2267      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
2268      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
2269    }
2270  } else {
2271    DCHECK(source.IsConstant());
2272    DCHECK(source.GetConstant()->IsIntConstant());
2273    int32_t value = source.GetConstant()->AsIntConstant()->GetValue();
2274    if (destination.IsRegister()) {
2275      __ LoadImmediate(destination.As<Register>(), value);
2276    } else {
2277      DCHECK(destination.IsStackSlot());
2278      __ LoadImmediate(IP, value);
2279      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
2280    }
2281  }
2282}
2283
2284void ParallelMoveResolverARM::Exchange(Register reg, int mem) {
2285  __ Mov(IP, reg);
2286  __ LoadFromOffset(kLoadWord, reg, SP, mem);
2287  __ StoreToOffset(kStoreWord, IP, SP, mem);
2288}
2289
2290void ParallelMoveResolverARM::Exchange(int mem1, int mem2) {
2291  ScratchRegisterScope ensure_scratch(this, IP, R0, codegen_->GetNumberOfCoreRegisters());
2292  int stack_offset = ensure_scratch.IsSpilled() ? kArmWordSize : 0;
2293  __ LoadFromOffset(kLoadWord, static_cast<Register>(ensure_scratch.GetRegister()),
2294                    SP, mem1 + stack_offset);
2295  __ LoadFromOffset(kLoadWord, IP, SP, mem2 + stack_offset);
2296  __ StoreToOffset(kStoreWord, static_cast<Register>(ensure_scratch.GetRegister()),
2297                   SP, mem2 + stack_offset);
2298  __ StoreToOffset(kStoreWord, IP, SP, mem1 + stack_offset);
2299}
2300
2301void ParallelMoveResolverARM::EmitSwap(size_t index) {
2302  MoveOperands* move = moves_.Get(index);
2303  Location source = move->GetSource();
2304  Location destination = move->GetDestination();
2305
2306  if (source.IsRegister() && destination.IsRegister()) {
2307    DCHECK_NE(source.As<Register>(), IP);
2308    DCHECK_NE(destination.As<Register>(), IP);
2309    __ Mov(IP, source.As<Register>());
2310    __ Mov(source.As<Register>(), destination.As<Register>());
2311    __ Mov(destination.As<Register>(), IP);
2312  } else if (source.IsRegister() && destination.IsStackSlot()) {
2313    Exchange(source.As<Register>(), destination.GetStackIndex());
2314  } else if (source.IsStackSlot() && destination.IsRegister()) {
2315    Exchange(destination.As<Register>(), source.GetStackIndex());
2316  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
2317    Exchange(source.GetStackIndex(), destination.GetStackIndex());
2318  } else {
2319    LOG(FATAL) << "Unimplemented";
2320  }
2321}
2322
2323void ParallelMoveResolverARM::SpillScratch(int reg) {
2324  __ Push(static_cast<Register>(reg));
2325}
2326
2327void ParallelMoveResolverARM::RestoreScratch(int reg) {
2328  __ Pop(static_cast<Register>(reg));
2329}
2330
2331void LocationsBuilderARM::VisitLoadClass(HLoadClass* cls) {
2332  LocationSummary::CallKind call_kind = cls->CanCallRuntime()
2333      ? LocationSummary::kCallOnSlowPath
2334      : LocationSummary::kNoCall;
2335  LocationSummary* locations =
2336      new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
2337  locations->SetOut(Location::RequiresRegister());
2338}
2339
2340void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) {
2341  Register out = cls->GetLocations()->Out().As<Register>();
2342  if (cls->IsReferrersClass()) {
2343    DCHECK(!cls->CanCallRuntime());
2344    DCHECK(!cls->MustGenerateClinitCheck());
2345    codegen_->LoadCurrentMethod(out);
2346    __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value());
2347  } else {
2348    DCHECK(cls->CanCallRuntime());
2349    codegen_->LoadCurrentMethod(out);
2350    __ LoadFromOffset(
2351        kLoadWord, out, out, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value());
2352    __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
2353
2354    SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
2355        cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
2356    codegen_->AddSlowPath(slow_path);
2357    __ cmp(out, ShifterOperand(0));
2358    __ b(slow_path->GetEntryLabel(), EQ);
2359    if (cls->MustGenerateClinitCheck()) {
2360      GenerateClassInitializationCheck(slow_path, out);
2361    } else {
2362      __ Bind(slow_path->GetExitLabel());
2363    }
2364  }
2365}
2366
2367void LocationsBuilderARM::VisitClinitCheck(HClinitCheck* check) {
2368  LocationSummary* locations =
2369      new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
2370  locations->SetInAt(0, Location::RequiresRegister());
2371  if (check->HasUses()) {
2372    locations->SetOut(Location::SameAsFirstInput());
2373  }
2374}
2375
2376void InstructionCodeGeneratorARM::VisitClinitCheck(HClinitCheck* check) {
2377  // We assume the class is not null.
2378  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
2379      check->GetLoadClass(), check, check->GetDexPc(), true);
2380  codegen_->AddSlowPath(slow_path);
2381  GenerateClassInitializationCheck(slow_path, check->GetLocations()->InAt(0).As<Register>());
2382}
2383
2384void InstructionCodeGeneratorARM::GenerateClassInitializationCheck(
2385    SlowPathCodeARM* slow_path, Register class_reg) {
2386  __ LoadFromOffset(kLoadWord, IP, class_reg, mirror::Class::StatusOffset().Int32Value());
2387  __ cmp(IP, ShifterOperand(mirror::Class::kStatusInitialized));
2388  __ b(slow_path->GetEntryLabel(), LT);
2389  // Even if the initialized flag is set, we may be in a situation where caches are not synced
2390  // properly. Therefore, we do a memory fence.
2391  __ dmb(ISH);
2392  __ Bind(slow_path->GetExitLabel());
2393}
2394
2395void LocationsBuilderARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2396  LocationSummary* locations =
2397      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2398  locations->SetInAt(0, Location::RequiresRegister());
2399  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2400}
2401
2402void InstructionCodeGeneratorARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2403  LocationSummary* locations = instruction->GetLocations();
2404  Register cls = locations->InAt(0).As<Register>();
2405  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
2406
2407  switch (instruction->GetType()) {
2408    case Primitive::kPrimBoolean: {
2409      Register out = locations->Out().As<Register>();
2410      __ LoadFromOffset(kLoadUnsignedByte, out, cls, offset);
2411      break;
2412    }
2413
2414    case Primitive::kPrimByte: {
2415      Register out = locations->Out().As<Register>();
2416      __ LoadFromOffset(kLoadSignedByte, out, cls, offset);
2417      break;
2418    }
2419
2420    case Primitive::kPrimShort: {
2421      Register out = locations->Out().As<Register>();
2422      __ LoadFromOffset(kLoadSignedHalfword, out, cls, offset);
2423      break;
2424    }
2425
2426    case Primitive::kPrimChar: {
2427      Register out = locations->Out().As<Register>();
2428      __ LoadFromOffset(kLoadUnsignedHalfword, out, cls, offset);
2429      break;
2430    }
2431
2432    case Primitive::kPrimInt:
2433    case Primitive::kPrimNot: {
2434      Register out = locations->Out().As<Register>();
2435      __ LoadFromOffset(kLoadWord, out, cls, offset);
2436      break;
2437    }
2438
2439    case Primitive::kPrimLong: {
2440      // TODO: support volatile.
2441      Location out = locations->Out();
2442      __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), cls, offset);
2443      break;
2444    }
2445
2446    case Primitive::kPrimFloat: {
2447      SRegister out = locations->Out().As<SRegister>();
2448      __ LoadSFromOffset(out, cls, offset);
2449      break;
2450    }
2451
2452    case Primitive::kPrimDouble: {
2453      DRegister out = FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>());
2454      __ LoadDFromOffset(out, cls, offset);
2455      break;
2456    }
2457
2458    case Primitive::kPrimVoid:
2459      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2460      UNREACHABLE();
2461  }
2462}
2463
2464void LocationsBuilderARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2465  LocationSummary* locations =
2466      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2467  bool is_object_type = instruction->GetFieldType() == Primitive::kPrimNot;
2468  locations->SetInAt(0, Location::RequiresRegister());
2469  locations->SetInAt(1, Location::RequiresRegister());
2470  // Temporary registers for the write barrier.
2471  if (is_object_type) {
2472    locations->AddTemp(Location::RequiresRegister());
2473    locations->AddTemp(Location::RequiresRegister());
2474  }
2475}
2476
2477void InstructionCodeGeneratorARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2478  LocationSummary* locations = instruction->GetLocations();
2479  Register cls = locations->InAt(0).As<Register>();
2480  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
2481  Primitive::Type field_type = instruction->GetFieldType();
2482
2483  switch (field_type) {
2484    case Primitive::kPrimBoolean:
2485    case Primitive::kPrimByte: {
2486      Register value = locations->InAt(1).As<Register>();
2487      __ StoreToOffset(kStoreByte, value, cls, offset);
2488      break;
2489    }
2490
2491    case Primitive::kPrimShort:
2492    case Primitive::kPrimChar: {
2493      Register value = locations->InAt(1).As<Register>();
2494      __ StoreToOffset(kStoreHalfword, value, cls, offset);
2495      break;
2496    }
2497
2498    case Primitive::kPrimInt:
2499    case Primitive::kPrimNot: {
2500      Register value = locations->InAt(1).As<Register>();
2501      __ StoreToOffset(kStoreWord, value, cls, offset);
2502      if (field_type == Primitive::kPrimNot) {
2503        Register temp = locations->GetTemp(0).As<Register>();
2504        Register card = locations->GetTemp(1).As<Register>();
2505        codegen_->MarkGCCard(temp, card, cls, value);
2506      }
2507      break;
2508    }
2509
2510    case Primitive::kPrimLong: {
2511      Location value = locations->InAt(1);
2512      __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), cls, offset);
2513      break;
2514    }
2515
2516    case Primitive::kPrimFloat: {
2517      SRegister value = locations->InAt(1).As<SRegister>();
2518      __ StoreSToOffset(value, cls, offset);
2519      break;
2520    }
2521
2522    case Primitive::kPrimDouble: {
2523      DRegister value = FromLowSToD(locations->InAt(1).AsFpuRegisterPairLow<SRegister>());
2524      __ StoreDToOffset(value, cls, offset);
2525      break;
2526    }
2527
2528    case Primitive::kPrimVoid:
2529      LOG(FATAL) << "Unreachable type " << field_type;
2530      UNREACHABLE();
2531  }
2532}
2533
2534void LocationsBuilderARM::VisitLoadString(HLoadString* load) {
2535  LocationSummary* locations =
2536      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
2537  locations->SetOut(Location::RequiresRegister());
2538}
2539
2540void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) {
2541  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM(load);
2542  codegen_->AddSlowPath(slow_path);
2543
2544  Register out = load->GetLocations()->Out().As<Register>();
2545  codegen_->LoadCurrentMethod(out);
2546  __ LoadFromOffset(
2547      kLoadWord, out, out, mirror::ArtMethod::DexCacheStringsOffset().Int32Value());
2548  __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(load->GetStringIndex()));
2549  __ cmp(out, ShifterOperand(0));
2550  __ b(slow_path->GetEntryLabel(), EQ);
2551  __ Bind(slow_path->GetExitLabel());
2552}
2553
2554void LocationsBuilderARM::VisitLoadException(HLoadException* load) {
2555  LocationSummary* locations =
2556      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
2557  locations->SetOut(Location::RequiresRegister());
2558}
2559
2560void InstructionCodeGeneratorARM::VisitLoadException(HLoadException* load) {
2561  Register out = load->GetLocations()->Out().As<Register>();
2562  int32_t offset = Thread::ExceptionOffset<kArmWordSize>().Int32Value();
2563  __ LoadFromOffset(kLoadWord, out, TR, offset);
2564  __ LoadImmediate(IP, 0);
2565  __ StoreToOffset(kStoreWord, IP, TR, offset);
2566}
2567
2568void LocationsBuilderARM::VisitThrow(HThrow* instruction) {
2569  LocationSummary* locations =
2570      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2571  InvokeRuntimeCallingConvention calling_convention;
2572  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2573}
2574
2575void InstructionCodeGeneratorARM::VisitThrow(HThrow* instruction) {
2576  codegen_->InvokeRuntime(
2577      QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc());
2578}
2579
2580}  // namespace arm
2581}  // namespace art
2582