code_generator_arm64.cc revision dff1f2812ecdaea89978c5351f0c70cdabbc0821
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
19#include "entrypoints/quick/quick_entrypoints.h"
20#include "gc/accounting/card_table.h"
21#include "mirror/array-inl.h"
22#include "mirror/art_method.h"
23#include "mirror/class.h"
24#include "thread.h"
25#include "utils/arm64/assembler_arm64.h"
26#include "utils/assembler.h"
27#include "utils/stack_checks.h"
28
29
30using namespace vixl;   // NOLINT(build/namespaces)
31
32#ifdef __
33#error "ARM64 Codegen VIXL macro-assembler macro already defined."
34#endif
35
36
37namespace art {
38
39namespace arm64 {
40
41static bool IsFPType(Primitive::Type type) {
42  return type == Primitive::kPrimFloat || type == Primitive::kPrimDouble;
43}
44
45// TODO: clean-up some of the constant definitions.
46static constexpr size_t kHeapRefSize = sizeof(mirror::HeapReference<mirror::Object>);
47static constexpr int kCurrentMethodStackOffset = 0;
48
49namespace {
50// Convenience helpers to ease conversion to and from VIXL operands.
51
52int VIXLRegCodeFromART(int code) {
53  // TODO: static check?
54  DCHECK_EQ(SP, 31);
55  DCHECK_EQ(WSP, 31);
56  DCHECK_EQ(XZR, 32);
57  DCHECK_EQ(WZR, 32);
58  if (code == SP) {
59    return vixl::kSPRegInternalCode;
60  }
61  if (code == XZR) {
62    return vixl::kZeroRegCode;
63  }
64  return code;
65}
66
67int ARTRegCodeFromVIXL(int code) {
68  // TODO: static check?
69  DCHECK_EQ(SP, 31);
70  DCHECK_EQ(WSP, 31);
71  DCHECK_EQ(XZR, 32);
72  DCHECK_EQ(WZR, 32);
73  if (code == vixl::kSPRegInternalCode) {
74    return SP;
75  }
76  if (code == vixl::kZeroRegCode) {
77    return XZR;
78  }
79  return code;
80}
81
82Register XRegisterFrom(Location location) {
83  return Register::XRegFromCode(VIXLRegCodeFromART(location.reg()));
84}
85
86Register WRegisterFrom(Location location) {
87  return Register::WRegFromCode(VIXLRegCodeFromART(location.reg()));
88}
89
90Register RegisterFrom(Location location, Primitive::Type type) {
91  DCHECK(type != Primitive::kPrimVoid && !IsFPType(type));
92  return type == Primitive::kPrimLong ? XRegisterFrom(location) : WRegisterFrom(location);
93}
94
95Register OutputRegister(HInstruction* instr) {
96  return RegisterFrom(instr->GetLocations()->Out(), instr->GetType());
97}
98
99Register InputRegisterAt(HInstruction* instr, int input_index) {
100  return RegisterFrom(instr->GetLocations()->InAt(input_index),
101                      instr->InputAt(input_index)->GetType());
102}
103
104int64_t Int64ConstantFrom(Location location) {
105  HConstant* instr = location.GetConstant();
106  return instr->IsIntConstant() ? instr->AsIntConstant()->GetValue()
107                                : instr->AsLongConstant()->GetValue();
108}
109
110Operand OperandFrom(Location location, Primitive::Type type) {
111  if (location.IsRegister()) {
112    return Operand(RegisterFrom(location, type));
113  } else {
114    return Operand(Int64ConstantFrom(location));
115  }
116}
117
118Operand InputOperandAt(HInstruction* instr, int input_index) {
119  return OperandFrom(instr->GetLocations()->InAt(input_index),
120                     instr->InputAt(input_index)->GetType());
121}
122
123MemOperand StackOperandFrom(Location location) {
124  return MemOperand(sp, location.GetStackIndex());
125}
126
127MemOperand HeapOperand(const Register& base, Offset offset) {
128  // A heap reference must be 32bit, so fit in a W register.
129  DCHECK(base.IsW());
130  return MemOperand(base.X(), offset.SizeValue());
131}
132
133MemOperand HeapOperandFrom(Location location, Primitive::Type type, Offset offset) {
134  return HeapOperand(RegisterFrom(location, type), offset);
135}
136
137Location LocationFrom(const Register& reg) {
138  return Location::RegisterLocation(ARTRegCodeFromVIXL(reg.code()));
139}
140
141}  // namespace
142
143inline Condition ARM64Condition(IfCondition cond) {
144  switch (cond) {
145    case kCondEQ: return eq;
146    case kCondNE: return ne;
147    case kCondLT: return lt;
148    case kCondLE: return le;
149    case kCondGT: return gt;
150    case kCondGE: return ge;
151    default:
152      LOG(FATAL) << "Unknown if condition";
153  }
154  return nv;  // Unreachable.
155}
156
157static const Register kRuntimeParameterCoreRegisters[] = { x0, x1, x2, x3, x4, x5, x6, x7 };
158static constexpr size_t kRuntimeParameterCoreRegistersLength =
159    arraysize(kRuntimeParameterCoreRegisters);
160static const FPRegister kRuntimeParameterFpuRegisters[] = { };
161static constexpr size_t kRuntimeParameterFpuRegistersLength = 0;
162
163class InvokeRuntimeCallingConvention : public CallingConvention<Register, FPRegister> {
164 public:
165  static constexpr size_t kParameterCoreRegistersLength = arraysize(kParameterCoreRegisters);
166
167  InvokeRuntimeCallingConvention()
168      : CallingConvention(kRuntimeParameterCoreRegisters,
169                          kRuntimeParameterCoreRegistersLength,
170                          kRuntimeParameterFpuRegisters,
171                          kRuntimeParameterFpuRegistersLength) {}
172
173  Location GetReturnLocation(Primitive::Type return_type);
174
175 private:
176  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
177};
178
179Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
180  DCHECK_NE(return_type, Primitive::kPrimVoid);
181  if (return_type == Primitive::kPrimFloat || return_type == Primitive::kPrimDouble) {
182    LOG(FATAL) << "Unimplemented return type " << return_type;
183  }
184  return LocationFrom(x0);
185}
186
187#define __ reinterpret_cast<Arm64Assembler*>(codegen->GetAssembler())->vixl_masm_->
188
189class SlowPathCodeARM64 : public SlowPathCode {
190 public:
191  SlowPathCodeARM64() : entry_label_(), exit_label_() {}
192
193  vixl::Label* GetEntryLabel() { return &entry_label_; }
194  vixl::Label* GetExitLabel() { return &exit_label_; }
195
196 private:
197  vixl::Label entry_label_;
198  vixl::Label exit_label_;
199
200  DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM64);
201};
202
203class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
204 public:
205  explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction,
206                                    Location index_location,
207                                    Location length_location)
208      : instruction_(instruction),
209        index_location_(index_location),
210        length_location_(length_location) {}
211
212  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
213    CodeGeneratorARM64* arm64_codegen = reinterpret_cast<CodeGeneratorARM64*>(codegen);
214    __ Bind(GetEntryLabel());
215    InvokeRuntimeCallingConvention calling_convention;
216    arm64_codegen->MoveHelper(LocationFrom(calling_convention.GetRegisterAt(0)),
217                              index_location_, Primitive::kPrimInt);
218    arm64_codegen->MoveHelper(LocationFrom(calling_convention.GetRegisterAt(1)),
219                              length_location_, Primitive::kPrimInt);
220    size_t offset = QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pThrowArrayBounds).SizeValue();
221    __ Ldr(lr, MemOperand(tr, offset));
222    __ Blr(lr);
223    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
224  }
225
226 private:
227  HBoundsCheck* const instruction_;
228  const Location index_location_;
229  const Location length_location_;
230
231  DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
232};
233
234class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
235 public:
236  explicit NullCheckSlowPathARM64(HNullCheck* instr) : instruction_(instr) {}
237
238  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
239    __ Bind(GetEntryLabel());
240    int32_t offset = QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pThrowNullPointer).Int32Value();
241    __ Ldr(lr, MemOperand(tr, offset));
242    __ Blr(lr);
243    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
244  }
245
246 private:
247  HNullCheck* const instruction_;
248
249  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
250};
251
252class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
253 public:
254  explicit SuspendCheckSlowPathARM64(HSuspendCheck* instruction,
255                                     HBasicBlock* successor)
256      : instruction_(instruction), successor_(successor) {}
257
258  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
259    size_t offset = QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pTestSuspend).SizeValue();
260    __ Bind(GetEntryLabel());
261    __ Ldr(lr, MemOperand(tr, offset));
262    __ Blr(lr);
263    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
264    __ B(GetReturnLabel());
265  }
266
267  vixl::Label* GetReturnLabel() {
268    DCHECK(successor_ == nullptr);
269    return &return_label_;
270  }
271
272
273 private:
274  HSuspendCheck* const instruction_;
275  // If not null, the block to branch to after the suspend check.
276  HBasicBlock* const successor_;
277
278  // If `successor_` is null, the label to branch to after the suspend check.
279  vixl::Label return_label_;
280
281  DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
282};
283
284#undef __
285
286Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
287  Location next_location;
288  if (type == Primitive::kPrimVoid) {
289    LOG(FATAL) << "Unreachable type " << type;
290  }
291
292  if (type == Primitive::kPrimFloat || type == Primitive::kPrimDouble) {
293    LOG(FATAL) << "Unimplemented type " << type;
294  }
295
296  if (gp_index_ < calling_convention.GetNumberOfRegisters()) {
297    next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_));
298    if (type == Primitive::kPrimLong) {
299      // Double stack slot reserved on the stack.
300      stack_index_++;
301    }
302  } else {  // Stack.
303    if (type == Primitive::kPrimLong) {
304      next_location = Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_));
305      // Double stack slot reserved on the stack.
306      stack_index_++;
307    } else {
308      next_location = Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_));
309    }
310  }
311  // Move to the next register/stack slot.
312  gp_index_++;
313  stack_index_++;
314  return next_location;
315}
316
317CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph)
318    : CodeGenerator(graph,
319                    kNumberOfAllocatableRegisters,
320                    kNumberOfAllocatableFloatingPointRegisters,
321                    kNumberOfAllocatableRegisterPairs),
322      block_labels_(nullptr),
323      location_builder_(graph, this),
324      instruction_visitor_(graph, this) {}
325
326#define __ reinterpret_cast<Arm64Assembler*>(GetAssembler())->vixl_masm_->
327
328void CodeGeneratorARM64::GenerateFrameEntry() {
329  // TODO: Add proper support for the stack overflow check.
330  UseScratchRegisterScope temps(assembler_.vixl_masm_);
331  Register temp = temps.AcquireX();
332  __ Add(temp, sp, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
333  __ Ldr(temp, MemOperand(temp, 0));
334  RecordPcInfo(nullptr, 0);
335
336  CPURegList preserved_regs = GetFramePreservedRegisters();
337  int frame_size = GetFrameSize();
338  core_spill_mask_ |= preserved_regs.list();
339
340  __ Str(w0, MemOperand(sp, -frame_size, PreIndex));
341  __ PokeCPURegList(preserved_regs, frame_size - preserved_regs.TotalSizeInBytes());
342
343  // Stack layout:
344  // sp[frame_size - 8]        : lr.
345  // ...                       : other preserved registers.
346  // sp[frame_size - regs_size]: first preserved register.
347  // ...                       : reserved frame space.
348  // sp[0]                     : context pointer.
349}
350
351void CodeGeneratorARM64::GenerateFrameExit() {
352  int frame_size = GetFrameSize();
353  CPURegList preserved_regs = GetFramePreservedRegisters();
354  __ PeekCPURegList(preserved_regs, frame_size - preserved_regs.TotalSizeInBytes());
355  __ Drop(frame_size);
356}
357
358void CodeGeneratorARM64::Bind(HBasicBlock* block) {
359  __ Bind(GetLabelOf(block));
360}
361
362void CodeGeneratorARM64::MoveHelper(Location destination,
363                                    Location source,
364                                    Primitive::Type type) {
365  if (source.Equals(destination)) {
366    return;
367  }
368  if (destination.IsRegister()) {
369    Register dst = RegisterFrom(destination, type);
370    if (source.IsRegister()) {
371      Register src = RegisterFrom(source, type);
372      DCHECK(dst.IsSameSizeAndType(src));
373      __ Mov(dst, src);
374    } else {
375      DCHECK(dst.Is64Bits() || !source.IsDoubleStackSlot());
376      __ Ldr(dst, StackOperandFrom(source));
377    }
378  } else {
379    DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
380    if (source.IsRegister()) {
381      __ Str(RegisterFrom(source, type), StackOperandFrom(destination));
382    } else {
383      UseScratchRegisterScope temps(assembler_.vixl_masm_);
384      Register temp = destination.IsDoubleStackSlot() ? temps.AcquireX() : temps.AcquireW();
385      __ Ldr(temp, StackOperandFrom(source));
386      __ Str(temp, StackOperandFrom(destination));
387    }
388  }
389}
390
391void CodeGeneratorARM64::Move(HInstruction* instruction,
392                              Location location,
393                              HInstruction* move_for) {
394  LocationSummary* locations = instruction->GetLocations();
395  if (locations != nullptr && locations->Out().Equals(location)) {
396    return;
397  }
398
399  Primitive::Type type = instruction->GetType();
400
401  if (instruction->IsIntConstant() || instruction->IsLongConstant()) {
402    int64_t value = instruction->IsIntConstant() ? instruction->AsIntConstant()->GetValue()
403                                                 : instruction->AsLongConstant()->GetValue();
404    if (location.IsRegister()) {
405      Register dst = RegisterFrom(location, type);
406      DCHECK((instruction->IsIntConstant() && dst.Is32Bits()) ||
407             (instruction->IsLongConstant() && dst.Is64Bits()));
408      __ Mov(dst, value);
409    } else {
410      DCHECK(location.IsStackSlot() || location.IsDoubleStackSlot());
411      UseScratchRegisterScope temps(assembler_.vixl_masm_);
412      Register temp = instruction->IsIntConstant() ? temps.AcquireW() : temps.AcquireX();
413      __ Mov(temp, value);
414      __ Str(temp, StackOperandFrom(location));
415    }
416
417  } else if (instruction->IsLoadLocal()) {
418    uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
419    switch (type) {
420      case Primitive::kPrimNot:
421      case Primitive::kPrimBoolean:
422      case Primitive::kPrimByte:
423      case Primitive::kPrimChar:
424      case Primitive::kPrimShort:
425      case Primitive::kPrimInt:
426        MoveHelper(location, Location::StackSlot(stack_slot), type);
427        break;
428      case Primitive::kPrimLong:
429        MoveHelper(location, Location::DoubleStackSlot(stack_slot), type);
430        break;
431      default:
432        LOG(FATAL) << "Unimplemented type" << type;
433    }
434
435  } else {
436    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
437    MoveHelper(location, locations->Out(), type);
438  }
439}
440
441size_t CodeGeneratorARM64::FrameEntrySpillSize() const {
442  return GetFramePreservedRegistersSize();
443}
444
445Location CodeGeneratorARM64::GetStackLocation(HLoadLocal* load) const {
446  Primitive::Type type = load->GetType();
447  switch (type) {
448    case Primitive::kPrimNot:
449    case Primitive::kPrimBoolean:
450    case Primitive::kPrimByte:
451    case Primitive::kPrimChar:
452    case Primitive::kPrimShort:
453    case Primitive::kPrimInt:
454      return Location::StackSlot(GetStackSlot(load->GetLocal()));
455    case Primitive::kPrimLong:
456      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
457    case Primitive::kPrimFloat:
458    case Primitive::kPrimDouble:
459      LOG(FATAL) << "Unimplemented type " << type;
460      break;
461    case Primitive::kPrimVoid:
462    default:
463      LOG(FATAL) << "Unexpected type " << type;
464  }
465  LOG(FATAL) << "Unreachable";
466  return Location::NoLocation();
467}
468
469void CodeGeneratorARM64::MarkGCCard(Register object, Register value) {
470  UseScratchRegisterScope temps(assembler_.vixl_masm_);
471  Register card = temps.AcquireX();
472  Register temp = temps.AcquireX();
473  vixl::Label done;
474  __ Cbz(value, &done);
475  __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64WordSize>().Int32Value()));
476  __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
477  __ Strb(card, MemOperand(card, temp));
478  __ Bind(&done);
479}
480
481void CodeGeneratorARM64::SetupBlockedRegisters() const {
482  // Block reserved registers:
483  //   ip0 (VIXL temporary)
484  //   ip1 (VIXL temporary)
485  //   xSuspend (Suspend counter)
486  //   lr
487  // sp is not part of the allocatable registers, so we don't need to block it.
488  CPURegList reserved_core_registers = vixl_reserved_core_registers;
489  reserved_core_registers.Combine(runtime_reserved_core_registers);
490  // TODO: See if we should instead allow allocating but preserve those if used.
491  reserved_core_registers.Combine(quick_callee_saved_registers);
492  while (!reserved_core_registers.IsEmpty()) {
493    blocked_core_registers_[reserved_core_registers.PopLowestIndex().code()] = true;
494  }
495}
496
497Location CodeGeneratorARM64::AllocateFreeRegister(Primitive::Type type) const {
498  if (type == Primitive::kPrimVoid) {
499    LOG(FATAL) << "Unreachable type " << type;
500  }
501
502  if (type == Primitive::kPrimFloat || type == Primitive::kPrimDouble) {
503    LOG(FATAL) << "Unimplemented support for floating-point";
504  }
505
506  ssize_t reg = FindFreeEntry(blocked_core_registers_, kNumberOfXRegisters);
507  DCHECK_NE(reg, -1);
508  blocked_core_registers_[reg] = true;
509
510  if (IsFPType(type)) {
511    return Location::FpuRegisterLocation(reg);
512  } else {
513    return Location::RegisterLocation(reg);
514  }
515}
516
517void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
518  stream << Arm64ManagedRegister::FromXRegister(XRegister(reg));
519}
520
521void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
522  stream << Arm64ManagedRegister::FromDRegister(DRegister(reg));
523}
524
525#undef __
526#define __ assembler_->vixl_masm_->
527
528InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
529                                                             CodeGeneratorARM64* codegen)
530      : HGraphVisitor(graph),
531        assembler_(codegen->GetAssembler()),
532        codegen_(codegen) {}
533
534#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M)              \
535  M(ArrayGet)                                              \
536  M(ArraySet)                                              \
537  M(ClinitCheck)                                           \
538  M(DoubleConstant)                                        \
539  M(Div)                                                   \
540  M(FloatConstant)                                         \
541  M(LoadClass)                                             \
542  M(LoadString)                                            \
543  M(Neg)                                                   \
544  M(NewArray)                                              \
545  M(ParallelMove)                                          \
546  M(StaticFieldGet)                                        \
547  M(StaticFieldSet)                                        \
548  M(TypeConversion)                                        \
549
550#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
551
552enum UnimplementedInstructionBreakCode {
553#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
554  FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
555#undef ENUM_UNIMPLEMENTED_INSTRUCTION
556};
557
558#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name)                               \
559  void InstructionCodeGeneratorARM64::Visit##name(H##name* instr) {                   \
560    UNUSED(instr);                                                                    \
561    __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name));                               \
562  }                                                                                   \
563  void LocationsBuilderARM64::Visit##name(H##name* instr) {                           \
564    LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
565    locations->SetOut(Location::Any());                                               \
566  }
567  FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
568#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
569
570#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
571
572void LocationsBuilderARM64::HandleAddSub(HBinaryOperation* instr) {
573  DCHECK(instr->IsAdd() || instr->IsSub());
574  DCHECK_EQ(instr->InputCount(), 2U);
575  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
576  Primitive::Type type = instr->GetResultType();
577  switch (type) {
578    case Primitive::kPrimInt:
579    case Primitive::kPrimLong: {
580      locations->SetInAt(0, Location::RequiresRegister());
581      locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
582      locations->SetOut(Location::RequiresRegister());
583      break;
584    }
585    case Primitive::kPrimBoolean:
586    case Primitive::kPrimByte:
587    case Primitive::kPrimChar:
588    case Primitive::kPrimShort:
589      LOG(FATAL) << "Unexpected " << instr->DebugName() <<  " type " << type;
590      break;
591    default:
592      LOG(FATAL) << "Unimplemented " << instr->DebugName() << " type " << type;
593  }
594}
595
596void InstructionCodeGeneratorARM64::HandleAddSub(HBinaryOperation* instr) {
597  DCHECK(instr->IsAdd() || instr->IsSub());
598
599  Primitive::Type type = instr->GetType();
600  Register dst = OutputRegister(instr);
601  Register lhs = InputRegisterAt(instr, 0);
602  Operand rhs = InputOperandAt(instr, 1);
603
604  switch (type) {
605    case Primitive::kPrimInt:
606    case Primitive::kPrimLong:
607      if (instr->IsAdd()) {
608        __ Add(dst, lhs, rhs);
609      } else {
610        __ Sub(dst, lhs, rhs);
611      }
612      break;
613
614    case Primitive::kPrimBoolean:
615    case Primitive::kPrimByte:
616    case Primitive::kPrimChar:
617    case Primitive::kPrimShort:
618      LOG(FATAL) << "Unexpected add/sub type " << type;
619      break;
620    default:
621      LOG(FATAL) << "Unimplemented add/sub type " << type;
622  }
623}
624
625void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
626  HandleAddSub(instruction);
627}
628
629void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
630  HandleAddSub(instruction);
631}
632
633void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
634  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
635  locations->SetInAt(0, Location::RequiresRegister());
636  locations->SetOut(Location::RequiresRegister());
637}
638
639void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
640  __ Ldr(OutputRegister(instruction),
641         HeapOperand(InputRegisterAt(instruction, 0), mirror::Array::LengthOffset()));
642}
643
644void LocationsBuilderARM64::VisitCompare(HCompare* instruction) {
645  LocationSummary* locations =
646      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
647  locations->SetInAt(0, Location::RequiresRegister());
648  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
649  locations->SetOut(Location::RequiresRegister());
650}
651
652void InstructionCodeGeneratorARM64::VisitCompare(HCompare* instruction) {
653  Primitive::Type in_type = instruction->InputAt(0)->GetType();
654
655  DCHECK_EQ(in_type, Primitive::kPrimLong);
656  switch (in_type) {
657    case Primitive::kPrimLong: {
658      vixl::Label done;
659      Register result = OutputRegister(instruction);
660      Register left = InputRegisterAt(instruction, 0);
661      Operand right = InputOperandAt(instruction, 1);
662      __ Subs(result, left, right);
663      __ B(eq, &done);
664      __ Mov(result, 1);
665      __ Cneg(result, result, le);
666      __ Bind(&done);
667      break;
668    }
669    default:
670      LOG(FATAL) << "Unimplemented compare type " << in_type;
671  }
672}
673
674void LocationsBuilderARM64::VisitCondition(HCondition* instruction) {
675  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
676  locations->SetInAt(0, Location::RequiresRegister());
677  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
678  if (instruction->NeedsMaterialization()) {
679    locations->SetOut(Location::RequiresRegister());
680  }
681}
682
683void InstructionCodeGeneratorARM64::VisitCondition(HCondition* instruction) {
684  if (!instruction->NeedsMaterialization()) {
685    return;
686  }
687
688  LocationSummary* locations = instruction->GetLocations();
689  Register lhs = InputRegisterAt(instruction, 0);
690  Operand rhs = InputOperandAt(instruction, 1);
691  Register res = RegisterFrom(locations->Out(), instruction->GetType());
692  Condition cond = ARM64Condition(instruction->GetCondition());
693
694  __ Cmp(lhs, rhs);
695  __ Csel(res, vixl::Assembler::AppropriateZeroRegFor(res), Operand(1), InvertCondition(cond));
696}
697
698#define FOR_EACH_CONDITION_INSTRUCTION(M)                                                \
699  M(Equal)                                                                               \
700  M(NotEqual)                                                                            \
701  M(LessThan)                                                                            \
702  M(LessThanOrEqual)                                                                     \
703  M(GreaterThan)                                                                         \
704  M(GreaterThanOrEqual)
705#define DEFINE_CONDITION_VISITORS(Name)                                                  \
706void LocationsBuilderARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); }         \
707void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); }
708FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
709#undef FOR_EACH_CONDITION_INSTRUCTION
710
711void LocationsBuilderARM64::VisitExit(HExit* exit) {
712  exit->SetLocations(nullptr);
713}
714
715void InstructionCodeGeneratorARM64::VisitExit(HExit* exit) {
716  UNUSED(exit);
717  if (kIsDebugBuild) {
718    down_cast<Arm64Assembler*>(GetAssembler())->Comment("Unreachable");
719    __ Brk(0);    // TODO: Introduce special markers for such code locations.
720  }
721}
722
723void LocationsBuilderARM64::VisitGoto(HGoto* got) {
724  got->SetLocations(nullptr);
725}
726
727void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
728  HBasicBlock* successor = got->GetSuccessor();
729  // TODO: Support for suspend checks emission.
730  if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
731    __ B(codegen_->GetLabelOf(successor));
732  }
733}
734
735void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
736  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
737  HInstruction* cond = if_instr->InputAt(0);
738  DCHECK(cond->IsCondition());
739  if (cond->AsCondition()->NeedsMaterialization()) {
740    locations->SetInAt(0, Location::RequiresRegister());
741  }
742}
743
744void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
745  HInstruction* cond = if_instr->InputAt(0);
746  DCHECK(cond->IsCondition());
747  HCondition* condition = cond->AsCondition();
748  vixl::Label* true_target = codegen_->GetLabelOf(if_instr->IfTrueSuccessor());
749  vixl::Label* false_target = codegen_->GetLabelOf(if_instr->IfFalseSuccessor());
750
751  // TODO: Support constant condition input in VisitIf.
752
753  if (condition->NeedsMaterialization()) {
754    // The condition instruction has been materialized, compare the output to 0.
755    Location cond_val = if_instr->GetLocations()->InAt(0);
756    DCHECK(cond_val.IsRegister());
757    __ Cbnz(InputRegisterAt(if_instr, 0), true_target);
758
759  } else {
760    // The condition instruction has not been materialized, use its inputs as
761    // the comparison and its condition as the branch condition.
762    Register lhs = InputRegisterAt(condition, 0);
763    Operand rhs = InputOperandAt(condition, 1);
764    Condition arm64_cond = ARM64Condition(condition->GetCondition());
765    if ((arm64_cond == eq || arm64_cond == ne) && rhs.IsImmediate() && (rhs.immediate() == 0)) {
766      if (arm64_cond == eq) {
767        __ Cbz(lhs, true_target);
768      } else {
769        __ Cbnz(lhs, true_target);
770      }
771    } else {
772      __ Cmp(lhs, rhs);
773      __ B(arm64_cond, true_target);
774    }
775  }
776
777  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfFalseSuccessor())) {
778    __ B(false_target);
779  }
780}
781
782void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
783  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
784  locations->SetInAt(0, Location::RequiresRegister());
785  locations->SetOut(Location::RequiresRegister());
786}
787
788void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
789  Primitive::Type res_type = instruction->GetType();
790  Register res = OutputRegister(instruction);
791  Register obj = InputRegisterAt(instruction, 0);
792  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
793
794  switch (res_type) {
795    case Primitive::kPrimBoolean: {
796      __ Ldrb(res, MemOperand(obj, offset));
797      break;
798    }
799    case Primitive::kPrimByte: {
800      __ Ldrsb(res, MemOperand(obj, offset));
801      break;
802    }
803    case Primitive::kPrimShort: {
804      __ Ldrsh(res, MemOperand(obj, offset));
805      break;
806    }
807    case Primitive::kPrimChar: {
808      __ Ldrh(res, MemOperand(obj, offset));
809      break;
810    }
811    case Primitive::kPrimInt:
812    case Primitive::kPrimNot:
813    case Primitive::kPrimLong: {  // TODO: support volatile.
814      DCHECK(res.IsX() == (res_type == Primitive::kPrimLong));
815      __ Ldr(res, MemOperand(obj, offset));
816      break;
817    }
818
819    case Primitive::kPrimFloat:
820    case Primitive::kPrimDouble:
821      LOG(FATAL) << "Unimplemented register res_type " << res_type;
822      break;
823
824    case Primitive::kPrimVoid:
825      LOG(FATAL) << "Unreachable res_type " << res_type;
826  }
827}
828
829void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
830  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
831  locations->SetInAt(0, Location::RequiresRegister());
832  locations->SetInAt(1, Location::RequiresRegister());
833}
834
835void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
836  Register obj = InputRegisterAt(instruction, 0);
837  Register value = InputRegisterAt(instruction, 1);
838  Primitive::Type field_type = instruction->InputAt(1)->GetType();
839  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
840
841  switch (field_type) {
842    case Primitive::kPrimBoolean:
843    case Primitive::kPrimByte: {
844      __ Strb(value, MemOperand(obj, offset));
845      break;
846    }
847
848    case Primitive::kPrimShort:
849    case Primitive::kPrimChar: {
850      __ Strh(value, MemOperand(obj, offset));
851      break;
852    }
853
854    case Primitive::kPrimInt:
855    case Primitive::kPrimNot:
856    case Primitive::kPrimLong: {
857      DCHECK(value.IsX() == (field_type == Primitive::kPrimLong));
858      __ Str(value, MemOperand(obj, offset));
859
860      if (field_type == Primitive::kPrimNot) {
861        codegen_->MarkGCCard(obj, value);
862      }
863      break;
864    }
865
866    case Primitive::kPrimFloat:
867    case Primitive::kPrimDouble:
868      LOG(FATAL) << "Unimplemented register type " << field_type;
869      break;
870
871    case Primitive::kPrimVoid:
872      LOG(FATAL) << "Unreachable type " << field_type;
873  }
874}
875
876void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
877  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
878  locations->SetOut(Location::ConstantLocation(constant));
879}
880
881void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant) {
882  // Will be generated at use site.
883  UNUSED(constant);
884}
885
886void LocationsBuilderARM64::VisitInvokeStatic(HInvokeStatic* invoke) {
887  HandleInvoke(invoke);
888}
889
890void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
891  HandleInvoke(invoke);
892}
893
894void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
895  LocationSummary* locations =
896      new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
897  locations->AddTemp(LocationFrom(x0));
898
899  InvokeDexCallingConventionVisitor calling_convention_visitor;
900  for (size_t i = 0; i < invoke->InputCount(); i++) {
901    HInstruction* input = invoke->InputAt(i);
902    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
903  }
904
905  Primitive::Type return_type = invoke->GetType();
906  if (return_type != Primitive::kPrimVoid) {
907    locations->SetOut(calling_convention_visitor.GetReturnLocation(return_type));
908  }
909}
910
911void InstructionCodeGeneratorARM64::VisitInvokeStatic(HInvokeStatic* invoke) {
912  Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
913  // Make sure that ArtMethod* is passed in W0 as per the calling convention
914  DCHECK(temp.Is(w0));
915  size_t index_in_cache = mirror::Array::DataOffset(kHeapRefSize).SizeValue() +
916    invoke->GetIndexInDexCache() * kHeapRefSize;
917
918  // TODO: Implement all kinds of calls:
919  // 1) boot -> boot
920  // 2) app -> boot
921  // 3) app -> app
922  //
923  // Currently we implement the app -> app logic, which looks up in the resolve cache.
924
925  // temp = method;
926  __ Ldr(temp, MemOperand(sp, kCurrentMethodStackOffset));
927  // temp = temp->dex_cache_resolved_methods_;
928  __ Ldr(temp, MemOperand(temp.X(), mirror::ArtMethod::DexCacheResolvedMethodsOffset().SizeValue()));
929  // temp = temp[index_in_cache];
930  __ Ldr(temp, MemOperand(temp.X(), index_in_cache));
931  // lr = temp->entry_point_from_quick_compiled_code_;
932  __ Ldr(lr, MemOperand(temp.X(), mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().SizeValue()));
933  // lr();
934  __ Blr(lr);
935
936  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
937  DCHECK(!codegen_->IsLeafMethod());
938}
939
940void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
941  LocationSummary* locations = invoke->GetLocations();
942  Location receiver = locations->InAt(0);
943  Register temp = XRegisterFrom(invoke->GetLocations()->GetTemp(0));
944  size_t method_offset = mirror::Class::EmbeddedVTableOffset().SizeValue() +
945    invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
946  Offset class_offset = mirror::Object::ClassOffset();
947  Offset entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset();
948
949  // temp = object->GetClass();
950  if (receiver.IsStackSlot()) {
951    __ Ldr(temp.W(), MemOperand(sp, receiver.GetStackIndex()));
952    __ Ldr(temp.W(), MemOperand(temp, class_offset.SizeValue()));
953  } else {
954    DCHECK(receiver.IsRegister());
955    __ Ldr(temp.W(), HeapOperandFrom(receiver, Primitive::kPrimNot,
956                                     class_offset));
957  }
958  // temp = temp->GetMethodAt(method_offset);
959  __ Ldr(temp.W(), MemOperand(temp, method_offset));
960  // lr = temp->GetEntryPoint();
961  __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
962  // lr();
963  __ Blr(lr);
964  DCHECK(!codegen_->IsLeafMethod());
965  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
966}
967
968void LocationsBuilderARM64::VisitLoadLocal(HLoadLocal* load) {
969  load->SetLocations(nullptr);
970}
971
972void InstructionCodeGeneratorARM64::VisitLoadLocal(HLoadLocal* load) {
973  // Nothing to do, this is driven by the code generator.
974  UNUSED(load);
975}
976
977void LocationsBuilderARM64::VisitLocal(HLocal* local) {
978  local->SetLocations(nullptr);
979}
980
981void InstructionCodeGeneratorARM64::VisitLocal(HLocal* local) {
982  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
983}
984
985void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
986  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
987  locations->SetOut(Location::ConstantLocation(constant));
988}
989
990void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant) {
991  // Will be generated at use site.
992  UNUSED(constant);
993}
994
995void LocationsBuilderARM64::VisitMul(HMul* mul) {
996  LocationSummary* locations =
997      new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
998  switch (mul->GetResultType()) {
999    case Primitive::kPrimInt:
1000    case Primitive::kPrimLong:
1001      locations->SetInAt(0, Location::RequiresRegister());
1002      locations->SetInAt(1, Location::RequiresRegister());
1003      locations->SetOut(Location::RequiresRegister());
1004      break;
1005
1006    case Primitive::kPrimFloat:
1007    case Primitive::kPrimDouble:
1008      LOG(FATAL) << "Unimplemented mul type " << mul->GetResultType();
1009      break;
1010
1011    default:
1012      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1013  }
1014}
1015
1016void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
1017  switch (mul->GetResultType()) {
1018    case Primitive::kPrimInt:
1019    case Primitive::kPrimLong:
1020      __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
1021      break;
1022
1023    case Primitive::kPrimFloat:
1024    case Primitive::kPrimDouble:
1025      LOG(FATAL) << "Unimplemented mul type " << mul->GetResultType();
1026      break;
1027
1028    default:
1029      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1030  }
1031}
1032
1033void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
1034  LocationSummary* locations =
1035      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
1036  InvokeRuntimeCallingConvention calling_convention;
1037  locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
1038  locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(1)));
1039  locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1040}
1041
1042void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
1043  LocationSummary* locations = instruction->GetLocations();
1044  Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
1045  DCHECK(type_index.Is(w0));
1046  Register current_method = RegisterFrom(locations->GetTemp(1), Primitive::kPrimNot);
1047  DCHECK(current_method.Is(w1));
1048  __ Ldr(current_method, MemOperand(sp, kCurrentMethodStackOffset));
1049  __ Mov(type_index, instruction->GetTypeIndex());
1050  __ Ldr(lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocObjectWithAccessCheck).Int32Value()));
1051  __ Blr(lr);
1052  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
1053  DCHECK(!codegen_->IsLeafMethod());
1054}
1055
1056void LocationsBuilderARM64::VisitNot(HNot* instruction) {
1057  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1058  locations->SetInAt(0, Location::RequiresRegister());
1059  locations->SetOut(Location::RequiresRegister());
1060}
1061
1062void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
1063  switch (instruction->InputAt(0)->GetType()) {
1064    case Primitive::kPrimBoolean:
1065      __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), Operand(1));
1066      break;
1067
1068    case Primitive::kPrimInt:
1069      __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
1070      break;
1071
1072    case Primitive::kPrimLong:
1073      LOG(FATAL) << "Not yet implemented type for not operation " << instruction->GetResultType();
1074      break;
1075
1076    default:
1077      LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
1078  }
1079}
1080
1081void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
1082  LocationSummary* locations =
1083      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1084  locations->SetInAt(0, Location::RequiresRegister());
1085  if (instruction->HasUses()) {
1086    locations->SetOut(Location::SameAsFirstInput());
1087  }
1088}
1089
1090void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
1091  SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
1092  codegen_->AddSlowPath(slow_path);
1093
1094  LocationSummary* locations = instruction->GetLocations();
1095  Location obj = locations->InAt(0);
1096  if (obj.IsRegister()) {
1097    __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
1098  } else {
1099    DCHECK(obj.IsConstant()) << obj;
1100    DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0);
1101    __ B(slow_path->GetEntryLabel());
1102  }
1103}
1104
1105void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
1106  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1107  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
1108  if (location.IsStackSlot()) {
1109    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1110  } else if (location.IsDoubleStackSlot()) {
1111    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1112  }
1113  locations->SetOut(location);
1114}
1115
1116void InstructionCodeGeneratorARM64::VisitParameterValue(HParameterValue* instruction) {
1117  // Nothing to do, the parameter is already at its location.
1118  UNUSED(instruction);
1119}
1120
1121void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
1122  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1123  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
1124    locations->SetInAt(i, Location::Any());
1125  }
1126  locations->SetOut(Location::Any());
1127}
1128
1129void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction) {
1130  UNUSED(instruction);
1131  LOG(FATAL) << "Unreachable";
1132}
1133
1134void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
1135  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1136  Primitive::Type return_type = instruction->InputAt(0)->GetType();
1137
1138  if (return_type == Primitive::kPrimFloat || return_type == Primitive::kPrimDouble) {
1139    LOG(FATAL) << "Unimplemented return type " << return_type;
1140  }
1141
1142  locations->SetInAt(0, LocationFrom(x0));
1143}
1144
1145void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction) {
1146  if (kIsDebugBuild) {
1147    Primitive::Type type = instruction->InputAt(0)->GetType();
1148    switch (type) {
1149      case Primitive::kPrimBoolean:
1150      case Primitive::kPrimByte:
1151      case Primitive::kPrimChar:
1152      case Primitive::kPrimShort:
1153      case Primitive::kPrimInt:
1154      case Primitive::kPrimNot:
1155        DCHECK(InputRegisterAt(instruction, 0).Is(w0));
1156        break;
1157
1158      case Primitive::kPrimLong:
1159        DCHECK(InputRegisterAt(instruction, 0).Is(x0));
1160        break;
1161
1162      default:
1163        LOG(FATAL) << "Unimplemented return type " << type;
1164    }
1165  }
1166  codegen_->GenerateFrameExit();
1167  __ Br(lr);
1168}
1169
1170void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
1171  instruction->SetLocations(nullptr);
1172}
1173
1174void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction) {
1175  UNUSED(instruction);
1176  codegen_->GenerateFrameExit();
1177  __ Br(lr);
1178}
1179
1180void LocationsBuilderARM64::VisitStoreLocal(HStoreLocal* store) {
1181  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(store);
1182  Primitive::Type field_type = store->InputAt(1)->GetType();
1183  switch (field_type) {
1184    case Primitive::kPrimBoolean:
1185    case Primitive::kPrimByte:
1186    case Primitive::kPrimChar:
1187    case Primitive::kPrimShort:
1188    case Primitive::kPrimInt:
1189    case Primitive::kPrimNot:
1190      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
1191      break;
1192
1193    case Primitive::kPrimLong:
1194      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
1195      break;
1196
1197    default:
1198      LOG(FATAL) << "Unimplemented local type " << field_type;
1199  }
1200}
1201
1202void InstructionCodeGeneratorARM64::VisitStoreLocal(HStoreLocal* store) {
1203  UNUSED(store);
1204}
1205
1206void LocationsBuilderARM64::VisitSub(HSub* instruction) {
1207  HandleAddSub(instruction);
1208}
1209
1210void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
1211  HandleAddSub(instruction);
1212}
1213
1214void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
1215  LocationSummary* locations =
1216      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1217  locations->SetInAt(0, Location::RequiresRegister());
1218  locations->SetInAt(1, Location::RequiresRegister());
1219  if (instruction->HasUses()) {
1220    locations->SetOut(Location::SameAsFirstInput());
1221  }
1222}
1223
1224void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
1225  LocationSummary* locations = instruction->GetLocations();
1226  BoundsCheckSlowPathARM64* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(
1227      instruction, locations->InAt(0), locations->InAt(1));
1228  codegen_->AddSlowPath(slow_path);
1229
1230  __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
1231  __ B(slow_path->GetEntryLabel(), hs);
1232}
1233
1234void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
1235  new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
1236}
1237
1238void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
1239  // TODO: Improve support for suspend checks.
1240  SuspendCheckSlowPathARM64* slow_path =
1241      new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, nullptr);
1242  codegen_->AddSlowPath(slow_path);
1243
1244  __ Subs(wSuspend, wSuspend, 1);
1245  __ B(slow_path->GetEntryLabel(), le);
1246  __ Bind(slow_path->GetReturnLabel());
1247}
1248
1249void LocationsBuilderARM64::VisitTemporary(HTemporary* temp) {
1250  temp->SetLocations(nullptr);
1251}
1252
1253void InstructionCodeGeneratorARM64::VisitTemporary(HTemporary* temp) {
1254  // Nothing to do, this is driven by the code generator.
1255  UNUSED(temp);
1256}
1257
1258}  // namespace arm64
1259}  // namespace art
1260