code_generator_arm64.cc revision 5b4b898ed8725242ee6b7229b94467c3ea3054c8
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
19#include "entrypoints/quick/quick_entrypoints.h"
20#include "gc/accounting/card_table.h"
21#include "mirror/array-inl.h"
22#include "mirror/art_method.h"
23#include "mirror/class.h"
24#include "thread.h"
25#include "utils/arm64/assembler_arm64.h"
26#include "utils/assembler.h"
27#include "utils/stack_checks.h"
28
29
30using namespace vixl;   // NOLINT(build/namespaces)
31
32#ifdef __
33#error "ARM64 Codegen VIXL macro-assembler macro already defined."
34#endif
35
36
37namespace art {
38
39namespace arm64 {
40
41static constexpr bool kExplicitStackOverflowCheck = false;
42static constexpr size_t kHeapRefSize = sizeof(mirror::HeapReference<mirror::Object>);
43static constexpr int kCurrentMethodStackOffset = 0;
44
45namespace {
46
47bool IsFPType(Primitive::Type type) {
48  return type == Primitive::kPrimFloat || type == Primitive::kPrimDouble;
49}
50
51bool IsIntegralType(Primitive::Type type) {
52  switch (type) {
53    case Primitive::kPrimByte:
54    case Primitive::kPrimChar:
55    case Primitive::kPrimShort:
56    case Primitive::kPrimInt:
57    case Primitive::kPrimLong:
58      return true;
59    default:
60      return false;
61  }
62}
63
64bool Is64BitType(Primitive::Type type) {
65  return type == Primitive::kPrimLong || type == Primitive::kPrimDouble;
66}
67
68// Convenience helpers to ease conversion to and from VIXL operands.
69static_assert((SP == 31) && (WSP == 31) && (XZR == 32) && (WZR == 32),
70              "Unexpected values for register codes.");
71
72int VIXLRegCodeFromART(int code) {
73  if (code == SP) {
74    return vixl::kSPRegInternalCode;
75  }
76  if (code == XZR) {
77    return vixl::kZeroRegCode;
78  }
79  return code;
80}
81
82int ARTRegCodeFromVIXL(int code) {
83  if (code == vixl::kSPRegInternalCode) {
84    return SP;
85  }
86  if (code == vixl::kZeroRegCode) {
87    return XZR;
88  }
89  return code;
90}
91
92Register XRegisterFrom(Location location) {
93  DCHECK(location.IsRegister());
94  return Register::XRegFromCode(VIXLRegCodeFromART(location.reg()));
95}
96
97Register WRegisterFrom(Location location) {
98  DCHECK(location.IsRegister());
99  return Register::WRegFromCode(VIXLRegCodeFromART(location.reg()));
100}
101
102Register RegisterFrom(Location location, Primitive::Type type) {
103  DCHECK(type != Primitive::kPrimVoid && !IsFPType(type));
104  return type == Primitive::kPrimLong ? XRegisterFrom(location) : WRegisterFrom(location);
105}
106
107Register OutputRegister(HInstruction* instr) {
108  return RegisterFrom(instr->GetLocations()->Out(), instr->GetType());
109}
110
111Register InputRegisterAt(HInstruction* instr, int input_index) {
112  return RegisterFrom(instr->GetLocations()->InAt(input_index),
113                      instr->InputAt(input_index)->GetType());
114}
115
116FPRegister DRegisterFrom(Location location) {
117  DCHECK(location.IsFpuRegister());
118  return FPRegister::DRegFromCode(location.reg());
119}
120
121FPRegister SRegisterFrom(Location location) {
122  DCHECK(location.IsFpuRegister());
123  return FPRegister::SRegFromCode(location.reg());
124}
125
126FPRegister FPRegisterFrom(Location location, Primitive::Type type) {
127  DCHECK(IsFPType(type));
128  return type == Primitive::kPrimDouble ? DRegisterFrom(location) : SRegisterFrom(location);
129}
130
131FPRegister OutputFPRegister(HInstruction* instr) {
132  return FPRegisterFrom(instr->GetLocations()->Out(), instr->GetType());
133}
134
135FPRegister InputFPRegisterAt(HInstruction* instr, int input_index) {
136  return FPRegisterFrom(instr->GetLocations()->InAt(input_index),
137                        instr->InputAt(input_index)->GetType());
138}
139
140CPURegister CPURegisterFrom(Location location, Primitive::Type type) {
141  return IsFPType(type) ? CPURegister(FPRegisterFrom(location, type))
142                        : CPURegister(RegisterFrom(location, type));
143}
144
145CPURegister OutputCPURegister(HInstruction* instr) {
146  return IsFPType(instr->GetType()) ? static_cast<CPURegister>(OutputFPRegister(instr))
147                                    : static_cast<CPURegister>(OutputRegister(instr));
148}
149
150CPURegister InputCPURegisterAt(HInstruction* instr, int index) {
151  return IsFPType(instr->InputAt(index)->GetType())
152      ? static_cast<CPURegister>(InputFPRegisterAt(instr, index))
153      : static_cast<CPURegister>(InputRegisterAt(instr, index));
154}
155
156int64_t Int64ConstantFrom(Location location) {
157  HConstant* instr = location.GetConstant();
158  return instr->IsIntConstant() ? instr->AsIntConstant()->GetValue()
159                                : instr->AsLongConstant()->GetValue();
160}
161
162Operand OperandFrom(Location location, Primitive::Type type) {
163  if (location.IsRegister()) {
164    return Operand(RegisterFrom(location, type));
165  } else {
166    return Operand(Int64ConstantFrom(location));
167  }
168}
169
170Operand InputOperandAt(HInstruction* instr, int input_index) {
171  return OperandFrom(instr->GetLocations()->InAt(input_index),
172                     instr->InputAt(input_index)->GetType());
173}
174
175MemOperand StackOperandFrom(Location location) {
176  return MemOperand(sp, location.GetStackIndex());
177}
178
179MemOperand HeapOperand(const Register& base, size_t offset = 0) {
180  // A heap reference must be 32bit, so fit in a W register.
181  DCHECK(base.IsW());
182  return MemOperand(base.X(), offset);
183}
184
185MemOperand HeapOperand(const Register& base, Offset offset) {
186  return HeapOperand(base, offset.SizeValue());
187}
188
189MemOperand HeapOperandFrom(Location location, Offset offset) {
190  return HeapOperand(RegisterFrom(location, Primitive::kPrimNot), offset);
191}
192
193Location LocationFrom(const Register& reg) {
194  return Location::RegisterLocation(ARTRegCodeFromVIXL(reg.code()));
195}
196
197Location LocationFrom(const FPRegister& fpreg) {
198  return Location::FpuRegisterLocation(fpreg.code());
199}
200
201}  // namespace
202
203inline Condition ARM64Condition(IfCondition cond) {
204  switch (cond) {
205    case kCondEQ: return eq;
206    case kCondNE: return ne;
207    case kCondLT: return lt;
208    case kCondLE: return le;
209    case kCondGT: return gt;
210    case kCondGE: return ge;
211    default:
212      LOG(FATAL) << "Unknown if condition";
213  }
214  return nv;  // Unreachable.
215}
216
217Location ARM64ReturnLocation(Primitive::Type return_type) {
218  DCHECK_NE(return_type, Primitive::kPrimVoid);
219  // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
220  // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
221  // but we use the exact registers for clarity.
222  if (return_type == Primitive::kPrimFloat) {
223    return LocationFrom(s0);
224  } else if (return_type == Primitive::kPrimDouble) {
225    return LocationFrom(d0);
226  } else if (return_type == Primitive::kPrimLong) {
227    return LocationFrom(x0);
228  } else {
229    return LocationFrom(w0);
230  }
231}
232
233static const Register kRuntimeParameterCoreRegisters[] = { x0, x1, x2, x3, x4, x5, x6, x7 };
234static constexpr size_t kRuntimeParameterCoreRegistersLength =
235    arraysize(kRuntimeParameterCoreRegisters);
236static const FPRegister kRuntimeParameterFpuRegisters[] = { };
237static constexpr size_t kRuntimeParameterFpuRegistersLength = 0;
238
239class InvokeRuntimeCallingConvention : public CallingConvention<Register, FPRegister> {
240 public:
241  static constexpr size_t kParameterCoreRegistersLength = arraysize(kParameterCoreRegisters);
242
243  InvokeRuntimeCallingConvention()
244      : CallingConvention(kRuntimeParameterCoreRegisters,
245                          kRuntimeParameterCoreRegistersLength,
246                          kRuntimeParameterFpuRegisters,
247                          kRuntimeParameterFpuRegistersLength) {}
248
249  Location GetReturnLocation(Primitive::Type return_type);
250
251 private:
252  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
253};
254
255Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
256  return ARM64ReturnLocation(return_type);
257}
258
259#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()->
260#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, x).Int32Value()
261
262class SlowPathCodeARM64 : public SlowPathCode {
263 public:
264  SlowPathCodeARM64() : entry_label_(), exit_label_() {}
265
266  vixl::Label* GetEntryLabel() { return &entry_label_; }
267  vixl::Label* GetExitLabel() { return &exit_label_; }
268
269 private:
270  vixl::Label entry_label_;
271  vixl::Label exit_label_;
272
273  DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM64);
274};
275
276class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
277 public:
278  BoundsCheckSlowPathARM64(HBoundsCheck* instruction,
279                           Location index_location,
280                           Location length_location)
281      : instruction_(instruction),
282        index_location_(index_location),
283        length_location_(length_location) {}
284
285
286  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
287    CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
288    __ Bind(GetEntryLabel());
289    // We're moving two locations to locations that could overlap, so we need a parallel
290    // move resolver.
291    InvokeRuntimeCallingConvention calling_convention;
292    codegen->EmitParallelMoves(
293        index_location_, LocationFrom(calling_convention.GetRegisterAt(0)),
294        length_location_, LocationFrom(calling_convention.GetRegisterAt(1)));
295    arm64_codegen->InvokeRuntime(
296        QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc());
297  }
298
299 private:
300  HBoundsCheck* const instruction_;
301  const Location index_location_;
302  const Location length_location_;
303
304  DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
305};
306
307class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
308 public:
309  explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : instruction_(instruction) {}
310
311  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
312    CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
313    __ Bind(GetEntryLabel());
314    arm64_codegen->InvokeRuntime(
315        QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc());
316  }
317
318 private:
319  HDivZeroCheck* const instruction_;
320  DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
321};
322
323class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
324 public:
325  LoadClassSlowPathARM64(HLoadClass* cls,
326                         HInstruction* at,
327                         uint32_t dex_pc,
328                         bool do_clinit)
329      : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
330    DCHECK(at->IsLoadClass() || at->IsClinitCheck());
331  }
332
333  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
334    LocationSummary* locations = at_->GetLocations();
335    CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
336
337    __ Bind(GetEntryLabel());
338    codegen->SaveLiveRegisters(locations);
339
340    InvokeRuntimeCallingConvention calling_convention;
341    __ Mov(calling_convention.GetRegisterAt(0).W(), cls_->GetTypeIndex());
342    arm64_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1).W());
343    int32_t entry_point_offset = do_clinit_ ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
344                                            : QUICK_ENTRY_POINT(pInitializeType);
345    arm64_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_);
346
347    // Move the class to the desired location.
348    Location out = locations->Out();
349    if (out.IsValid()) {
350      DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
351      Primitive::Type type = at_->GetType();
352      arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
353    }
354
355    codegen->RestoreLiveRegisters(locations);
356    __ B(GetExitLabel());
357  }
358
359 private:
360  // The class this slow path will load.
361  HLoadClass* const cls_;
362
363  // The instruction where this slow path is happening.
364  // (Might be the load class or an initialization check).
365  HInstruction* const at_;
366
367  // The dex PC of `at_`.
368  const uint32_t dex_pc_;
369
370  // Whether to initialize the class.
371  const bool do_clinit_;
372
373  DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
374};
375
376class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
377 public:
378  explicit LoadStringSlowPathARM64(HLoadString* instruction) : instruction_(instruction) {}
379
380  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
381    LocationSummary* locations = instruction_->GetLocations();
382    DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
383    CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
384
385    __ Bind(GetEntryLabel());
386    codegen->SaveLiveRegisters(locations);
387
388    InvokeRuntimeCallingConvention calling_convention;
389    arm64_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(0).W());
390    __ Mov(calling_convention.GetRegisterAt(1).W(), instruction_->GetStringIndex());
391    arm64_codegen->InvokeRuntime(
392        QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc());
393    Primitive::Type type = instruction_->GetType();
394    arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
395
396    codegen->RestoreLiveRegisters(locations);
397    __ B(GetExitLabel());
398  }
399
400 private:
401  HLoadString* const instruction_;
402
403  DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
404};
405
406class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
407 public:
408  explicit NullCheckSlowPathARM64(HNullCheck* instr) : instruction_(instr) {}
409
410  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
411    CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
412    __ Bind(GetEntryLabel());
413    arm64_codegen->InvokeRuntime(
414        QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc());
415  }
416
417 private:
418  HNullCheck* const instruction_;
419
420  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
421};
422
423class StackOverflowCheckSlowPathARM64 : public SlowPathCodeARM64 {
424 public:
425  StackOverflowCheckSlowPathARM64() {}
426
427  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
428    CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
429    __ Bind(GetEntryLabel());
430    arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowStackOverflow), nullptr, 0);
431  }
432
433 private:
434  DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathARM64);
435};
436
437class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
438 public:
439  explicit SuspendCheckSlowPathARM64(HSuspendCheck* instruction,
440                                     HBasicBlock* successor)
441      : instruction_(instruction), successor_(successor) {}
442
443  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
444    CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
445    __ Bind(GetEntryLabel());
446    codegen->SaveLiveRegisters(instruction_->GetLocations());
447    arm64_codegen->InvokeRuntime(
448        QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc());
449    codegen->RestoreLiveRegisters(instruction_->GetLocations());
450    if (successor_ == nullptr) {
451      __ B(GetReturnLabel());
452    } else {
453      __ B(arm64_codegen->GetLabelOf(successor_));
454    }
455  }
456
457  vixl::Label* GetReturnLabel() {
458    DCHECK(successor_ == nullptr);
459    return &return_label_;
460  }
461
462 private:
463  HSuspendCheck* const instruction_;
464  // If not null, the block to branch to after the suspend check.
465  HBasicBlock* const successor_;
466
467  // If `successor_` is null, the label to branch to after the suspend check.
468  vixl::Label return_label_;
469
470  DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
471};
472
473class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
474 public:
475  TypeCheckSlowPathARM64(HInstruction* instruction,
476                         Location class_to_check,
477                         Location object_class,
478                         uint32_t dex_pc)
479      : instruction_(instruction),
480        class_to_check_(class_to_check),
481        object_class_(object_class),
482        dex_pc_(dex_pc) {}
483
484  void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
485    LocationSummary* locations = instruction_->GetLocations();
486    DCHECK(instruction_->IsCheckCast()
487           || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
488    CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
489
490    __ Bind(GetEntryLabel());
491    codegen->SaveLiveRegisters(locations);
492
493    // We're moving two locations to locations that could overlap, so we need a parallel
494    // move resolver.
495    InvokeRuntimeCallingConvention calling_convention;
496    codegen->EmitParallelMoves(
497        class_to_check_, LocationFrom(calling_convention.GetRegisterAt(0)),
498        object_class_, LocationFrom(calling_convention.GetRegisterAt(1)));
499
500    if (instruction_->IsInstanceOf()) {
501      arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc_);
502      Primitive::Type ret_type = instruction_->GetType();
503      Location ret_loc = calling_convention.GetReturnLocation(ret_type);
504      arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
505    } else {
506      DCHECK(instruction_->IsCheckCast());
507      arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc_);
508    }
509
510    codegen->RestoreLiveRegisters(locations);
511    __ B(GetExitLabel());
512  }
513
514 private:
515  HInstruction* const instruction_;
516  const Location class_to_check_;
517  const Location object_class_;
518  uint32_t dex_pc_;
519
520  DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
521};
522
523#undef __
524
525Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
526  Location next_location;
527  if (type == Primitive::kPrimVoid) {
528    LOG(FATAL) << "Unreachable type " << type;
529  }
530
531  if (IsFPType(type) && (fp_index_ < calling_convention.GetNumberOfFpuRegisters())) {
532    next_location = LocationFrom(calling_convention.GetFpuRegisterAt(fp_index_++));
533  } else if (!IsFPType(type) && (gp_index_ < calling_convention.GetNumberOfRegisters())) {
534    next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
535  } else {
536    size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
537    next_location = Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
538                                      : Location::StackSlot(stack_offset);
539  }
540
541  // Space on the stack is reserved for all arguments.
542  stack_index_ += Is64BitType(type) ? 2 : 1;
543  return next_location;
544}
545
546CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph)
547    : CodeGenerator(graph,
548                    kNumberOfAllocatableRegisters,
549                    kNumberOfAllocatableFPRegisters,
550                    kNumberOfAllocatableRegisterPairs),
551      block_labels_(nullptr),
552      location_builder_(graph, this),
553      instruction_visitor_(graph, this),
554      move_resolver_(graph->GetArena(), this) {}
555
556#undef __
557#define __ GetVIXLAssembler()->
558
559void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
560  // Ensure we emit the literal pool.
561  __ FinalizeCode();
562  CodeGenerator::Finalize(allocator);
563}
564
565void ParallelMoveResolverARM64::EmitMove(size_t index) {
566  MoveOperands* move = moves_.Get(index);
567  codegen_->MoveLocation(move->GetDestination(), move->GetSource());
568}
569
570void ParallelMoveResolverARM64::EmitSwap(size_t index) {
571  MoveOperands* move = moves_.Get(index);
572  codegen_->SwapLocations(move->GetDestination(), move->GetSource());
573}
574
575void ParallelMoveResolverARM64::RestoreScratch(int reg) {
576  __ Pop(Register(VIXLRegCodeFromART(reg), kXRegSize));
577}
578
579void ParallelMoveResolverARM64::SpillScratch(int reg) {
580  __ Push(Register(VIXLRegCodeFromART(reg), kXRegSize));
581}
582
583void CodeGeneratorARM64::GenerateFrameEntry() {
584  bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kArm64) || !IsLeafMethod();
585  if (do_overflow_check) {
586    UseScratchRegisterScope temps(GetVIXLAssembler());
587    Register temp = temps.AcquireX();
588    if (kExplicitStackOverflowCheck) {
589      SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathARM64();
590      AddSlowPath(slow_path);
591
592      __ Ldr(temp, MemOperand(tr, Thread::StackEndOffset<kArm64WordSize>().Int32Value()));
593      __ Cmp(sp, temp);
594      __ B(lo, slow_path->GetEntryLabel());
595    } else {
596      __ Add(temp, sp, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
597      __ Ldr(wzr, MemOperand(temp, 0));
598      RecordPcInfo(nullptr, 0);
599    }
600  }
601
602  CPURegList preserved_regs = GetFramePreservedRegisters();
603  int frame_size = GetFrameSize();
604  core_spill_mask_ |= preserved_regs.list();
605
606  __ Str(w0, MemOperand(sp, -frame_size, PreIndex));
607  __ PokeCPURegList(preserved_regs, frame_size - preserved_regs.TotalSizeInBytes());
608
609  // Stack layout:
610  // sp[frame_size - 8]        : lr.
611  // ...                       : other preserved registers.
612  // sp[frame_size - regs_size]: first preserved register.
613  // ...                       : reserved frame space.
614  // sp[0]                     : current method.
615}
616
617void CodeGeneratorARM64::GenerateFrameExit() {
618  int frame_size = GetFrameSize();
619  CPURegList preserved_regs = GetFramePreservedRegisters();
620  __ PeekCPURegList(preserved_regs, frame_size - preserved_regs.TotalSizeInBytes());
621  __ Drop(frame_size);
622}
623
624void CodeGeneratorARM64::Bind(HBasicBlock* block) {
625  __ Bind(GetLabelOf(block));
626}
627
628void CodeGeneratorARM64::Move(HInstruction* instruction,
629                              Location location,
630                              HInstruction* move_for) {
631  LocationSummary* locations = instruction->GetLocations();
632  if (locations != nullptr && locations->Out().Equals(location)) {
633    return;
634  }
635
636  Primitive::Type type = instruction->GetType();
637  DCHECK_NE(type, Primitive::kPrimVoid);
638
639  if (instruction->IsIntConstant() || instruction->IsLongConstant()) {
640    int64_t value = instruction->IsIntConstant() ? instruction->AsIntConstant()->GetValue()
641                                                 : instruction->AsLongConstant()->GetValue();
642    if (location.IsRegister()) {
643      Register dst = RegisterFrom(location, type);
644      DCHECK((instruction->IsIntConstant() && dst.Is32Bits()) ||
645             (instruction->IsLongConstant() && dst.Is64Bits()));
646      __ Mov(dst, value);
647    } else {
648      DCHECK(location.IsStackSlot() || location.IsDoubleStackSlot());
649      UseScratchRegisterScope temps(GetVIXLAssembler());
650      Register temp = instruction->IsIntConstant() ? temps.AcquireW() : temps.AcquireX();
651      __ Mov(temp, value);
652      __ Str(temp, StackOperandFrom(location));
653    }
654  } else if (instruction->IsTemporary()) {
655    Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
656    MoveLocation(location, temp_location, type);
657  } else if (instruction->IsLoadLocal()) {
658    uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
659    if (Is64BitType(type)) {
660      MoveLocation(location, Location::DoubleStackSlot(stack_slot), type);
661    } else {
662      MoveLocation(location, Location::StackSlot(stack_slot), type);
663    }
664
665  } else {
666    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
667    MoveLocation(location, locations->Out(), type);
668  }
669}
670
671size_t CodeGeneratorARM64::FrameEntrySpillSize() const {
672  return GetFramePreservedRegistersSize();
673}
674
675Location CodeGeneratorARM64::GetStackLocation(HLoadLocal* load) const {
676  Primitive::Type type = load->GetType();
677
678  switch (type) {
679    case Primitive::kPrimNot:
680    case Primitive::kPrimInt:
681    case Primitive::kPrimFloat:
682      return Location::StackSlot(GetStackSlot(load->GetLocal()));
683
684    case Primitive::kPrimLong:
685    case Primitive::kPrimDouble:
686      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
687
688    case Primitive::kPrimBoolean:
689    case Primitive::kPrimByte:
690    case Primitive::kPrimChar:
691    case Primitive::kPrimShort:
692    case Primitive::kPrimVoid:
693      LOG(FATAL) << "Unexpected type " << type;
694  }
695
696  LOG(FATAL) << "Unreachable";
697  return Location::NoLocation();
698}
699
700void CodeGeneratorARM64::MarkGCCard(Register object, Register value) {
701  UseScratchRegisterScope temps(GetVIXLAssembler());
702  Register card = temps.AcquireX();
703  Register temp = temps.AcquireW();   // Index within the CardTable - 32bit.
704  vixl::Label done;
705  __ Cbz(value, &done);
706  __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64WordSize>().Int32Value()));
707  __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
708  __ Strb(card, MemOperand(card, temp.X()));
709  __ Bind(&done);
710}
711
712void CodeGeneratorARM64::SetupBlockedRegisters() const {
713  // Block reserved registers:
714  //   ip0 (VIXL temporary)
715  //   ip1 (VIXL temporary)
716  //   tr
717  //   lr
718  // sp is not part of the allocatable registers, so we don't need to block it.
719  // TODO: Avoid blocking callee-saved registers, and instead preserve them
720  // where necessary.
721  CPURegList reserved_core_registers = vixl_reserved_core_registers;
722  reserved_core_registers.Combine(runtime_reserved_core_registers);
723  reserved_core_registers.Combine(quick_callee_saved_registers);
724  while (!reserved_core_registers.IsEmpty()) {
725    blocked_core_registers_[reserved_core_registers.PopLowestIndex().code()] = true;
726  }
727  CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
728  reserved_fp_registers.Combine(CPURegList::GetCalleeSavedFP());
729  while (!reserved_core_registers.IsEmpty()) {
730    blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().code()] = true;
731  }
732}
733
734Location CodeGeneratorARM64::AllocateFreeRegister(Primitive::Type type) const {
735  if (type == Primitive::kPrimVoid) {
736    LOG(FATAL) << "Unreachable type " << type;
737  }
738
739  if (IsFPType(type)) {
740    ssize_t reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfAllocatableFPRegisters);
741    DCHECK_NE(reg, -1);
742    return Location::FpuRegisterLocation(reg);
743  } else {
744    ssize_t reg = FindFreeEntry(blocked_core_registers_, kNumberOfAllocatableRegisters);
745    DCHECK_NE(reg, -1);
746    return Location::RegisterLocation(reg);
747  }
748}
749
750size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
751  Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
752  __ Str(reg, MemOperand(sp, stack_index));
753  return kArm64WordSize;
754}
755
756size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
757  Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
758  __ Ldr(reg, MemOperand(sp, stack_index));
759  return kArm64WordSize;
760}
761
762size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
763  FPRegister reg = FPRegister(reg_id, kDRegSize);
764  __ Str(reg, MemOperand(sp, stack_index));
765  return kArm64WordSize;
766}
767
768size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
769  FPRegister reg = FPRegister(reg_id, kDRegSize);
770  __ Ldr(reg, MemOperand(sp, stack_index));
771  return kArm64WordSize;
772}
773
774void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
775  stream << Arm64ManagedRegister::FromXRegister(XRegister(reg));
776}
777
778void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
779  stream << Arm64ManagedRegister::FromDRegister(DRegister(reg));
780}
781
782void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
783  if (constant->IsIntConstant() || constant->IsLongConstant()) {
784    __ Mov(Register(destination),
785           constant->IsIntConstant() ? constant->AsIntConstant()->GetValue()
786                                     : constant->AsLongConstant()->GetValue());
787  } else if (constant->IsFloatConstant()) {
788    __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
789  } else {
790    DCHECK(constant->IsDoubleConstant());
791    __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
792  }
793}
794
795
796static bool CoherentConstantAndType(Location constant, Primitive::Type type) {
797  DCHECK(constant.IsConstant());
798  HConstant* cst = constant.GetConstant();
799  return (cst->IsIntConstant() && type == Primitive::kPrimInt) ||
800         (cst->IsLongConstant() && type == Primitive::kPrimLong) ||
801         (cst->IsFloatConstant() && type == Primitive::kPrimFloat) ||
802         (cst->IsDoubleConstant() && type == Primitive::kPrimDouble);
803}
804
805void CodeGeneratorARM64::MoveLocation(Location destination, Location source, Primitive::Type type) {
806  if (source.Equals(destination)) {
807    return;
808  }
809
810  // A valid move can always be inferred from the destination and source
811  // locations. When moving from and to a register, the argument type can be
812  // used to generate 32bit instead of 64bit moves. In debug mode we also
813  // checks the coherency of the locations and the type.
814  bool unspecified_type = (type == Primitive::kPrimVoid);
815
816  if (destination.IsRegister() || destination.IsFpuRegister()) {
817    if (unspecified_type) {
818      HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
819      if (source.IsStackSlot() ||
820          (src_cst != nullptr && (src_cst->IsIntConstant() || src_cst->IsFloatConstant()))) {
821        // For stack slots and 32bit constants, a 64bit type is appropriate.
822        type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
823      } else {
824        // If the source is a double stack slot or a 64bit constant, a 64bit
825        // type is appropriate. Else the source is a register, and since the
826        // type has not been specified, we chose a 64bit type to force a 64bit
827        // move.
828        type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
829      }
830    }
831    DCHECK((destination.IsFpuRegister() && IsFPType(type)) ||
832           (destination.IsRegister() && !IsFPType(type)));
833    CPURegister dst = CPURegisterFrom(destination, type);
834    if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
835      DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
836      __ Ldr(dst, StackOperandFrom(source));
837    } else if (source.IsConstant()) {
838      DCHECK(CoherentConstantAndType(source, type));
839      MoveConstant(dst, source.GetConstant());
840    } else {
841      if (destination.IsRegister()) {
842        __ Mov(Register(dst), RegisterFrom(source, type));
843      } else {
844        __ Fmov(FPRegister(dst), FPRegisterFrom(source, type));
845      }
846    }
847
848  } else {  // The destination is not a register. It must be a stack slot.
849    DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
850    if (source.IsRegister() || source.IsFpuRegister()) {
851      if (unspecified_type) {
852        if (source.IsRegister()) {
853          type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
854        } else {
855          type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
856        }
857      }
858      DCHECK((destination.IsDoubleStackSlot() == Is64BitType(type)) &&
859             (source.IsFpuRegister() == IsFPType(type)));
860      __ Str(CPURegisterFrom(source, type), StackOperandFrom(destination));
861    } else if (source.IsConstant()) {
862      DCHECK(unspecified_type || CoherentConstantAndType(source, type));
863      UseScratchRegisterScope temps(GetVIXLAssembler());
864      HConstant* src_cst = source.GetConstant();
865      CPURegister temp;
866      if (src_cst->IsIntConstant()) {
867        temp = temps.AcquireW();
868      } else if (src_cst->IsLongConstant()) {
869        temp = temps.AcquireX();
870      } else if (src_cst->IsFloatConstant()) {
871        temp = temps.AcquireS();
872      } else {
873        DCHECK(src_cst->IsDoubleConstant());
874        temp = temps.AcquireD();
875      }
876      MoveConstant(temp, src_cst);
877      __ Str(temp, StackOperandFrom(destination));
878    } else {
879      DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
880      DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
881      UseScratchRegisterScope temps(GetVIXLAssembler());
882      // There is generally less pressure on FP registers.
883      FPRegister temp = destination.IsDoubleStackSlot() ? temps.AcquireD() : temps.AcquireS();
884      __ Ldr(temp, StackOperandFrom(source));
885      __ Str(temp, StackOperandFrom(destination));
886    }
887  }
888}
889
890void CodeGeneratorARM64::SwapLocations(Location loc1, Location loc2) {
891  DCHECK(!loc1.IsConstant());
892  DCHECK(!loc2.IsConstant());
893
894  if (loc1.Equals(loc2)) {
895    return;
896  }
897
898  UseScratchRegisterScope temps(GetAssembler()->vixl_masm_);
899
900  bool is_slot1 = loc1.IsStackSlot() || loc1.IsDoubleStackSlot();
901  bool is_slot2 = loc2.IsStackSlot() || loc2.IsDoubleStackSlot();
902  bool is_fp_reg1 = loc1.IsFpuRegister();
903  bool is_fp_reg2 = loc2.IsFpuRegister();
904
905  if (loc2.IsRegister() && loc1.IsRegister()) {
906    Register r1 = XRegisterFrom(loc1);
907    Register r2 = XRegisterFrom(loc2);
908    Register tmp = temps.AcquireSameSizeAs(r1);
909    __ Mov(tmp, r2);
910    __ Mov(r2, r1);
911    __ Mov(r1, tmp);
912  } else if (is_fp_reg2 && is_fp_reg1) {
913    FPRegister r1 = DRegisterFrom(loc1);
914    FPRegister r2 = DRegisterFrom(loc2);
915    FPRegister tmp = temps.AcquireSameSizeAs(r1);
916    __ Fmov(tmp, r2);
917    __ Fmov(r2, r1);
918    __ Fmov(r1, tmp);
919  } else if (is_slot1 != is_slot2) {
920    MemOperand mem = StackOperandFrom(is_slot1 ? loc1 : loc2);
921    Location reg_loc = is_slot1 ? loc2 : loc1;
922    CPURegister reg, tmp;
923    if (reg_loc.IsFpuRegister()) {
924      reg = DRegisterFrom(reg_loc);
925      tmp = temps.AcquireD();
926    } else {
927      reg = XRegisterFrom(reg_loc);
928      tmp = temps.AcquireX();
929    }
930    __ Ldr(tmp, mem);
931    __ Str(reg, mem);
932    if (reg_loc.IsFpuRegister()) {
933      __ Fmov(FPRegister(reg), FPRegister(tmp));
934    } else {
935      __ Mov(Register(reg), Register(tmp));
936    }
937  } else if (is_slot1 && is_slot2) {
938    MemOperand mem1 = StackOperandFrom(loc1);
939    MemOperand mem2 = StackOperandFrom(loc2);
940    Register tmp1 = loc1.IsStackSlot() ? temps.AcquireW() : temps.AcquireX();
941    Register tmp2 = temps.AcquireSameSizeAs(tmp1);
942    __ Ldr(tmp1, mem1);
943    __ Ldr(tmp2, mem2);
944    __ Str(tmp1, mem2);
945    __ Str(tmp2, mem1);
946  } else {
947    LOG(FATAL) << "Unimplemented";
948  }
949}
950
951void CodeGeneratorARM64::Load(Primitive::Type type,
952                              vixl::CPURegister dst,
953                              const vixl::MemOperand& src) {
954  switch (type) {
955    case Primitive::kPrimBoolean:
956      __ Ldrb(Register(dst), src);
957      break;
958    case Primitive::kPrimByte:
959      __ Ldrsb(Register(dst), src);
960      break;
961    case Primitive::kPrimShort:
962      __ Ldrsh(Register(dst), src);
963      break;
964    case Primitive::kPrimChar:
965      __ Ldrh(Register(dst), src);
966      break;
967    case Primitive::kPrimInt:
968    case Primitive::kPrimNot:
969    case Primitive::kPrimLong:
970    case Primitive::kPrimFloat:
971    case Primitive::kPrimDouble:
972      DCHECK(dst.Is64Bits() == Is64BitType(type));
973      __ Ldr(dst, src);
974      break;
975    case Primitive::kPrimVoid:
976      LOG(FATAL) << "Unreachable type " << type;
977  }
978}
979
980void CodeGeneratorARM64::Store(Primitive::Type type,
981                               vixl::CPURegister rt,
982                               const vixl::MemOperand& dst) {
983  switch (type) {
984    case Primitive::kPrimBoolean:
985    case Primitive::kPrimByte:
986      __ Strb(Register(rt), dst);
987      break;
988    case Primitive::kPrimChar:
989    case Primitive::kPrimShort:
990      __ Strh(Register(rt), dst);
991      break;
992    case Primitive::kPrimInt:
993    case Primitive::kPrimNot:
994    case Primitive::kPrimLong:
995    case Primitive::kPrimFloat:
996    case Primitive::kPrimDouble:
997      DCHECK(rt.Is64Bits() == Is64BitType(type));
998      __ Str(rt, dst);
999      break;
1000    case Primitive::kPrimVoid:
1001      LOG(FATAL) << "Unreachable type " << type;
1002  }
1003}
1004
1005void CodeGeneratorARM64::LoadCurrentMethod(vixl::Register current_method) {
1006  DCHECK(current_method.IsW());
1007  __ Ldr(current_method, MemOperand(sp, kCurrentMethodStackOffset));
1008}
1009
1010void CodeGeneratorARM64::InvokeRuntime(int32_t entry_point_offset,
1011                                       HInstruction* instruction,
1012                                       uint32_t dex_pc) {
1013  __ Ldr(lr, MemOperand(tr, entry_point_offset));
1014  __ Blr(lr);
1015  if (instruction != nullptr) {
1016    RecordPcInfo(instruction, dex_pc);
1017    DCHECK(instruction->IsSuspendCheck()
1018        || instruction->IsBoundsCheck()
1019        || instruction->IsNullCheck()
1020        || instruction->IsDivZeroCheck()
1021        || !IsLeafMethod());
1022    }
1023}
1024
1025void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
1026                                                                     vixl::Register class_reg) {
1027  UseScratchRegisterScope temps(GetVIXLAssembler());
1028  Register temp = temps.AcquireW();
1029  __ Ldr(temp, HeapOperand(class_reg, mirror::Class::StatusOffset()));
1030  __ Cmp(temp, mirror::Class::kStatusInitialized);
1031  __ B(lt, slow_path->GetEntryLabel());
1032  // Even if the initialized flag is set, we need to ensure consistent memory ordering.
1033  __ Dmb(InnerShareable, BarrierReads);
1034  __ Bind(slow_path->GetExitLabel());
1035}
1036
1037void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1038                                                         HBasicBlock* successor) {
1039  SuspendCheckSlowPathARM64* slow_path =
1040    new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, successor);
1041  codegen_->AddSlowPath(slow_path);
1042  UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1043  Register temp = temps.AcquireW();
1044
1045  __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64WordSize>().SizeValue()));
1046  if (successor == nullptr) {
1047    __ Cbnz(temp, slow_path->GetEntryLabel());
1048    __ Bind(slow_path->GetReturnLabel());
1049  } else {
1050    __ Cbz(temp, codegen_->GetLabelOf(successor));
1051    __ B(slow_path->GetEntryLabel());
1052    // slow_path will return to GetLabelOf(successor).
1053  }
1054}
1055
1056InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1057                                                             CodeGeneratorARM64* codegen)
1058      : HGraphVisitor(graph),
1059        assembler_(codegen->GetAssembler()),
1060        codegen_(codegen) {}
1061
1062#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M)              \
1063  /* No unimplemented IR. */
1064
1065#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
1066
1067enum UnimplementedInstructionBreakCode {
1068  // Using a base helps identify when we hit such breakpoints.
1069  UnimplementedInstructionBreakCodeBaseCode = 0x900,
1070#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
1071  FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
1072#undef ENUM_UNIMPLEMENTED_INSTRUCTION
1073};
1074
1075#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name)                               \
1076  void InstructionCodeGeneratorARM64::Visit##name(H##name* instr) {                   \
1077    UNUSED(instr);                                                                    \
1078    __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name));                               \
1079  }                                                                                   \
1080  void LocationsBuilderARM64::Visit##name(H##name* instr) {                           \
1081    LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
1082    locations->SetOut(Location::Any());                                               \
1083  }
1084  FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
1085#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
1086
1087#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
1088#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION
1089
1090void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
1091  DCHECK_EQ(instr->InputCount(), 2U);
1092  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1093  Primitive::Type type = instr->GetResultType();
1094  switch (type) {
1095    case Primitive::kPrimInt:
1096    case Primitive::kPrimLong:
1097      locations->SetInAt(0, Location::RequiresRegister());
1098      locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
1099      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1100      break;
1101
1102    case Primitive::kPrimFloat:
1103    case Primitive::kPrimDouble:
1104      locations->SetInAt(0, Location::RequiresFpuRegister());
1105      locations->SetInAt(1, Location::RequiresFpuRegister());
1106      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1107      break;
1108
1109    default:
1110      LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
1111  }
1112}
1113
1114void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
1115  Primitive::Type type = instr->GetType();
1116
1117  switch (type) {
1118    case Primitive::kPrimInt:
1119    case Primitive::kPrimLong: {
1120      Register dst = OutputRegister(instr);
1121      Register lhs = InputRegisterAt(instr, 0);
1122      Operand rhs = InputOperandAt(instr, 1);
1123      if (instr->IsAdd()) {
1124        __ Add(dst, lhs, rhs);
1125      } else if (instr->IsAnd()) {
1126        __ And(dst, lhs, rhs);
1127      } else if (instr->IsOr()) {
1128        __ Orr(dst, lhs, rhs);
1129      } else if (instr->IsSub()) {
1130        __ Sub(dst, lhs, rhs);
1131      } else {
1132        DCHECK(instr->IsXor());
1133        __ Eor(dst, lhs, rhs);
1134      }
1135      break;
1136    }
1137    case Primitive::kPrimFloat:
1138    case Primitive::kPrimDouble: {
1139      FPRegister dst = OutputFPRegister(instr);
1140      FPRegister lhs = InputFPRegisterAt(instr, 0);
1141      FPRegister rhs = InputFPRegisterAt(instr, 1);
1142      if (instr->IsAdd()) {
1143        __ Fadd(dst, lhs, rhs);
1144      } else if (instr->IsSub()) {
1145        __ Fsub(dst, lhs, rhs);
1146      } else {
1147        LOG(FATAL) << "Unexpected floating-point binary operation";
1148      }
1149      break;
1150    }
1151    default:
1152      LOG(FATAL) << "Unexpected binary operation type " << type;
1153  }
1154}
1155
1156void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
1157  DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1158
1159  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1160  Primitive::Type type = instr->GetResultType();
1161  switch (type) {
1162    case Primitive::kPrimInt:
1163    case Primitive::kPrimLong: {
1164      locations->SetInAt(0, Location::RequiresRegister());
1165      locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
1166      locations->SetOut(Location::RequiresRegister());
1167      break;
1168    }
1169    default:
1170      LOG(FATAL) << "Unexpected shift type " << type;
1171  }
1172}
1173
1174void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
1175  DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1176
1177  Primitive::Type type = instr->GetType();
1178  switch (type) {
1179    case Primitive::kPrimInt:
1180    case Primitive::kPrimLong: {
1181      Register dst = OutputRegister(instr);
1182      Register lhs = InputRegisterAt(instr, 0);
1183      Operand rhs = InputOperandAt(instr, 1);
1184      if (rhs.IsImmediate()) {
1185        uint32_t shift_value = (type == Primitive::kPrimInt)
1186          ? static_cast<uint32_t>(rhs.immediate() & kMaxIntShiftValue)
1187          : static_cast<uint32_t>(rhs.immediate() & kMaxLongShiftValue);
1188        if (instr->IsShl()) {
1189          __ Lsl(dst, lhs, shift_value);
1190        } else if (instr->IsShr()) {
1191          __ Asr(dst, lhs, shift_value);
1192        } else {
1193          __ Lsr(dst, lhs, shift_value);
1194        }
1195      } else {
1196        Register rhs_reg = dst.IsX() ? rhs.reg().X() : rhs.reg().W();
1197
1198        if (instr->IsShl()) {
1199          __ Lsl(dst, lhs, rhs_reg);
1200        } else if (instr->IsShr()) {
1201          __ Asr(dst, lhs, rhs_reg);
1202        } else {
1203          __ Lsr(dst, lhs, rhs_reg);
1204        }
1205      }
1206      break;
1207    }
1208    default:
1209      LOG(FATAL) << "Unexpected shift operation type " << type;
1210  }
1211}
1212
1213void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
1214  HandleBinaryOp(instruction);
1215}
1216
1217void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
1218  HandleBinaryOp(instruction);
1219}
1220
1221void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
1222  HandleBinaryOp(instruction);
1223}
1224
1225void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
1226  HandleBinaryOp(instruction);
1227}
1228
1229void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
1230  LocationSummary* locations =
1231      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1232  locations->SetInAt(0, Location::RequiresRegister());
1233  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1234  locations->SetOut(Location::RequiresRegister());
1235}
1236
1237void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
1238  LocationSummary* locations = instruction->GetLocations();
1239  Primitive::Type type = instruction->GetType();
1240  Register obj = InputRegisterAt(instruction, 0);
1241  Location index = locations->InAt(1);
1242  size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(type)).Uint32Value();
1243  MemOperand source = HeapOperand(obj);
1244  UseScratchRegisterScope temps(GetVIXLAssembler());
1245
1246  if (index.IsConstant()) {
1247    offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type);
1248    source = HeapOperand(obj, offset);
1249  } else {
1250    Register temp = temps.AcquireSameSizeAs(obj);
1251    Register index_reg = RegisterFrom(index, Primitive::kPrimInt);
1252    __ Add(temp, obj, Operand(index_reg, LSL, Primitive::ComponentSizeShift(type)));
1253    source = HeapOperand(temp, offset);
1254  }
1255
1256  codegen_->Load(type, OutputCPURegister(instruction), source);
1257}
1258
1259void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
1260  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1261  locations->SetInAt(0, Location::RequiresRegister());
1262  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1263}
1264
1265void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
1266  __ Ldr(OutputRegister(instruction),
1267         HeapOperand(InputRegisterAt(instruction, 0), mirror::Array::LengthOffset()));
1268}
1269
1270void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
1271  Primitive::Type value_type = instruction->GetComponentType();
1272  bool is_object = value_type == Primitive::kPrimNot;
1273  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
1274      instruction, is_object ? LocationSummary::kCall : LocationSummary::kNoCall);
1275  if (is_object) {
1276    InvokeRuntimeCallingConvention calling_convention;
1277    locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1278    locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1279    locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1280  } else {
1281    locations->SetInAt(0, Location::RequiresRegister());
1282    locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1283    locations->SetInAt(2, Location::RequiresRegister());
1284  }
1285}
1286
1287void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
1288  Primitive::Type value_type = instruction->GetComponentType();
1289  if (value_type == Primitive::kPrimNot) {
1290    codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject), instruction, instruction->GetDexPc());
1291
1292  } else {
1293    LocationSummary* locations = instruction->GetLocations();
1294    Register obj = InputRegisterAt(instruction, 0);
1295    CPURegister value = InputCPURegisterAt(instruction, 2);
1296    Location index = locations->InAt(1);
1297    size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
1298    MemOperand destination = HeapOperand(obj);
1299    UseScratchRegisterScope temps(GetVIXLAssembler());
1300
1301    if (index.IsConstant()) {
1302      offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
1303      destination = HeapOperand(obj, offset);
1304    } else {
1305      Register temp = temps.AcquireSameSizeAs(obj);
1306      Register index_reg = InputRegisterAt(instruction, 1);
1307      __ Add(temp, obj, Operand(index_reg, LSL, Primitive::ComponentSizeShift(value_type)));
1308      destination = HeapOperand(temp, offset);
1309    }
1310
1311    codegen_->Store(value_type, value, destination);
1312  }
1313}
1314
1315void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
1316  LocationSummary* locations =
1317      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1318  locations->SetInAt(0, Location::RequiresRegister());
1319  locations->SetInAt(1, Location::RequiresRegister());
1320  if (instruction->HasUses()) {
1321    locations->SetOut(Location::SameAsFirstInput());
1322  }
1323}
1324
1325void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
1326  LocationSummary* locations = instruction->GetLocations();
1327  BoundsCheckSlowPathARM64* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(
1328      instruction, locations->InAt(0), locations->InAt(1));
1329  codegen_->AddSlowPath(slow_path);
1330
1331  __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
1332  __ B(slow_path->GetEntryLabel(), hs);
1333}
1334
1335void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
1336  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
1337      instruction, LocationSummary::kCallOnSlowPath);
1338  locations->SetInAt(0, Location::RequiresRegister());
1339  locations->SetInAt(1, Location::RequiresRegister());
1340  locations->AddTemp(Location::RequiresRegister());
1341}
1342
1343void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
1344  LocationSummary* locations = instruction->GetLocations();
1345  Register obj = InputRegisterAt(instruction, 0);;
1346  Register cls = InputRegisterAt(instruction, 1);;
1347  Register obj_cls = WRegisterFrom(instruction->GetLocations()->GetTemp(0));
1348
1349  SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(
1350      instruction, locations->InAt(1), LocationFrom(obj_cls), instruction->GetDexPc());
1351  codegen_->AddSlowPath(slow_path);
1352
1353  // TODO: avoid this check if we know obj is not null.
1354  __ Cbz(obj, slow_path->GetExitLabel());
1355  // Compare the class of `obj` with `cls`.
1356  __ Ldr(obj_cls, HeapOperand(obj, mirror::Object::ClassOffset()));
1357  __ Cmp(obj_cls, cls);
1358  __ B(ne, slow_path->GetEntryLabel());
1359  __ Bind(slow_path->GetExitLabel());
1360}
1361
1362void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
1363  LocationSummary* locations =
1364      new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
1365  locations->SetInAt(0, Location::RequiresRegister());
1366  if (check->HasUses()) {
1367    locations->SetOut(Location::SameAsFirstInput());
1368  }
1369}
1370
1371void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
1372  // We assume the class is not null.
1373  SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
1374      check->GetLoadClass(), check, check->GetDexPc(), true);
1375  codegen_->AddSlowPath(slow_path);
1376  GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
1377}
1378
1379void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
1380  LocationSummary* locations =
1381      new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
1382  Primitive::Type in_type = compare->InputAt(0)->GetType();
1383  switch (in_type) {
1384    case Primitive::kPrimLong: {
1385      locations->SetInAt(0, Location::RequiresRegister());
1386      locations->SetInAt(1, Location::RegisterOrConstant(compare->InputAt(1)));
1387      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1388      break;
1389    }
1390    case Primitive::kPrimFloat:
1391    case Primitive::kPrimDouble: {
1392      locations->SetInAt(0, Location::RequiresFpuRegister());
1393      locations->SetInAt(1, Location::RequiresFpuRegister());
1394      locations->SetOut(Location::RequiresRegister());
1395      break;
1396    }
1397    default:
1398      LOG(FATAL) << "Unexpected type for compare operation " << in_type;
1399  }
1400}
1401
1402void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
1403  Primitive::Type in_type = compare->InputAt(0)->GetType();
1404
1405  //  0 if: left == right
1406  //  1 if: left  > right
1407  // -1 if: left  < right
1408  switch (in_type) {
1409    case Primitive::kPrimLong: {
1410      Register result = OutputRegister(compare);
1411      Register left = InputRegisterAt(compare, 0);
1412      Operand right = InputOperandAt(compare, 1);
1413
1414      __ Cmp(left, right);
1415      __ Cset(result, ne);
1416      __ Cneg(result, result, lt);
1417      break;
1418    }
1419    case Primitive::kPrimFloat:
1420    case Primitive::kPrimDouble: {
1421      Register result = OutputRegister(compare);
1422      FPRegister left = InputFPRegisterAt(compare, 0);
1423      FPRegister right = InputFPRegisterAt(compare, 1);
1424
1425      __ Fcmp(left, right);
1426      if (compare->IsGtBias()) {
1427        __ Cset(result, ne);
1428      } else {
1429        __ Csetm(result, ne);
1430      }
1431      __ Cneg(result, result, compare->IsGtBias() ? mi : gt);
1432      break;
1433    }
1434    default:
1435      LOG(FATAL) << "Unimplemented compare type " << in_type;
1436  }
1437}
1438
1439void LocationsBuilderARM64::VisitCondition(HCondition* instruction) {
1440  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1441  locations->SetInAt(0, Location::RequiresRegister());
1442  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1443  if (instruction->NeedsMaterialization()) {
1444    locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1445  }
1446}
1447
1448void InstructionCodeGeneratorARM64::VisitCondition(HCondition* instruction) {
1449  if (!instruction->NeedsMaterialization()) {
1450    return;
1451  }
1452
1453  LocationSummary* locations = instruction->GetLocations();
1454  Register lhs = InputRegisterAt(instruction, 0);
1455  Operand rhs = InputOperandAt(instruction, 1);
1456  Register res = RegisterFrom(locations->Out(), instruction->GetType());
1457  Condition cond = ARM64Condition(instruction->GetCondition());
1458
1459  __ Cmp(lhs, rhs);
1460  __ Cset(res, cond);
1461}
1462
1463#define FOR_EACH_CONDITION_INSTRUCTION(M)                                                \
1464  M(Equal)                                                                               \
1465  M(NotEqual)                                                                            \
1466  M(LessThan)                                                                            \
1467  M(LessThanOrEqual)                                                                     \
1468  M(GreaterThan)                                                                         \
1469  M(GreaterThanOrEqual)
1470#define DEFINE_CONDITION_VISITORS(Name)                                                  \
1471void LocationsBuilderARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); }         \
1472void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); }
1473FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
1474#undef DEFINE_CONDITION_VISITORS
1475#undef FOR_EACH_CONDITION_INSTRUCTION
1476
1477void LocationsBuilderARM64::VisitDiv(HDiv* div) {
1478  LocationSummary* locations =
1479      new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
1480  switch (div->GetResultType()) {
1481    case Primitive::kPrimInt:
1482    case Primitive::kPrimLong:
1483      locations->SetInAt(0, Location::RequiresRegister());
1484      locations->SetInAt(1, Location::RequiresRegister());
1485      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1486      break;
1487
1488    case Primitive::kPrimFloat:
1489    case Primitive::kPrimDouble:
1490      locations->SetInAt(0, Location::RequiresFpuRegister());
1491      locations->SetInAt(1, Location::RequiresFpuRegister());
1492      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1493      break;
1494
1495    default:
1496      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
1497  }
1498}
1499
1500void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
1501  Primitive::Type type = div->GetResultType();
1502  switch (type) {
1503    case Primitive::kPrimInt:
1504    case Primitive::kPrimLong:
1505      __ Sdiv(OutputRegister(div), InputRegisterAt(div, 0), InputRegisterAt(div, 1));
1506      break;
1507
1508    case Primitive::kPrimFloat:
1509    case Primitive::kPrimDouble:
1510      __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
1511      break;
1512
1513    default:
1514      LOG(FATAL) << "Unexpected div type " << type;
1515  }
1516}
1517
1518void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1519  LocationSummary* locations =
1520      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1521  locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
1522  if (instruction->HasUses()) {
1523    locations->SetOut(Location::SameAsFirstInput());
1524  }
1525}
1526
1527void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1528  SlowPathCodeARM64* slow_path =
1529      new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction);
1530  codegen_->AddSlowPath(slow_path);
1531  Location value = instruction->GetLocations()->InAt(0);
1532
1533  Primitive::Type type = instruction->GetType();
1534
1535  if ((type != Primitive::kPrimInt) && (type != Primitive::kPrimLong)) {
1536      LOG(FATAL) << "Unexpected type " << type << "for DivZeroCheck.";
1537    return;
1538  }
1539
1540  if (value.IsConstant()) {
1541    int64_t divisor = Int64ConstantFrom(value);
1542    if (divisor == 0) {
1543      __ B(slow_path->GetEntryLabel());
1544    } else {
1545      // A division by a non-null constant is valid. We don't need to perform
1546      // any check, so simply fall through.
1547    }
1548  } else {
1549    __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
1550  }
1551}
1552
1553void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
1554  LocationSummary* locations =
1555      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1556  locations->SetOut(Location::ConstantLocation(constant));
1557}
1558
1559void InstructionCodeGeneratorARM64::VisitDoubleConstant(HDoubleConstant* constant) {
1560  UNUSED(constant);
1561  // Will be generated at use site.
1562}
1563
1564void LocationsBuilderARM64::VisitExit(HExit* exit) {
1565  exit->SetLocations(nullptr);
1566}
1567
1568void InstructionCodeGeneratorARM64::VisitExit(HExit* exit) {
1569  UNUSED(exit);
1570  if (kIsDebugBuild) {
1571    down_cast<Arm64Assembler*>(GetAssembler())->Comment("Unreachable");
1572    __ Brk(__LINE__);    // TODO: Introduce special markers for such code locations.
1573  }
1574}
1575
1576void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
1577  LocationSummary* locations =
1578      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1579  locations->SetOut(Location::ConstantLocation(constant));
1580}
1581
1582void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant) {
1583  UNUSED(constant);
1584  // Will be generated at use site.
1585}
1586
1587void LocationsBuilderARM64::VisitGoto(HGoto* got) {
1588  got->SetLocations(nullptr);
1589}
1590
1591void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
1592  HBasicBlock* successor = got->GetSuccessor();
1593  DCHECK(!successor->IsExitBlock());
1594  HBasicBlock* block = got->GetBlock();
1595  HInstruction* previous = got->GetPrevious();
1596  HLoopInformation* info = block->GetLoopInformation();
1597
1598  if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
1599    codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
1600    GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1601    return;
1602  }
1603  if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1604    GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1605  }
1606  if (!codegen_->GoesToNextBlock(block, successor)) {
1607    __ B(codegen_->GetLabelOf(successor));
1608  }
1609}
1610
1611void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
1612  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1613  HInstruction* cond = if_instr->InputAt(0);
1614  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
1615    locations->SetInAt(0, Location::RequiresRegister());
1616  }
1617}
1618
1619void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
1620  HInstruction* cond = if_instr->InputAt(0);
1621  HCondition* condition = cond->AsCondition();
1622  vixl::Label* true_target = codegen_->GetLabelOf(if_instr->IfTrueSuccessor());
1623  vixl::Label* false_target = codegen_->GetLabelOf(if_instr->IfFalseSuccessor());
1624
1625  if (cond->IsIntConstant()) {
1626    int32_t cond_value = cond->AsIntConstant()->GetValue();
1627    if (cond_value == 1) {
1628      if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfTrueSuccessor())) {
1629        __ B(true_target);
1630      }
1631      return;
1632    } else {
1633      DCHECK_EQ(cond_value, 0);
1634    }
1635  } else if (!cond->IsCondition() || condition->NeedsMaterialization()) {
1636    // The condition instruction has been materialized, compare the output to 0.
1637    Location cond_val = if_instr->GetLocations()->InAt(0);
1638    DCHECK(cond_val.IsRegister());
1639    __ Cbnz(InputRegisterAt(if_instr, 0), true_target);
1640  } else {
1641    // The condition instruction has not been materialized, use its inputs as
1642    // the comparison and its condition as the branch condition.
1643    Register lhs = InputRegisterAt(condition, 0);
1644    Operand rhs = InputOperandAt(condition, 1);
1645    Condition arm64_cond = ARM64Condition(condition->GetCondition());
1646    if ((arm64_cond == eq || arm64_cond == ne) && rhs.IsImmediate() && (rhs.immediate() == 0)) {
1647      if (arm64_cond == eq) {
1648        __ Cbz(lhs, true_target);
1649      } else {
1650        __ Cbnz(lhs, true_target);
1651      }
1652    } else {
1653      __ Cmp(lhs, rhs);
1654      __ B(arm64_cond, true_target);
1655    }
1656  }
1657  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfFalseSuccessor())) {
1658    __ B(false_target);
1659  }
1660}
1661
1662void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
1663  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1664  locations->SetInAt(0, Location::RequiresRegister());
1665  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1666}
1667
1668void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
1669  MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), instruction->GetFieldOffset());
1670  codegen_->Load(instruction->GetType(), OutputCPURegister(instruction), field);
1671}
1672
1673void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
1674  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1675  locations->SetInAt(0, Location::RequiresRegister());
1676  locations->SetInAt(1, Location::RequiresRegister());
1677}
1678
1679void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
1680  Primitive::Type field_type = instruction->GetFieldType();
1681  CPURegister value = InputCPURegisterAt(instruction, 1);
1682  Register obj = InputRegisterAt(instruction, 0);
1683  codegen_->Store(field_type, value, HeapOperand(obj, instruction->GetFieldOffset()));
1684  if (field_type == Primitive::kPrimNot) {
1685    codegen_->MarkGCCard(obj, Register(value));
1686  }
1687}
1688
1689void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
1690  LocationSummary::CallKind call_kind =
1691      instruction->IsClassFinal() ? LocationSummary::kNoCall : LocationSummary::kCallOnSlowPath;
1692  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
1693  locations->SetInAt(0, Location::RequiresRegister());
1694  locations->SetInAt(1, Location::RequiresRegister());
1695  locations->SetOut(Location::RequiresRegister(), true);  // The output does overlap inputs.
1696}
1697
1698void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
1699  LocationSummary* locations = instruction->GetLocations();
1700  Register obj = InputRegisterAt(instruction, 0);;
1701  Register cls = InputRegisterAt(instruction, 1);;
1702  Register out = OutputRegister(instruction);
1703
1704  vixl::Label done;
1705
1706  // Return 0 if `obj` is null.
1707  // TODO: Avoid this check if we know `obj` is not null.
1708  __ Mov(out, 0);
1709  __ Cbz(obj, &done);
1710
1711  // Compare the class of `obj` with `cls`.
1712  __ Ldr(out, HeapOperand(obj, mirror::Object::ClassOffset()));
1713  __ Cmp(out, cls);
1714  if (instruction->IsClassFinal()) {
1715    // Classes must be equal for the instanceof to succeed.
1716    __ Cset(out, eq);
1717  } else {
1718    // If the classes are not equal, we go into a slow path.
1719    DCHECK(locations->OnlyCallsOnSlowPath());
1720    SlowPathCodeARM64* slow_path =
1721        new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(
1722        instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc());
1723    codegen_->AddSlowPath(slow_path);
1724    __ B(ne, slow_path->GetEntryLabel());
1725    __ Mov(out, 1);
1726    __ Bind(slow_path->GetExitLabel());
1727  }
1728
1729  __ Bind(&done);
1730}
1731
1732void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
1733  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
1734  locations->SetOut(Location::ConstantLocation(constant));
1735}
1736
1737void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant) {
1738  // Will be generated at use site.
1739  UNUSED(constant);
1740}
1741
1742void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
1743  LocationSummary* locations =
1744      new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1745  locations->AddTemp(LocationFrom(x0));
1746
1747  InvokeDexCallingConventionVisitor calling_convention_visitor;
1748  for (size_t i = 0; i < invoke->InputCount(); i++) {
1749    HInstruction* input = invoke->InputAt(i);
1750    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1751  }
1752
1753  Primitive::Type return_type = invoke->GetType();
1754  if (return_type != Primitive::kPrimVoid) {
1755    locations->SetOut(calling_convention_visitor.GetReturnLocation(return_type));
1756  }
1757}
1758
1759void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
1760  HandleInvoke(invoke);
1761}
1762
1763void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
1764  // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
1765  Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
1766  uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() +
1767          (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry);
1768  Location receiver = invoke->GetLocations()->InAt(0);
1769  Offset class_offset = mirror::Object::ClassOffset();
1770  Offset entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
1771
1772  // The register ip1 is required to be used for the hidden argument in
1773  // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
1774  UseScratchRegisterScope scratch_scope(GetVIXLAssembler());
1775  scratch_scope.Exclude(ip1);
1776  __ Mov(ip1, invoke->GetDexMethodIndex());
1777
1778  // temp = object->GetClass();
1779  if (receiver.IsStackSlot()) {
1780    __ Ldr(temp, StackOperandFrom(receiver));
1781    __ Ldr(temp, HeapOperand(temp, class_offset));
1782  } else {
1783    __ Ldr(temp, HeapOperandFrom(receiver, class_offset));
1784  }
1785  // temp = temp->GetImtEntryAt(method_offset);
1786  __ Ldr(temp, HeapOperand(temp, method_offset));
1787  // lr = temp->GetEntryPoint();
1788  __ Ldr(lr, HeapOperand(temp, entry_point));
1789  // lr();
1790  __ Blr(lr);
1791  DCHECK(!codegen_->IsLeafMethod());
1792  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1793}
1794
1795void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1796  HandleInvoke(invoke);
1797}
1798
1799void LocationsBuilderARM64::VisitInvokeStatic(HInvokeStatic* invoke) {
1800  HandleInvoke(invoke);
1801}
1802
1803void InstructionCodeGeneratorARM64::VisitInvokeStatic(HInvokeStatic* invoke) {
1804  Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
1805  // Make sure that ArtMethod* is passed in W0 as per the calling convention
1806  DCHECK(temp.Is(w0));
1807  size_t index_in_cache = mirror::Array::DataOffset(kHeapRefSize).SizeValue() +
1808    invoke->GetIndexInDexCache() * kHeapRefSize;
1809
1810  // TODO: Implement all kinds of calls:
1811  // 1) boot -> boot
1812  // 2) app -> boot
1813  // 3) app -> app
1814  //
1815  // Currently we implement the app -> app logic, which looks up in the resolve cache.
1816
1817  // temp = method;
1818  codegen_->LoadCurrentMethod(temp);
1819  // temp = temp->dex_cache_resolved_methods_;
1820  __ Ldr(temp, HeapOperand(temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset()));
1821  // temp = temp[index_in_cache];
1822  __ Ldr(temp, HeapOperand(temp, index_in_cache));
1823  // lr = temp->entry_point_from_quick_compiled_code_;
1824  __ Ldr(lr, HeapOperand(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1825                          kArm64WordSize)));
1826  // lr();
1827  __ Blr(lr);
1828
1829  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1830  DCHECK(!codegen_->IsLeafMethod());
1831}
1832
1833void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1834  LocationSummary* locations = invoke->GetLocations();
1835  Location receiver = locations->InAt(0);
1836  Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
1837  size_t method_offset = mirror::Class::EmbeddedVTableOffset().SizeValue() +
1838    invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
1839  Offset class_offset = mirror::Object::ClassOffset();
1840  Offset entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
1841
1842  // temp = object->GetClass();
1843  if (receiver.IsStackSlot()) {
1844    __ Ldr(temp, MemOperand(sp, receiver.GetStackIndex()));
1845    __ Ldr(temp, HeapOperand(temp, class_offset));
1846  } else {
1847    DCHECK(receiver.IsRegister());
1848    __ Ldr(temp, HeapOperandFrom(receiver, class_offset));
1849  }
1850  // temp = temp->GetMethodAt(method_offset);
1851  __ Ldr(temp, HeapOperand(temp, method_offset));
1852  // lr = temp->GetEntryPoint();
1853  __ Ldr(lr, HeapOperand(temp, entry_point.SizeValue()));
1854  // lr();
1855  __ Blr(lr);
1856  DCHECK(!codegen_->IsLeafMethod());
1857  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1858}
1859
1860void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
1861  LocationSummary::CallKind call_kind = cls->CanCallRuntime() ? LocationSummary::kCallOnSlowPath
1862                                                              : LocationSummary::kNoCall;
1863  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
1864  locations->SetOut(Location::RequiresRegister());
1865}
1866
1867void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) {
1868  Register out = OutputRegister(cls);
1869  if (cls->IsReferrersClass()) {
1870    DCHECK(!cls->CanCallRuntime());
1871    DCHECK(!cls->MustGenerateClinitCheck());
1872    codegen_->LoadCurrentMethod(out);
1873    __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DeclaringClassOffset()));
1874  } else {
1875    DCHECK(cls->CanCallRuntime());
1876    codegen_->LoadCurrentMethod(out);
1877    __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DexCacheResolvedTypesOffset()));
1878    __ Ldr(out, HeapOperand(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())));
1879
1880    SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
1881        cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
1882    codegen_->AddSlowPath(slow_path);
1883    __ Cbz(out, slow_path->GetEntryLabel());
1884    if (cls->MustGenerateClinitCheck()) {
1885      GenerateClassInitializationCheck(slow_path, out);
1886    } else {
1887      __ Bind(slow_path->GetExitLabel());
1888    }
1889  }
1890}
1891
1892void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
1893  LocationSummary* locations =
1894      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
1895  locations->SetOut(Location::RequiresRegister());
1896}
1897
1898void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
1899  MemOperand exception = MemOperand(tr, Thread::ExceptionOffset<kArm64WordSize>().Int32Value());
1900  __ Ldr(OutputRegister(instruction), exception);
1901  __ Str(wzr, exception);
1902}
1903
1904void LocationsBuilderARM64::VisitLoadLocal(HLoadLocal* load) {
1905  load->SetLocations(nullptr);
1906}
1907
1908void InstructionCodeGeneratorARM64::VisitLoadLocal(HLoadLocal* load) {
1909  // Nothing to do, this is driven by the code generator.
1910  UNUSED(load);
1911}
1912
1913void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
1914  LocationSummary* locations =
1915      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
1916  locations->SetOut(Location::RequiresRegister());
1917}
1918
1919void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) {
1920  SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load);
1921  codegen_->AddSlowPath(slow_path);
1922
1923  Register out = OutputRegister(load);
1924  codegen_->LoadCurrentMethod(out);
1925  __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DeclaringClassOffset()));
1926  __ Ldr(out, HeapOperand(out, mirror::Class::DexCacheStringsOffset()));
1927  __ Ldr(out, HeapOperand(out, CodeGenerator::GetCacheOffset(load->GetStringIndex())));
1928  __ Cbz(out, slow_path->GetEntryLabel());
1929  __ Bind(slow_path->GetExitLabel());
1930}
1931
1932void LocationsBuilderARM64::VisitLocal(HLocal* local) {
1933  local->SetLocations(nullptr);
1934}
1935
1936void InstructionCodeGeneratorARM64::VisitLocal(HLocal* local) {
1937  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
1938}
1939
1940void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
1941  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
1942  locations->SetOut(Location::ConstantLocation(constant));
1943}
1944
1945void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant) {
1946  // Will be generated at use site.
1947  UNUSED(constant);
1948}
1949
1950void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
1951  LocationSummary* locations =
1952      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
1953  InvokeRuntimeCallingConvention calling_convention;
1954  locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1955}
1956
1957void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
1958  codegen_->InvokeRuntime(instruction->IsEnter()
1959        ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
1960      instruction,
1961      instruction->GetDexPc());
1962}
1963
1964void LocationsBuilderARM64::VisitMul(HMul* mul) {
1965  LocationSummary* locations =
1966      new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
1967  switch (mul->GetResultType()) {
1968    case Primitive::kPrimInt:
1969    case Primitive::kPrimLong:
1970      locations->SetInAt(0, Location::RequiresRegister());
1971      locations->SetInAt(1, Location::RequiresRegister());
1972      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1973      break;
1974
1975    case Primitive::kPrimFloat:
1976    case Primitive::kPrimDouble:
1977      locations->SetInAt(0, Location::RequiresFpuRegister());
1978      locations->SetInAt(1, Location::RequiresFpuRegister());
1979      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1980      break;
1981
1982    default:
1983      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1984  }
1985}
1986
1987void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
1988  switch (mul->GetResultType()) {
1989    case Primitive::kPrimInt:
1990    case Primitive::kPrimLong:
1991      __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
1992      break;
1993
1994    case Primitive::kPrimFloat:
1995    case Primitive::kPrimDouble:
1996      __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
1997      break;
1998
1999    default:
2000      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2001  }
2002}
2003
2004void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
2005  LocationSummary* locations =
2006      new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2007  switch (neg->GetResultType()) {
2008    case Primitive::kPrimInt:
2009    case Primitive::kPrimLong:
2010      locations->SetInAt(0, Location::RegisterOrConstant(neg->InputAt(0)));
2011      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2012      break;
2013
2014    case Primitive::kPrimFloat:
2015    case Primitive::kPrimDouble:
2016      locations->SetInAt(0, Location::RequiresFpuRegister());
2017      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2018      break;
2019
2020    default:
2021      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2022  }
2023}
2024
2025void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
2026  switch (neg->GetResultType()) {
2027    case Primitive::kPrimInt:
2028    case Primitive::kPrimLong:
2029      __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
2030      break;
2031
2032    case Primitive::kPrimFloat:
2033    case Primitive::kPrimDouble:
2034      __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
2035      break;
2036
2037    default:
2038      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2039  }
2040}
2041
2042void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
2043  LocationSummary* locations =
2044      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2045  InvokeRuntimeCallingConvention calling_convention;
2046  locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
2047  locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(1)));
2048  locations->SetOut(LocationFrom(x0));
2049  locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(2)));
2050}
2051
2052void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
2053  LocationSummary* locations = instruction->GetLocations();
2054  InvokeRuntimeCallingConvention calling_convention;
2055  Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
2056  DCHECK(type_index.Is(w0));
2057  Register current_method = RegisterFrom(locations->GetTemp(1), Primitive::kPrimNot);
2058  DCHECK(current_method.Is(w1));
2059  codegen_->LoadCurrentMethod(current_method);
2060  __ Mov(type_index, instruction->GetTypeIndex());
2061  codegen_->InvokeRuntime(
2062      QUICK_ENTRY_POINT(pAllocArrayWithAccessCheck), instruction, instruction->GetDexPc());
2063}
2064
2065void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
2066  LocationSummary* locations =
2067      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2068  InvokeRuntimeCallingConvention calling_convention;
2069  locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
2070  locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(1)));
2071  locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
2072}
2073
2074void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
2075  LocationSummary* locations = instruction->GetLocations();
2076  Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
2077  DCHECK(type_index.Is(w0));
2078  Register current_method = RegisterFrom(locations->GetTemp(1), Primitive::kPrimNot);
2079  DCHECK(current_method.Is(w1));
2080  codegen_->LoadCurrentMethod(current_method);
2081  __ Mov(type_index, instruction->GetTypeIndex());
2082  codegen_->InvokeRuntime(
2083      QUICK_ENTRY_POINT(pAllocObjectWithAccessCheck), instruction, instruction->GetDexPc());
2084}
2085
2086void LocationsBuilderARM64::VisitNot(HNot* instruction) {
2087  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2088  locations->SetInAt(0, Location::RequiresRegister());
2089  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2090}
2091
2092void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
2093  switch (instruction->InputAt(0)->GetType()) {
2094    case Primitive::kPrimBoolean:
2095      __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), Operand(1));
2096      break;
2097
2098    case Primitive::kPrimInt:
2099    case Primitive::kPrimLong:
2100      __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
2101      break;
2102
2103    default:
2104      LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
2105  }
2106}
2107
2108void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
2109  LocationSummary* locations =
2110      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2111  locations->SetInAt(0, Location::RequiresRegister());
2112  if (instruction->HasUses()) {
2113    locations->SetOut(Location::SameAsFirstInput());
2114  }
2115}
2116
2117void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
2118  SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
2119  codegen_->AddSlowPath(slow_path);
2120
2121  LocationSummary* locations = instruction->GetLocations();
2122  Location obj = locations->InAt(0);
2123  if (obj.IsRegister()) {
2124    __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
2125  } else {
2126    DCHECK(obj.IsConstant()) << obj;
2127    DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0);
2128    __ B(slow_path->GetEntryLabel());
2129  }
2130}
2131
2132void LocationsBuilderARM64::VisitOr(HOr* instruction) {
2133  HandleBinaryOp(instruction);
2134}
2135
2136void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
2137  HandleBinaryOp(instruction);
2138}
2139
2140void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
2141  LOG(FATAL) << "Unreachable";
2142}
2143
2144void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
2145  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
2146}
2147
2148void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
2149  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2150  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
2151  if (location.IsStackSlot()) {
2152    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2153  } else if (location.IsDoubleStackSlot()) {
2154    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2155  }
2156  locations->SetOut(location);
2157}
2158
2159void InstructionCodeGeneratorARM64::VisitParameterValue(HParameterValue* instruction) {
2160  // Nothing to do, the parameter is already at its location.
2161  UNUSED(instruction);
2162}
2163
2164void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
2165  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2166  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
2167    locations->SetInAt(i, Location::Any());
2168  }
2169  locations->SetOut(Location::Any());
2170}
2171
2172void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction) {
2173  UNUSED(instruction);
2174  LOG(FATAL) << "Unreachable";
2175}
2176
2177void LocationsBuilderARM64::VisitRem(HRem* rem) {
2178  LocationSummary* locations =
2179      new (GetGraph()->GetArena()) LocationSummary(rem, LocationSummary::kNoCall);
2180  switch (rem->GetResultType()) {
2181    case Primitive::kPrimInt:
2182    case Primitive::kPrimLong:
2183      locations->SetInAt(0, Location::RequiresRegister());
2184      locations->SetInAt(1, Location::RequiresRegister());
2185      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2186      break;
2187
2188    default:
2189      LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
2190  }
2191}
2192
2193void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
2194  Primitive::Type type = rem->GetResultType();
2195  switch (type) {
2196    case Primitive::kPrimInt:
2197    case Primitive::kPrimLong: {
2198      UseScratchRegisterScope temps(GetVIXLAssembler());
2199      Register dividend = InputRegisterAt(rem, 0);
2200      Register divisor = InputRegisterAt(rem, 1);
2201      Register output = OutputRegister(rem);
2202      Register temp = temps.AcquireSameSizeAs(output);
2203
2204      __ Sdiv(temp, dividend, divisor);
2205      __ Msub(output, temp, divisor, dividend);
2206      break;
2207    }
2208
2209    default:
2210      LOG(FATAL) << "Unexpected rem type " << type;
2211  }
2212}
2213
2214void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
2215  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2216  Primitive::Type return_type = instruction->InputAt(0)->GetType();
2217  locations->SetInAt(0, ARM64ReturnLocation(return_type));
2218}
2219
2220void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction) {
2221  UNUSED(instruction);
2222  codegen_->GenerateFrameExit();
2223  __ Ret();
2224}
2225
2226void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
2227  instruction->SetLocations(nullptr);
2228}
2229
2230void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction) {
2231  UNUSED(instruction);
2232  codegen_->GenerateFrameExit();
2233  __ Ret();
2234}
2235
2236void LocationsBuilderARM64::VisitShl(HShl* shl) {
2237  HandleShift(shl);
2238}
2239
2240void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
2241  HandleShift(shl);
2242}
2243
2244void LocationsBuilderARM64::VisitShr(HShr* shr) {
2245  HandleShift(shr);
2246}
2247
2248void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
2249  HandleShift(shr);
2250}
2251
2252void LocationsBuilderARM64::VisitStoreLocal(HStoreLocal* store) {
2253  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(store);
2254  Primitive::Type field_type = store->InputAt(1)->GetType();
2255  switch (field_type) {
2256    case Primitive::kPrimNot:
2257    case Primitive::kPrimBoolean:
2258    case Primitive::kPrimByte:
2259    case Primitive::kPrimChar:
2260    case Primitive::kPrimShort:
2261    case Primitive::kPrimInt:
2262    case Primitive::kPrimFloat:
2263      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
2264      break;
2265
2266    case Primitive::kPrimLong:
2267    case Primitive::kPrimDouble:
2268      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
2269      break;
2270
2271    default:
2272      LOG(FATAL) << "Unimplemented local type " << field_type;
2273  }
2274}
2275
2276void InstructionCodeGeneratorARM64::VisitStoreLocal(HStoreLocal* store) {
2277  UNUSED(store);
2278}
2279
2280void LocationsBuilderARM64::VisitSub(HSub* instruction) {
2281  HandleBinaryOp(instruction);
2282}
2283
2284void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
2285  HandleBinaryOp(instruction);
2286}
2287
2288void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2289  LocationSummary* locations =
2290      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2291  locations->SetInAt(0, Location::RequiresRegister());
2292  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2293}
2294
2295void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2296  MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), instruction->GetFieldOffset());
2297  codegen_->Load(instruction->GetType(), OutputCPURegister(instruction), field);
2298}
2299
2300void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2301  LocationSummary* locations =
2302      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2303  locations->SetInAt(0, Location::RequiresRegister());
2304  locations->SetInAt(1, Location::RequiresRegister());
2305}
2306
2307void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2308  CPURegister value = InputCPURegisterAt(instruction, 1);
2309  Register cls = InputRegisterAt(instruction, 0);
2310  Offset offset = instruction->GetFieldOffset();
2311  Primitive::Type field_type = instruction->GetFieldType();
2312
2313  codegen_->Store(field_type, value, HeapOperand(cls, offset));
2314  if (field_type == Primitive::kPrimNot) {
2315    codegen_->MarkGCCard(cls, Register(value));
2316  }
2317}
2318
2319void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
2320  new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
2321}
2322
2323void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
2324  HBasicBlock* block = instruction->GetBlock();
2325  if (block->GetLoopInformation() != nullptr) {
2326    DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
2327    // The back edge will generate the suspend check.
2328    return;
2329  }
2330  if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
2331    // The goto will generate the suspend check.
2332    return;
2333  }
2334  GenerateSuspendCheck(instruction, nullptr);
2335}
2336
2337void LocationsBuilderARM64::VisitTemporary(HTemporary* temp) {
2338  temp->SetLocations(nullptr);
2339}
2340
2341void InstructionCodeGeneratorARM64::VisitTemporary(HTemporary* temp) {
2342  // Nothing to do, this is driven by the code generator.
2343  UNUSED(temp);
2344}
2345
2346void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
2347  LocationSummary* locations =
2348      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2349  InvokeRuntimeCallingConvention calling_convention;
2350  locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
2351}
2352
2353void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
2354  codegen_->InvokeRuntime(
2355      QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc());
2356}
2357
2358void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
2359  LocationSummary* locations =
2360      new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2361  Primitive::Type input_type = conversion->GetInputType();
2362  Primitive::Type result_type = conversion->GetResultType();
2363  DCHECK_NE(input_type, result_type);
2364  if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
2365      (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
2366    LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
2367  }
2368
2369  if (IsFPType(input_type)) {
2370    locations->SetInAt(0, Location::RequiresFpuRegister());
2371  } else {
2372    locations->SetInAt(0, Location::RequiresRegister());
2373  }
2374
2375  if (IsFPType(result_type)) {
2376    locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2377  } else {
2378    locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2379  }
2380}
2381
2382void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
2383  Primitive::Type result_type = conversion->GetResultType();
2384  Primitive::Type input_type = conversion->GetInputType();
2385
2386  DCHECK_NE(input_type, result_type);
2387
2388  if (IsIntegralType(result_type) && IsIntegralType(input_type)) {
2389    int result_size = Primitive::ComponentSize(result_type);
2390    int input_size = Primitive::ComponentSize(input_type);
2391    int min_size = std::min(result_size, input_size);
2392    Register output = OutputRegister(conversion);
2393    Register source = InputRegisterAt(conversion, 0);
2394    if ((result_type == Primitive::kPrimChar) && (input_size < result_size)) {
2395      __ Ubfx(output, source, 0, result_size * kBitsPerByte);
2396    } else if ((result_type == Primitive::kPrimChar) ||
2397               ((input_type == Primitive::kPrimChar) && (result_size > input_size))) {
2398      __ Ubfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
2399    } else {
2400      __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
2401    }
2402  } else if (IsFPType(result_type) && IsIntegralType(input_type)) {
2403    __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
2404  } else if (IsIntegralType(result_type) && IsFPType(input_type)) {
2405    CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
2406    __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
2407  } else if (IsFPType(result_type) && IsFPType(input_type)) {
2408    __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
2409  } else {
2410    LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
2411                << " to " << result_type;
2412  }
2413}
2414
2415void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
2416  HandleShift(ushr);
2417}
2418
2419void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
2420  HandleShift(ushr);
2421}
2422
2423void LocationsBuilderARM64::VisitXor(HXor* instruction) {
2424  HandleBinaryOp(instruction);
2425}
2426
2427void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
2428  HandleBinaryOp(instruction);
2429}
2430
2431#undef __
2432#undef QUICK_ENTRY_POINT
2433
2434}  // namespace arm64
2435}  // namespace art
2436