intrinsics_arm64.cc revision 9021825d1e73998b99c81e89c73796f6f2845471
1/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm64.h"
18
19#include "arch/arm64/instruction_set_features_arm64.h"
20#include "code_generator_arm64.h"
21#include "common_arm64.h"
22#include "entrypoints/quick/quick_entrypoints.h"
23#include "intrinsics.h"
24#include "mirror/array-inl.h"
25#include "mirror/art_method.h"
26#include "mirror/string.h"
27#include "thread.h"
28#include "utils/arm64/assembler_arm64.h"
29#include "utils/arm64/constants_arm64.h"
30
31#include "vixl/a64/disasm-a64.h"
32#include "vixl/a64/macro-assembler-a64.h"
33
34using namespace vixl;   // NOLINT(build/namespaces)
35
36namespace art {
37
38namespace arm64 {
39
40using helpers::DRegisterFrom;
41using helpers::FPRegisterFrom;
42using helpers::HeapOperand;
43using helpers::LocationFrom;
44using helpers::RegisterFrom;
45using helpers::SRegisterFrom;
46using helpers::WRegisterFrom;
47using helpers::XRegisterFrom;
48
49
50namespace {
51
52ALWAYS_INLINE inline MemOperand AbsoluteHeapOperandFrom(Location location, size_t offset = 0) {
53  return MemOperand(XRegisterFrom(location), offset);
54}
55
56}  // namespace
57
58vixl::MacroAssembler* IntrinsicCodeGeneratorARM64::GetVIXLAssembler() {
59  return codegen_->GetAssembler()->vixl_masm_;
60}
61
62ArenaAllocator* IntrinsicCodeGeneratorARM64::GetAllocator() {
63  return codegen_->GetGraph()->GetArena();
64}
65
66#define __ codegen->GetAssembler()->vixl_masm_->
67
68static void MoveFromReturnRegister(Location trg,
69                                   Primitive::Type type,
70                                   CodeGeneratorARM64* codegen) {
71  if (!trg.IsValid()) {
72    DCHECK(type == Primitive::kPrimVoid);
73    return;
74  }
75
76  DCHECK_NE(type, Primitive::kPrimVoid);
77
78  if (Primitive::IsIntegralType(type)) {
79    Register trg_reg = RegisterFrom(trg, type);
80    Register res_reg = RegisterFrom(ARM64ReturnLocation(type), type);
81    __ Mov(trg_reg, res_reg, kDiscardForSameWReg);
82  } else {
83    FPRegister trg_reg = FPRegisterFrom(trg, type);
84    FPRegister res_reg = FPRegisterFrom(ARM64ReturnLocation(type), type);
85    __ Fmov(trg_reg, res_reg);
86  }
87}
88
89static void MoveArguments(HInvoke* invoke, ArenaAllocator* arena, CodeGeneratorARM64* codegen) {
90  if (invoke->InputCount() == 0) {
91    return;
92  }
93
94  LocationSummary* locations = invoke->GetLocations();
95  InvokeDexCallingConventionVisitor calling_convention_visitor;
96
97  // We're moving potentially two or more locations to locations that could overlap, so we need
98  // a parallel move resolver.
99  HParallelMove parallel_move(arena);
100
101  for (size_t i = 0; i < invoke->InputCount(); i++) {
102    HInstruction* input = invoke->InputAt(i);
103    Location cc_loc = calling_convention_visitor.GetNextLocation(input->GetType());
104    Location actual_loc = locations->InAt(i);
105
106    parallel_move.AddMove(actual_loc, cc_loc, input->GetType(), nullptr);
107  }
108
109  codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
110}
111
112// Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified
113// call. This will copy the arguments into the positions for a regular call.
114//
115// Note: The actual parameters are required to be in the locations given by the invoke's location
116//       summary. If an intrinsic modifies those locations before a slowpath call, they must be
117//       restored!
118class IntrinsicSlowPathARM64 : public SlowPathCodeARM64 {
119 public:
120  explicit IntrinsicSlowPathARM64(HInvoke* invoke) : invoke_(invoke) { }
121
122  void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
123    CodeGeneratorARM64* codegen = down_cast<CodeGeneratorARM64*>(codegen_in);
124    __ Bind(GetEntryLabel());
125
126    SaveLiveRegisters(codegen, invoke_->GetLocations());
127
128    MoveArguments(invoke_, codegen->GetGraph()->GetArena(), codegen);
129
130    if (invoke_->IsInvokeStaticOrDirect()) {
131      codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(), kArtMethodRegister);
132      RecordPcInfo(codegen, invoke_, invoke_->GetDexPc());
133    } else {
134      UNIMPLEMENTED(FATAL) << "Non-direct intrinsic slow-path not yet implemented";
135      UNREACHABLE();
136    }
137
138    // Copy the result back to the expected output.
139    Location out = invoke_->GetLocations()->Out();
140    if (out.IsValid()) {
141      DCHECK(out.IsRegister());  // TODO: Replace this when we support output in memory.
142      DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
143      MoveFromReturnRegister(out, invoke_->GetType(), codegen);
144    }
145
146    RestoreLiveRegisters(codegen, invoke_->GetLocations());
147    __ B(GetExitLabel());
148  }
149
150 private:
151  // The instruction where this slow path is happening.
152  HInvoke* const invoke_;
153
154  DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathARM64);
155};
156
157#undef __
158
159bool IntrinsicLocationsBuilderARM64::TryDispatch(HInvoke* invoke) {
160  Dispatch(invoke);
161  LocationSummary* res = invoke->GetLocations();
162  return res != nullptr && res->Intrinsified();
163}
164
165#define __ masm->
166
167static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
168  LocationSummary* locations = new (arena) LocationSummary(invoke,
169                                                           LocationSummary::kNoCall,
170                                                           kIntrinsified);
171  locations->SetInAt(0, Location::RequiresFpuRegister());
172  locations->SetOut(Location::RequiresRegister());
173}
174
175static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
176  LocationSummary* locations = new (arena) LocationSummary(invoke,
177                                                           LocationSummary::kNoCall,
178                                                           kIntrinsified);
179  locations->SetInAt(0, Location::RequiresRegister());
180  locations->SetOut(Location::RequiresFpuRegister());
181}
182
183static void MoveFPToInt(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
184  Location input = locations->InAt(0);
185  Location output = locations->Out();
186  __ Fmov(is64bit ? XRegisterFrom(output) : WRegisterFrom(output),
187          is64bit ? DRegisterFrom(input) : SRegisterFrom(input));
188}
189
190static void MoveIntToFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
191  Location input = locations->InAt(0);
192  Location output = locations->Out();
193  __ Fmov(is64bit ? DRegisterFrom(output) : SRegisterFrom(output),
194          is64bit ? XRegisterFrom(input) : WRegisterFrom(input));
195}
196
197void IntrinsicLocationsBuilderARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
198  CreateFPToIntLocations(arena_, invoke);
199}
200void IntrinsicLocationsBuilderARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
201  CreateIntToFPLocations(arena_, invoke);
202}
203
204void IntrinsicCodeGeneratorARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
205  MoveFPToInt(invoke->GetLocations(), true, GetVIXLAssembler());
206}
207void IntrinsicCodeGeneratorARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
208  MoveIntToFP(invoke->GetLocations(), true, GetVIXLAssembler());
209}
210
211void IntrinsicLocationsBuilderARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
212  CreateFPToIntLocations(arena_, invoke);
213}
214void IntrinsicLocationsBuilderARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
215  CreateIntToFPLocations(arena_, invoke);
216}
217
218void IntrinsicCodeGeneratorARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
219  MoveFPToInt(invoke->GetLocations(), false, GetVIXLAssembler());
220}
221void IntrinsicCodeGeneratorARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
222  MoveIntToFP(invoke->GetLocations(), false, GetVIXLAssembler());
223}
224
225static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
226  LocationSummary* locations = new (arena) LocationSummary(invoke,
227                                                           LocationSummary::kNoCall,
228                                                           kIntrinsified);
229  locations->SetInAt(0, Location::RequiresRegister());
230  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
231}
232
233static void GenReverseBytes(LocationSummary* locations,
234                            Primitive::Type type,
235                            vixl::MacroAssembler* masm) {
236  Location in = locations->InAt(0);
237  Location out = locations->Out();
238
239  switch (type) {
240    case Primitive::kPrimShort:
241      __ Rev16(WRegisterFrom(out), WRegisterFrom(in));
242      __ Sxth(WRegisterFrom(out), WRegisterFrom(out));
243      break;
244    case Primitive::kPrimInt:
245    case Primitive::kPrimLong:
246      __ Rev(RegisterFrom(out, type), RegisterFrom(in, type));
247      break;
248    default:
249      LOG(FATAL) << "Unexpected size for reverse-bytes: " << type;
250      UNREACHABLE();
251  }
252}
253
254void IntrinsicLocationsBuilderARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
255  CreateIntToIntLocations(arena_, invoke);
256}
257
258void IntrinsicCodeGeneratorARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
259  GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
260}
261
262void IntrinsicLocationsBuilderARM64::VisitLongReverseBytes(HInvoke* invoke) {
263  CreateIntToIntLocations(arena_, invoke);
264}
265
266void IntrinsicCodeGeneratorARM64::VisitLongReverseBytes(HInvoke* invoke) {
267  GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
268}
269
270void IntrinsicLocationsBuilderARM64::VisitShortReverseBytes(HInvoke* invoke) {
271  CreateIntToIntLocations(arena_, invoke);
272}
273
274void IntrinsicCodeGeneratorARM64::VisitShortReverseBytes(HInvoke* invoke) {
275  GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetVIXLAssembler());
276}
277
278static void GenReverse(LocationSummary* locations,
279                       Primitive::Type type,
280                       vixl::MacroAssembler* masm) {
281  DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
282
283  Location in = locations->InAt(0);
284  Location out = locations->Out();
285
286  __ Rbit(RegisterFrom(out, type), RegisterFrom(in, type));
287}
288
289void IntrinsicLocationsBuilderARM64::VisitIntegerReverse(HInvoke* invoke) {
290  CreateIntToIntLocations(arena_, invoke);
291}
292
293void IntrinsicCodeGeneratorARM64::VisitIntegerReverse(HInvoke* invoke) {
294  GenReverse(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
295}
296
297void IntrinsicLocationsBuilderARM64::VisitLongReverse(HInvoke* invoke) {
298  CreateIntToIntLocations(arena_, invoke);
299}
300
301void IntrinsicCodeGeneratorARM64::VisitLongReverse(HInvoke* invoke) {
302  GenReverse(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
303}
304
305static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
306  LocationSummary* locations = new (arena) LocationSummary(invoke,
307                                                           LocationSummary::kNoCall,
308                                                           kIntrinsified);
309  locations->SetInAt(0, Location::RequiresFpuRegister());
310  locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
311}
312
313static void MathAbsFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
314  Location in = locations->InAt(0);
315  Location out = locations->Out();
316
317  FPRegister in_reg = is64bit ? DRegisterFrom(in) : SRegisterFrom(in);
318  FPRegister out_reg = is64bit ? DRegisterFrom(out) : SRegisterFrom(out);
319
320  __ Fabs(out_reg, in_reg);
321}
322
323void IntrinsicLocationsBuilderARM64::VisitMathAbsDouble(HInvoke* invoke) {
324  CreateFPToFPLocations(arena_, invoke);
325}
326
327void IntrinsicCodeGeneratorARM64::VisitMathAbsDouble(HInvoke* invoke) {
328  MathAbsFP(invoke->GetLocations(), true, GetVIXLAssembler());
329}
330
331void IntrinsicLocationsBuilderARM64::VisitMathAbsFloat(HInvoke* invoke) {
332  CreateFPToFPLocations(arena_, invoke);
333}
334
335void IntrinsicCodeGeneratorARM64::VisitMathAbsFloat(HInvoke* invoke) {
336  MathAbsFP(invoke->GetLocations(), false, GetVIXLAssembler());
337}
338
339static void CreateIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
340  LocationSummary* locations = new (arena) LocationSummary(invoke,
341                                                           LocationSummary::kNoCall,
342                                                           kIntrinsified);
343  locations->SetInAt(0, Location::RequiresRegister());
344  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
345}
346
347static void GenAbsInteger(LocationSummary* locations,
348                          bool is64bit,
349                          vixl::MacroAssembler* masm) {
350  Location in = locations->InAt(0);
351  Location output = locations->Out();
352
353  Register in_reg = is64bit ? XRegisterFrom(in) : WRegisterFrom(in);
354  Register out_reg = is64bit ? XRegisterFrom(output) : WRegisterFrom(output);
355
356  __ Cmp(in_reg, Operand(0));
357  __ Cneg(out_reg, in_reg, lt);
358}
359
360void IntrinsicLocationsBuilderARM64::VisitMathAbsInt(HInvoke* invoke) {
361  CreateIntToInt(arena_, invoke);
362}
363
364void IntrinsicCodeGeneratorARM64::VisitMathAbsInt(HInvoke* invoke) {
365  GenAbsInteger(invoke->GetLocations(), false, GetVIXLAssembler());
366}
367
368void IntrinsicLocationsBuilderARM64::VisitMathAbsLong(HInvoke* invoke) {
369  CreateIntToInt(arena_, invoke);
370}
371
372void IntrinsicCodeGeneratorARM64::VisitMathAbsLong(HInvoke* invoke) {
373  GenAbsInteger(invoke->GetLocations(), true, GetVIXLAssembler());
374}
375
376static void GenMinMaxFP(LocationSummary* locations,
377                        bool is_min,
378                        bool is_double,
379                        vixl::MacroAssembler* masm) {
380  Location op1 = locations->InAt(0);
381  Location op2 = locations->InAt(1);
382  Location out = locations->Out();
383
384  FPRegister op1_reg = is_double ? DRegisterFrom(op1) : SRegisterFrom(op1);
385  FPRegister op2_reg = is_double ? DRegisterFrom(op2) : SRegisterFrom(op2);
386  FPRegister out_reg = is_double ? DRegisterFrom(out) : SRegisterFrom(out);
387  if (is_min) {
388    __ Fmin(out_reg, op1_reg, op2_reg);
389  } else {
390    __ Fmax(out_reg, op1_reg, op2_reg);
391  }
392}
393
394static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
395  LocationSummary* locations = new (arena) LocationSummary(invoke,
396                                                           LocationSummary::kNoCall,
397                                                           kIntrinsified);
398  locations->SetInAt(0, Location::RequiresFpuRegister());
399  locations->SetInAt(1, Location::RequiresFpuRegister());
400  locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
401}
402
403void IntrinsicLocationsBuilderARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
404  CreateFPFPToFPLocations(arena_, invoke);
405}
406
407void IntrinsicCodeGeneratorARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
408  GenMinMaxFP(invoke->GetLocations(), true, true, GetVIXLAssembler());
409}
410
411void IntrinsicLocationsBuilderARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
412  CreateFPFPToFPLocations(arena_, invoke);
413}
414
415void IntrinsicCodeGeneratorARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
416  GenMinMaxFP(invoke->GetLocations(), true, false, GetVIXLAssembler());
417}
418
419void IntrinsicLocationsBuilderARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
420  CreateFPFPToFPLocations(arena_, invoke);
421}
422
423void IntrinsicCodeGeneratorARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
424  GenMinMaxFP(invoke->GetLocations(), false, true, GetVIXLAssembler());
425}
426
427void IntrinsicLocationsBuilderARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
428  CreateFPFPToFPLocations(arena_, invoke);
429}
430
431void IntrinsicCodeGeneratorARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
432  GenMinMaxFP(invoke->GetLocations(), false, false, GetVIXLAssembler());
433}
434
435static void GenMinMax(LocationSummary* locations,
436                      bool is_min,
437                      bool is_long,
438                      vixl::MacroAssembler* masm) {
439  Location op1 = locations->InAt(0);
440  Location op2 = locations->InAt(1);
441  Location out = locations->Out();
442
443  Register op1_reg = is_long ? XRegisterFrom(op1) : WRegisterFrom(op1);
444  Register op2_reg = is_long ? XRegisterFrom(op2) : WRegisterFrom(op2);
445  Register out_reg = is_long ? XRegisterFrom(out) : WRegisterFrom(out);
446
447  __ Cmp(op1_reg, op2_reg);
448  __ Csel(out_reg, op1_reg, op2_reg, is_min ? lt : gt);
449}
450
451static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
452  LocationSummary* locations = new (arena) LocationSummary(invoke,
453                                                           LocationSummary::kNoCall,
454                                                           kIntrinsified);
455  locations->SetInAt(0, Location::RequiresRegister());
456  locations->SetInAt(1, Location::RequiresRegister());
457  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
458}
459
460void IntrinsicLocationsBuilderARM64::VisitMathMinIntInt(HInvoke* invoke) {
461  CreateIntIntToIntLocations(arena_, invoke);
462}
463
464void IntrinsicCodeGeneratorARM64::VisitMathMinIntInt(HInvoke* invoke) {
465  GenMinMax(invoke->GetLocations(), true, false, GetVIXLAssembler());
466}
467
468void IntrinsicLocationsBuilderARM64::VisitMathMinLongLong(HInvoke* invoke) {
469  CreateIntIntToIntLocations(arena_, invoke);
470}
471
472void IntrinsicCodeGeneratorARM64::VisitMathMinLongLong(HInvoke* invoke) {
473  GenMinMax(invoke->GetLocations(), true, true, GetVIXLAssembler());
474}
475
476void IntrinsicLocationsBuilderARM64::VisitMathMaxIntInt(HInvoke* invoke) {
477  CreateIntIntToIntLocations(arena_, invoke);
478}
479
480void IntrinsicCodeGeneratorARM64::VisitMathMaxIntInt(HInvoke* invoke) {
481  GenMinMax(invoke->GetLocations(), false, false, GetVIXLAssembler());
482}
483
484void IntrinsicLocationsBuilderARM64::VisitMathMaxLongLong(HInvoke* invoke) {
485  CreateIntIntToIntLocations(arena_, invoke);
486}
487
488void IntrinsicCodeGeneratorARM64::VisitMathMaxLongLong(HInvoke* invoke) {
489  GenMinMax(invoke->GetLocations(), false, true, GetVIXLAssembler());
490}
491
492void IntrinsicLocationsBuilderARM64::VisitMathSqrt(HInvoke* invoke) {
493  CreateFPToFPLocations(arena_, invoke);
494}
495
496void IntrinsicCodeGeneratorARM64::VisitMathSqrt(HInvoke* invoke) {
497  LocationSummary* locations = invoke->GetLocations();
498  vixl::MacroAssembler* masm = GetVIXLAssembler();
499  __ Fsqrt(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
500}
501
502void IntrinsicLocationsBuilderARM64::VisitMathCeil(HInvoke* invoke) {
503  CreateFPToFPLocations(arena_, invoke);
504}
505
506void IntrinsicCodeGeneratorARM64::VisitMathCeil(HInvoke* invoke) {
507  LocationSummary* locations = invoke->GetLocations();
508  vixl::MacroAssembler* masm = GetVIXLAssembler();
509  __ Frintp(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
510}
511
512void IntrinsicLocationsBuilderARM64::VisitMathFloor(HInvoke* invoke) {
513  CreateFPToFPLocations(arena_, invoke);
514}
515
516void IntrinsicCodeGeneratorARM64::VisitMathFloor(HInvoke* invoke) {
517  LocationSummary* locations = invoke->GetLocations();
518  vixl::MacroAssembler* masm = GetVIXLAssembler();
519  __ Frintm(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
520}
521
522void IntrinsicLocationsBuilderARM64::VisitMathRint(HInvoke* invoke) {
523  CreateFPToFPLocations(arena_, invoke);
524}
525
526void IntrinsicCodeGeneratorARM64::VisitMathRint(HInvoke* invoke) {
527  LocationSummary* locations = invoke->GetLocations();
528  vixl::MacroAssembler* masm = GetVIXLAssembler();
529  __ Frintn(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
530}
531
532static void CreateFPToIntPlusTempLocations(ArenaAllocator* arena, HInvoke* invoke) {
533  LocationSummary* locations = new (arena) LocationSummary(invoke,
534                                                           LocationSummary::kNoCall,
535                                                           kIntrinsified);
536  locations->SetInAt(0, Location::RequiresFpuRegister());
537  locations->SetOut(Location::RequiresRegister());
538}
539
540static void GenMathRound(LocationSummary* locations,
541                         bool is_double,
542                         vixl::MacroAssembler* masm) {
543  FPRegister in_reg = is_double ?
544      DRegisterFrom(locations->InAt(0)) : SRegisterFrom(locations->InAt(0));
545  Register out_reg = is_double ?
546      XRegisterFrom(locations->Out()) : WRegisterFrom(locations->Out());
547  UseScratchRegisterScope temps(masm);
548  FPRegister temp1_reg = temps.AcquireSameSizeAs(in_reg);
549
550  // 0.5 can be encoded as an immediate, so use fmov.
551  if (is_double) {
552    __ Fmov(temp1_reg, static_cast<double>(0.5));
553  } else {
554    __ Fmov(temp1_reg, static_cast<float>(0.5));
555  }
556  __ Fadd(temp1_reg, in_reg, temp1_reg);
557  __ Fcvtms(out_reg, temp1_reg);
558}
559
560void IntrinsicLocationsBuilderARM64::VisitMathRoundDouble(HInvoke* invoke) {
561  CreateFPToIntPlusTempLocations(arena_, invoke);
562}
563
564void IntrinsicCodeGeneratorARM64::VisitMathRoundDouble(HInvoke* invoke) {
565  GenMathRound(invoke->GetLocations(), true, GetVIXLAssembler());
566}
567
568void IntrinsicLocationsBuilderARM64::VisitMathRoundFloat(HInvoke* invoke) {
569  CreateFPToIntPlusTempLocations(arena_, invoke);
570}
571
572void IntrinsicCodeGeneratorARM64::VisitMathRoundFloat(HInvoke* invoke) {
573  GenMathRound(invoke->GetLocations(), false, GetVIXLAssembler());
574}
575
576void IntrinsicLocationsBuilderARM64::VisitMemoryPeekByte(HInvoke* invoke) {
577  CreateIntToIntLocations(arena_, invoke);
578}
579
580void IntrinsicCodeGeneratorARM64::VisitMemoryPeekByte(HInvoke* invoke) {
581  vixl::MacroAssembler* masm = GetVIXLAssembler();
582  __ Ldrsb(WRegisterFrom(invoke->GetLocations()->Out()),
583          AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
584}
585
586void IntrinsicLocationsBuilderARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
587  CreateIntToIntLocations(arena_, invoke);
588}
589
590void IntrinsicCodeGeneratorARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
591  vixl::MacroAssembler* masm = GetVIXLAssembler();
592  __ Ldr(WRegisterFrom(invoke->GetLocations()->Out()),
593         AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
594}
595
596void IntrinsicLocationsBuilderARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
597  CreateIntToIntLocations(arena_, invoke);
598}
599
600void IntrinsicCodeGeneratorARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
601  vixl::MacroAssembler* masm = GetVIXLAssembler();
602  __ Ldr(XRegisterFrom(invoke->GetLocations()->Out()),
603         AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
604}
605
606void IntrinsicLocationsBuilderARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
607  CreateIntToIntLocations(arena_, invoke);
608}
609
610void IntrinsicCodeGeneratorARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
611  vixl::MacroAssembler* masm = GetVIXLAssembler();
612  __ Ldrsh(WRegisterFrom(invoke->GetLocations()->Out()),
613           AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
614}
615
616static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
617  LocationSummary* locations = new (arena) LocationSummary(invoke,
618                                                           LocationSummary::kNoCall,
619                                                           kIntrinsified);
620  locations->SetInAt(0, Location::RequiresRegister());
621  locations->SetInAt(1, Location::RequiresRegister());
622}
623
624void IntrinsicLocationsBuilderARM64::VisitMemoryPokeByte(HInvoke* invoke) {
625  CreateIntIntToVoidLocations(arena_, invoke);
626}
627
628void IntrinsicCodeGeneratorARM64::VisitMemoryPokeByte(HInvoke* invoke) {
629  vixl::MacroAssembler* masm = GetVIXLAssembler();
630  __ Strb(WRegisterFrom(invoke->GetLocations()->InAt(1)),
631          AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
632}
633
634void IntrinsicLocationsBuilderARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
635  CreateIntIntToVoidLocations(arena_, invoke);
636}
637
638void IntrinsicCodeGeneratorARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
639  vixl::MacroAssembler* masm = GetVIXLAssembler();
640  __ Str(WRegisterFrom(invoke->GetLocations()->InAt(1)),
641         AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
642}
643
644void IntrinsicLocationsBuilderARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
645  CreateIntIntToVoidLocations(arena_, invoke);
646}
647
648void IntrinsicCodeGeneratorARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
649  vixl::MacroAssembler* masm = GetVIXLAssembler();
650  __ Str(XRegisterFrom(invoke->GetLocations()->InAt(1)),
651         AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
652}
653
654void IntrinsicLocationsBuilderARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
655  CreateIntIntToVoidLocations(arena_, invoke);
656}
657
658void IntrinsicCodeGeneratorARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
659  vixl::MacroAssembler* masm = GetVIXLAssembler();
660  __ Strh(WRegisterFrom(invoke->GetLocations()->InAt(1)),
661          AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
662}
663
664void IntrinsicLocationsBuilderARM64::VisitThreadCurrentThread(HInvoke* invoke) {
665  LocationSummary* locations = new (arena_) LocationSummary(invoke,
666                                                            LocationSummary::kNoCall,
667                                                            kIntrinsified);
668  locations->SetOut(Location::RequiresRegister());
669}
670
671void IntrinsicCodeGeneratorARM64::VisitThreadCurrentThread(HInvoke* invoke) {
672  codegen_->Load(Primitive::kPrimNot, WRegisterFrom(invoke->GetLocations()->Out()),
673                 MemOperand(tr, Thread::PeerOffset<8>().Int32Value()));
674}
675
676static void GenUnsafeGet(HInvoke* invoke,
677                         Primitive::Type type,
678                         bool is_volatile,
679                         CodeGeneratorARM64* codegen) {
680  LocationSummary* locations = invoke->GetLocations();
681  DCHECK((type == Primitive::kPrimInt) ||
682         (type == Primitive::kPrimLong) ||
683         (type == Primitive::kPrimNot));
684  vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
685  Register base = WRegisterFrom(locations->InAt(1));    // Object pointer.
686  Register offset = XRegisterFrom(locations->InAt(2));  // Long offset.
687  Register trg = RegisterFrom(locations->Out(), type);
688  bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
689
690  MemOperand mem_op(base.X(), offset);
691  if (is_volatile) {
692    if (use_acquire_release) {
693      codegen->LoadAcquire(invoke, trg, mem_op);
694    } else {
695      codegen->Load(type, trg, mem_op);
696      __ Dmb(InnerShareable, BarrierReads);
697    }
698  } else {
699    codegen->Load(type, trg, mem_op);
700  }
701}
702
703static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
704  LocationSummary* locations = new (arena) LocationSummary(invoke,
705                                                           LocationSummary::kNoCall,
706                                                           kIntrinsified);
707  locations->SetInAt(0, Location::NoLocation());        // Unused receiver.
708  locations->SetInAt(1, Location::RequiresRegister());
709  locations->SetInAt(2, Location::RequiresRegister());
710  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
711}
712
713void IntrinsicLocationsBuilderARM64::VisitUnsafeGet(HInvoke* invoke) {
714  CreateIntIntIntToIntLocations(arena_, invoke);
715}
716void IntrinsicLocationsBuilderARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
717  CreateIntIntIntToIntLocations(arena_, invoke);
718}
719void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLong(HInvoke* invoke) {
720  CreateIntIntIntToIntLocations(arena_, invoke);
721}
722void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
723  CreateIntIntIntToIntLocations(arena_, invoke);
724}
725void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObject(HInvoke* invoke) {
726  CreateIntIntIntToIntLocations(arena_, invoke);
727}
728void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
729  CreateIntIntIntToIntLocations(arena_, invoke);
730}
731
732void IntrinsicCodeGeneratorARM64::VisitUnsafeGet(HInvoke* invoke) {
733  GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_);
734}
735void IntrinsicCodeGeneratorARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
736  GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_);
737}
738void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLong(HInvoke* invoke) {
739  GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_);
740}
741void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
742  GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_);
743}
744void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObject(HInvoke* invoke) {
745  GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_);
746}
747void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
748  GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_);
749}
750
751static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) {
752  LocationSummary* locations = new (arena) LocationSummary(invoke,
753                                                           LocationSummary::kNoCall,
754                                                           kIntrinsified);
755  locations->SetInAt(0, Location::NoLocation());        // Unused receiver.
756  locations->SetInAt(1, Location::RequiresRegister());
757  locations->SetInAt(2, Location::RequiresRegister());
758  locations->SetInAt(3, Location::RequiresRegister());
759}
760
761void IntrinsicLocationsBuilderARM64::VisitUnsafePut(HInvoke* invoke) {
762  CreateIntIntIntIntToVoid(arena_, invoke);
763}
764void IntrinsicLocationsBuilderARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
765  CreateIntIntIntIntToVoid(arena_, invoke);
766}
767void IntrinsicLocationsBuilderARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
768  CreateIntIntIntIntToVoid(arena_, invoke);
769}
770void IntrinsicLocationsBuilderARM64::VisitUnsafePutObject(HInvoke* invoke) {
771  CreateIntIntIntIntToVoid(arena_, invoke);
772}
773void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
774  CreateIntIntIntIntToVoid(arena_, invoke);
775}
776void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
777  CreateIntIntIntIntToVoid(arena_, invoke);
778}
779void IntrinsicLocationsBuilderARM64::VisitUnsafePutLong(HInvoke* invoke) {
780  CreateIntIntIntIntToVoid(arena_, invoke);
781}
782void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
783  CreateIntIntIntIntToVoid(arena_, invoke);
784}
785void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
786  CreateIntIntIntIntToVoid(arena_, invoke);
787}
788
789static void GenUnsafePut(LocationSummary* locations,
790                         Primitive::Type type,
791                         bool is_volatile,
792                         bool is_ordered,
793                         CodeGeneratorARM64* codegen) {
794  vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
795
796  Register base = WRegisterFrom(locations->InAt(1));    // Object pointer.
797  Register offset = XRegisterFrom(locations->InAt(2));  // Long offset.
798  Register value = RegisterFrom(locations->InAt(3), type);
799  bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
800
801  MemOperand mem_op(base.X(), offset);
802
803  if (is_volatile || is_ordered) {
804    if (use_acquire_release) {
805      codegen->StoreRelease(type, value, mem_op);
806    } else {
807      __ Dmb(InnerShareable, BarrierAll);
808      codegen->Store(type, value, mem_op);
809      if (is_volatile) {
810        __ Dmb(InnerShareable, BarrierReads);
811      }
812    }
813  } else {
814    codegen->Store(type, value, mem_op);
815  }
816
817  if (type == Primitive::kPrimNot) {
818    codegen->MarkGCCard(base, value);
819  }
820}
821
822void IntrinsicCodeGeneratorARM64::VisitUnsafePut(HInvoke* invoke) {
823  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, false, codegen_);
824}
825void IntrinsicCodeGeneratorARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
826  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, true, codegen_);
827}
828void IntrinsicCodeGeneratorARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
829  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, false, codegen_);
830}
831void IntrinsicCodeGeneratorARM64::VisitUnsafePutObject(HInvoke* invoke) {
832  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, false, codegen_);
833}
834void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
835  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, true, codegen_);
836}
837void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
838  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, false, codegen_);
839}
840void IntrinsicCodeGeneratorARM64::VisitUnsafePutLong(HInvoke* invoke) {
841  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, false, codegen_);
842}
843void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
844  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, true, codegen_);
845}
846void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
847  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, false, codegen_);
848}
849
850static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
851  LocationSummary* locations = new (arena) LocationSummary(invoke,
852                                                           LocationSummary::kNoCall,
853                                                           kIntrinsified);
854  locations->SetInAt(0, Location::NoLocation());        // Unused receiver.
855  locations->SetInAt(1, Location::RequiresRegister());
856  locations->SetInAt(2, Location::RequiresRegister());
857  locations->SetInAt(3, Location::RequiresRegister());
858  locations->SetInAt(4, Location::RequiresRegister());
859
860  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
861}
862
863static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM64* codegen) {
864  bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
865  vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
866
867  Register out = WRegisterFrom(locations->Out());                  // Boolean result.
868
869  Register base = WRegisterFrom(locations->InAt(1));               // Object pointer.
870  Register offset = XRegisterFrom(locations->InAt(2));             // Long offset.
871  Register expected = RegisterFrom(locations->InAt(3), type);      // Expected.
872  Register value = RegisterFrom(locations->InAt(4), type);         // Value.
873
874  // This needs to be before the temp registers, as MarkGCCard also uses VIXL temps.
875  if (type == Primitive::kPrimNot) {
876    // Mark card for object assuming new value is stored.
877    codegen->MarkGCCard(base, value);
878  }
879
880  UseScratchRegisterScope temps(masm);
881  Register tmp_ptr = temps.AcquireX();                             // Pointer to actual memory.
882  Register tmp_value = temps.AcquireSameSizeAs(value);             // Value in memory.
883
884  Register tmp_32 = tmp_value.W();
885
886  __ Add(tmp_ptr, base.X(), Operand(offset));
887
888  // do {
889  //   tmp_value = [tmp_ptr] - expected;
890  // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
891  // result = tmp_value != 0;
892
893  vixl::Label loop_head, exit_loop;
894  if (use_acquire_release) {
895    __ Bind(&loop_head);
896    __ Ldaxr(tmp_value, MemOperand(tmp_ptr));
897    __ Cmp(tmp_value, expected);
898    __ B(&exit_loop, ne);
899    __ Stlxr(tmp_32, value, MemOperand(tmp_ptr));
900    __ Cbnz(tmp_32, &loop_head);
901  } else {
902    __ Dmb(InnerShareable, BarrierWrites);
903    __ Bind(&loop_head);
904    __ Ldxr(tmp_value, MemOperand(tmp_ptr));
905    __ Cmp(tmp_value, expected);
906    __ B(&exit_loop, ne);
907    __ Stxr(tmp_32, value, MemOperand(tmp_ptr));
908    __ Cbnz(tmp_32, &loop_head);
909    __ Dmb(InnerShareable, BarrierAll);
910  }
911  __ Bind(&exit_loop);
912  __ Cset(out, eq);
913}
914
915void IntrinsicLocationsBuilderARM64::VisitUnsafeCASInt(HInvoke* invoke) {
916  CreateIntIntIntIntIntToInt(arena_, invoke);
917}
918void IntrinsicLocationsBuilderARM64::VisitUnsafeCASLong(HInvoke* invoke) {
919  CreateIntIntIntIntIntToInt(arena_, invoke);
920}
921void IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject(HInvoke* invoke) {
922  CreateIntIntIntIntIntToInt(arena_, invoke);
923}
924
925void IntrinsicCodeGeneratorARM64::VisitUnsafeCASInt(HInvoke* invoke) {
926  GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
927}
928void IntrinsicCodeGeneratorARM64::VisitUnsafeCASLong(HInvoke* invoke) {
929  GenCas(invoke->GetLocations(), Primitive::kPrimLong, codegen_);
930}
931void IntrinsicCodeGeneratorARM64::VisitUnsafeCASObject(HInvoke* invoke) {
932  GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
933}
934
935void IntrinsicLocationsBuilderARM64::VisitStringCharAt(HInvoke* invoke) {
936  LocationSummary* locations = new (arena_) LocationSummary(invoke,
937                                                            LocationSummary::kCallOnSlowPath,
938                                                            kIntrinsified);
939  locations->SetInAt(0, Location::RequiresRegister());
940  locations->SetInAt(1, Location::RequiresRegister());
941  // In case we need to go in the slow path, we can't have the output be the same
942  // as the input: the current liveness analysis considers the input to be live
943  // at the point of the call.
944  locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
945}
946
947void IntrinsicCodeGeneratorARM64::VisitStringCharAt(HInvoke* invoke) {
948  vixl::MacroAssembler* masm = GetVIXLAssembler();
949  LocationSummary* locations = invoke->GetLocations();
950
951  // Location of reference to data array
952  const MemberOffset value_offset = mirror::String::ValueOffset();
953  // Location of count
954  const MemberOffset count_offset = mirror::String::CountOffset();
955  // Starting offset within data array
956  const MemberOffset offset_offset = mirror::String::OffsetOffset();
957  // Start of char data with array_
958  const MemberOffset data_offset = mirror::Array::DataOffset(sizeof(uint16_t));
959
960  Register obj = WRegisterFrom(locations->InAt(0));  // String object pointer.
961  Register idx = WRegisterFrom(locations->InAt(1));  // Index of character.
962  Register out = WRegisterFrom(locations->Out());    // Result character.
963
964  UseScratchRegisterScope temps(masm);
965  Register temp = temps.AcquireW();
966  Register array_temp = temps.AcquireW();            // We can trade this for worse scheduling.
967
968  // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
969  //       the cost.
970  // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
971  //       we will not optimize the code for constants (which would save a register).
972
973  SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
974  codegen_->AddSlowPath(slow_path);
975
976  __ Ldr(temp, HeapOperand(obj, count_offset));          // temp = str.length.
977  codegen_->MaybeRecordImplicitNullCheck(invoke);
978  __ Cmp(idx, temp);
979  __ B(hs, slow_path->GetEntryLabel());
980
981  // Index computation.
982  __ Ldr(temp, HeapOperand(obj, offset_offset));         // temp := str.offset.
983  __ Ldr(array_temp, HeapOperand(obj, value_offset));    // array_temp := str.offset.
984  __ Add(temp, temp, idx);
985  DCHECK_EQ(data_offset.Int32Value() % 2, 0);            // We'll compensate by shifting.
986  __ Add(temp, temp, Operand(data_offset.Int32Value() / 2));
987
988  // Load the value.
989  __ Ldrh(out, MemOperand(array_temp.X(), temp, UXTW, 1));  // out := array_temp[temp].
990
991  __ Bind(slow_path->GetExitLabel());
992}
993
994void IntrinsicLocationsBuilderARM64::VisitStringCompareTo(HInvoke* invoke) {
995  // The inputs plus one temp.
996  LocationSummary* locations = new (arena_) LocationSummary(invoke,
997                                                            LocationSummary::kCall,
998                                                            kIntrinsified);
999  InvokeRuntimeCallingConvention calling_convention;
1000  locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1001  locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1002  locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
1003}
1004
1005void IntrinsicCodeGeneratorARM64::VisitStringCompareTo(HInvoke* invoke) {
1006  vixl::MacroAssembler* masm = GetVIXLAssembler();
1007  LocationSummary* locations = invoke->GetLocations();
1008
1009  // Note that the null check must have been done earlier.
1010  DCHECK(!invoke->CanDoImplicitNullCheck());
1011
1012  Register argument = WRegisterFrom(locations->InAt(1));
1013  __ Cmp(argument, 0);
1014  SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1015  codegen_->AddSlowPath(slow_path);
1016  __ B(eq, slow_path->GetEntryLabel());
1017
1018  __ Ldr(
1019      lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pStringCompareTo).Int32Value()));
1020  __ Blr(lr);
1021  __ Bind(slow_path->GetExitLabel());
1022}
1023
1024// Unimplemented intrinsics.
1025
1026#define UNIMPLEMENTED_INTRINSIC(Name)                                                  \
1027void IntrinsicLocationsBuilderARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1028}                                                                                      \
1029void IntrinsicCodeGeneratorARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) {    \
1030}
1031
1032UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
1033UNIMPLEMENTED_INTRINSIC(StringIndexOf)
1034UNIMPLEMENTED_INTRINSIC(StringIndexOfAfter)
1035UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
1036
1037}  // namespace arm64
1038}  // namespace art
1039