intrinsics_arm64.cc revision 2d27c8e338af7262dbd4aaa66127bb8fa1758b86
1/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm64.h"
18
19#include "arch/arm64/instruction_set_features_arm64.h"
20#include "code_generator_arm64.h"
21#include "common_arm64.h"
22#include "entrypoints/quick/quick_entrypoints.h"
23#include "intrinsics.h"
24#include "mirror/array-inl.h"
25#include "mirror/art_method.h"
26#include "mirror/string.h"
27#include "thread.h"
28#include "utils/arm64/assembler_arm64.h"
29#include "utils/arm64/constants_arm64.h"
30
31#include "vixl/a64/disasm-a64.h"
32#include "vixl/a64/macro-assembler-a64.h"
33
34using namespace vixl;   // NOLINT(build/namespaces)
35
36namespace art {
37
38namespace arm64 {
39
40using helpers::DRegisterFrom;
41using helpers::FPRegisterFrom;
42using helpers::HeapOperand;
43using helpers::LocationFrom;
44using helpers::RegisterFrom;
45using helpers::SRegisterFrom;
46using helpers::WRegisterFrom;
47using helpers::XRegisterFrom;
48
49
50namespace {
51
52ALWAYS_INLINE inline MemOperand AbsoluteHeapOperandFrom(Location location, size_t offset = 0) {
53  return MemOperand(XRegisterFrom(location), offset);
54}
55
56}  // namespace
57
58vixl::MacroAssembler* IntrinsicCodeGeneratorARM64::GetVIXLAssembler() {
59  return codegen_->GetAssembler()->vixl_masm_;
60}
61
62ArenaAllocator* IntrinsicCodeGeneratorARM64::GetAllocator() {
63  return codegen_->GetGraph()->GetArena();
64}
65
66#define __ codegen->GetAssembler()->vixl_masm_->
67
68static void MoveFromReturnRegister(Location trg,
69                                   Primitive::Type type,
70                                   CodeGeneratorARM64* codegen) {
71  if (!trg.IsValid()) {
72    DCHECK(type == Primitive::kPrimVoid);
73    return;
74  }
75
76  DCHECK_NE(type, Primitive::kPrimVoid);
77
78  if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
79    Register trg_reg = RegisterFrom(trg, type);
80    Register res_reg = RegisterFrom(ARM64ReturnLocation(type), type);
81    __ Mov(trg_reg, res_reg, kDiscardForSameWReg);
82  } else {
83    FPRegister trg_reg = FPRegisterFrom(trg, type);
84    FPRegister res_reg = FPRegisterFrom(ARM64ReturnLocation(type), type);
85    __ Fmov(trg_reg, res_reg);
86  }
87}
88
89static void MoveArguments(HInvoke* invoke, ArenaAllocator* arena, CodeGeneratorARM64* codegen) {
90  if (invoke->GetNumberOfArguments() == 0) {
91    // No argument to move.
92    return;
93  }
94
95  LocationSummary* locations = invoke->GetLocations();
96  InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
97
98  // We're moving potentially two or more locations to locations that could overlap, so we need
99  // a parallel move resolver.
100  HParallelMove parallel_move(arena);
101
102  for (size_t i = 0; i < invoke->GetNumberOfArguments(); i++) {
103    HInstruction* input = invoke->InputAt(i);
104    Location cc_loc = calling_convention_visitor.GetNextLocation(input->GetType());
105    Location actual_loc = locations->InAt(i);
106
107    parallel_move.AddMove(actual_loc, cc_loc, input->GetType(), nullptr);
108  }
109
110  codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
111}
112
113// Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified
114// call. This will copy the arguments into the positions for a regular call.
115//
116// Note: The actual parameters are required to be in the locations given by the invoke's location
117//       summary. If an intrinsic modifies those locations before a slowpath call, they must be
118//       restored!
119class IntrinsicSlowPathARM64 : public SlowPathCodeARM64 {
120 public:
121  explicit IntrinsicSlowPathARM64(HInvoke* invoke) : invoke_(invoke) { }
122
123  void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
124    CodeGeneratorARM64* codegen = down_cast<CodeGeneratorARM64*>(codegen_in);
125    __ Bind(GetEntryLabel());
126
127    SaveLiveRegisters(codegen, invoke_->GetLocations());
128
129    MoveArguments(invoke_, codegen->GetGraph()->GetArena(), codegen);
130
131    if (invoke_->IsInvokeStaticOrDirect()) {
132      codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(), kArtMethodRegister);
133      RecordPcInfo(codegen, invoke_, invoke_->GetDexPc());
134    } else {
135      UNIMPLEMENTED(FATAL) << "Non-direct intrinsic slow-path not yet implemented";
136      UNREACHABLE();
137    }
138
139    // Copy the result back to the expected output.
140    Location out = invoke_->GetLocations()->Out();
141    if (out.IsValid()) {
142      DCHECK(out.IsRegister());  // TODO: Replace this when we support output in memory.
143      DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
144      MoveFromReturnRegister(out, invoke_->GetType(), codegen);
145    }
146
147    RestoreLiveRegisters(codegen, invoke_->GetLocations());
148    __ B(GetExitLabel());
149  }
150
151 private:
152  // The instruction where this slow path is happening.
153  HInvoke* const invoke_;
154
155  DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathARM64);
156};
157
158#undef __
159
160bool IntrinsicLocationsBuilderARM64::TryDispatch(HInvoke* invoke) {
161  Dispatch(invoke);
162  LocationSummary* res = invoke->GetLocations();
163  return res != nullptr && res->Intrinsified();
164}
165
166#define __ masm->
167
168static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
169  LocationSummary* locations = new (arena) LocationSummary(invoke,
170                                                           LocationSummary::kNoCall,
171                                                           kIntrinsified);
172  locations->SetInAt(0, Location::RequiresFpuRegister());
173  locations->SetOut(Location::RequiresRegister());
174}
175
176static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
177  LocationSummary* locations = new (arena) LocationSummary(invoke,
178                                                           LocationSummary::kNoCall,
179                                                           kIntrinsified);
180  locations->SetInAt(0, Location::RequiresRegister());
181  locations->SetOut(Location::RequiresFpuRegister());
182}
183
184static void MoveFPToInt(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
185  Location input = locations->InAt(0);
186  Location output = locations->Out();
187  __ Fmov(is64bit ? XRegisterFrom(output) : WRegisterFrom(output),
188          is64bit ? DRegisterFrom(input) : SRegisterFrom(input));
189}
190
191static void MoveIntToFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
192  Location input = locations->InAt(0);
193  Location output = locations->Out();
194  __ Fmov(is64bit ? DRegisterFrom(output) : SRegisterFrom(output),
195          is64bit ? XRegisterFrom(input) : WRegisterFrom(input));
196}
197
198void IntrinsicLocationsBuilderARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
199  CreateFPToIntLocations(arena_, invoke);
200}
201void IntrinsicLocationsBuilderARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
202  CreateIntToFPLocations(arena_, invoke);
203}
204
205void IntrinsicCodeGeneratorARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
206  MoveFPToInt(invoke->GetLocations(), true, GetVIXLAssembler());
207}
208void IntrinsicCodeGeneratorARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
209  MoveIntToFP(invoke->GetLocations(), true, GetVIXLAssembler());
210}
211
212void IntrinsicLocationsBuilderARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
213  CreateFPToIntLocations(arena_, invoke);
214}
215void IntrinsicLocationsBuilderARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
216  CreateIntToFPLocations(arena_, invoke);
217}
218
219void IntrinsicCodeGeneratorARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
220  MoveFPToInt(invoke->GetLocations(), false, GetVIXLAssembler());
221}
222void IntrinsicCodeGeneratorARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
223  MoveIntToFP(invoke->GetLocations(), false, GetVIXLAssembler());
224}
225
226static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
227  LocationSummary* locations = new (arena) LocationSummary(invoke,
228                                                           LocationSummary::kNoCall,
229                                                           kIntrinsified);
230  locations->SetInAt(0, Location::RequiresRegister());
231  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
232}
233
234static void GenReverseBytes(LocationSummary* locations,
235                            Primitive::Type type,
236                            vixl::MacroAssembler* masm) {
237  Location in = locations->InAt(0);
238  Location out = locations->Out();
239
240  switch (type) {
241    case Primitive::kPrimShort:
242      __ Rev16(WRegisterFrom(out), WRegisterFrom(in));
243      __ Sxth(WRegisterFrom(out), WRegisterFrom(out));
244      break;
245    case Primitive::kPrimInt:
246    case Primitive::kPrimLong:
247      __ Rev(RegisterFrom(out, type), RegisterFrom(in, type));
248      break;
249    default:
250      LOG(FATAL) << "Unexpected size for reverse-bytes: " << type;
251      UNREACHABLE();
252  }
253}
254
255void IntrinsicLocationsBuilderARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
256  CreateIntToIntLocations(arena_, invoke);
257}
258
259void IntrinsicCodeGeneratorARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
260  GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
261}
262
263void IntrinsicLocationsBuilderARM64::VisitLongReverseBytes(HInvoke* invoke) {
264  CreateIntToIntLocations(arena_, invoke);
265}
266
267void IntrinsicCodeGeneratorARM64::VisitLongReverseBytes(HInvoke* invoke) {
268  GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
269}
270
271void IntrinsicLocationsBuilderARM64::VisitShortReverseBytes(HInvoke* invoke) {
272  CreateIntToIntLocations(arena_, invoke);
273}
274
275void IntrinsicCodeGeneratorARM64::VisitShortReverseBytes(HInvoke* invoke) {
276  GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetVIXLAssembler());
277}
278
279static void GenReverse(LocationSummary* locations,
280                       Primitive::Type type,
281                       vixl::MacroAssembler* masm) {
282  DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
283
284  Location in = locations->InAt(0);
285  Location out = locations->Out();
286
287  __ Rbit(RegisterFrom(out, type), RegisterFrom(in, type));
288}
289
290void IntrinsicLocationsBuilderARM64::VisitIntegerReverse(HInvoke* invoke) {
291  CreateIntToIntLocations(arena_, invoke);
292}
293
294void IntrinsicCodeGeneratorARM64::VisitIntegerReverse(HInvoke* invoke) {
295  GenReverse(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
296}
297
298void IntrinsicLocationsBuilderARM64::VisitLongReverse(HInvoke* invoke) {
299  CreateIntToIntLocations(arena_, invoke);
300}
301
302void IntrinsicCodeGeneratorARM64::VisitLongReverse(HInvoke* invoke) {
303  GenReverse(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
304}
305
306static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
307  LocationSummary* locations = new (arena) LocationSummary(invoke,
308                                                           LocationSummary::kNoCall,
309                                                           kIntrinsified);
310  locations->SetInAt(0, Location::RequiresFpuRegister());
311  locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
312}
313
314static void MathAbsFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
315  Location in = locations->InAt(0);
316  Location out = locations->Out();
317
318  FPRegister in_reg = is64bit ? DRegisterFrom(in) : SRegisterFrom(in);
319  FPRegister out_reg = is64bit ? DRegisterFrom(out) : SRegisterFrom(out);
320
321  __ Fabs(out_reg, in_reg);
322}
323
324void IntrinsicLocationsBuilderARM64::VisitMathAbsDouble(HInvoke* invoke) {
325  CreateFPToFPLocations(arena_, invoke);
326}
327
328void IntrinsicCodeGeneratorARM64::VisitMathAbsDouble(HInvoke* invoke) {
329  MathAbsFP(invoke->GetLocations(), true, GetVIXLAssembler());
330}
331
332void IntrinsicLocationsBuilderARM64::VisitMathAbsFloat(HInvoke* invoke) {
333  CreateFPToFPLocations(arena_, invoke);
334}
335
336void IntrinsicCodeGeneratorARM64::VisitMathAbsFloat(HInvoke* invoke) {
337  MathAbsFP(invoke->GetLocations(), false, GetVIXLAssembler());
338}
339
340static void CreateIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
341  LocationSummary* locations = new (arena) LocationSummary(invoke,
342                                                           LocationSummary::kNoCall,
343                                                           kIntrinsified);
344  locations->SetInAt(0, Location::RequiresRegister());
345  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
346}
347
348static void GenAbsInteger(LocationSummary* locations,
349                          bool is64bit,
350                          vixl::MacroAssembler* masm) {
351  Location in = locations->InAt(0);
352  Location output = locations->Out();
353
354  Register in_reg = is64bit ? XRegisterFrom(in) : WRegisterFrom(in);
355  Register out_reg = is64bit ? XRegisterFrom(output) : WRegisterFrom(output);
356
357  __ Cmp(in_reg, Operand(0));
358  __ Cneg(out_reg, in_reg, lt);
359}
360
361void IntrinsicLocationsBuilderARM64::VisitMathAbsInt(HInvoke* invoke) {
362  CreateIntToInt(arena_, invoke);
363}
364
365void IntrinsicCodeGeneratorARM64::VisitMathAbsInt(HInvoke* invoke) {
366  GenAbsInteger(invoke->GetLocations(), false, GetVIXLAssembler());
367}
368
369void IntrinsicLocationsBuilderARM64::VisitMathAbsLong(HInvoke* invoke) {
370  CreateIntToInt(arena_, invoke);
371}
372
373void IntrinsicCodeGeneratorARM64::VisitMathAbsLong(HInvoke* invoke) {
374  GenAbsInteger(invoke->GetLocations(), true, GetVIXLAssembler());
375}
376
377static void GenMinMaxFP(LocationSummary* locations,
378                        bool is_min,
379                        bool is_double,
380                        vixl::MacroAssembler* masm) {
381  Location op1 = locations->InAt(0);
382  Location op2 = locations->InAt(1);
383  Location out = locations->Out();
384
385  FPRegister op1_reg = is_double ? DRegisterFrom(op1) : SRegisterFrom(op1);
386  FPRegister op2_reg = is_double ? DRegisterFrom(op2) : SRegisterFrom(op2);
387  FPRegister out_reg = is_double ? DRegisterFrom(out) : SRegisterFrom(out);
388  if (is_min) {
389    __ Fmin(out_reg, op1_reg, op2_reg);
390  } else {
391    __ Fmax(out_reg, op1_reg, op2_reg);
392  }
393}
394
395static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
396  LocationSummary* locations = new (arena) LocationSummary(invoke,
397                                                           LocationSummary::kNoCall,
398                                                           kIntrinsified);
399  locations->SetInAt(0, Location::RequiresFpuRegister());
400  locations->SetInAt(1, Location::RequiresFpuRegister());
401  locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
402}
403
404void IntrinsicLocationsBuilderARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
405  CreateFPFPToFPLocations(arena_, invoke);
406}
407
408void IntrinsicCodeGeneratorARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
409  GenMinMaxFP(invoke->GetLocations(), true, true, GetVIXLAssembler());
410}
411
412void IntrinsicLocationsBuilderARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
413  CreateFPFPToFPLocations(arena_, invoke);
414}
415
416void IntrinsicCodeGeneratorARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
417  GenMinMaxFP(invoke->GetLocations(), true, false, GetVIXLAssembler());
418}
419
420void IntrinsicLocationsBuilderARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
421  CreateFPFPToFPLocations(arena_, invoke);
422}
423
424void IntrinsicCodeGeneratorARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
425  GenMinMaxFP(invoke->GetLocations(), false, true, GetVIXLAssembler());
426}
427
428void IntrinsicLocationsBuilderARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
429  CreateFPFPToFPLocations(arena_, invoke);
430}
431
432void IntrinsicCodeGeneratorARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
433  GenMinMaxFP(invoke->GetLocations(), false, false, GetVIXLAssembler());
434}
435
436static void GenMinMax(LocationSummary* locations,
437                      bool is_min,
438                      bool is_long,
439                      vixl::MacroAssembler* masm) {
440  Location op1 = locations->InAt(0);
441  Location op2 = locations->InAt(1);
442  Location out = locations->Out();
443
444  Register op1_reg = is_long ? XRegisterFrom(op1) : WRegisterFrom(op1);
445  Register op2_reg = is_long ? XRegisterFrom(op2) : WRegisterFrom(op2);
446  Register out_reg = is_long ? XRegisterFrom(out) : WRegisterFrom(out);
447
448  __ Cmp(op1_reg, op2_reg);
449  __ Csel(out_reg, op1_reg, op2_reg, is_min ? lt : gt);
450}
451
452static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
453  LocationSummary* locations = new (arena) LocationSummary(invoke,
454                                                           LocationSummary::kNoCall,
455                                                           kIntrinsified);
456  locations->SetInAt(0, Location::RequiresRegister());
457  locations->SetInAt(1, Location::RequiresRegister());
458  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
459}
460
461void IntrinsicLocationsBuilderARM64::VisitMathMinIntInt(HInvoke* invoke) {
462  CreateIntIntToIntLocations(arena_, invoke);
463}
464
465void IntrinsicCodeGeneratorARM64::VisitMathMinIntInt(HInvoke* invoke) {
466  GenMinMax(invoke->GetLocations(), true, false, GetVIXLAssembler());
467}
468
469void IntrinsicLocationsBuilderARM64::VisitMathMinLongLong(HInvoke* invoke) {
470  CreateIntIntToIntLocations(arena_, invoke);
471}
472
473void IntrinsicCodeGeneratorARM64::VisitMathMinLongLong(HInvoke* invoke) {
474  GenMinMax(invoke->GetLocations(), true, true, GetVIXLAssembler());
475}
476
477void IntrinsicLocationsBuilderARM64::VisitMathMaxIntInt(HInvoke* invoke) {
478  CreateIntIntToIntLocations(arena_, invoke);
479}
480
481void IntrinsicCodeGeneratorARM64::VisitMathMaxIntInt(HInvoke* invoke) {
482  GenMinMax(invoke->GetLocations(), false, false, GetVIXLAssembler());
483}
484
485void IntrinsicLocationsBuilderARM64::VisitMathMaxLongLong(HInvoke* invoke) {
486  CreateIntIntToIntLocations(arena_, invoke);
487}
488
489void IntrinsicCodeGeneratorARM64::VisitMathMaxLongLong(HInvoke* invoke) {
490  GenMinMax(invoke->GetLocations(), false, true, GetVIXLAssembler());
491}
492
493void IntrinsicLocationsBuilderARM64::VisitMathSqrt(HInvoke* invoke) {
494  CreateFPToFPLocations(arena_, invoke);
495}
496
497void IntrinsicCodeGeneratorARM64::VisitMathSqrt(HInvoke* invoke) {
498  LocationSummary* locations = invoke->GetLocations();
499  vixl::MacroAssembler* masm = GetVIXLAssembler();
500  __ Fsqrt(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
501}
502
503void IntrinsicLocationsBuilderARM64::VisitMathCeil(HInvoke* invoke) {
504  CreateFPToFPLocations(arena_, invoke);
505}
506
507void IntrinsicCodeGeneratorARM64::VisitMathCeil(HInvoke* invoke) {
508  LocationSummary* locations = invoke->GetLocations();
509  vixl::MacroAssembler* masm = GetVIXLAssembler();
510  __ Frintp(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
511}
512
513void IntrinsicLocationsBuilderARM64::VisitMathFloor(HInvoke* invoke) {
514  CreateFPToFPLocations(arena_, invoke);
515}
516
517void IntrinsicCodeGeneratorARM64::VisitMathFloor(HInvoke* invoke) {
518  LocationSummary* locations = invoke->GetLocations();
519  vixl::MacroAssembler* masm = GetVIXLAssembler();
520  __ Frintm(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
521}
522
523void IntrinsicLocationsBuilderARM64::VisitMathRint(HInvoke* invoke) {
524  CreateFPToFPLocations(arena_, invoke);
525}
526
527void IntrinsicCodeGeneratorARM64::VisitMathRint(HInvoke* invoke) {
528  LocationSummary* locations = invoke->GetLocations();
529  vixl::MacroAssembler* masm = GetVIXLAssembler();
530  __ Frintn(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
531}
532
533static void CreateFPToIntPlusTempLocations(ArenaAllocator* arena, HInvoke* invoke) {
534  LocationSummary* locations = new (arena) LocationSummary(invoke,
535                                                           LocationSummary::kNoCall,
536                                                           kIntrinsified);
537  locations->SetInAt(0, Location::RequiresFpuRegister());
538  locations->SetOut(Location::RequiresRegister());
539}
540
541static void GenMathRound(LocationSummary* locations,
542                         bool is_double,
543                         vixl::MacroAssembler* masm) {
544  FPRegister in_reg = is_double ?
545      DRegisterFrom(locations->InAt(0)) : SRegisterFrom(locations->InAt(0));
546  Register out_reg = is_double ?
547      XRegisterFrom(locations->Out()) : WRegisterFrom(locations->Out());
548  UseScratchRegisterScope temps(masm);
549  FPRegister temp1_reg = temps.AcquireSameSizeAs(in_reg);
550
551  // 0.5 can be encoded as an immediate, so use fmov.
552  if (is_double) {
553    __ Fmov(temp1_reg, static_cast<double>(0.5));
554  } else {
555    __ Fmov(temp1_reg, static_cast<float>(0.5));
556  }
557  __ Fadd(temp1_reg, in_reg, temp1_reg);
558  __ Fcvtms(out_reg, temp1_reg);
559}
560
561void IntrinsicLocationsBuilderARM64::VisitMathRoundDouble(HInvoke* invoke) {
562  CreateFPToIntPlusTempLocations(arena_, invoke);
563}
564
565void IntrinsicCodeGeneratorARM64::VisitMathRoundDouble(HInvoke* invoke) {
566  GenMathRound(invoke->GetLocations(), true, GetVIXLAssembler());
567}
568
569void IntrinsicLocationsBuilderARM64::VisitMathRoundFloat(HInvoke* invoke) {
570  CreateFPToIntPlusTempLocations(arena_, invoke);
571}
572
573void IntrinsicCodeGeneratorARM64::VisitMathRoundFloat(HInvoke* invoke) {
574  GenMathRound(invoke->GetLocations(), false, GetVIXLAssembler());
575}
576
577void IntrinsicLocationsBuilderARM64::VisitMemoryPeekByte(HInvoke* invoke) {
578  CreateIntToIntLocations(arena_, invoke);
579}
580
581void IntrinsicCodeGeneratorARM64::VisitMemoryPeekByte(HInvoke* invoke) {
582  vixl::MacroAssembler* masm = GetVIXLAssembler();
583  __ Ldrsb(WRegisterFrom(invoke->GetLocations()->Out()),
584          AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
585}
586
587void IntrinsicLocationsBuilderARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
588  CreateIntToIntLocations(arena_, invoke);
589}
590
591void IntrinsicCodeGeneratorARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
592  vixl::MacroAssembler* masm = GetVIXLAssembler();
593  __ Ldr(WRegisterFrom(invoke->GetLocations()->Out()),
594         AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
595}
596
597void IntrinsicLocationsBuilderARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
598  CreateIntToIntLocations(arena_, invoke);
599}
600
601void IntrinsicCodeGeneratorARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
602  vixl::MacroAssembler* masm = GetVIXLAssembler();
603  __ Ldr(XRegisterFrom(invoke->GetLocations()->Out()),
604         AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
605}
606
607void IntrinsicLocationsBuilderARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
608  CreateIntToIntLocations(arena_, invoke);
609}
610
611void IntrinsicCodeGeneratorARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
612  vixl::MacroAssembler* masm = GetVIXLAssembler();
613  __ Ldrsh(WRegisterFrom(invoke->GetLocations()->Out()),
614           AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
615}
616
617static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
618  LocationSummary* locations = new (arena) LocationSummary(invoke,
619                                                           LocationSummary::kNoCall,
620                                                           kIntrinsified);
621  locations->SetInAt(0, Location::RequiresRegister());
622  locations->SetInAt(1, Location::RequiresRegister());
623}
624
625void IntrinsicLocationsBuilderARM64::VisitMemoryPokeByte(HInvoke* invoke) {
626  CreateIntIntToVoidLocations(arena_, invoke);
627}
628
629void IntrinsicCodeGeneratorARM64::VisitMemoryPokeByte(HInvoke* invoke) {
630  vixl::MacroAssembler* masm = GetVIXLAssembler();
631  __ Strb(WRegisterFrom(invoke->GetLocations()->InAt(1)),
632          AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
633}
634
635void IntrinsicLocationsBuilderARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
636  CreateIntIntToVoidLocations(arena_, invoke);
637}
638
639void IntrinsicCodeGeneratorARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
640  vixl::MacroAssembler* masm = GetVIXLAssembler();
641  __ Str(WRegisterFrom(invoke->GetLocations()->InAt(1)),
642         AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
643}
644
645void IntrinsicLocationsBuilderARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
646  CreateIntIntToVoidLocations(arena_, invoke);
647}
648
649void IntrinsicCodeGeneratorARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
650  vixl::MacroAssembler* masm = GetVIXLAssembler();
651  __ Str(XRegisterFrom(invoke->GetLocations()->InAt(1)),
652         AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
653}
654
655void IntrinsicLocationsBuilderARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
656  CreateIntIntToVoidLocations(arena_, invoke);
657}
658
659void IntrinsicCodeGeneratorARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
660  vixl::MacroAssembler* masm = GetVIXLAssembler();
661  __ Strh(WRegisterFrom(invoke->GetLocations()->InAt(1)),
662          AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
663}
664
665void IntrinsicLocationsBuilderARM64::VisitThreadCurrentThread(HInvoke* invoke) {
666  LocationSummary* locations = new (arena_) LocationSummary(invoke,
667                                                            LocationSummary::kNoCall,
668                                                            kIntrinsified);
669  locations->SetOut(Location::RequiresRegister());
670}
671
672void IntrinsicCodeGeneratorARM64::VisitThreadCurrentThread(HInvoke* invoke) {
673  codegen_->Load(Primitive::kPrimNot, WRegisterFrom(invoke->GetLocations()->Out()),
674                 MemOperand(tr, Thread::PeerOffset<8>().Int32Value()));
675}
676
677static void GenUnsafeGet(HInvoke* invoke,
678                         Primitive::Type type,
679                         bool is_volatile,
680                         CodeGeneratorARM64* codegen) {
681  LocationSummary* locations = invoke->GetLocations();
682  DCHECK((type == Primitive::kPrimInt) ||
683         (type == Primitive::kPrimLong) ||
684         (type == Primitive::kPrimNot));
685  vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
686  Register base = WRegisterFrom(locations->InAt(1));    // Object pointer.
687  Register offset = XRegisterFrom(locations->InAt(2));  // Long offset.
688  Register trg = RegisterFrom(locations->Out(), type);
689  bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
690
691  MemOperand mem_op(base.X(), offset);
692  if (is_volatile) {
693    if (use_acquire_release) {
694      codegen->LoadAcquire(invoke, trg, mem_op);
695    } else {
696      codegen->Load(type, trg, mem_op);
697      __ Dmb(InnerShareable, BarrierReads);
698    }
699  } else {
700    codegen->Load(type, trg, mem_op);
701  }
702}
703
704static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
705  LocationSummary* locations = new (arena) LocationSummary(invoke,
706                                                           LocationSummary::kNoCall,
707                                                           kIntrinsified);
708  locations->SetInAt(0, Location::NoLocation());        // Unused receiver.
709  locations->SetInAt(1, Location::RequiresRegister());
710  locations->SetInAt(2, Location::RequiresRegister());
711  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
712}
713
714void IntrinsicLocationsBuilderARM64::VisitUnsafeGet(HInvoke* invoke) {
715  CreateIntIntIntToIntLocations(arena_, invoke);
716}
717void IntrinsicLocationsBuilderARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
718  CreateIntIntIntToIntLocations(arena_, invoke);
719}
720void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLong(HInvoke* invoke) {
721  CreateIntIntIntToIntLocations(arena_, invoke);
722}
723void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
724  CreateIntIntIntToIntLocations(arena_, invoke);
725}
726void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObject(HInvoke* invoke) {
727  CreateIntIntIntToIntLocations(arena_, invoke);
728}
729void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
730  CreateIntIntIntToIntLocations(arena_, invoke);
731}
732
733void IntrinsicCodeGeneratorARM64::VisitUnsafeGet(HInvoke* invoke) {
734  GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_);
735}
736void IntrinsicCodeGeneratorARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
737  GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_);
738}
739void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLong(HInvoke* invoke) {
740  GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_);
741}
742void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
743  GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_);
744}
745void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObject(HInvoke* invoke) {
746  GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_);
747}
748void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
749  GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_);
750}
751
752static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) {
753  LocationSummary* locations = new (arena) LocationSummary(invoke,
754                                                           LocationSummary::kNoCall,
755                                                           kIntrinsified);
756  locations->SetInAt(0, Location::NoLocation());        // Unused receiver.
757  locations->SetInAt(1, Location::RequiresRegister());
758  locations->SetInAt(2, Location::RequiresRegister());
759  locations->SetInAt(3, Location::RequiresRegister());
760}
761
762void IntrinsicLocationsBuilderARM64::VisitUnsafePut(HInvoke* invoke) {
763  CreateIntIntIntIntToVoid(arena_, invoke);
764}
765void IntrinsicLocationsBuilderARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
766  CreateIntIntIntIntToVoid(arena_, invoke);
767}
768void IntrinsicLocationsBuilderARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
769  CreateIntIntIntIntToVoid(arena_, invoke);
770}
771void IntrinsicLocationsBuilderARM64::VisitUnsafePutObject(HInvoke* invoke) {
772  CreateIntIntIntIntToVoid(arena_, invoke);
773}
774void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
775  CreateIntIntIntIntToVoid(arena_, invoke);
776}
777void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
778  CreateIntIntIntIntToVoid(arena_, invoke);
779}
780void IntrinsicLocationsBuilderARM64::VisitUnsafePutLong(HInvoke* invoke) {
781  CreateIntIntIntIntToVoid(arena_, invoke);
782}
783void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
784  CreateIntIntIntIntToVoid(arena_, invoke);
785}
786void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
787  CreateIntIntIntIntToVoid(arena_, invoke);
788}
789
790static void GenUnsafePut(LocationSummary* locations,
791                         Primitive::Type type,
792                         bool is_volatile,
793                         bool is_ordered,
794                         CodeGeneratorARM64* codegen) {
795  vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
796
797  Register base = WRegisterFrom(locations->InAt(1));    // Object pointer.
798  Register offset = XRegisterFrom(locations->InAt(2));  // Long offset.
799  Register value = RegisterFrom(locations->InAt(3), type);
800  bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
801
802  MemOperand mem_op(base.X(), offset);
803
804  if (is_volatile || is_ordered) {
805    if (use_acquire_release) {
806      codegen->StoreRelease(type, value, mem_op);
807    } else {
808      __ Dmb(InnerShareable, BarrierAll);
809      codegen->Store(type, value, mem_op);
810      if (is_volatile) {
811        __ Dmb(InnerShareable, BarrierReads);
812      }
813    }
814  } else {
815    codegen->Store(type, value, mem_op);
816  }
817
818  if (type == Primitive::kPrimNot) {
819    codegen->MarkGCCard(base, value);
820  }
821}
822
823void IntrinsicCodeGeneratorARM64::VisitUnsafePut(HInvoke* invoke) {
824  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, false, codegen_);
825}
826void IntrinsicCodeGeneratorARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
827  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, true, codegen_);
828}
829void IntrinsicCodeGeneratorARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
830  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, false, codegen_);
831}
832void IntrinsicCodeGeneratorARM64::VisitUnsafePutObject(HInvoke* invoke) {
833  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, false, codegen_);
834}
835void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
836  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, true, codegen_);
837}
838void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
839  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, false, codegen_);
840}
841void IntrinsicCodeGeneratorARM64::VisitUnsafePutLong(HInvoke* invoke) {
842  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, false, codegen_);
843}
844void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
845  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, true, codegen_);
846}
847void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
848  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, false, codegen_);
849}
850
851static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
852  LocationSummary* locations = new (arena) LocationSummary(invoke,
853                                                           LocationSummary::kNoCall,
854                                                           kIntrinsified);
855  locations->SetInAt(0, Location::NoLocation());        // Unused receiver.
856  locations->SetInAt(1, Location::RequiresRegister());
857  locations->SetInAt(2, Location::RequiresRegister());
858  locations->SetInAt(3, Location::RequiresRegister());
859  locations->SetInAt(4, Location::RequiresRegister());
860
861  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
862}
863
864static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM64* codegen) {
865  bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
866  vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
867
868  Register out = WRegisterFrom(locations->Out());                  // Boolean result.
869
870  Register base = WRegisterFrom(locations->InAt(1));               // Object pointer.
871  Register offset = XRegisterFrom(locations->InAt(2));             // Long offset.
872  Register expected = RegisterFrom(locations->InAt(3), type);      // Expected.
873  Register value = RegisterFrom(locations->InAt(4), type);         // Value.
874
875  // This needs to be before the temp registers, as MarkGCCard also uses VIXL temps.
876  if (type == Primitive::kPrimNot) {
877    // Mark card for object assuming new value is stored.
878    codegen->MarkGCCard(base, value);
879  }
880
881  UseScratchRegisterScope temps(masm);
882  Register tmp_ptr = temps.AcquireX();                             // Pointer to actual memory.
883  Register tmp_value = temps.AcquireSameSizeAs(value);             // Value in memory.
884
885  Register tmp_32 = tmp_value.W();
886
887  __ Add(tmp_ptr, base.X(), Operand(offset));
888
889  // do {
890  //   tmp_value = [tmp_ptr] - expected;
891  // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
892  // result = tmp_value != 0;
893
894  vixl::Label loop_head, exit_loop;
895  if (use_acquire_release) {
896    __ Bind(&loop_head);
897    __ Ldaxr(tmp_value, MemOperand(tmp_ptr));
898    __ Cmp(tmp_value, expected);
899    __ B(&exit_loop, ne);
900    __ Stlxr(tmp_32, value, MemOperand(tmp_ptr));
901    __ Cbnz(tmp_32, &loop_head);
902  } else {
903    __ Dmb(InnerShareable, BarrierWrites);
904    __ Bind(&loop_head);
905    __ Ldxr(tmp_value, MemOperand(tmp_ptr));
906    __ Cmp(tmp_value, expected);
907    __ B(&exit_loop, ne);
908    __ Stxr(tmp_32, value, MemOperand(tmp_ptr));
909    __ Cbnz(tmp_32, &loop_head);
910    __ Dmb(InnerShareable, BarrierAll);
911  }
912  __ Bind(&exit_loop);
913  __ Cset(out, eq);
914}
915
916void IntrinsicLocationsBuilderARM64::VisitUnsafeCASInt(HInvoke* invoke) {
917  CreateIntIntIntIntIntToInt(arena_, invoke);
918}
919void IntrinsicLocationsBuilderARM64::VisitUnsafeCASLong(HInvoke* invoke) {
920  CreateIntIntIntIntIntToInt(arena_, invoke);
921}
922void IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject(HInvoke* invoke) {
923  CreateIntIntIntIntIntToInt(arena_, invoke);
924}
925
926void IntrinsicCodeGeneratorARM64::VisitUnsafeCASInt(HInvoke* invoke) {
927  GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
928}
929void IntrinsicCodeGeneratorARM64::VisitUnsafeCASLong(HInvoke* invoke) {
930  GenCas(invoke->GetLocations(), Primitive::kPrimLong, codegen_);
931}
932void IntrinsicCodeGeneratorARM64::VisitUnsafeCASObject(HInvoke* invoke) {
933  GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
934}
935
936void IntrinsicLocationsBuilderARM64::VisitStringCharAt(HInvoke* invoke) {
937  LocationSummary* locations = new (arena_) LocationSummary(invoke,
938                                                            LocationSummary::kCallOnSlowPath,
939                                                            kIntrinsified);
940  locations->SetInAt(0, Location::RequiresRegister());
941  locations->SetInAt(1, Location::RequiresRegister());
942  // In case we need to go in the slow path, we can't have the output be the same
943  // as the input: the current liveness analysis considers the input to be live
944  // at the point of the call.
945  locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
946}
947
948void IntrinsicCodeGeneratorARM64::VisitStringCharAt(HInvoke* invoke) {
949  vixl::MacroAssembler* masm = GetVIXLAssembler();
950  LocationSummary* locations = invoke->GetLocations();
951
952  // Location of reference to data array
953  const MemberOffset value_offset = mirror::String::ValueOffset();
954  // Location of count
955  const MemberOffset count_offset = mirror::String::CountOffset();
956
957  Register obj = WRegisterFrom(locations->InAt(0));  // String object pointer.
958  Register idx = WRegisterFrom(locations->InAt(1));  // Index of character.
959  Register out = WRegisterFrom(locations->Out());    // Result character.
960
961  UseScratchRegisterScope temps(masm);
962  Register temp = temps.AcquireW();
963  Register array_temp = temps.AcquireW();            // We can trade this for worse scheduling.
964
965  // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
966  //       the cost.
967  // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
968  //       we will not optimize the code for constants (which would save a register).
969
970  SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
971  codegen_->AddSlowPath(slow_path);
972
973  __ Ldr(temp, HeapOperand(obj, count_offset));          // temp = str.length.
974  codegen_->MaybeRecordImplicitNullCheck(invoke);
975  __ Cmp(idx, temp);
976  __ B(hs, slow_path->GetEntryLabel());
977
978  __ Add(array_temp, obj, Operand(value_offset.Int32Value()));  // array_temp := str.value.
979
980  // Load the value.
981  __ Ldrh(out, MemOperand(array_temp.X(), idx, UXTW, 1));  // out := array_temp[idx].
982
983  __ Bind(slow_path->GetExitLabel());
984}
985
986void IntrinsicLocationsBuilderARM64::VisitStringCompareTo(HInvoke* invoke) {
987  LocationSummary* locations = new (arena_) LocationSummary(invoke,
988                                                            LocationSummary::kCall,
989                                                            kIntrinsified);
990  InvokeRuntimeCallingConvention calling_convention;
991  locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
992  locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
993  locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
994}
995
996void IntrinsicCodeGeneratorARM64::VisitStringCompareTo(HInvoke* invoke) {
997  vixl::MacroAssembler* masm = GetVIXLAssembler();
998  LocationSummary* locations = invoke->GetLocations();
999
1000  // Note that the null check must have been done earlier.
1001  DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1002
1003  Register argument = WRegisterFrom(locations->InAt(1));
1004  __ Cmp(argument, 0);
1005  SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1006  codegen_->AddSlowPath(slow_path);
1007  __ B(eq, slow_path->GetEntryLabel());
1008
1009  __ Ldr(
1010      lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pStringCompareTo).Int32Value()));
1011  __ Blr(lr);
1012  __ Bind(slow_path->GetExitLabel());
1013}
1014
1015void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1016  LocationSummary* locations = new (arena_) LocationSummary(invoke,
1017                                                            LocationSummary::kCall,
1018                                                            kIntrinsified);
1019  InvokeRuntimeCallingConvention calling_convention;
1020  locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1021  locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1022  locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1023  locations->SetInAt(3, LocationFrom(calling_convention.GetRegisterAt(3)));
1024  locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1025}
1026
1027void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1028  vixl::MacroAssembler* masm = GetVIXLAssembler();
1029  LocationSummary* locations = invoke->GetLocations();
1030
1031  Register byte_array = WRegisterFrom(locations->InAt(0));
1032  __ Cmp(byte_array, 0);
1033  SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1034  codegen_->AddSlowPath(slow_path);
1035  __ B(eq, slow_path->GetEntryLabel());
1036
1037  __ Ldr(lr,
1038      MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromBytes).Int32Value()));
1039  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1040  __ Blr(lr);
1041  __ Bind(slow_path->GetExitLabel());
1042}
1043
1044void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromChars(HInvoke* invoke) {
1045  LocationSummary* locations = new (arena_) LocationSummary(invoke,
1046                                                            LocationSummary::kCall,
1047                                                            kIntrinsified);
1048  InvokeRuntimeCallingConvention calling_convention;
1049  locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1050  locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1051  locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1052  locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1053}
1054
1055void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromChars(HInvoke* invoke) {
1056  vixl::MacroAssembler* masm = GetVIXLAssembler();
1057
1058  __ Ldr(lr,
1059      MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromChars).Int32Value()));
1060  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1061  __ Blr(lr);
1062}
1063
1064void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromString(HInvoke* invoke) {
1065  // The inputs plus one temp.
1066  LocationSummary* locations = new (arena_) LocationSummary(invoke,
1067                                                            LocationSummary::kCall,
1068                                                            kIntrinsified);
1069  InvokeRuntimeCallingConvention calling_convention;
1070  locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1071  locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1072  locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1073  locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1074}
1075
1076void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromString(HInvoke* invoke) {
1077  vixl::MacroAssembler* masm = GetVIXLAssembler();
1078  LocationSummary* locations = invoke->GetLocations();
1079
1080  Register string_to_copy = WRegisterFrom(locations->InAt(0));
1081  __ Cmp(string_to_copy, 0);
1082  SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1083  codegen_->AddSlowPath(slow_path);
1084  __ B(eq, slow_path->GetEntryLabel());
1085
1086  __ Ldr(lr,
1087      MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromString).Int32Value()));
1088  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1089  __ Blr(lr);
1090  __ Bind(slow_path->GetExitLabel());
1091}
1092
1093// Unimplemented intrinsics.
1094
1095#define UNIMPLEMENTED_INTRINSIC(Name)                                                  \
1096void IntrinsicLocationsBuilderARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1097}                                                                                      \
1098void IntrinsicCodeGeneratorARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) {    \
1099}
1100
1101UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
1102UNIMPLEMENTED_INTRINSIC(StringIndexOf)
1103UNIMPLEMENTED_INTRINSIC(StringIndexOfAfter)
1104UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
1105UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
1106
1107}  // namespace arm64
1108}  // namespace art
1109