intrinsics_arm64.cc revision 579885a26d761f5ba9550f2a1cd7f0f598c2e1e3
1/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm64.h"
18
19#include "arch/arm64/instruction_set_features_arm64.h"
20#include "code_generator_arm64.h"
21#include "common_arm64.h"
22#include "entrypoints/quick/quick_entrypoints.h"
23#include "intrinsics.h"
24#include "mirror/array-inl.h"
25#include "mirror/art_method.h"
26#include "mirror/string.h"
27#include "thread.h"
28#include "utils/arm64/assembler_arm64.h"
29#include "utils/arm64/constants_arm64.h"
30
31#include "a64/disasm-a64.h"
32#include "a64/macro-assembler-a64.h"
33
34using namespace vixl;   // NOLINT(build/namespaces)
35
36namespace art {
37
38namespace arm64 {
39
40using helpers::DRegisterFrom;
41using helpers::FPRegisterFrom;
42using helpers::HeapOperand;
43using helpers::RegisterFrom;
44using helpers::SRegisterFrom;
45using helpers::WRegisterFrom;
46using helpers::XRegisterFrom;
47
48
49namespace {
50
51ALWAYS_INLINE inline MemOperand AbsoluteHeapOperandFrom(Location location, size_t offset = 0) {
52  return MemOperand(XRegisterFrom(location), offset);
53}
54
55}  // namespace
56
57vixl::MacroAssembler* IntrinsicCodeGeneratorARM64::GetVIXLAssembler() {
58  return codegen_->GetAssembler()->vixl_masm_;
59}
60
61ArenaAllocator* IntrinsicCodeGeneratorARM64::GetAllocator() {
62  return codegen_->GetGraph()->GetArena();
63}
64
65#define __ codegen->GetAssembler()->vixl_masm_->
66
67static void MoveFromReturnRegister(Location trg,
68                                   Primitive::Type type,
69                                   CodeGeneratorARM64* codegen) {
70  if (!trg.IsValid()) {
71    DCHECK(type == Primitive::kPrimVoid);
72    return;
73  }
74
75  DCHECK_NE(type, Primitive::kPrimVoid);
76
77  if (Primitive::IsIntegralType(type)) {
78    Register trg_reg = RegisterFrom(trg, type);
79    Register res_reg = RegisterFrom(ARM64ReturnLocation(type), type);
80    __ Mov(trg_reg, res_reg, kDiscardForSameWReg);
81  } else {
82    FPRegister trg_reg = FPRegisterFrom(trg, type);
83    FPRegister res_reg = FPRegisterFrom(ARM64ReturnLocation(type), type);
84    __ Fmov(trg_reg, res_reg);
85  }
86}
87
88static void MoveArguments(HInvoke* invoke, ArenaAllocator* arena, CodeGeneratorARM64* codegen) {
89  if (invoke->InputCount() == 0) {
90    return;
91  }
92
93  LocationSummary* locations = invoke->GetLocations();
94  InvokeDexCallingConventionVisitor calling_convention_visitor;
95
96  // We're moving potentially two or more locations to locations that could overlap, so we need
97  // a parallel move resolver.
98  HParallelMove parallel_move(arena);
99
100  for (size_t i = 0; i < invoke->InputCount(); i++) {
101    HInstruction* input = invoke->InputAt(i);
102    Location cc_loc = calling_convention_visitor.GetNextLocation(input->GetType());
103    Location actual_loc = locations->InAt(i);
104
105    parallel_move.AddMove(actual_loc, cc_loc, nullptr);
106  }
107
108  codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
109}
110
111// Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified
112// call. This will copy the arguments into the positions for a regular call.
113//
114// Note: The actual parameters are required to be in the locations given by the invoke's location
115//       summary. If an intrinsic modifies those locations before a slowpath call, they must be
116//       restored!
117class IntrinsicSlowPathARM64 : public SlowPathCodeARM64 {
118 public:
119  explicit IntrinsicSlowPathARM64(HInvoke* invoke) : invoke_(invoke) { }
120
121  void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
122    CodeGeneratorARM64* codegen = down_cast<CodeGeneratorARM64*>(codegen_in);
123    __ Bind(GetEntryLabel());
124
125    codegen->SaveLiveRegisters(invoke_->GetLocations());
126
127    MoveArguments(invoke_, codegen->GetGraph()->GetArena(), codegen);
128
129    if (invoke_->IsInvokeStaticOrDirect()) {
130      codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(), kArtMethodRegister);
131    } else {
132      UNIMPLEMENTED(FATAL) << "Non-direct intrinsic slow-path not yet implemented";
133      UNREACHABLE();
134    }
135
136    // Copy the result back to the expected output.
137    Location out = invoke_->GetLocations()->Out();
138    if (out.IsValid()) {
139      DCHECK(out.IsRegister());  // TODO: Replace this when we support output in memory.
140      DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
141      MoveFromReturnRegister(out, invoke_->GetType(), codegen);
142    }
143
144    codegen->RestoreLiveRegisters(invoke_->GetLocations());
145    __ B(GetExitLabel());
146  }
147
148 private:
149  // The instruction where this slow path is happening.
150  HInvoke* const invoke_;
151
152  DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathARM64);
153};
154
155#undef __
156
157bool IntrinsicLocationsBuilderARM64::TryDispatch(HInvoke* invoke) {
158  Dispatch(invoke);
159  LocationSummary* res = invoke->GetLocations();
160  return res != nullptr && res->Intrinsified();
161}
162
163#define __ masm->
164
165static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
166  LocationSummary* locations = new (arena) LocationSummary(invoke,
167                                                           LocationSummary::kNoCall,
168                                                           kIntrinsified);
169  locations->SetInAt(0, Location::RequiresFpuRegister());
170  locations->SetOut(Location::RequiresRegister());
171}
172
173static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
174  LocationSummary* locations = new (arena) LocationSummary(invoke,
175                                                           LocationSummary::kNoCall,
176                                                           kIntrinsified);
177  locations->SetInAt(0, Location::RequiresRegister());
178  locations->SetOut(Location::RequiresFpuRegister());
179}
180
181static void MoveFPToInt(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
182  Location input = locations->InAt(0);
183  Location output = locations->Out();
184  __ Fmov(is64bit ? XRegisterFrom(output) : WRegisterFrom(output),
185          is64bit ? DRegisterFrom(input) : SRegisterFrom(input));
186}
187
188static void MoveIntToFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
189  Location input = locations->InAt(0);
190  Location output = locations->Out();
191  __ Fmov(is64bit ? DRegisterFrom(output) : SRegisterFrom(output),
192          is64bit ? XRegisterFrom(input) : WRegisterFrom(input));
193}
194
195void IntrinsicLocationsBuilderARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
196  CreateFPToIntLocations(arena_, invoke);
197}
198void IntrinsicLocationsBuilderARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
199  CreateIntToFPLocations(arena_, invoke);
200}
201
202void IntrinsicCodeGeneratorARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
203  MoveFPToInt(invoke->GetLocations(), true, GetVIXLAssembler());
204}
205void IntrinsicCodeGeneratorARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
206  MoveIntToFP(invoke->GetLocations(), true, GetVIXLAssembler());
207}
208
209void IntrinsicLocationsBuilderARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
210  CreateFPToIntLocations(arena_, invoke);
211}
212void IntrinsicLocationsBuilderARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
213  CreateIntToFPLocations(arena_, invoke);
214}
215
216void IntrinsicCodeGeneratorARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
217  MoveFPToInt(invoke->GetLocations(), false, GetVIXLAssembler());
218}
219void IntrinsicCodeGeneratorARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
220  MoveIntToFP(invoke->GetLocations(), false, GetVIXLAssembler());
221}
222
223static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
224  LocationSummary* locations = new (arena) LocationSummary(invoke,
225                                                           LocationSummary::kNoCall,
226                                                           kIntrinsified);
227  locations->SetInAt(0, Location::RequiresRegister());
228  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
229}
230
231static void GenReverseBytes(LocationSummary* locations,
232                            Primitive::Type type,
233                            vixl::MacroAssembler* masm) {
234  Location in = locations->InAt(0);
235  Location out = locations->Out();
236
237  switch (type) {
238    case Primitive::kPrimShort:
239      __ Rev16(WRegisterFrom(out), WRegisterFrom(in));
240      __ Sxth(WRegisterFrom(out), WRegisterFrom(out));
241      break;
242    case Primitive::kPrimInt:
243    case Primitive::kPrimLong:
244      __ Rev(RegisterFrom(out, type), RegisterFrom(in, type));
245      break;
246    default:
247      LOG(FATAL) << "Unexpected size for reverse-bytes: " << type;
248      UNREACHABLE();
249  }
250}
251
252void IntrinsicLocationsBuilderARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
253  CreateIntToIntLocations(arena_, invoke);
254}
255
256void IntrinsicCodeGeneratorARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
257  GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
258}
259
260void IntrinsicLocationsBuilderARM64::VisitLongReverseBytes(HInvoke* invoke) {
261  CreateIntToIntLocations(arena_, invoke);
262}
263
264void IntrinsicCodeGeneratorARM64::VisitLongReverseBytes(HInvoke* invoke) {
265  GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
266}
267
268void IntrinsicLocationsBuilderARM64::VisitShortReverseBytes(HInvoke* invoke) {
269  CreateIntToIntLocations(arena_, invoke);
270}
271
272void IntrinsicCodeGeneratorARM64::VisitShortReverseBytes(HInvoke* invoke) {
273  GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetVIXLAssembler());
274}
275
276static void GenReverse(LocationSummary* locations,
277                       Primitive::Type type,
278                       vixl::MacroAssembler* masm) {
279  DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
280
281  Location in = locations->InAt(0);
282  Location out = locations->Out();
283
284  __ Rbit(RegisterFrom(out, type), RegisterFrom(in, type));
285}
286
287void IntrinsicLocationsBuilderARM64::VisitIntegerReverse(HInvoke* invoke) {
288  CreateIntToIntLocations(arena_, invoke);
289}
290
291void IntrinsicCodeGeneratorARM64::VisitIntegerReverse(HInvoke* invoke) {
292  GenReverse(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
293}
294
295void IntrinsicLocationsBuilderARM64::VisitLongReverse(HInvoke* invoke) {
296  CreateIntToIntLocations(arena_, invoke);
297}
298
299void IntrinsicCodeGeneratorARM64::VisitLongReverse(HInvoke* invoke) {
300  GenReverse(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
301}
302
303static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
304  LocationSummary* locations = new (arena) LocationSummary(invoke,
305                                                           LocationSummary::kNoCall,
306                                                           kIntrinsified);
307  locations->SetInAt(0, Location::RequiresFpuRegister());
308  locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
309}
310
311static void MathAbsFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
312  Location in = locations->InAt(0);
313  Location out = locations->Out();
314
315  FPRegister in_reg = is64bit ? DRegisterFrom(in) : SRegisterFrom(in);
316  FPRegister out_reg = is64bit ? DRegisterFrom(out) : SRegisterFrom(out);
317
318  __ Fabs(out_reg, in_reg);
319}
320
321void IntrinsicLocationsBuilderARM64::VisitMathAbsDouble(HInvoke* invoke) {
322  CreateFPToFPLocations(arena_, invoke);
323}
324
325void IntrinsicCodeGeneratorARM64::VisitMathAbsDouble(HInvoke* invoke) {
326  MathAbsFP(invoke->GetLocations(), true, GetVIXLAssembler());
327}
328
329void IntrinsicLocationsBuilderARM64::VisitMathAbsFloat(HInvoke* invoke) {
330  CreateFPToFPLocations(arena_, invoke);
331}
332
333void IntrinsicCodeGeneratorARM64::VisitMathAbsFloat(HInvoke* invoke) {
334  MathAbsFP(invoke->GetLocations(), false, GetVIXLAssembler());
335}
336
337static void CreateIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
338  LocationSummary* locations = new (arena) LocationSummary(invoke,
339                                                           LocationSummary::kNoCall,
340                                                           kIntrinsified);
341  locations->SetInAt(0, Location::RequiresRegister());
342  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
343}
344
345static void GenAbsInteger(LocationSummary* locations,
346                          bool is64bit,
347                          vixl::MacroAssembler* masm) {
348  Location in = locations->InAt(0);
349  Location output = locations->Out();
350
351  Register in_reg = is64bit ? XRegisterFrom(in) : WRegisterFrom(in);
352  Register out_reg = is64bit ? XRegisterFrom(output) : WRegisterFrom(output);
353
354  __ Cmp(in_reg, Operand(0));
355  __ Cneg(out_reg, in_reg, lt);
356}
357
358void IntrinsicLocationsBuilderARM64::VisitMathAbsInt(HInvoke* invoke) {
359  CreateIntToInt(arena_, invoke);
360}
361
362void IntrinsicCodeGeneratorARM64::VisitMathAbsInt(HInvoke* invoke) {
363  GenAbsInteger(invoke->GetLocations(), false, GetVIXLAssembler());
364}
365
366void IntrinsicLocationsBuilderARM64::VisitMathAbsLong(HInvoke* invoke) {
367  CreateIntToInt(arena_, invoke);
368}
369
370void IntrinsicCodeGeneratorARM64::VisitMathAbsLong(HInvoke* invoke) {
371  GenAbsInteger(invoke->GetLocations(), true, GetVIXLAssembler());
372}
373
374static void GenMinMaxFP(LocationSummary* locations,
375                        bool is_min,
376                        bool is_double,
377                        vixl::MacroAssembler* masm) {
378  Location op1 = locations->InAt(0);
379  Location op2 = locations->InAt(1);
380  Location out = locations->Out();
381
382  FPRegister op1_reg = is_double ? DRegisterFrom(op1) : SRegisterFrom(op1);
383  FPRegister op2_reg = is_double ? DRegisterFrom(op2) : SRegisterFrom(op2);
384  FPRegister out_reg = is_double ? DRegisterFrom(out) : SRegisterFrom(out);
385  if (is_min) {
386    __ Fmin(out_reg, op1_reg, op2_reg);
387  } else {
388    __ Fmax(out_reg, op1_reg, op2_reg);
389  }
390}
391
392static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
393  LocationSummary* locations = new (arena) LocationSummary(invoke,
394                                                           LocationSummary::kNoCall,
395                                                           kIntrinsified);
396  locations->SetInAt(0, Location::RequiresFpuRegister());
397  locations->SetInAt(1, Location::RequiresFpuRegister());
398  locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
399}
400
401void IntrinsicLocationsBuilderARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
402  CreateFPFPToFPLocations(arena_, invoke);
403}
404
405void IntrinsicCodeGeneratorARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
406  GenMinMaxFP(invoke->GetLocations(), true, true, GetVIXLAssembler());
407}
408
409void IntrinsicLocationsBuilderARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
410  CreateFPFPToFPLocations(arena_, invoke);
411}
412
413void IntrinsicCodeGeneratorARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
414  GenMinMaxFP(invoke->GetLocations(), true, false, GetVIXLAssembler());
415}
416
417void IntrinsicLocationsBuilderARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
418  CreateFPFPToFPLocations(arena_, invoke);
419}
420
421void IntrinsicCodeGeneratorARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
422  GenMinMaxFP(invoke->GetLocations(), false, true, GetVIXLAssembler());
423}
424
425void IntrinsicLocationsBuilderARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
426  CreateFPFPToFPLocations(arena_, invoke);
427}
428
429void IntrinsicCodeGeneratorARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
430  GenMinMaxFP(invoke->GetLocations(), false, false, GetVIXLAssembler());
431}
432
433static void GenMinMax(LocationSummary* locations,
434                      bool is_min,
435                      bool is_long,
436                      vixl::MacroAssembler* masm) {
437  Location op1 = locations->InAt(0);
438  Location op2 = locations->InAt(1);
439  Location out = locations->Out();
440
441  Register op1_reg = is_long ? XRegisterFrom(op1) : WRegisterFrom(op1);
442  Register op2_reg = is_long ? XRegisterFrom(op2) : WRegisterFrom(op2);
443  Register out_reg = is_long ? XRegisterFrom(out) : WRegisterFrom(out);
444
445  __ Cmp(op1_reg, op2_reg);
446  __ Csel(out_reg, op1_reg, op2_reg, is_min ? lt : gt);
447}
448
449static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
450  LocationSummary* locations = new (arena) LocationSummary(invoke,
451                                                           LocationSummary::kNoCall,
452                                                           kIntrinsified);
453  locations->SetInAt(0, Location::RequiresRegister());
454  locations->SetInAt(1, Location::RequiresRegister());
455  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
456}
457
458void IntrinsicLocationsBuilderARM64::VisitMathMinIntInt(HInvoke* invoke) {
459  CreateIntIntToIntLocations(arena_, invoke);
460}
461
462void IntrinsicCodeGeneratorARM64::VisitMathMinIntInt(HInvoke* invoke) {
463  GenMinMax(invoke->GetLocations(), true, false, GetVIXLAssembler());
464}
465
466void IntrinsicLocationsBuilderARM64::VisitMathMinLongLong(HInvoke* invoke) {
467  CreateIntIntToIntLocations(arena_, invoke);
468}
469
470void IntrinsicCodeGeneratorARM64::VisitMathMinLongLong(HInvoke* invoke) {
471  GenMinMax(invoke->GetLocations(), true, true, GetVIXLAssembler());
472}
473
474void IntrinsicLocationsBuilderARM64::VisitMathMaxIntInt(HInvoke* invoke) {
475  CreateIntIntToIntLocations(arena_, invoke);
476}
477
478void IntrinsicCodeGeneratorARM64::VisitMathMaxIntInt(HInvoke* invoke) {
479  GenMinMax(invoke->GetLocations(), false, false, GetVIXLAssembler());
480}
481
482void IntrinsicLocationsBuilderARM64::VisitMathMaxLongLong(HInvoke* invoke) {
483  CreateIntIntToIntLocations(arena_, invoke);
484}
485
486void IntrinsicCodeGeneratorARM64::VisitMathMaxLongLong(HInvoke* invoke) {
487  GenMinMax(invoke->GetLocations(), false, true, GetVIXLAssembler());
488}
489
490void IntrinsicLocationsBuilderARM64::VisitMathSqrt(HInvoke* invoke) {
491  CreateFPToFPLocations(arena_, invoke);
492}
493
494void IntrinsicCodeGeneratorARM64::VisitMathSqrt(HInvoke* invoke) {
495  LocationSummary* locations = invoke->GetLocations();
496  vixl::MacroAssembler* masm = GetVIXLAssembler();
497  __ Fsqrt(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
498}
499
500void IntrinsicLocationsBuilderARM64::VisitMathCeil(HInvoke* invoke) {
501  CreateFPToFPLocations(arena_, invoke);
502}
503
504void IntrinsicCodeGeneratorARM64::VisitMathCeil(HInvoke* invoke) {
505  LocationSummary* locations = invoke->GetLocations();
506  vixl::MacroAssembler* masm = GetVIXLAssembler();
507  __ Frintp(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
508}
509
510void IntrinsicLocationsBuilderARM64::VisitMathFloor(HInvoke* invoke) {
511  CreateFPToFPLocations(arena_, invoke);
512}
513
514void IntrinsicCodeGeneratorARM64::VisitMathFloor(HInvoke* invoke) {
515  LocationSummary* locations = invoke->GetLocations();
516  vixl::MacroAssembler* masm = GetVIXLAssembler();
517  __ Frintm(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
518}
519
520void IntrinsicLocationsBuilderARM64::VisitMathRint(HInvoke* invoke) {
521  CreateFPToFPLocations(arena_, invoke);
522}
523
524void IntrinsicCodeGeneratorARM64::VisitMathRint(HInvoke* invoke) {
525  LocationSummary* locations = invoke->GetLocations();
526  vixl::MacroAssembler* masm = GetVIXLAssembler();
527  __ Frintn(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
528}
529
530static void CreateFPToIntPlusTempLocations(ArenaAllocator* arena, HInvoke* invoke) {
531  LocationSummary* locations = new (arena) LocationSummary(invoke,
532                                                           LocationSummary::kNoCall,
533                                                           kIntrinsified);
534  locations->SetInAt(0, Location::RequiresFpuRegister());
535  locations->SetOut(Location::RequiresRegister());
536}
537
538static void GenMathRound(LocationSummary* locations,
539                         bool is_double,
540                         vixl::MacroAssembler* masm) {
541  FPRegister in_reg = is_double ?
542      DRegisterFrom(locations->InAt(0)) : SRegisterFrom(locations->InAt(0));
543  Register out_reg = is_double ?
544      XRegisterFrom(locations->Out()) : WRegisterFrom(locations->Out());
545  UseScratchRegisterScope temps(masm);
546  FPRegister temp1_reg = temps.AcquireSameSizeAs(in_reg);
547
548  // 0.5 can be encoded as an immediate, so use fmov.
549  if (is_double) {
550    __ Fmov(temp1_reg, static_cast<double>(0.5));
551  } else {
552    __ Fmov(temp1_reg, static_cast<float>(0.5));
553  }
554  __ Fadd(temp1_reg, in_reg, temp1_reg);
555  __ Fcvtms(out_reg, temp1_reg);
556}
557
558void IntrinsicLocationsBuilderARM64::VisitMathRoundDouble(HInvoke* invoke) {
559  CreateFPToIntPlusTempLocations(arena_, invoke);
560}
561
562void IntrinsicCodeGeneratorARM64::VisitMathRoundDouble(HInvoke* invoke) {
563  GenMathRound(invoke->GetLocations(), true, GetVIXLAssembler());
564}
565
566void IntrinsicLocationsBuilderARM64::VisitMathRoundFloat(HInvoke* invoke) {
567  CreateFPToIntPlusTempLocations(arena_, invoke);
568}
569
570void IntrinsicCodeGeneratorARM64::VisitMathRoundFloat(HInvoke* invoke) {
571  GenMathRound(invoke->GetLocations(), false, GetVIXLAssembler());
572}
573
574void IntrinsicLocationsBuilderARM64::VisitMemoryPeekByte(HInvoke* invoke) {
575  CreateIntToIntLocations(arena_, invoke);
576}
577
578void IntrinsicCodeGeneratorARM64::VisitMemoryPeekByte(HInvoke* invoke) {
579  vixl::MacroAssembler* masm = GetVIXLAssembler();
580  __ Ldrsb(WRegisterFrom(invoke->GetLocations()->Out()),
581          AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
582}
583
584void IntrinsicLocationsBuilderARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
585  CreateIntToIntLocations(arena_, invoke);
586}
587
588void IntrinsicCodeGeneratorARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
589  vixl::MacroAssembler* masm = GetVIXLAssembler();
590  __ Ldr(WRegisterFrom(invoke->GetLocations()->Out()),
591         AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
592}
593
594void IntrinsicLocationsBuilderARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
595  CreateIntToIntLocations(arena_, invoke);
596}
597
598void IntrinsicCodeGeneratorARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
599  vixl::MacroAssembler* masm = GetVIXLAssembler();
600  __ Ldr(XRegisterFrom(invoke->GetLocations()->Out()),
601         AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
602}
603
604void IntrinsicLocationsBuilderARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
605  CreateIntToIntLocations(arena_, invoke);
606}
607
608void IntrinsicCodeGeneratorARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
609  vixl::MacroAssembler* masm = GetVIXLAssembler();
610  __ Ldrsh(WRegisterFrom(invoke->GetLocations()->Out()),
611           AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
612}
613
614static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
615  LocationSummary* locations = new (arena) LocationSummary(invoke,
616                                                           LocationSummary::kNoCall,
617                                                           kIntrinsified);
618  locations->SetInAt(0, Location::RequiresRegister());
619  locations->SetInAt(1, Location::RequiresRegister());
620}
621
622void IntrinsicLocationsBuilderARM64::VisitMemoryPokeByte(HInvoke* invoke) {
623  CreateIntIntToVoidLocations(arena_, invoke);
624}
625
626void IntrinsicCodeGeneratorARM64::VisitMemoryPokeByte(HInvoke* invoke) {
627  vixl::MacroAssembler* masm = GetVIXLAssembler();
628  __ Strb(WRegisterFrom(invoke->GetLocations()->InAt(1)),
629          AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
630}
631
632void IntrinsicLocationsBuilderARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
633  CreateIntIntToVoidLocations(arena_, invoke);
634}
635
636void IntrinsicCodeGeneratorARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
637  vixl::MacroAssembler* masm = GetVIXLAssembler();
638  __ Str(WRegisterFrom(invoke->GetLocations()->InAt(1)),
639         AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
640}
641
642void IntrinsicLocationsBuilderARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
643  CreateIntIntToVoidLocations(arena_, invoke);
644}
645
646void IntrinsicCodeGeneratorARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
647  vixl::MacroAssembler* masm = GetVIXLAssembler();
648  __ Str(XRegisterFrom(invoke->GetLocations()->InAt(1)),
649         AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
650}
651
652void IntrinsicLocationsBuilderARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
653  CreateIntIntToVoidLocations(arena_, invoke);
654}
655
656void IntrinsicCodeGeneratorARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
657  vixl::MacroAssembler* masm = GetVIXLAssembler();
658  __ Strh(WRegisterFrom(invoke->GetLocations()->InAt(1)),
659          AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
660}
661
662void IntrinsicLocationsBuilderARM64::VisitThreadCurrentThread(HInvoke* invoke) {
663  LocationSummary* locations = new (arena_) LocationSummary(invoke,
664                                                            LocationSummary::kNoCall,
665                                                            kIntrinsified);
666  locations->SetOut(Location::RequiresRegister());
667}
668
669void IntrinsicCodeGeneratorARM64::VisitThreadCurrentThread(HInvoke* invoke) {
670  codegen_->Load(Primitive::kPrimNot, WRegisterFrom(invoke->GetLocations()->Out()),
671                 MemOperand(tr, Thread::PeerOffset<8>().Int32Value()));
672}
673
674static void GenUnsafeGet(HInvoke* invoke,
675                         Primitive::Type type,
676                         bool is_volatile,
677                         CodeGeneratorARM64* codegen) {
678  LocationSummary* locations = invoke->GetLocations();
679  DCHECK((type == Primitive::kPrimInt) ||
680         (type == Primitive::kPrimLong) ||
681         (type == Primitive::kPrimNot));
682  vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
683  Register base = WRegisterFrom(locations->InAt(1));    // Object pointer.
684  Register offset = XRegisterFrom(locations->InAt(2));  // Long offset.
685  Register trg = RegisterFrom(locations->Out(), type);
686  bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
687
688  MemOperand mem_op(base.X(), offset);
689  if (is_volatile) {
690    if (use_acquire_release) {
691      codegen->LoadAcquire(invoke, trg, mem_op);
692    } else {
693      codegen->Load(type, trg, mem_op);
694      __ Dmb(InnerShareable, BarrierReads);
695    }
696  } else {
697    codegen->Load(type, trg, mem_op);
698  }
699}
700
701static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
702  LocationSummary* locations = new (arena) LocationSummary(invoke,
703                                                           LocationSummary::kNoCall,
704                                                           kIntrinsified);
705  locations->SetInAt(0, Location::NoLocation());        // Unused receiver.
706  locations->SetInAt(1, Location::RequiresRegister());
707  locations->SetInAt(2, Location::RequiresRegister());
708  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
709}
710
711void IntrinsicLocationsBuilderARM64::VisitUnsafeGet(HInvoke* invoke) {
712  CreateIntIntIntToIntLocations(arena_, invoke);
713}
714void IntrinsicLocationsBuilderARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
715  CreateIntIntIntToIntLocations(arena_, invoke);
716}
717void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLong(HInvoke* invoke) {
718  CreateIntIntIntToIntLocations(arena_, invoke);
719}
720void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
721  CreateIntIntIntToIntLocations(arena_, invoke);
722}
723void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObject(HInvoke* invoke) {
724  CreateIntIntIntToIntLocations(arena_, invoke);
725}
726void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
727  CreateIntIntIntToIntLocations(arena_, invoke);
728}
729
730void IntrinsicCodeGeneratorARM64::VisitUnsafeGet(HInvoke* invoke) {
731  GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_);
732}
733void IntrinsicCodeGeneratorARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
734  GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_);
735}
736void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLong(HInvoke* invoke) {
737  GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_);
738}
739void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
740  GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_);
741}
742void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObject(HInvoke* invoke) {
743  GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_);
744}
745void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
746  GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_);
747}
748
749static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) {
750  LocationSummary* locations = new (arena) LocationSummary(invoke,
751                                                           LocationSummary::kNoCall,
752                                                           kIntrinsified);
753  locations->SetInAt(0, Location::NoLocation());        // Unused receiver.
754  locations->SetInAt(1, Location::RequiresRegister());
755  locations->SetInAt(2, Location::RequiresRegister());
756  locations->SetInAt(3, Location::RequiresRegister());
757}
758
759void IntrinsicLocationsBuilderARM64::VisitUnsafePut(HInvoke* invoke) {
760  CreateIntIntIntIntToVoid(arena_, invoke);
761}
762void IntrinsicLocationsBuilderARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
763  CreateIntIntIntIntToVoid(arena_, invoke);
764}
765void IntrinsicLocationsBuilderARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
766  CreateIntIntIntIntToVoid(arena_, invoke);
767}
768void IntrinsicLocationsBuilderARM64::VisitUnsafePutObject(HInvoke* invoke) {
769  CreateIntIntIntIntToVoid(arena_, invoke);
770}
771void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
772  CreateIntIntIntIntToVoid(arena_, invoke);
773}
774void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
775  CreateIntIntIntIntToVoid(arena_, invoke);
776}
777void IntrinsicLocationsBuilderARM64::VisitUnsafePutLong(HInvoke* invoke) {
778  CreateIntIntIntIntToVoid(arena_, invoke);
779}
780void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
781  CreateIntIntIntIntToVoid(arena_, invoke);
782}
783void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
784  CreateIntIntIntIntToVoid(arena_, invoke);
785}
786
787static void GenUnsafePut(LocationSummary* locations,
788                         Primitive::Type type,
789                         bool is_volatile,
790                         bool is_ordered,
791                         CodeGeneratorARM64* codegen) {
792  vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
793
794  Register base = WRegisterFrom(locations->InAt(1));    // Object pointer.
795  Register offset = XRegisterFrom(locations->InAt(2));  // Long offset.
796  Register value = RegisterFrom(locations->InAt(3), type);
797  bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
798
799  MemOperand mem_op(base.X(), offset);
800
801  if (is_volatile || is_ordered) {
802    if (use_acquire_release) {
803      codegen->StoreRelease(type, value, mem_op);
804    } else {
805      __ Dmb(InnerShareable, BarrierAll);
806      codegen->Store(type, value, mem_op);
807      if (is_volatile) {
808        __ Dmb(InnerShareable, BarrierReads);
809      }
810    }
811  } else {
812    codegen->Store(type, value, mem_op);
813  }
814
815  if (type == Primitive::kPrimNot) {
816    codegen->MarkGCCard(base, value);
817  }
818}
819
820void IntrinsicCodeGeneratorARM64::VisitUnsafePut(HInvoke* invoke) {
821  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, false, codegen_);
822}
823void IntrinsicCodeGeneratorARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
824  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, true, codegen_);
825}
826void IntrinsicCodeGeneratorARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
827  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, false, codegen_);
828}
829void IntrinsicCodeGeneratorARM64::VisitUnsafePutObject(HInvoke* invoke) {
830  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, false, codegen_);
831}
832void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
833  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, true, codegen_);
834}
835void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
836  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, false, codegen_);
837}
838void IntrinsicCodeGeneratorARM64::VisitUnsafePutLong(HInvoke* invoke) {
839  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, false, codegen_);
840}
841void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
842  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, true, codegen_);
843}
844void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
845  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, false, codegen_);
846}
847
848static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
849  LocationSummary* locations = new (arena) LocationSummary(invoke,
850                                                           LocationSummary::kNoCall,
851                                                           kIntrinsified);
852  locations->SetInAt(0, Location::NoLocation());        // Unused receiver.
853  locations->SetInAt(1, Location::RequiresRegister());
854  locations->SetInAt(2, Location::RequiresRegister());
855  locations->SetInAt(3, Location::RequiresRegister());
856  locations->SetInAt(4, Location::RequiresRegister());
857
858  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
859}
860
861static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM64* codegen) {
862  bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
863  vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
864
865  Register out = WRegisterFrom(locations->Out());                  // Boolean result.
866
867  Register base = WRegisterFrom(locations->InAt(1));               // Object pointer.
868  Register offset = XRegisterFrom(locations->InAt(2));             // Long offset.
869  Register expected = RegisterFrom(locations->InAt(3), type);      // Expected.
870  Register value = RegisterFrom(locations->InAt(4), type);         // Value.
871
872  // This needs to be before the temp registers, as MarkGCCard also uses VIXL temps.
873  if (type == Primitive::kPrimNot) {
874    // Mark card for object assuming new value is stored.
875    codegen->MarkGCCard(base, value);
876  }
877
878  UseScratchRegisterScope temps(masm);
879  Register tmp_ptr = temps.AcquireX();                             // Pointer to actual memory.
880  Register tmp_value = temps.AcquireSameSizeAs(value);             // Value in memory.
881
882  Register tmp_32 = tmp_value.W();
883
884  __ Add(tmp_ptr, base.X(), Operand(offset));
885
886  // do {
887  //   tmp_value = [tmp_ptr] - expected;
888  // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
889  // result = tmp_value != 0;
890
891  vixl::Label loop_head, exit_loop;
892  if (use_acquire_release) {
893    __ Bind(&loop_head);
894    __ Ldaxr(tmp_value, MemOperand(tmp_ptr));
895    __ Cmp(tmp_value, expected);
896    __ B(&exit_loop, ne);
897    __ Stlxr(tmp_32, value, MemOperand(tmp_ptr));
898    __ Cbnz(tmp_32, &loop_head);
899  } else {
900    __ Dmb(InnerShareable, BarrierWrites);
901    __ Bind(&loop_head);
902    __ Ldxr(tmp_value, MemOperand(tmp_ptr));
903    __ Cmp(tmp_value, expected);
904    __ B(&exit_loop, ne);
905    __ Stxr(tmp_32, value, MemOperand(tmp_ptr));
906    __ Cbnz(tmp_32, &loop_head);
907    __ Dmb(InnerShareable, BarrierAll);
908  }
909  __ Bind(&exit_loop);
910  __ Cset(out, eq);
911}
912
913void IntrinsicLocationsBuilderARM64::VisitUnsafeCASInt(HInvoke* invoke) {
914  CreateIntIntIntIntIntToInt(arena_, invoke);
915}
916void IntrinsicLocationsBuilderARM64::VisitUnsafeCASLong(HInvoke* invoke) {
917  CreateIntIntIntIntIntToInt(arena_, invoke);
918}
919void IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject(HInvoke* invoke) {
920  CreateIntIntIntIntIntToInt(arena_, invoke);
921}
922
923void IntrinsicCodeGeneratorARM64::VisitUnsafeCASInt(HInvoke* invoke) {
924  GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
925}
926void IntrinsicCodeGeneratorARM64::VisitUnsafeCASLong(HInvoke* invoke) {
927  GenCas(invoke->GetLocations(), Primitive::kPrimLong, codegen_);
928}
929void IntrinsicCodeGeneratorARM64::VisitUnsafeCASObject(HInvoke* invoke) {
930  GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
931}
932
933void IntrinsicLocationsBuilderARM64::VisitStringCharAt(HInvoke* invoke) {
934  LocationSummary* locations = new (arena_) LocationSummary(invoke,
935                                                            LocationSummary::kCallOnSlowPath,
936                                                            kIntrinsified);
937  locations->SetInAt(0, Location::RequiresRegister());
938  locations->SetInAt(1, Location::RequiresRegister());
939  // In case we need to go in the slow path, we can't have the output be the same
940  // as the input: the current liveness analysis considers the input to be live
941  // at the point of the call.
942  locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
943}
944
945void IntrinsicCodeGeneratorARM64::VisitStringCharAt(HInvoke* invoke) {
946  vixl::MacroAssembler* masm = GetVIXLAssembler();
947  LocationSummary* locations = invoke->GetLocations();
948
949  // Location of reference to data array
950  const MemberOffset value_offset = mirror::String::ValueOffset();
951  // Location of count
952  const MemberOffset count_offset = mirror::String::CountOffset();
953  // Starting offset within data array
954  const MemberOffset offset_offset = mirror::String::OffsetOffset();
955  // Start of char data with array_
956  const MemberOffset data_offset = mirror::Array::DataOffset(sizeof(uint16_t));
957
958  Register obj = WRegisterFrom(locations->InAt(0));  // String object pointer.
959  Register idx = WRegisterFrom(locations->InAt(1));  // Index of character.
960  Register out = WRegisterFrom(locations->Out());    // Result character.
961
962  UseScratchRegisterScope temps(masm);
963  Register temp = temps.AcquireW();
964  Register array_temp = temps.AcquireW();            // We can trade this for worse scheduling.
965
966  // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
967  //       the cost.
968  // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
969  //       we will not optimize the code for constants (which would save a register).
970
971  SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
972  codegen_->AddSlowPath(slow_path);
973
974  __ Ldr(temp, HeapOperand(obj, count_offset));          // temp = str.length.
975  codegen_->MaybeRecordImplicitNullCheck(invoke);
976  __ Cmp(idx, temp);
977  __ B(hs, slow_path->GetEntryLabel());
978
979  // Index computation.
980  __ Ldr(temp, HeapOperand(obj, offset_offset));         // temp := str.offset.
981  __ Ldr(array_temp, HeapOperand(obj, value_offset));    // array_temp := str.offset.
982  __ Add(temp, temp, idx);
983  DCHECK_EQ(data_offset.Int32Value() % 2, 0);            // We'll compensate by shifting.
984  __ Add(temp, temp, Operand(data_offset.Int32Value() / 2));
985
986  // Load the value.
987  __ Ldrh(out, MemOperand(array_temp.X(), temp, UXTW, 1));  // out := array_temp[temp].
988
989  __ Bind(slow_path->GetExitLabel());
990}
991
992// Unimplemented intrinsics.
993
994#define UNIMPLEMENTED_INTRINSIC(Name)                                                  \
995void IntrinsicLocationsBuilderARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
996}                                                                                      \
997void IntrinsicCodeGeneratorARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) {    \
998}
999
1000UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
1001UNIMPLEMENTED_INTRINSIC(StringCompareTo)
1002UNIMPLEMENTED_INTRINSIC(StringIsEmpty)  // Might not want to do these two anyways, inlining should
1003UNIMPLEMENTED_INTRINSIC(StringLength)   // be good enough here.
1004UNIMPLEMENTED_INTRINSIC(StringIndexOf)
1005UNIMPLEMENTED_INTRINSIC(StringIndexOfAfter)
1006UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
1007
1008}  // namespace arm64
1009}  // namespace art
1010