intrinsics_arm.cc revision 848f70a3d73833fc1bf3032a9ff6812e429661d9
1/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
20#include "code_generator_arm.h"
21#include "entrypoints/quick/quick_entrypoints.h"
22#include "intrinsics.h"
23#include "mirror/array-inl.h"
24#include "mirror/art_method.h"
25#include "mirror/string.h"
26#include "thread.h"
27#include "utils/arm/assembler_arm.h"
28
29namespace art {
30
31namespace arm {
32
33ArmAssembler* IntrinsicCodeGeneratorARM::GetAssembler() {
34  return codegen_->GetAssembler();
35}
36
37ArenaAllocator* IntrinsicCodeGeneratorARM::GetAllocator() {
38  return codegen_->GetGraph()->GetArena();
39}
40
41#define __ codegen->GetAssembler()->
42
43static void MoveFromReturnRegister(Location trg, Primitive::Type type, CodeGeneratorARM* codegen) {
44  if (!trg.IsValid()) {
45    DCHECK(type == Primitive::kPrimVoid);
46    return;
47  }
48
49  DCHECK_NE(type, Primitive::kPrimVoid);
50
51  if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
52    if (type == Primitive::kPrimLong) {
53      Register trg_reg_lo = trg.AsRegisterPairLow<Register>();
54      Register trg_reg_hi = trg.AsRegisterPairHigh<Register>();
55      Register res_reg_lo = R0;
56      Register res_reg_hi = R1;
57      if (trg_reg_lo != res_reg_hi) {
58        if (trg_reg_lo != res_reg_lo) {
59          __ mov(trg_reg_lo, ShifterOperand(res_reg_lo));
60          __ mov(trg_reg_hi, ShifterOperand(res_reg_hi));
61        } else {
62          DCHECK_EQ(trg_reg_lo + 1, trg_reg_hi);
63        }
64      } else {
65        __ mov(trg_reg_hi, ShifterOperand(res_reg_hi));
66        __ mov(trg_reg_lo, ShifterOperand(res_reg_lo));
67      }
68    } else {
69      Register trg_reg = trg.AsRegister<Register>();
70      Register res_reg = R0;
71      if (trg_reg != res_reg) {
72        __ mov(trg_reg, ShifterOperand(res_reg));
73      }
74    }
75  } else {
76    UNIMPLEMENTED(FATAL) << "Floating-point return.";
77  }
78}
79
80static void MoveArguments(HInvoke* invoke, ArenaAllocator* arena, CodeGeneratorARM* codegen) {
81  if (invoke->InputCount() == 0) {
82    // No argument to move.
83    return;
84  }
85
86  LocationSummary* locations = invoke->GetLocations();
87  InvokeDexCallingConventionVisitor calling_convention_visitor;
88
89  // We're moving potentially two or more locations to locations that could overlap, so we need
90  // a parallel move resolver.
91  HParallelMove parallel_move(arena);
92
93  for (size_t i = 0; i < invoke->InputCount(); i++) {
94    HInstruction* input = invoke->InputAt(i);
95    Location cc_loc = calling_convention_visitor.GetNextLocation(input->GetType());
96    Location actual_loc = locations->InAt(i);
97
98    parallel_move.AddMove(actual_loc, cc_loc, input->GetType(), nullptr);
99  }
100
101  codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
102}
103
104// Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified
105// call. This will copy the arguments into the positions for a regular call.
106//
107// Note: The actual parameters are required to be in the locations given by the invoke's location
108//       summary. If an intrinsic modifies those locations before a slowpath call, they must be
109//       restored!
110class IntrinsicSlowPathARM : public SlowPathCodeARM {
111 public:
112  explicit IntrinsicSlowPathARM(HInvoke* invoke) : invoke_(invoke) { }
113
114  void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
115    CodeGeneratorARM* codegen = down_cast<CodeGeneratorARM*>(codegen_in);
116    __ Bind(GetEntryLabel());
117
118    SaveLiveRegisters(codegen, invoke_->GetLocations());
119
120    MoveArguments(invoke_, codegen->GetGraph()->GetArena(), codegen);
121
122    if (invoke_->IsInvokeStaticOrDirect()) {
123      codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(), kArtMethodRegister);
124      RecordPcInfo(codegen, invoke_, invoke_->GetDexPc());
125    } else {
126      UNIMPLEMENTED(FATAL) << "Non-direct intrinsic slow-path not yet implemented";
127      UNREACHABLE();
128    }
129
130    // Copy the result back to the expected output.
131    Location out = invoke_->GetLocations()->Out();
132    if (out.IsValid()) {
133      DCHECK(out.IsRegister());  // TODO: Replace this when we support output in memory.
134      DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
135      MoveFromReturnRegister(out, invoke_->GetType(), codegen);
136    }
137
138    RestoreLiveRegisters(codegen, invoke_->GetLocations());
139    __ b(GetExitLabel());
140  }
141
142 private:
143  // The instruction where this slow path is happening.
144  HInvoke* const invoke_;
145
146  DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathARM);
147};
148
149#undef __
150
151bool IntrinsicLocationsBuilderARM::TryDispatch(HInvoke* invoke) {
152  Dispatch(invoke);
153  LocationSummary* res = invoke->GetLocations();
154  return res != nullptr && res->Intrinsified();
155}
156
157#define __ assembler->
158
159static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
160  LocationSummary* locations = new (arena) LocationSummary(invoke,
161                                                           LocationSummary::kNoCall,
162                                                           kIntrinsified);
163  locations->SetInAt(0, Location::RequiresFpuRegister());
164  locations->SetOut(Location::RequiresRegister());
165}
166
167static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
168  LocationSummary* locations = new (arena) LocationSummary(invoke,
169                                                           LocationSummary::kNoCall,
170                                                           kIntrinsified);
171  locations->SetInAt(0, Location::RequiresRegister());
172  locations->SetOut(Location::RequiresFpuRegister());
173}
174
175static void MoveFPToInt(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
176  Location input = locations->InAt(0);
177  Location output = locations->Out();
178  if (is64bit) {
179    __ vmovrrd(output.AsRegisterPairLow<Register>(),
180               output.AsRegisterPairHigh<Register>(),
181               FromLowSToD(input.AsFpuRegisterPairLow<SRegister>()));
182  } else {
183    __ vmovrs(output.AsRegister<Register>(), input.AsFpuRegister<SRegister>());
184  }
185}
186
187static void MoveIntToFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
188  Location input = locations->InAt(0);
189  Location output = locations->Out();
190  if (is64bit) {
191    __ vmovdrr(FromLowSToD(output.AsFpuRegisterPairLow<SRegister>()),
192               input.AsRegisterPairLow<Register>(),
193               input.AsRegisterPairHigh<Register>());
194  } else {
195    __ vmovsr(output.AsFpuRegister<SRegister>(), input.AsRegister<Register>());
196  }
197}
198
199void IntrinsicLocationsBuilderARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
200  CreateFPToIntLocations(arena_, invoke);
201}
202void IntrinsicLocationsBuilderARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
203  CreateIntToFPLocations(arena_, invoke);
204}
205
206void IntrinsicCodeGeneratorARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
207  MoveFPToInt(invoke->GetLocations(), true, GetAssembler());
208}
209void IntrinsicCodeGeneratorARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
210  MoveIntToFP(invoke->GetLocations(), true, GetAssembler());
211}
212
213void IntrinsicLocationsBuilderARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
214  CreateFPToIntLocations(arena_, invoke);
215}
216void IntrinsicLocationsBuilderARM::VisitFloatIntBitsToFloat(HInvoke* invoke) {
217  CreateIntToFPLocations(arena_, invoke);
218}
219
220void IntrinsicCodeGeneratorARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
221  MoveFPToInt(invoke->GetLocations(), false, GetAssembler());
222}
223void IntrinsicCodeGeneratorARM::VisitFloatIntBitsToFloat(HInvoke* invoke) {
224  MoveIntToFP(invoke->GetLocations(), false, GetAssembler());
225}
226
227static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
228  LocationSummary* locations = new (arena) LocationSummary(invoke,
229                                                           LocationSummary::kNoCall,
230                                                           kIntrinsified);
231  locations->SetInAt(0, Location::RequiresRegister());
232  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
233}
234
235static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
236  LocationSummary* locations = new (arena) LocationSummary(invoke,
237                                                           LocationSummary::kNoCall,
238                                                           kIntrinsified);
239  locations->SetInAt(0, Location::RequiresFpuRegister());
240  locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
241}
242
243static void MathAbsFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
244  Location in = locations->InAt(0);
245  Location out = locations->Out();
246
247  if (is64bit) {
248    __ vabsd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
249             FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
250  } else {
251    __ vabss(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>());
252  }
253}
254
255void IntrinsicLocationsBuilderARM::VisitMathAbsDouble(HInvoke* invoke) {
256  CreateFPToFPLocations(arena_, invoke);
257}
258
259void IntrinsicCodeGeneratorARM::VisitMathAbsDouble(HInvoke* invoke) {
260  MathAbsFP(invoke->GetLocations(), true, GetAssembler());
261}
262
263void IntrinsicLocationsBuilderARM::VisitMathAbsFloat(HInvoke* invoke) {
264  CreateFPToFPLocations(arena_, invoke);
265}
266
267void IntrinsicCodeGeneratorARM::VisitMathAbsFloat(HInvoke* invoke) {
268  MathAbsFP(invoke->GetLocations(), false, GetAssembler());
269}
270
271static void CreateIntToIntPlusTemp(ArenaAllocator* arena, HInvoke* invoke) {
272  LocationSummary* locations = new (arena) LocationSummary(invoke,
273                                                           LocationSummary::kNoCall,
274                                                           kIntrinsified);
275  locations->SetInAt(0, Location::RequiresRegister());
276  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
277
278  locations->AddTemp(Location::RequiresRegister());
279}
280
281static void GenAbsInteger(LocationSummary* locations,
282                          bool is64bit,
283                          ArmAssembler* assembler) {
284  Location in = locations->InAt(0);
285  Location output = locations->Out();
286
287  Register mask = locations->GetTemp(0).AsRegister<Register>();
288
289  if (is64bit) {
290    Register in_reg_lo = in.AsRegisterPairLow<Register>();
291    Register in_reg_hi = in.AsRegisterPairHigh<Register>();
292    Register out_reg_lo = output.AsRegisterPairLow<Register>();
293    Register out_reg_hi = output.AsRegisterPairHigh<Register>();
294
295    DCHECK_NE(out_reg_lo, in_reg_hi) << "Diagonal overlap unexpected.";
296
297    __ Asr(mask, in_reg_hi, 31);
298    __ adds(out_reg_lo, in_reg_lo, ShifterOperand(mask));
299    __ adc(out_reg_hi, in_reg_hi, ShifterOperand(mask));
300    __ eor(out_reg_lo, mask, ShifterOperand(out_reg_lo));
301    __ eor(out_reg_hi, mask, ShifterOperand(out_reg_hi));
302  } else {
303    Register in_reg = in.AsRegister<Register>();
304    Register out_reg = output.AsRegister<Register>();
305
306    __ Asr(mask, in_reg, 31);
307    __ add(out_reg, in_reg, ShifterOperand(mask));
308    __ eor(out_reg, mask, ShifterOperand(out_reg));
309  }
310}
311
312void IntrinsicLocationsBuilderARM::VisitMathAbsInt(HInvoke* invoke) {
313  CreateIntToIntPlusTemp(arena_, invoke);
314}
315
316void IntrinsicCodeGeneratorARM::VisitMathAbsInt(HInvoke* invoke) {
317  GenAbsInteger(invoke->GetLocations(), false, GetAssembler());
318}
319
320
321void IntrinsicLocationsBuilderARM::VisitMathAbsLong(HInvoke* invoke) {
322  CreateIntToIntPlusTemp(arena_, invoke);
323}
324
325void IntrinsicCodeGeneratorARM::VisitMathAbsLong(HInvoke* invoke) {
326  GenAbsInteger(invoke->GetLocations(), true, GetAssembler());
327}
328
329static void GenMinMax(LocationSummary* locations,
330                      bool is_min,
331                      ArmAssembler* assembler) {
332  Register op1 = locations->InAt(0).AsRegister<Register>();
333  Register op2 = locations->InAt(1).AsRegister<Register>();
334  Register out = locations->Out().AsRegister<Register>();
335
336  __ cmp(op1, ShifterOperand(op2));
337
338  __ it((is_min) ? Condition::LT : Condition::GT, kItElse);
339  __ mov(out, ShifterOperand(op1), is_min ? Condition::LT : Condition::GT);
340  __ mov(out, ShifterOperand(op2), is_min ? Condition::GE : Condition::LE);
341}
342
343static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
344  LocationSummary* locations = new (arena) LocationSummary(invoke,
345                                                           LocationSummary::kNoCall,
346                                                           kIntrinsified);
347  locations->SetInAt(0, Location::RequiresRegister());
348  locations->SetInAt(1, Location::RequiresRegister());
349  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
350}
351
352void IntrinsicLocationsBuilderARM::VisitMathMinIntInt(HInvoke* invoke) {
353  CreateIntIntToIntLocations(arena_, invoke);
354}
355
356void IntrinsicCodeGeneratorARM::VisitMathMinIntInt(HInvoke* invoke) {
357  GenMinMax(invoke->GetLocations(), true, GetAssembler());
358}
359
360void IntrinsicLocationsBuilderARM::VisitMathMaxIntInt(HInvoke* invoke) {
361  CreateIntIntToIntLocations(arena_, invoke);
362}
363
364void IntrinsicCodeGeneratorARM::VisitMathMaxIntInt(HInvoke* invoke) {
365  GenMinMax(invoke->GetLocations(), false, GetAssembler());
366}
367
368void IntrinsicLocationsBuilderARM::VisitMathSqrt(HInvoke* invoke) {
369  CreateFPToFPLocations(arena_, invoke);
370}
371
372void IntrinsicCodeGeneratorARM::VisitMathSqrt(HInvoke* invoke) {
373  LocationSummary* locations = invoke->GetLocations();
374  ArmAssembler* assembler = GetAssembler();
375  __ vsqrtd(FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()),
376            FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>()));
377}
378
379void IntrinsicLocationsBuilderARM::VisitMemoryPeekByte(HInvoke* invoke) {
380  CreateIntToIntLocations(arena_, invoke);
381}
382
383void IntrinsicCodeGeneratorARM::VisitMemoryPeekByte(HInvoke* invoke) {
384  ArmAssembler* assembler = GetAssembler();
385  // Ignore upper 4B of long address.
386  __ ldrsb(invoke->GetLocations()->Out().AsRegister<Register>(),
387           Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
388}
389
390void IntrinsicLocationsBuilderARM::VisitMemoryPeekIntNative(HInvoke* invoke) {
391  CreateIntToIntLocations(arena_, invoke);
392}
393
394void IntrinsicCodeGeneratorARM::VisitMemoryPeekIntNative(HInvoke* invoke) {
395  ArmAssembler* assembler = GetAssembler();
396  // Ignore upper 4B of long address.
397  __ ldr(invoke->GetLocations()->Out().AsRegister<Register>(),
398         Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
399}
400
401void IntrinsicLocationsBuilderARM::VisitMemoryPeekLongNative(HInvoke* invoke) {
402  CreateIntToIntLocations(arena_, invoke);
403}
404
405void IntrinsicCodeGeneratorARM::VisitMemoryPeekLongNative(HInvoke* invoke) {
406  ArmAssembler* assembler = GetAssembler();
407  // Ignore upper 4B of long address.
408  Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
409  // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
410  // exception. So we can't use ldrd as addr may be unaligned.
411  Register lo = invoke->GetLocations()->Out().AsRegisterPairLow<Register>();
412  Register hi = invoke->GetLocations()->Out().AsRegisterPairHigh<Register>();
413  if (addr == lo) {
414    __ ldr(hi, Address(addr, 4));
415    __ ldr(lo, Address(addr, 0));
416  } else {
417    __ ldr(lo, Address(addr, 0));
418    __ ldr(hi, Address(addr, 4));
419  }
420}
421
422void IntrinsicLocationsBuilderARM::VisitMemoryPeekShortNative(HInvoke* invoke) {
423  CreateIntToIntLocations(arena_, invoke);
424}
425
426void IntrinsicCodeGeneratorARM::VisitMemoryPeekShortNative(HInvoke* invoke) {
427  ArmAssembler* assembler = GetAssembler();
428  // Ignore upper 4B of long address.
429  __ ldrsh(invoke->GetLocations()->Out().AsRegister<Register>(),
430           Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
431}
432
433static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
434  LocationSummary* locations = new (arena) LocationSummary(invoke,
435                                                           LocationSummary::kNoCall,
436                                                           kIntrinsified);
437  locations->SetInAt(0, Location::RequiresRegister());
438  locations->SetInAt(1, Location::RequiresRegister());
439}
440
441void IntrinsicLocationsBuilderARM::VisitMemoryPokeByte(HInvoke* invoke) {
442  CreateIntIntToVoidLocations(arena_, invoke);
443}
444
445void IntrinsicCodeGeneratorARM::VisitMemoryPokeByte(HInvoke* invoke) {
446  ArmAssembler* assembler = GetAssembler();
447  __ strb(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
448          Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
449}
450
451void IntrinsicLocationsBuilderARM::VisitMemoryPokeIntNative(HInvoke* invoke) {
452  CreateIntIntToVoidLocations(arena_, invoke);
453}
454
455void IntrinsicCodeGeneratorARM::VisitMemoryPokeIntNative(HInvoke* invoke) {
456  ArmAssembler* assembler = GetAssembler();
457  __ str(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
458         Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
459}
460
461void IntrinsicLocationsBuilderARM::VisitMemoryPokeLongNative(HInvoke* invoke) {
462  CreateIntIntToVoidLocations(arena_, invoke);
463}
464
465void IntrinsicCodeGeneratorARM::VisitMemoryPokeLongNative(HInvoke* invoke) {
466  ArmAssembler* assembler = GetAssembler();
467  // Ignore upper 4B of long address.
468  Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
469  // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
470  // exception. So we can't use ldrd as addr may be unaligned.
471  __ str(invoke->GetLocations()->InAt(1).AsRegisterPairLow<Register>(), Address(addr, 0));
472  __ str(invoke->GetLocations()->InAt(1).AsRegisterPairHigh<Register>(), Address(addr, 4));
473}
474
475void IntrinsicLocationsBuilderARM::VisitMemoryPokeShortNative(HInvoke* invoke) {
476  CreateIntIntToVoidLocations(arena_, invoke);
477}
478
479void IntrinsicCodeGeneratorARM::VisitMemoryPokeShortNative(HInvoke* invoke) {
480  ArmAssembler* assembler = GetAssembler();
481  __ strh(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
482          Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
483}
484
485void IntrinsicLocationsBuilderARM::VisitThreadCurrentThread(HInvoke* invoke) {
486  LocationSummary* locations = new (arena_) LocationSummary(invoke,
487                                                            LocationSummary::kNoCall,
488                                                            kIntrinsified);
489  locations->SetOut(Location::RequiresRegister());
490}
491
492void IntrinsicCodeGeneratorARM::VisitThreadCurrentThread(HInvoke* invoke) {
493  ArmAssembler* assembler = GetAssembler();
494  __ LoadFromOffset(kLoadWord,
495                    invoke->GetLocations()->Out().AsRegister<Register>(),
496                    TR,
497                    Thread::PeerOffset<kArmPointerSize>().Int32Value());
498}
499
500static void GenUnsafeGet(HInvoke* invoke,
501                         Primitive::Type type,
502                         bool is_volatile,
503                         CodeGeneratorARM* codegen) {
504  LocationSummary* locations = invoke->GetLocations();
505  DCHECK((type == Primitive::kPrimInt) ||
506         (type == Primitive::kPrimLong) ||
507         (type == Primitive::kPrimNot));
508  ArmAssembler* assembler = codegen->GetAssembler();
509  Register base = locations->InAt(1).AsRegister<Register>();           // Object pointer.
510  Register offset = locations->InAt(2).AsRegisterPairLow<Register>();  // Long offset, lo part only.
511
512  if (type == Primitive::kPrimLong) {
513    Register trg_lo = locations->Out().AsRegisterPairLow<Register>();
514    __ add(IP, base, ShifterOperand(offset));
515    if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
516      Register trg_hi = locations->Out().AsRegisterPairHigh<Register>();
517      __ ldrexd(trg_lo, trg_hi, IP);
518    } else {
519      __ ldrd(trg_lo, Address(IP));
520    }
521  } else {
522    Register trg = locations->Out().AsRegister<Register>();
523    __ ldr(trg, Address(base, offset));
524  }
525
526  if (is_volatile) {
527    __ dmb(ISH);
528  }
529}
530
531static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
532  LocationSummary* locations = new (arena) LocationSummary(invoke,
533                                                           LocationSummary::kNoCall,
534                                                           kIntrinsified);
535  locations->SetInAt(0, Location::NoLocation());        // Unused receiver.
536  locations->SetInAt(1, Location::RequiresRegister());
537  locations->SetInAt(2, Location::RequiresRegister());
538  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
539}
540
541void IntrinsicLocationsBuilderARM::VisitUnsafeGet(HInvoke* invoke) {
542  CreateIntIntIntToIntLocations(arena_, invoke);
543}
544void IntrinsicLocationsBuilderARM::VisitUnsafeGetVolatile(HInvoke* invoke) {
545  CreateIntIntIntToIntLocations(arena_, invoke);
546}
547void IntrinsicLocationsBuilderARM::VisitUnsafeGetLong(HInvoke* invoke) {
548  CreateIntIntIntToIntLocations(arena_, invoke);
549}
550void IntrinsicLocationsBuilderARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
551  CreateIntIntIntToIntLocations(arena_, invoke);
552}
553void IntrinsicLocationsBuilderARM::VisitUnsafeGetObject(HInvoke* invoke) {
554  CreateIntIntIntToIntLocations(arena_, invoke);
555}
556void IntrinsicLocationsBuilderARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
557  CreateIntIntIntToIntLocations(arena_, invoke);
558}
559
560void IntrinsicCodeGeneratorARM::VisitUnsafeGet(HInvoke* invoke) {
561  GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_);
562}
563void IntrinsicCodeGeneratorARM::VisitUnsafeGetVolatile(HInvoke* invoke) {
564  GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_);
565}
566void IntrinsicCodeGeneratorARM::VisitUnsafeGetLong(HInvoke* invoke) {
567  GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_);
568}
569void IntrinsicCodeGeneratorARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
570  GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_);
571}
572void IntrinsicCodeGeneratorARM::VisitUnsafeGetObject(HInvoke* invoke) {
573  GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_);
574}
575void IntrinsicCodeGeneratorARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
576  GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_);
577}
578
579static void CreateIntIntIntIntToVoid(ArenaAllocator* arena,
580                                     const ArmInstructionSetFeatures& features,
581                                     Primitive::Type type,
582                                     bool is_volatile,
583                                     HInvoke* invoke) {
584  LocationSummary* locations = new (arena) LocationSummary(invoke,
585                                                           LocationSummary::kNoCall,
586                                                           kIntrinsified);
587  locations->SetInAt(0, Location::NoLocation());        // Unused receiver.
588  locations->SetInAt(1, Location::RequiresRegister());
589  locations->SetInAt(2, Location::RequiresRegister());
590  locations->SetInAt(3, Location::RequiresRegister());
591
592  if (type == Primitive::kPrimLong) {
593    // Potentially need temps for ldrexd-strexd loop.
594    if (is_volatile && !features.HasAtomicLdrdAndStrd()) {
595      locations->AddTemp(Location::RequiresRegister());  // Temp_lo.
596      locations->AddTemp(Location::RequiresRegister());  // Temp_hi.
597    }
598  } else if (type == Primitive::kPrimNot) {
599    // Temps for card-marking.
600    locations->AddTemp(Location::RequiresRegister());  // Temp.
601    locations->AddTemp(Location::RequiresRegister());  // Card.
602  }
603}
604
605void IntrinsicLocationsBuilderARM::VisitUnsafePut(HInvoke* invoke) {
606  CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, false, invoke);
607}
608void IntrinsicLocationsBuilderARM::VisitUnsafePutOrdered(HInvoke* invoke) {
609  CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, false, invoke);
610}
611void IntrinsicLocationsBuilderARM::VisitUnsafePutVolatile(HInvoke* invoke) {
612  CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, true, invoke);
613}
614void IntrinsicLocationsBuilderARM::VisitUnsafePutObject(HInvoke* invoke) {
615  CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, false, invoke);
616}
617void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
618  CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, false, invoke);
619}
620void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
621  CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, true, invoke);
622}
623void IntrinsicLocationsBuilderARM::VisitUnsafePutLong(HInvoke* invoke) {
624  CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, false, invoke);
625}
626void IntrinsicLocationsBuilderARM::VisitUnsafePutLongOrdered(HInvoke* invoke) {
627  CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, false, invoke);
628}
629void IntrinsicLocationsBuilderARM::VisitUnsafePutLongVolatile(HInvoke* invoke) {
630  CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, true, invoke);
631}
632
633static void GenUnsafePut(LocationSummary* locations,
634                         Primitive::Type type,
635                         bool is_volatile,
636                         bool is_ordered,
637                         CodeGeneratorARM* codegen) {
638  ArmAssembler* assembler = codegen->GetAssembler();
639
640  Register base = locations->InAt(1).AsRegister<Register>();           // Object pointer.
641  Register offset = locations->InAt(2).AsRegisterPairLow<Register>();  // Long offset, lo part only.
642  Register value;
643
644  if (is_volatile || is_ordered) {
645    __ dmb(ISH);
646  }
647
648  if (type == Primitive::kPrimLong) {
649    Register value_lo = locations->InAt(3).AsRegisterPairLow<Register>();
650    value = value_lo;
651    if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
652      Register temp_lo = locations->GetTemp(0).AsRegister<Register>();
653      Register temp_hi = locations->GetTemp(1).AsRegister<Register>();
654      Register value_hi = locations->InAt(3).AsRegisterPairHigh<Register>();
655
656      __ add(IP, base, ShifterOperand(offset));
657      Label loop_head;
658      __ Bind(&loop_head);
659      __ ldrexd(temp_lo, temp_hi, IP);
660      __ strexd(temp_lo, value_lo, value_hi, IP);
661      __ cmp(temp_lo, ShifterOperand(0));
662      __ b(&loop_head, NE);
663    } else {
664      __ add(IP, base, ShifterOperand(offset));
665      __ strd(value_lo, Address(IP));
666    }
667  } else {
668    value =  locations->InAt(3).AsRegister<Register>();
669    __ str(value, Address(base, offset));
670  }
671
672  if (is_volatile) {
673    __ dmb(ISH);
674  }
675
676  if (type == Primitive::kPrimNot) {
677    Register temp = locations->GetTemp(0).AsRegister<Register>();
678    Register card = locations->GetTemp(1).AsRegister<Register>();
679    codegen->MarkGCCard(temp, card, base, value);
680  }
681}
682
683void IntrinsicCodeGeneratorARM::VisitUnsafePut(HInvoke* invoke) {
684  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, false, codegen_);
685}
686void IntrinsicCodeGeneratorARM::VisitUnsafePutOrdered(HInvoke* invoke) {
687  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, true, codegen_);
688}
689void IntrinsicCodeGeneratorARM::VisitUnsafePutVolatile(HInvoke* invoke) {
690  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, false, codegen_);
691}
692void IntrinsicCodeGeneratorARM::VisitUnsafePutObject(HInvoke* invoke) {
693  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, false, codegen_);
694}
695void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
696  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, true, codegen_);
697}
698void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
699  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, false, codegen_);
700}
701void IntrinsicCodeGeneratorARM::VisitUnsafePutLong(HInvoke* invoke) {
702  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, false, codegen_);
703}
704void IntrinsicCodeGeneratorARM::VisitUnsafePutLongOrdered(HInvoke* invoke) {
705  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, true, codegen_);
706}
707void IntrinsicCodeGeneratorARM::VisitUnsafePutLongVolatile(HInvoke* invoke) {
708  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, false, codegen_);
709}
710
711static void CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator* arena,
712                                                HInvoke* invoke) {
713  LocationSummary* locations = new (arena) LocationSummary(invoke,
714                                                           LocationSummary::kNoCall,
715                                                           kIntrinsified);
716  locations->SetInAt(0, Location::NoLocation());        // Unused receiver.
717  locations->SetInAt(1, Location::RequiresRegister());
718  locations->SetInAt(2, Location::RequiresRegister());
719  locations->SetInAt(3, Location::RequiresRegister());
720  locations->SetInAt(4, Location::RequiresRegister());
721
722  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
723
724  locations->AddTemp(Location::RequiresRegister());  // Pointer.
725  locations->AddTemp(Location::RequiresRegister());  // Temp 1.
726  locations->AddTemp(Location::RequiresRegister());  // Temp 2.
727}
728
729static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM* codegen) {
730  DCHECK_NE(type, Primitive::kPrimLong);
731
732  ArmAssembler* assembler = codegen->GetAssembler();
733
734  Register out = locations->Out().AsRegister<Register>();              // Boolean result.
735
736  Register base = locations->InAt(1).AsRegister<Register>();           // Object pointer.
737  Register offset = locations->InAt(2).AsRegisterPairLow<Register>();  // Offset (discard high 4B).
738  Register expected_lo = locations->InAt(3).AsRegister<Register>();    // Expected.
739  Register value_lo = locations->InAt(4).AsRegister<Register>();       // Value.
740
741  Register tmp_ptr = locations->GetTemp(0).AsRegister<Register>();     // Pointer to actual memory.
742  Register tmp_lo = locations->GetTemp(1).AsRegister<Register>();      // Value in memory.
743
744  if (type == Primitive::kPrimNot) {
745    // Mark card for object assuming new value is stored. Worst case we will mark an unchanged
746    // object and scan the receiver at the next GC for nothing.
747    codegen->MarkGCCard(tmp_ptr, tmp_lo, base, value_lo);
748  }
749
750  // Prevent reordering with prior memory operations.
751  __ dmb(ISH);
752
753  __ add(tmp_ptr, base, ShifterOperand(offset));
754
755  // do {
756  //   tmp = [r_ptr] - expected;
757  // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
758  // result = tmp != 0;
759
760  Label loop_head;
761  __ Bind(&loop_head);
762
763  __ ldrex(tmp_lo, tmp_ptr);
764
765  __ subs(tmp_lo, tmp_lo, ShifterOperand(expected_lo));
766
767  __ it(EQ, ItState::kItT);
768  __ strex(tmp_lo, value_lo, tmp_ptr, EQ);
769  __ cmp(tmp_lo, ShifterOperand(1), EQ);
770
771  __ b(&loop_head, EQ);
772
773  __ dmb(ISH);
774
775  __ rsbs(out, tmp_lo, ShifterOperand(1));
776  __ it(CC);
777  __ mov(out, ShifterOperand(0), CC);
778}
779
780void IntrinsicLocationsBuilderARM::VisitUnsafeCASInt(HInvoke* invoke) {
781  CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke);
782}
783void IntrinsicLocationsBuilderARM::VisitUnsafeCASObject(HInvoke* invoke) {
784  CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke);
785}
786void IntrinsicCodeGeneratorARM::VisitUnsafeCASInt(HInvoke* invoke) {
787  GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
788}
789void IntrinsicCodeGeneratorARM::VisitUnsafeCASObject(HInvoke* invoke) {
790  GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
791}
792
793void IntrinsicLocationsBuilderARM::VisitStringCharAt(HInvoke* invoke) {
794  LocationSummary* locations = new (arena_) LocationSummary(invoke,
795                                                            LocationSummary::kCallOnSlowPath,
796                                                            kIntrinsified);
797  locations->SetInAt(0, Location::RequiresRegister());
798  locations->SetInAt(1, Location::RequiresRegister());
799  locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
800
801  locations->AddTemp(Location::RequiresRegister());
802  locations->AddTemp(Location::RequiresRegister());
803}
804
805void IntrinsicCodeGeneratorARM::VisitStringCharAt(HInvoke* invoke) {
806  ArmAssembler* assembler = GetAssembler();
807  LocationSummary* locations = invoke->GetLocations();
808
809  // Location of reference to data array
810  const MemberOffset value_offset = mirror::String::ValueOffset();
811  // Location of count
812  const MemberOffset count_offset = mirror::String::CountOffset();
813
814  Register obj = locations->InAt(0).AsRegister<Register>();  // String object pointer.
815  Register idx = locations->InAt(1).AsRegister<Register>();  // Index of character.
816  Register out = locations->Out().AsRegister<Register>();    // Result character.
817
818  Register temp = locations->GetTemp(0).AsRegister<Register>();
819  Register array_temp = locations->GetTemp(1).AsRegister<Register>();
820
821  // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
822  //       the cost.
823  // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
824  //       we will not optimize the code for constants (which would save a register).
825
826  SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
827  codegen_->AddSlowPath(slow_path);
828
829  __ ldr(temp, Address(obj, count_offset.Int32Value()));          // temp = str.length.
830  codegen_->MaybeRecordImplicitNullCheck(invoke);
831  __ cmp(idx, ShifterOperand(temp));
832  __ b(slow_path->GetEntryLabel(), CS);
833
834  __ add(array_temp, obj, ShifterOperand(value_offset.Int32Value()));  // array_temp := str.value.
835
836  // Load the value.
837  __ ldrh(out, Address(array_temp, idx, LSL, 1));                 // out := array_temp[idx].
838
839  __ Bind(slow_path->GetExitLabel());
840}
841
842void IntrinsicLocationsBuilderARM::VisitStringCompareTo(HInvoke* invoke) {
843  // The inputs plus one temp.
844  LocationSummary* locations = new (arena_) LocationSummary(invoke,
845                                                            LocationSummary::kCall,
846                                                            kIntrinsified);
847  InvokeRuntimeCallingConvention calling_convention;
848  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
849  locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
850  locations->SetOut(Location::RegisterLocation(R0));
851}
852
853void IntrinsicCodeGeneratorARM::VisitStringCompareTo(HInvoke* invoke) {
854  ArmAssembler* assembler = GetAssembler();
855  LocationSummary* locations = invoke->GetLocations();
856
857  // Note that the null check must have been done earlier.
858  DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
859
860  Register argument = locations->InAt(1).AsRegister<Register>();
861  __ cmp(argument, ShifterOperand(0));
862  SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
863  codegen_->AddSlowPath(slow_path);
864  __ b(slow_path->GetEntryLabel(), EQ);
865
866  __ LoadFromOffset(
867      kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pStringCompareTo).Int32Value());
868  __ blx(LR);
869  __ Bind(slow_path->GetExitLabel());
870}
871
872void IntrinsicLocationsBuilderARM::VisitStringNewStringFromBytes(HInvoke* invoke) {
873  LocationSummary* locations = new (arena_) LocationSummary(invoke,
874                                                            LocationSummary::kCall,
875                                                            kIntrinsified);
876  InvokeRuntimeCallingConvention calling_convention;
877  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
878  locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
879  locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
880  locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
881  locations->SetOut(Location::RegisterLocation(R0));
882}
883
884void IntrinsicCodeGeneratorARM::VisitStringNewStringFromBytes(HInvoke* invoke) {
885  ArmAssembler* assembler = GetAssembler();
886  LocationSummary* locations = invoke->GetLocations();
887
888  Register byte_array = locations->InAt(0).AsRegister<Register>();
889  __ cmp(byte_array, ShifterOperand(0));
890  SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
891  codegen_->AddSlowPath(slow_path);
892  __ b(slow_path->GetEntryLabel(), EQ);
893
894  __ LoadFromOffset(
895      kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromBytes).Int32Value());
896  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
897  __ blx(LR);
898  __ Bind(slow_path->GetExitLabel());
899}
900
901void IntrinsicLocationsBuilderARM::VisitStringNewStringFromChars(HInvoke* invoke) {
902  LocationSummary* locations = new (arena_) LocationSummary(invoke,
903                                                            LocationSummary::kCall,
904                                                            kIntrinsified);
905  InvokeRuntimeCallingConvention calling_convention;
906  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
907  locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
908  locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
909  locations->SetOut(Location::RegisterLocation(R0));
910}
911
912void IntrinsicCodeGeneratorARM::VisitStringNewStringFromChars(HInvoke* invoke) {
913  ArmAssembler* assembler = GetAssembler();
914
915  __ LoadFromOffset(
916      kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromChars).Int32Value());
917  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
918  __ blx(LR);
919}
920
921void IntrinsicLocationsBuilderARM::VisitStringNewStringFromString(HInvoke* invoke) {
922  LocationSummary* locations = new (arena_) LocationSummary(invoke,
923                                                            LocationSummary::kCall,
924                                                            kIntrinsified);
925  InvokeRuntimeCallingConvention calling_convention;
926  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
927  locations->SetOut(Location::RegisterLocation(R0));
928}
929
930void IntrinsicCodeGeneratorARM::VisitStringNewStringFromString(HInvoke* invoke) {
931  ArmAssembler* assembler = GetAssembler();
932  LocationSummary* locations = invoke->GetLocations();
933
934  Register string_to_copy = locations->InAt(0).AsRegister<Register>();
935  __ cmp(string_to_copy, ShifterOperand(0));
936  SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
937  codegen_->AddSlowPath(slow_path);
938  __ b(slow_path->GetEntryLabel(), EQ);
939
940  __ LoadFromOffset(kLoadWord,
941      LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromString).Int32Value());
942  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
943  __ blx(LR);
944  __ Bind(slow_path->GetExitLabel());
945}
946
947// Unimplemented intrinsics.
948
949#define UNIMPLEMENTED_INTRINSIC(Name)                                                  \
950void IntrinsicLocationsBuilderARM::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
951}                                                                                      \
952void IntrinsicCodeGeneratorARM::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) {    \
953}
954
955UNIMPLEMENTED_INTRINSIC(IntegerReverse)
956UNIMPLEMENTED_INTRINSIC(IntegerReverseBytes)
957UNIMPLEMENTED_INTRINSIC(LongReverse)
958UNIMPLEMENTED_INTRINSIC(LongReverseBytes)
959UNIMPLEMENTED_INTRINSIC(ShortReverseBytes)
960UNIMPLEMENTED_INTRINSIC(MathMinDoubleDouble)
961UNIMPLEMENTED_INTRINSIC(MathMinFloatFloat)
962UNIMPLEMENTED_INTRINSIC(MathMaxDoubleDouble)
963UNIMPLEMENTED_INTRINSIC(MathMaxFloatFloat)
964UNIMPLEMENTED_INTRINSIC(MathMinLongLong)
965UNIMPLEMENTED_INTRINSIC(MathMaxLongLong)
966UNIMPLEMENTED_INTRINSIC(MathCeil)          // Could be done by changing rounding mode, maybe?
967UNIMPLEMENTED_INTRINSIC(MathFloor)         // Could be done by changing rounding mode, maybe?
968UNIMPLEMENTED_INTRINSIC(MathRint)
969UNIMPLEMENTED_INTRINSIC(MathRoundDouble)   // Could be done by changing rounding mode, maybe?
970UNIMPLEMENTED_INTRINSIC(MathRoundFloat)    // Could be done by changing rounding mode, maybe?
971UNIMPLEMENTED_INTRINSIC(UnsafeCASLong)     // High register pressure.
972UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
973UNIMPLEMENTED_INTRINSIC(StringIndexOf)
974UNIMPLEMENTED_INTRINSIC(StringIndexOfAfter)
975UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
976UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
977
978}  // namespace arm
979}  // namespace art
980