intrinsics_arm.cc revision 611d3395e9efc0ab8dbfa4a197fa022fbd8c7204
1/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
20#include "art_method.h"
21#include "code_generator_arm.h"
22#include "entrypoints/quick/quick_entrypoints.h"
23#include "intrinsics.h"
24#include "mirror/array-inl.h"
25#include "mirror/string.h"
26#include "thread.h"
27#include "utils/arm/assembler_arm.h"
28
29namespace art {
30
31namespace arm {
32
33ArmAssembler* IntrinsicCodeGeneratorARM::GetAssembler() {
34  return codegen_->GetAssembler();
35}
36
37ArenaAllocator* IntrinsicCodeGeneratorARM::GetAllocator() {
38  return codegen_->GetGraph()->GetArena();
39}
40
41#define __ codegen->GetAssembler()->
42
43static void MoveFromReturnRegister(Location trg, Primitive::Type type, CodeGeneratorARM* codegen) {
44  if (!trg.IsValid()) {
45    DCHECK(type == Primitive::kPrimVoid);
46    return;
47  }
48
49  DCHECK_NE(type, Primitive::kPrimVoid);
50
51  if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
52    if (type == Primitive::kPrimLong) {
53      Register trg_reg_lo = trg.AsRegisterPairLow<Register>();
54      Register trg_reg_hi = trg.AsRegisterPairHigh<Register>();
55      Register res_reg_lo = R0;
56      Register res_reg_hi = R1;
57      if (trg_reg_lo != res_reg_hi) {
58        if (trg_reg_lo != res_reg_lo) {
59          __ mov(trg_reg_lo, ShifterOperand(res_reg_lo));
60          __ mov(trg_reg_hi, ShifterOperand(res_reg_hi));
61        } else {
62          DCHECK_EQ(trg_reg_lo + 1, trg_reg_hi);
63        }
64      } else {
65        __ mov(trg_reg_hi, ShifterOperand(res_reg_hi));
66        __ mov(trg_reg_lo, ShifterOperand(res_reg_lo));
67      }
68    } else {
69      Register trg_reg = trg.AsRegister<Register>();
70      Register res_reg = R0;
71      if (trg_reg != res_reg) {
72        __ mov(trg_reg, ShifterOperand(res_reg));
73      }
74    }
75  } else {
76    UNIMPLEMENTED(FATAL) << "Floating-point return.";
77  }
78}
79
80static void MoveArguments(HInvoke* invoke, CodeGeneratorARM* codegen) {
81  InvokeDexCallingConventionVisitorARM calling_convention_visitor;
82  IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
83}
84
85// Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified
86// call. This will copy the arguments into the positions for a regular call.
87//
88// Note: The actual parameters are required to be in the locations given by the invoke's location
89//       summary. If an intrinsic modifies those locations before a slowpath call, they must be
90//       restored!
91class IntrinsicSlowPathARM : public SlowPathCodeARM {
92 public:
93  explicit IntrinsicSlowPathARM(HInvoke* invoke) : invoke_(invoke) { }
94
95  void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
96    CodeGeneratorARM* codegen = down_cast<CodeGeneratorARM*>(codegen_in);
97    __ Bind(GetEntryLabel());
98
99    SaveLiveRegisters(codegen, invoke_->GetLocations());
100
101    MoveArguments(invoke_, codegen);
102
103    if (invoke_->IsInvokeStaticOrDirect()) {
104      codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(),
105                                          Location::RegisterLocation(kArtMethodRegister));
106      RecordPcInfo(codegen, invoke_, invoke_->GetDexPc());
107    } else {
108      UNIMPLEMENTED(FATAL) << "Non-direct intrinsic slow-path not yet implemented";
109      UNREACHABLE();
110    }
111
112    // Copy the result back to the expected output.
113    Location out = invoke_->GetLocations()->Out();
114    if (out.IsValid()) {
115      DCHECK(out.IsRegister());  // TODO: Replace this when we support output in memory.
116      DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
117      MoveFromReturnRegister(out, invoke_->GetType(), codegen);
118    }
119
120    RestoreLiveRegisters(codegen, invoke_->GetLocations());
121    __ b(GetExitLabel());
122  }
123
124  const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathARM"; }
125
126 private:
127  // The instruction where this slow path is happening.
128  HInvoke* const invoke_;
129
130  DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathARM);
131};
132
133#undef __
134
135bool IntrinsicLocationsBuilderARM::TryDispatch(HInvoke* invoke) {
136  Dispatch(invoke);
137  LocationSummary* res = invoke->GetLocations();
138  return res != nullptr && res->Intrinsified();
139}
140
141#define __ assembler->
142
143static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
144  LocationSummary* locations = new (arena) LocationSummary(invoke,
145                                                           LocationSummary::kNoCall,
146                                                           kIntrinsified);
147  locations->SetInAt(0, Location::RequiresFpuRegister());
148  locations->SetOut(Location::RequiresRegister());
149}
150
151static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
152  LocationSummary* locations = new (arena) LocationSummary(invoke,
153                                                           LocationSummary::kNoCall,
154                                                           kIntrinsified);
155  locations->SetInAt(0, Location::RequiresRegister());
156  locations->SetOut(Location::RequiresFpuRegister());
157}
158
159static void MoveFPToInt(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
160  Location input = locations->InAt(0);
161  Location output = locations->Out();
162  if (is64bit) {
163    __ vmovrrd(output.AsRegisterPairLow<Register>(),
164               output.AsRegisterPairHigh<Register>(),
165               FromLowSToD(input.AsFpuRegisterPairLow<SRegister>()));
166  } else {
167    __ vmovrs(output.AsRegister<Register>(), input.AsFpuRegister<SRegister>());
168  }
169}
170
171static void MoveIntToFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
172  Location input = locations->InAt(0);
173  Location output = locations->Out();
174  if (is64bit) {
175    __ vmovdrr(FromLowSToD(output.AsFpuRegisterPairLow<SRegister>()),
176               input.AsRegisterPairLow<Register>(),
177               input.AsRegisterPairHigh<Register>());
178  } else {
179    __ vmovsr(output.AsFpuRegister<SRegister>(), input.AsRegister<Register>());
180  }
181}
182
183void IntrinsicLocationsBuilderARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
184  CreateFPToIntLocations(arena_, invoke);
185}
186void IntrinsicLocationsBuilderARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
187  CreateIntToFPLocations(arena_, invoke);
188}
189
190void IntrinsicCodeGeneratorARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
191  MoveFPToInt(invoke->GetLocations(), true, GetAssembler());
192}
193void IntrinsicCodeGeneratorARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
194  MoveIntToFP(invoke->GetLocations(), true, GetAssembler());
195}
196
197void IntrinsicLocationsBuilderARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
198  CreateFPToIntLocations(arena_, invoke);
199}
200void IntrinsicLocationsBuilderARM::VisitFloatIntBitsToFloat(HInvoke* invoke) {
201  CreateIntToFPLocations(arena_, invoke);
202}
203
204void IntrinsicCodeGeneratorARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
205  MoveFPToInt(invoke->GetLocations(), false, GetAssembler());
206}
207void IntrinsicCodeGeneratorARM::VisitFloatIntBitsToFloat(HInvoke* invoke) {
208  MoveIntToFP(invoke->GetLocations(), false, GetAssembler());
209}
210
211static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
212  LocationSummary* locations = new (arena) LocationSummary(invoke,
213                                                           LocationSummary::kNoCall,
214                                                           kIntrinsified);
215  locations->SetInAt(0, Location::RequiresRegister());
216  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
217}
218
219static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
220  LocationSummary* locations = new (arena) LocationSummary(invoke,
221                                                           LocationSummary::kNoCall,
222                                                           kIntrinsified);
223  locations->SetInAt(0, Location::RequiresFpuRegister());
224  locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
225}
226
227static void GenNumberOfLeadingZeros(LocationSummary* locations,
228                                    Primitive::Type type,
229                                    ArmAssembler* assembler) {
230  Location in = locations->InAt(0);
231  Register out = locations->Out().AsRegister<Register>();
232
233  DCHECK((type == Primitive::kPrimInt) || (type == Primitive::kPrimLong));
234
235  if (type == Primitive::kPrimLong) {
236    Register in_reg_lo = in.AsRegisterPairLow<Register>();
237    Register in_reg_hi = in.AsRegisterPairHigh<Register>();
238    Label end;
239    __ clz(out, in_reg_hi);
240    __ CompareAndBranchIfNonZero(in_reg_hi, &end);
241    __ clz(out, in_reg_lo);
242    __ AddConstant(out, 32);
243    __ Bind(&end);
244  } else {
245    __ clz(out, in.AsRegister<Register>());
246  }
247}
248
249void IntrinsicLocationsBuilderARM::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
250  CreateIntToIntLocations(arena_, invoke);
251}
252
253void IntrinsicCodeGeneratorARM::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
254  GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
255}
256
257void IntrinsicLocationsBuilderARM::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
258  LocationSummary* locations = new (arena_) LocationSummary(invoke,
259                                                           LocationSummary::kNoCall,
260                                                           kIntrinsified);
261  locations->SetInAt(0, Location::RequiresRegister());
262  locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
263}
264
265void IntrinsicCodeGeneratorARM::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
266  GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
267}
268
269static void MathAbsFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
270  Location in = locations->InAt(0);
271  Location out = locations->Out();
272
273  if (is64bit) {
274    __ vabsd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
275             FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
276  } else {
277    __ vabss(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>());
278  }
279}
280
281void IntrinsicLocationsBuilderARM::VisitMathAbsDouble(HInvoke* invoke) {
282  CreateFPToFPLocations(arena_, invoke);
283}
284
285void IntrinsicCodeGeneratorARM::VisitMathAbsDouble(HInvoke* invoke) {
286  MathAbsFP(invoke->GetLocations(), true, GetAssembler());
287}
288
289void IntrinsicLocationsBuilderARM::VisitMathAbsFloat(HInvoke* invoke) {
290  CreateFPToFPLocations(arena_, invoke);
291}
292
293void IntrinsicCodeGeneratorARM::VisitMathAbsFloat(HInvoke* invoke) {
294  MathAbsFP(invoke->GetLocations(), false, GetAssembler());
295}
296
297static void CreateIntToIntPlusTemp(ArenaAllocator* arena, HInvoke* invoke) {
298  LocationSummary* locations = new (arena) LocationSummary(invoke,
299                                                           LocationSummary::kNoCall,
300                                                           kIntrinsified);
301  locations->SetInAt(0, Location::RequiresRegister());
302  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
303
304  locations->AddTemp(Location::RequiresRegister());
305}
306
307static void GenAbsInteger(LocationSummary* locations,
308                          bool is64bit,
309                          ArmAssembler* assembler) {
310  Location in = locations->InAt(0);
311  Location output = locations->Out();
312
313  Register mask = locations->GetTemp(0).AsRegister<Register>();
314
315  if (is64bit) {
316    Register in_reg_lo = in.AsRegisterPairLow<Register>();
317    Register in_reg_hi = in.AsRegisterPairHigh<Register>();
318    Register out_reg_lo = output.AsRegisterPairLow<Register>();
319    Register out_reg_hi = output.AsRegisterPairHigh<Register>();
320
321    DCHECK_NE(out_reg_lo, in_reg_hi) << "Diagonal overlap unexpected.";
322
323    __ Asr(mask, in_reg_hi, 31);
324    __ adds(out_reg_lo, in_reg_lo, ShifterOperand(mask));
325    __ adc(out_reg_hi, in_reg_hi, ShifterOperand(mask));
326    __ eor(out_reg_lo, mask, ShifterOperand(out_reg_lo));
327    __ eor(out_reg_hi, mask, ShifterOperand(out_reg_hi));
328  } else {
329    Register in_reg = in.AsRegister<Register>();
330    Register out_reg = output.AsRegister<Register>();
331
332    __ Asr(mask, in_reg, 31);
333    __ add(out_reg, in_reg, ShifterOperand(mask));
334    __ eor(out_reg, mask, ShifterOperand(out_reg));
335  }
336}
337
338void IntrinsicLocationsBuilderARM::VisitMathAbsInt(HInvoke* invoke) {
339  CreateIntToIntPlusTemp(arena_, invoke);
340}
341
342void IntrinsicCodeGeneratorARM::VisitMathAbsInt(HInvoke* invoke) {
343  GenAbsInteger(invoke->GetLocations(), false, GetAssembler());
344}
345
346
347void IntrinsicLocationsBuilderARM::VisitMathAbsLong(HInvoke* invoke) {
348  CreateIntToIntPlusTemp(arena_, invoke);
349}
350
351void IntrinsicCodeGeneratorARM::VisitMathAbsLong(HInvoke* invoke) {
352  GenAbsInteger(invoke->GetLocations(), true, GetAssembler());
353}
354
355static void GenMinMax(LocationSummary* locations,
356                      bool is_min,
357                      ArmAssembler* assembler) {
358  Register op1 = locations->InAt(0).AsRegister<Register>();
359  Register op2 = locations->InAt(1).AsRegister<Register>();
360  Register out = locations->Out().AsRegister<Register>();
361
362  __ cmp(op1, ShifterOperand(op2));
363
364  __ it((is_min) ? Condition::LT : Condition::GT, kItElse);
365  __ mov(out, ShifterOperand(op1), is_min ? Condition::LT : Condition::GT);
366  __ mov(out, ShifterOperand(op2), is_min ? Condition::GE : Condition::LE);
367}
368
369static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
370  LocationSummary* locations = new (arena) LocationSummary(invoke,
371                                                           LocationSummary::kNoCall,
372                                                           kIntrinsified);
373  locations->SetInAt(0, Location::RequiresRegister());
374  locations->SetInAt(1, Location::RequiresRegister());
375  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
376}
377
378void IntrinsicLocationsBuilderARM::VisitMathMinIntInt(HInvoke* invoke) {
379  CreateIntIntToIntLocations(arena_, invoke);
380}
381
382void IntrinsicCodeGeneratorARM::VisitMathMinIntInt(HInvoke* invoke) {
383  GenMinMax(invoke->GetLocations(), true, GetAssembler());
384}
385
386void IntrinsicLocationsBuilderARM::VisitMathMaxIntInt(HInvoke* invoke) {
387  CreateIntIntToIntLocations(arena_, invoke);
388}
389
390void IntrinsicCodeGeneratorARM::VisitMathMaxIntInt(HInvoke* invoke) {
391  GenMinMax(invoke->GetLocations(), false, GetAssembler());
392}
393
394void IntrinsicLocationsBuilderARM::VisitMathSqrt(HInvoke* invoke) {
395  CreateFPToFPLocations(arena_, invoke);
396}
397
398void IntrinsicCodeGeneratorARM::VisitMathSqrt(HInvoke* invoke) {
399  LocationSummary* locations = invoke->GetLocations();
400  ArmAssembler* assembler = GetAssembler();
401  __ vsqrtd(FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()),
402            FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>()));
403}
404
405void IntrinsicLocationsBuilderARM::VisitMemoryPeekByte(HInvoke* invoke) {
406  CreateIntToIntLocations(arena_, invoke);
407}
408
409void IntrinsicCodeGeneratorARM::VisitMemoryPeekByte(HInvoke* invoke) {
410  ArmAssembler* assembler = GetAssembler();
411  // Ignore upper 4B of long address.
412  __ ldrsb(invoke->GetLocations()->Out().AsRegister<Register>(),
413           Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
414}
415
416void IntrinsicLocationsBuilderARM::VisitMemoryPeekIntNative(HInvoke* invoke) {
417  CreateIntToIntLocations(arena_, invoke);
418}
419
420void IntrinsicCodeGeneratorARM::VisitMemoryPeekIntNative(HInvoke* invoke) {
421  ArmAssembler* assembler = GetAssembler();
422  // Ignore upper 4B of long address.
423  __ ldr(invoke->GetLocations()->Out().AsRegister<Register>(),
424         Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
425}
426
427void IntrinsicLocationsBuilderARM::VisitMemoryPeekLongNative(HInvoke* invoke) {
428  CreateIntToIntLocations(arena_, invoke);
429}
430
431void IntrinsicCodeGeneratorARM::VisitMemoryPeekLongNative(HInvoke* invoke) {
432  ArmAssembler* assembler = GetAssembler();
433  // Ignore upper 4B of long address.
434  Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
435  // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
436  // exception. So we can't use ldrd as addr may be unaligned.
437  Register lo = invoke->GetLocations()->Out().AsRegisterPairLow<Register>();
438  Register hi = invoke->GetLocations()->Out().AsRegisterPairHigh<Register>();
439  if (addr == lo) {
440    __ ldr(hi, Address(addr, 4));
441    __ ldr(lo, Address(addr, 0));
442  } else {
443    __ ldr(lo, Address(addr, 0));
444    __ ldr(hi, Address(addr, 4));
445  }
446}
447
448void IntrinsicLocationsBuilderARM::VisitMemoryPeekShortNative(HInvoke* invoke) {
449  CreateIntToIntLocations(arena_, invoke);
450}
451
452void IntrinsicCodeGeneratorARM::VisitMemoryPeekShortNative(HInvoke* invoke) {
453  ArmAssembler* assembler = GetAssembler();
454  // Ignore upper 4B of long address.
455  __ ldrsh(invoke->GetLocations()->Out().AsRegister<Register>(),
456           Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
457}
458
459static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
460  LocationSummary* locations = new (arena) LocationSummary(invoke,
461                                                           LocationSummary::kNoCall,
462                                                           kIntrinsified);
463  locations->SetInAt(0, Location::RequiresRegister());
464  locations->SetInAt(1, Location::RequiresRegister());
465}
466
467void IntrinsicLocationsBuilderARM::VisitMemoryPokeByte(HInvoke* invoke) {
468  CreateIntIntToVoidLocations(arena_, invoke);
469}
470
471void IntrinsicCodeGeneratorARM::VisitMemoryPokeByte(HInvoke* invoke) {
472  ArmAssembler* assembler = GetAssembler();
473  __ strb(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
474          Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
475}
476
477void IntrinsicLocationsBuilderARM::VisitMemoryPokeIntNative(HInvoke* invoke) {
478  CreateIntIntToVoidLocations(arena_, invoke);
479}
480
481void IntrinsicCodeGeneratorARM::VisitMemoryPokeIntNative(HInvoke* invoke) {
482  ArmAssembler* assembler = GetAssembler();
483  __ str(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
484         Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
485}
486
487void IntrinsicLocationsBuilderARM::VisitMemoryPokeLongNative(HInvoke* invoke) {
488  CreateIntIntToVoidLocations(arena_, invoke);
489}
490
491void IntrinsicCodeGeneratorARM::VisitMemoryPokeLongNative(HInvoke* invoke) {
492  ArmAssembler* assembler = GetAssembler();
493  // Ignore upper 4B of long address.
494  Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
495  // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
496  // exception. So we can't use ldrd as addr may be unaligned.
497  __ str(invoke->GetLocations()->InAt(1).AsRegisterPairLow<Register>(), Address(addr, 0));
498  __ str(invoke->GetLocations()->InAt(1).AsRegisterPairHigh<Register>(), Address(addr, 4));
499}
500
501void IntrinsicLocationsBuilderARM::VisitMemoryPokeShortNative(HInvoke* invoke) {
502  CreateIntIntToVoidLocations(arena_, invoke);
503}
504
505void IntrinsicCodeGeneratorARM::VisitMemoryPokeShortNative(HInvoke* invoke) {
506  ArmAssembler* assembler = GetAssembler();
507  __ strh(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
508          Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
509}
510
511void IntrinsicLocationsBuilderARM::VisitThreadCurrentThread(HInvoke* invoke) {
512  LocationSummary* locations = new (arena_) LocationSummary(invoke,
513                                                            LocationSummary::kNoCall,
514                                                            kIntrinsified);
515  locations->SetOut(Location::RequiresRegister());
516}
517
518void IntrinsicCodeGeneratorARM::VisitThreadCurrentThread(HInvoke* invoke) {
519  ArmAssembler* assembler = GetAssembler();
520  __ LoadFromOffset(kLoadWord,
521                    invoke->GetLocations()->Out().AsRegister<Register>(),
522                    TR,
523                    Thread::PeerOffset<kArmPointerSize>().Int32Value());
524}
525
526static void GenUnsafeGet(HInvoke* invoke,
527                         Primitive::Type type,
528                         bool is_volatile,
529                         CodeGeneratorARM* codegen) {
530  LocationSummary* locations = invoke->GetLocations();
531  DCHECK((type == Primitive::kPrimInt) ||
532         (type == Primitive::kPrimLong) ||
533         (type == Primitive::kPrimNot));
534  ArmAssembler* assembler = codegen->GetAssembler();
535  Register base = locations->InAt(1).AsRegister<Register>();           // Object pointer.
536  Register offset = locations->InAt(2).AsRegisterPairLow<Register>();  // Long offset, lo part only.
537
538  if (type == Primitive::kPrimLong) {
539    Register trg_lo = locations->Out().AsRegisterPairLow<Register>();
540    __ add(IP, base, ShifterOperand(offset));
541    if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
542      Register trg_hi = locations->Out().AsRegisterPairHigh<Register>();
543      __ ldrexd(trg_lo, trg_hi, IP);
544    } else {
545      __ ldrd(trg_lo, Address(IP));
546    }
547  } else {
548    Register trg = locations->Out().AsRegister<Register>();
549    __ ldr(trg, Address(base, offset));
550  }
551
552  if (is_volatile) {
553    __ dmb(ISH);
554  }
555
556  if (type == Primitive::kPrimNot) {
557    Register trg = locations->Out().AsRegister<Register>();
558    __ MaybeUnpoisonHeapReference(trg);
559  }
560}
561
562static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
563  LocationSummary* locations = new (arena) LocationSummary(invoke,
564                                                           LocationSummary::kNoCall,
565                                                           kIntrinsified);
566  locations->SetInAt(0, Location::NoLocation());        // Unused receiver.
567  locations->SetInAt(1, Location::RequiresRegister());
568  locations->SetInAt(2, Location::RequiresRegister());
569  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
570}
571
572void IntrinsicLocationsBuilderARM::VisitUnsafeGet(HInvoke* invoke) {
573  CreateIntIntIntToIntLocations(arena_, invoke);
574}
575void IntrinsicLocationsBuilderARM::VisitUnsafeGetVolatile(HInvoke* invoke) {
576  CreateIntIntIntToIntLocations(arena_, invoke);
577}
578void IntrinsicLocationsBuilderARM::VisitUnsafeGetLong(HInvoke* invoke) {
579  CreateIntIntIntToIntLocations(arena_, invoke);
580}
581void IntrinsicLocationsBuilderARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
582  CreateIntIntIntToIntLocations(arena_, invoke);
583}
584void IntrinsicLocationsBuilderARM::VisitUnsafeGetObject(HInvoke* invoke) {
585  CreateIntIntIntToIntLocations(arena_, invoke);
586}
587void IntrinsicLocationsBuilderARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
588  CreateIntIntIntToIntLocations(arena_, invoke);
589}
590
591void IntrinsicCodeGeneratorARM::VisitUnsafeGet(HInvoke* invoke) {
592  GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_);
593}
594void IntrinsicCodeGeneratorARM::VisitUnsafeGetVolatile(HInvoke* invoke) {
595  GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_);
596}
597void IntrinsicCodeGeneratorARM::VisitUnsafeGetLong(HInvoke* invoke) {
598  GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_);
599}
600void IntrinsicCodeGeneratorARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
601  GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_);
602}
603void IntrinsicCodeGeneratorARM::VisitUnsafeGetObject(HInvoke* invoke) {
604  GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_);
605}
606void IntrinsicCodeGeneratorARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
607  GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_);
608}
609
610static void CreateIntIntIntIntToVoid(ArenaAllocator* arena,
611                                     const ArmInstructionSetFeatures& features,
612                                     Primitive::Type type,
613                                     bool is_volatile,
614                                     HInvoke* invoke) {
615  LocationSummary* locations = new (arena) LocationSummary(invoke,
616                                                           LocationSummary::kNoCall,
617                                                           kIntrinsified);
618  locations->SetInAt(0, Location::NoLocation());        // Unused receiver.
619  locations->SetInAt(1, Location::RequiresRegister());
620  locations->SetInAt(2, Location::RequiresRegister());
621  locations->SetInAt(3, Location::RequiresRegister());
622
623  if (type == Primitive::kPrimLong) {
624    // Potentially need temps for ldrexd-strexd loop.
625    if (is_volatile && !features.HasAtomicLdrdAndStrd()) {
626      locations->AddTemp(Location::RequiresRegister());  // Temp_lo.
627      locations->AddTemp(Location::RequiresRegister());  // Temp_hi.
628    }
629  } else if (type == Primitive::kPrimNot) {
630    // Temps for card-marking.
631    locations->AddTemp(Location::RequiresRegister());  // Temp.
632    locations->AddTemp(Location::RequiresRegister());  // Card.
633  }
634}
635
636void IntrinsicLocationsBuilderARM::VisitUnsafePut(HInvoke* invoke) {
637  CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, false, invoke);
638}
639void IntrinsicLocationsBuilderARM::VisitUnsafePutOrdered(HInvoke* invoke) {
640  CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, false, invoke);
641}
642void IntrinsicLocationsBuilderARM::VisitUnsafePutVolatile(HInvoke* invoke) {
643  CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, true, invoke);
644}
645void IntrinsicLocationsBuilderARM::VisitUnsafePutObject(HInvoke* invoke) {
646  CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, false, invoke);
647}
648void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
649  CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, false, invoke);
650}
651void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
652  CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, true, invoke);
653}
654void IntrinsicLocationsBuilderARM::VisitUnsafePutLong(HInvoke* invoke) {
655  CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, false, invoke);
656}
657void IntrinsicLocationsBuilderARM::VisitUnsafePutLongOrdered(HInvoke* invoke) {
658  CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, false, invoke);
659}
660void IntrinsicLocationsBuilderARM::VisitUnsafePutLongVolatile(HInvoke* invoke) {
661  CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, true, invoke);
662}
663
664static void GenUnsafePut(LocationSummary* locations,
665                         Primitive::Type type,
666                         bool is_volatile,
667                         bool is_ordered,
668                         CodeGeneratorARM* codegen) {
669  ArmAssembler* assembler = codegen->GetAssembler();
670
671  Register base = locations->InAt(1).AsRegister<Register>();           // Object pointer.
672  Register offset = locations->InAt(2).AsRegisterPairLow<Register>();  // Long offset, lo part only.
673  Register value;
674
675  if (is_volatile || is_ordered) {
676    __ dmb(ISH);
677  }
678
679  if (type == Primitive::kPrimLong) {
680    Register value_lo = locations->InAt(3).AsRegisterPairLow<Register>();
681    value = value_lo;
682    if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
683      Register temp_lo = locations->GetTemp(0).AsRegister<Register>();
684      Register temp_hi = locations->GetTemp(1).AsRegister<Register>();
685      Register value_hi = locations->InAt(3).AsRegisterPairHigh<Register>();
686
687      __ add(IP, base, ShifterOperand(offset));
688      Label loop_head;
689      __ Bind(&loop_head);
690      __ ldrexd(temp_lo, temp_hi, IP);
691      __ strexd(temp_lo, value_lo, value_hi, IP);
692      __ cmp(temp_lo, ShifterOperand(0));
693      __ b(&loop_head, NE);
694    } else {
695      __ add(IP, base, ShifterOperand(offset));
696      __ strd(value_lo, Address(IP));
697    }
698  } else {
699    value = locations->InAt(3).AsRegister<Register>();
700    Register source = value;
701    if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
702      Register temp = locations->GetTemp(0).AsRegister<Register>();
703      __ Mov(temp, value);
704      __ PoisonHeapReference(temp);
705      source = temp;
706    }
707    __ str(source, Address(base, offset));
708  }
709
710  if (is_volatile) {
711    __ dmb(ISH);
712  }
713
714  if (type == Primitive::kPrimNot) {
715    Register temp = locations->GetTemp(0).AsRegister<Register>();
716    Register card = locations->GetTemp(1).AsRegister<Register>();
717    bool value_can_be_null = true;  // TODO: Worth finding out this information?
718    codegen->MarkGCCard(temp, card, base, value, value_can_be_null);
719  }
720}
721
722void IntrinsicCodeGeneratorARM::VisitUnsafePut(HInvoke* invoke) {
723  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, false, codegen_);
724}
725void IntrinsicCodeGeneratorARM::VisitUnsafePutOrdered(HInvoke* invoke) {
726  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, true, codegen_);
727}
728void IntrinsicCodeGeneratorARM::VisitUnsafePutVolatile(HInvoke* invoke) {
729  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, false, codegen_);
730}
731void IntrinsicCodeGeneratorARM::VisitUnsafePutObject(HInvoke* invoke) {
732  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, false, codegen_);
733}
734void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
735  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, true, codegen_);
736}
737void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
738  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, false, codegen_);
739}
740void IntrinsicCodeGeneratorARM::VisitUnsafePutLong(HInvoke* invoke) {
741  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, false, codegen_);
742}
743void IntrinsicCodeGeneratorARM::VisitUnsafePutLongOrdered(HInvoke* invoke) {
744  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, true, codegen_);
745}
746void IntrinsicCodeGeneratorARM::VisitUnsafePutLongVolatile(HInvoke* invoke) {
747  GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, false, codegen_);
748}
749
750static void CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator* arena,
751                                                HInvoke* invoke) {
752  LocationSummary* locations = new (arena) LocationSummary(invoke,
753                                                           LocationSummary::kNoCall,
754                                                           kIntrinsified);
755  locations->SetInAt(0, Location::NoLocation());        // Unused receiver.
756  locations->SetInAt(1, Location::RequiresRegister());
757  locations->SetInAt(2, Location::RequiresRegister());
758  locations->SetInAt(3, Location::RequiresRegister());
759  locations->SetInAt(4, Location::RequiresRegister());
760
761  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
762
763  locations->AddTemp(Location::RequiresRegister());  // Pointer.
764  locations->AddTemp(Location::RequiresRegister());  // Temp 1.
765  locations->AddTemp(Location::RequiresRegister());  // Temp 2.
766}
767
768static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM* codegen) {
769  DCHECK_NE(type, Primitive::kPrimLong);
770
771  ArmAssembler* assembler = codegen->GetAssembler();
772
773  Register out = locations->Out().AsRegister<Register>();              // Boolean result.
774
775  Register base = locations->InAt(1).AsRegister<Register>();           // Object pointer.
776  Register offset = locations->InAt(2).AsRegisterPairLow<Register>();  // Offset (discard high 4B).
777  Register expected_lo = locations->InAt(3).AsRegister<Register>();    // Expected.
778  Register value_lo = locations->InAt(4).AsRegister<Register>();       // Value.
779
780  Register tmp_ptr = locations->GetTemp(0).AsRegister<Register>();     // Pointer to actual memory.
781  Register tmp_lo = locations->GetTemp(1).AsRegister<Register>();      // Value in memory.
782
783  if (type == Primitive::kPrimNot) {
784    // Mark card for object assuming new value is stored. Worst case we will mark an unchanged
785    // object and scan the receiver at the next GC for nothing.
786    bool value_can_be_null = true;  // TODO: Worth finding out this information?
787    codegen->MarkGCCard(tmp_ptr, tmp_lo, base, value_lo, value_can_be_null);
788  }
789
790  // Prevent reordering with prior memory operations.
791  __ dmb(ISH);
792
793  __ add(tmp_ptr, base, ShifterOperand(offset));
794
795  if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
796    codegen->GetAssembler()->PoisonHeapReference(expected_lo);
797    codegen->GetAssembler()->PoisonHeapReference(value_lo);
798  }
799
800  // do {
801  //   tmp = [r_ptr] - expected;
802  // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
803  // result = tmp != 0;
804
805  Label loop_head;
806  __ Bind(&loop_head);
807
808  __ ldrex(tmp_lo, tmp_ptr);
809
810  __ subs(tmp_lo, tmp_lo, ShifterOperand(expected_lo));
811
812  __ it(EQ, ItState::kItT);
813  __ strex(tmp_lo, value_lo, tmp_ptr, EQ);
814  __ cmp(tmp_lo, ShifterOperand(1), EQ);
815
816  __ b(&loop_head, EQ);
817
818  __ dmb(ISH);
819
820  __ rsbs(out, tmp_lo, ShifterOperand(1));
821  __ it(CC);
822  __ mov(out, ShifterOperand(0), CC);
823
824  if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
825    codegen->GetAssembler()->UnpoisonHeapReference(value_lo);
826    codegen->GetAssembler()->UnpoisonHeapReference(expected_lo);
827  }
828}
829
830void IntrinsicLocationsBuilderARM::VisitUnsafeCASInt(HInvoke* invoke) {
831  CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke);
832}
833void IntrinsicLocationsBuilderARM::VisitUnsafeCASObject(HInvoke* invoke) {
834  CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke);
835}
836void IntrinsicCodeGeneratorARM::VisitUnsafeCASInt(HInvoke* invoke) {
837  GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
838}
839void IntrinsicCodeGeneratorARM::VisitUnsafeCASObject(HInvoke* invoke) {
840  GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
841}
842
843void IntrinsicLocationsBuilderARM::VisitStringCharAt(HInvoke* invoke) {
844  LocationSummary* locations = new (arena_) LocationSummary(invoke,
845                                                            LocationSummary::kCallOnSlowPath,
846                                                            kIntrinsified);
847  locations->SetInAt(0, Location::RequiresRegister());
848  locations->SetInAt(1, Location::RequiresRegister());
849  locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
850
851  locations->AddTemp(Location::RequiresRegister());
852  locations->AddTemp(Location::RequiresRegister());
853}
854
855void IntrinsicCodeGeneratorARM::VisitStringCharAt(HInvoke* invoke) {
856  ArmAssembler* assembler = GetAssembler();
857  LocationSummary* locations = invoke->GetLocations();
858
859  // Location of reference to data array
860  const MemberOffset value_offset = mirror::String::ValueOffset();
861  // Location of count
862  const MemberOffset count_offset = mirror::String::CountOffset();
863
864  Register obj = locations->InAt(0).AsRegister<Register>();  // String object pointer.
865  Register idx = locations->InAt(1).AsRegister<Register>();  // Index of character.
866  Register out = locations->Out().AsRegister<Register>();    // Result character.
867
868  Register temp = locations->GetTemp(0).AsRegister<Register>();
869  Register array_temp = locations->GetTemp(1).AsRegister<Register>();
870
871  // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
872  //       the cost.
873  // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
874  //       we will not optimize the code for constants (which would save a register).
875
876  SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
877  codegen_->AddSlowPath(slow_path);
878
879  __ ldr(temp, Address(obj, count_offset.Int32Value()));          // temp = str.length.
880  codegen_->MaybeRecordImplicitNullCheck(invoke);
881  __ cmp(idx, ShifterOperand(temp));
882  __ b(slow_path->GetEntryLabel(), CS);
883
884  __ add(array_temp, obj, ShifterOperand(value_offset.Int32Value()));  // array_temp := str.value.
885
886  // Load the value.
887  __ ldrh(out, Address(array_temp, idx, LSL, 1));                 // out := array_temp[idx].
888
889  __ Bind(slow_path->GetExitLabel());
890}
891
892void IntrinsicLocationsBuilderARM::VisitStringCompareTo(HInvoke* invoke) {
893  // The inputs plus one temp.
894  LocationSummary* locations = new (arena_) LocationSummary(invoke,
895                                                            LocationSummary::kCall,
896                                                            kIntrinsified);
897  InvokeRuntimeCallingConvention calling_convention;
898  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
899  locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
900  locations->SetOut(Location::RegisterLocation(R0));
901}
902
903void IntrinsicCodeGeneratorARM::VisitStringCompareTo(HInvoke* invoke) {
904  ArmAssembler* assembler = GetAssembler();
905  LocationSummary* locations = invoke->GetLocations();
906
907  // Note that the null check must have been done earlier.
908  DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
909
910  Register argument = locations->InAt(1).AsRegister<Register>();
911  __ cmp(argument, ShifterOperand(0));
912  SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
913  codegen_->AddSlowPath(slow_path);
914  __ b(slow_path->GetEntryLabel(), EQ);
915
916  __ LoadFromOffset(
917      kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pStringCompareTo).Int32Value());
918  __ blx(LR);
919  __ Bind(slow_path->GetExitLabel());
920}
921
922static void GenerateVisitStringIndexOf(HInvoke* invoke,
923                                       ArmAssembler* assembler,
924                                       CodeGeneratorARM* codegen,
925                                       ArenaAllocator* allocator,
926                                       bool start_at_zero) {
927  LocationSummary* locations = invoke->GetLocations();
928  Register tmp_reg = locations->GetTemp(0).AsRegister<Register>();
929
930  // Note that the null check must have been done earlier.
931  DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
932
933  // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
934  // or directly dispatch if we have a constant.
935  SlowPathCodeARM* slow_path = nullptr;
936  if (invoke->InputAt(1)->IsIntConstant()) {
937    if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) >
938        std::numeric_limits<uint16_t>::max()) {
939      // Always needs the slow-path. We could directly dispatch to it, but this case should be
940      // rare, so for simplicity just put the full slow-path down and branch unconditionally.
941      slow_path = new (allocator) IntrinsicSlowPathARM(invoke);
942      codegen->AddSlowPath(slow_path);
943      __ b(slow_path->GetEntryLabel());
944      __ Bind(slow_path->GetExitLabel());
945      return;
946    }
947  } else {
948    Register char_reg = locations->InAt(1).AsRegister<Register>();
949    __ LoadImmediate(tmp_reg, std::numeric_limits<uint16_t>::max());
950    __ cmp(char_reg, ShifterOperand(tmp_reg));
951    slow_path = new (allocator) IntrinsicSlowPathARM(invoke);
952    codegen->AddSlowPath(slow_path);
953    __ b(slow_path->GetEntryLabel(), HI);
954  }
955
956  if (start_at_zero) {
957    DCHECK_EQ(tmp_reg, R2);
958    // Start-index = 0.
959    __ LoadImmediate(tmp_reg, 0);
960  }
961
962  __ LoadFromOffset(kLoadWord, LR, TR,
963                    QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pIndexOf).Int32Value());
964  __ blx(LR);
965
966  if (slow_path != nullptr) {
967    __ Bind(slow_path->GetExitLabel());
968  }
969}
970
971void IntrinsicLocationsBuilderARM::VisitStringIndexOf(HInvoke* invoke) {
972  LocationSummary* locations = new (arena_) LocationSummary(invoke,
973                                                            LocationSummary::kCall,
974                                                            kIntrinsified);
975  // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
976  // best to align the inputs accordingly.
977  InvokeRuntimeCallingConvention calling_convention;
978  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
979  locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
980  locations->SetOut(Location::RegisterLocation(R0));
981
982  // Need a temp for slow-path codepoint compare, and need to send start-index=0.
983  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
984}
985
986void IntrinsicCodeGeneratorARM::VisitStringIndexOf(HInvoke* invoke) {
987  GenerateVisitStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), true);
988}
989
990void IntrinsicLocationsBuilderARM::VisitStringIndexOfAfter(HInvoke* invoke) {
991  LocationSummary* locations = new (arena_) LocationSummary(invoke,
992                                                            LocationSummary::kCall,
993                                                            kIntrinsified);
994  // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
995  // best to align the inputs accordingly.
996  InvokeRuntimeCallingConvention calling_convention;
997  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
998  locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
999  locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1000  locations->SetOut(Location::RegisterLocation(R0));
1001
1002  // Need a temp for slow-path codepoint compare.
1003  locations->AddTemp(Location::RequiresRegister());
1004}
1005
1006void IntrinsicCodeGeneratorARM::VisitStringIndexOfAfter(HInvoke* invoke) {
1007  GenerateVisitStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), false);
1008}
1009
1010void IntrinsicLocationsBuilderARM::VisitStringNewStringFromBytes(HInvoke* invoke) {
1011  LocationSummary* locations = new (arena_) LocationSummary(invoke,
1012                                                            LocationSummary::kCall,
1013                                                            kIntrinsified);
1014  InvokeRuntimeCallingConvention calling_convention;
1015  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1016  locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1017  locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1018  locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
1019  locations->SetOut(Location::RegisterLocation(R0));
1020}
1021
1022void IntrinsicCodeGeneratorARM::VisitStringNewStringFromBytes(HInvoke* invoke) {
1023  ArmAssembler* assembler = GetAssembler();
1024  LocationSummary* locations = invoke->GetLocations();
1025
1026  Register byte_array = locations->InAt(0).AsRegister<Register>();
1027  __ cmp(byte_array, ShifterOperand(0));
1028  SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
1029  codegen_->AddSlowPath(slow_path);
1030  __ b(slow_path->GetEntryLabel(), EQ);
1031
1032  __ LoadFromOffset(
1033      kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromBytes).Int32Value());
1034  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1035  __ blx(LR);
1036  __ Bind(slow_path->GetExitLabel());
1037}
1038
1039void IntrinsicLocationsBuilderARM::VisitStringNewStringFromChars(HInvoke* invoke) {
1040  LocationSummary* locations = new (arena_) LocationSummary(invoke,
1041                                                            LocationSummary::kCall,
1042                                                            kIntrinsified);
1043  InvokeRuntimeCallingConvention calling_convention;
1044  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1045  locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1046  locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1047  locations->SetOut(Location::RegisterLocation(R0));
1048}
1049
1050void IntrinsicCodeGeneratorARM::VisitStringNewStringFromChars(HInvoke* invoke) {
1051  ArmAssembler* assembler = GetAssembler();
1052
1053  __ LoadFromOffset(
1054      kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromChars).Int32Value());
1055  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1056  __ blx(LR);
1057}
1058
1059void IntrinsicLocationsBuilderARM::VisitStringNewStringFromString(HInvoke* invoke) {
1060  LocationSummary* locations = new (arena_) LocationSummary(invoke,
1061                                                            LocationSummary::kCall,
1062                                                            kIntrinsified);
1063  InvokeRuntimeCallingConvention calling_convention;
1064  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1065  locations->SetOut(Location::RegisterLocation(R0));
1066}
1067
1068void IntrinsicCodeGeneratorARM::VisitStringNewStringFromString(HInvoke* invoke) {
1069  ArmAssembler* assembler = GetAssembler();
1070  LocationSummary* locations = invoke->GetLocations();
1071
1072  Register string_to_copy = locations->InAt(0).AsRegister<Register>();
1073  __ cmp(string_to_copy, ShifterOperand(0));
1074  SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
1075  codegen_->AddSlowPath(slow_path);
1076  __ b(slow_path->GetEntryLabel(), EQ);
1077
1078  __ LoadFromOffset(kLoadWord,
1079      LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromString).Int32Value());
1080  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1081  __ blx(LR);
1082  __ Bind(slow_path->GetExitLabel());
1083}
1084
1085// Unimplemented intrinsics.
1086
1087#define UNIMPLEMENTED_INTRINSIC(Name)                                                  \
1088void IntrinsicLocationsBuilderARM::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1089}                                                                                      \
1090void IntrinsicCodeGeneratorARM::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) {    \
1091}
1092
1093UNIMPLEMENTED_INTRINSIC(IntegerReverse)
1094UNIMPLEMENTED_INTRINSIC(IntegerReverseBytes)
1095UNIMPLEMENTED_INTRINSIC(LongReverse)
1096UNIMPLEMENTED_INTRINSIC(LongReverseBytes)
1097UNIMPLEMENTED_INTRINSIC(ShortReverseBytes)
1098UNIMPLEMENTED_INTRINSIC(MathMinDoubleDouble)
1099UNIMPLEMENTED_INTRINSIC(MathMinFloatFloat)
1100UNIMPLEMENTED_INTRINSIC(MathMaxDoubleDouble)
1101UNIMPLEMENTED_INTRINSIC(MathMaxFloatFloat)
1102UNIMPLEMENTED_INTRINSIC(MathMinLongLong)
1103UNIMPLEMENTED_INTRINSIC(MathMaxLongLong)
1104UNIMPLEMENTED_INTRINSIC(MathCeil)          // Could be done by changing rounding mode, maybe?
1105UNIMPLEMENTED_INTRINSIC(MathFloor)         // Could be done by changing rounding mode, maybe?
1106UNIMPLEMENTED_INTRINSIC(MathRint)
1107UNIMPLEMENTED_INTRINSIC(MathRoundDouble)   // Could be done by changing rounding mode, maybe?
1108UNIMPLEMENTED_INTRINSIC(MathRoundFloat)    // Could be done by changing rounding mode, maybe?
1109UNIMPLEMENTED_INTRINSIC(UnsafeCASLong)     // High register pressure.
1110UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
1111UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
1112UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
1113
1114#undef UNIMPLEMENTED_INTRINSIC
1115
1116#undef __
1117
1118}  // namespace arm
1119}  // namespace art
1120