intrinsics_arm.cc revision 391b866ce55b8e78b1f9a6b98321d837256e8d66
1/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
20#include "art_method.h"
21#include "code_generator_arm.h"
22#include "entrypoints/quick/quick_entrypoints.h"
23#include "intrinsics.h"
24#include "intrinsics_utils.h"
25#include "mirror/array-inl.h"
26#include "mirror/string.h"
27#include "thread.h"
28#include "utils/arm/assembler_arm.h"
29
30namespace art {
31
32namespace arm {
33
34ArmAssembler* IntrinsicCodeGeneratorARM::GetAssembler() {
35  return codegen_->GetAssembler();
36}
37
38ArenaAllocator* IntrinsicCodeGeneratorARM::GetAllocator() {
39  return codegen_->GetGraph()->GetArena();
40}
41
42using IntrinsicSlowPathARM = IntrinsicSlowPath<InvokeDexCallingConventionVisitorARM>;
43
44bool IntrinsicLocationsBuilderARM::TryDispatch(HInvoke* invoke) {
45  Dispatch(invoke);
46  LocationSummary* res = invoke->GetLocations();
47  if (res == nullptr) {
48    return false;
49  }
50  if (kEmitCompilerReadBarrier && res->CanCall()) {
51    // Generating an intrinsic for this HInvoke may produce an
52    // IntrinsicSlowPathARM slow path.  Currently this approach
53    // does not work when using read barriers, as the emitted
54    // calling sequence will make use of another slow path
55    // (ReadBarrierForRootSlowPathARM for HInvokeStaticOrDirect,
56    // ReadBarrierSlowPathARM for HInvokeVirtual).  So we bail
57    // out in this case.
58    //
59    // TODO: Find a way to have intrinsics work with read barriers.
60    invoke->SetLocations(nullptr);
61    return false;
62  }
63  return res->Intrinsified();
64}
65
66#define __ assembler->
67
68static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
69  LocationSummary* locations = new (arena) LocationSummary(invoke,
70                                                           LocationSummary::kNoCall,
71                                                           kIntrinsified);
72  locations->SetInAt(0, Location::RequiresFpuRegister());
73  locations->SetOut(Location::RequiresRegister());
74}
75
76static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
77  LocationSummary* locations = new (arena) LocationSummary(invoke,
78                                                           LocationSummary::kNoCall,
79                                                           kIntrinsified);
80  locations->SetInAt(0, Location::RequiresRegister());
81  locations->SetOut(Location::RequiresFpuRegister());
82}
83
84static void MoveFPToInt(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
85  Location input = locations->InAt(0);
86  Location output = locations->Out();
87  if (is64bit) {
88    __ vmovrrd(output.AsRegisterPairLow<Register>(),
89               output.AsRegisterPairHigh<Register>(),
90               FromLowSToD(input.AsFpuRegisterPairLow<SRegister>()));
91  } else {
92    __ vmovrs(output.AsRegister<Register>(), input.AsFpuRegister<SRegister>());
93  }
94}
95
96static void MoveIntToFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
97  Location input = locations->InAt(0);
98  Location output = locations->Out();
99  if (is64bit) {
100    __ vmovdrr(FromLowSToD(output.AsFpuRegisterPairLow<SRegister>()),
101               input.AsRegisterPairLow<Register>(),
102               input.AsRegisterPairHigh<Register>());
103  } else {
104    __ vmovsr(output.AsFpuRegister<SRegister>(), input.AsRegister<Register>());
105  }
106}
107
108void IntrinsicLocationsBuilderARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
109  CreateFPToIntLocations(arena_, invoke);
110}
111void IntrinsicLocationsBuilderARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
112  CreateIntToFPLocations(arena_, invoke);
113}
114
115void IntrinsicCodeGeneratorARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
116  MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
117}
118void IntrinsicCodeGeneratorARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
119  MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
120}
121
122void IntrinsicLocationsBuilderARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
123  CreateFPToIntLocations(arena_, invoke);
124}
125void IntrinsicLocationsBuilderARM::VisitFloatIntBitsToFloat(HInvoke* invoke) {
126  CreateIntToFPLocations(arena_, invoke);
127}
128
129void IntrinsicCodeGeneratorARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
130  MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
131}
132void IntrinsicCodeGeneratorARM::VisitFloatIntBitsToFloat(HInvoke* invoke) {
133  MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
134}
135
136static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
137  LocationSummary* locations = new (arena) LocationSummary(invoke,
138                                                           LocationSummary::kNoCall,
139                                                           kIntrinsified);
140  locations->SetInAt(0, Location::RequiresRegister());
141  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
142}
143
144static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
145  LocationSummary* locations = new (arena) LocationSummary(invoke,
146                                                           LocationSummary::kNoCall,
147                                                           kIntrinsified);
148  locations->SetInAt(0, Location::RequiresFpuRegister());
149  locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
150}
151
152static void GenNumberOfLeadingZeros(LocationSummary* locations,
153                                    Primitive::Type type,
154                                    ArmAssembler* assembler) {
155  Location in = locations->InAt(0);
156  Register out = locations->Out().AsRegister<Register>();
157
158  DCHECK((type == Primitive::kPrimInt) || (type == Primitive::kPrimLong));
159
160  if (type == Primitive::kPrimLong) {
161    Register in_reg_lo = in.AsRegisterPairLow<Register>();
162    Register in_reg_hi = in.AsRegisterPairHigh<Register>();
163    Label end;
164    __ clz(out, in_reg_hi);
165    __ CompareAndBranchIfNonZero(in_reg_hi, &end);
166    __ clz(out, in_reg_lo);
167    __ AddConstant(out, 32);
168    __ Bind(&end);
169  } else {
170    __ clz(out, in.AsRegister<Register>());
171  }
172}
173
174void IntrinsicLocationsBuilderARM::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
175  CreateIntToIntLocations(arena_, invoke);
176}
177
178void IntrinsicCodeGeneratorARM::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
179  GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
180}
181
182void IntrinsicLocationsBuilderARM::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
183  LocationSummary* locations = new (arena_) LocationSummary(invoke,
184                                                           LocationSummary::kNoCall,
185                                                           kIntrinsified);
186  locations->SetInAt(0, Location::RequiresRegister());
187  locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
188}
189
190void IntrinsicCodeGeneratorARM::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
191  GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
192}
193
194static void GenNumberOfTrailingZeros(LocationSummary* locations,
195                                     Primitive::Type type,
196                                     ArmAssembler* assembler) {
197  DCHECK((type == Primitive::kPrimInt) || (type == Primitive::kPrimLong));
198
199  Register out = locations->Out().AsRegister<Register>();
200
201  if (type == Primitive::kPrimLong) {
202    Register in_reg_lo = locations->InAt(0).AsRegisterPairLow<Register>();
203    Register in_reg_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
204    Label end;
205    __ rbit(out, in_reg_lo);
206    __ clz(out, out);
207    __ CompareAndBranchIfNonZero(in_reg_lo, &end);
208    __ rbit(out, in_reg_hi);
209    __ clz(out, out);
210    __ AddConstant(out, 32);
211    __ Bind(&end);
212  } else {
213    Register in = locations->InAt(0).AsRegister<Register>();
214    __ rbit(out, in);
215    __ clz(out, out);
216  }
217}
218
219void IntrinsicLocationsBuilderARM::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
220  LocationSummary* locations = new (arena_) LocationSummary(invoke,
221                                                            LocationSummary::kNoCall,
222                                                            kIntrinsified);
223  locations->SetInAt(0, Location::RequiresRegister());
224  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
225}
226
227void IntrinsicCodeGeneratorARM::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
228  GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
229}
230
231void IntrinsicLocationsBuilderARM::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
232  LocationSummary* locations = new (arena_) LocationSummary(invoke,
233                                                            LocationSummary::kNoCall,
234                                                            kIntrinsified);
235  locations->SetInAt(0, Location::RequiresRegister());
236  locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
237}
238
239void IntrinsicCodeGeneratorARM::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
240  GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
241}
242
243static void MathAbsFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
244  Location in = locations->InAt(0);
245  Location out = locations->Out();
246
247  if (is64bit) {
248    __ vabsd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
249             FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
250  } else {
251    __ vabss(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>());
252  }
253}
254
255void IntrinsicLocationsBuilderARM::VisitMathAbsDouble(HInvoke* invoke) {
256  CreateFPToFPLocations(arena_, invoke);
257}
258
259void IntrinsicCodeGeneratorARM::VisitMathAbsDouble(HInvoke* invoke) {
260  MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
261}
262
263void IntrinsicLocationsBuilderARM::VisitMathAbsFloat(HInvoke* invoke) {
264  CreateFPToFPLocations(arena_, invoke);
265}
266
267void IntrinsicCodeGeneratorARM::VisitMathAbsFloat(HInvoke* invoke) {
268  MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
269}
270
271static void CreateIntToIntPlusTemp(ArenaAllocator* arena, HInvoke* invoke) {
272  LocationSummary* locations = new (arena) LocationSummary(invoke,
273                                                           LocationSummary::kNoCall,
274                                                           kIntrinsified);
275  locations->SetInAt(0, Location::RequiresRegister());
276  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
277
278  locations->AddTemp(Location::RequiresRegister());
279}
280
281static void GenAbsInteger(LocationSummary* locations,
282                          bool is64bit,
283                          ArmAssembler* assembler) {
284  Location in = locations->InAt(0);
285  Location output = locations->Out();
286
287  Register mask = locations->GetTemp(0).AsRegister<Register>();
288
289  if (is64bit) {
290    Register in_reg_lo = in.AsRegisterPairLow<Register>();
291    Register in_reg_hi = in.AsRegisterPairHigh<Register>();
292    Register out_reg_lo = output.AsRegisterPairLow<Register>();
293    Register out_reg_hi = output.AsRegisterPairHigh<Register>();
294
295    DCHECK_NE(out_reg_lo, in_reg_hi) << "Diagonal overlap unexpected.";
296
297    __ Asr(mask, in_reg_hi, 31);
298    __ adds(out_reg_lo, in_reg_lo, ShifterOperand(mask));
299    __ adc(out_reg_hi, in_reg_hi, ShifterOperand(mask));
300    __ eor(out_reg_lo, mask, ShifterOperand(out_reg_lo));
301    __ eor(out_reg_hi, mask, ShifterOperand(out_reg_hi));
302  } else {
303    Register in_reg = in.AsRegister<Register>();
304    Register out_reg = output.AsRegister<Register>();
305
306    __ Asr(mask, in_reg, 31);
307    __ add(out_reg, in_reg, ShifterOperand(mask));
308    __ eor(out_reg, mask, ShifterOperand(out_reg));
309  }
310}
311
312void IntrinsicLocationsBuilderARM::VisitMathAbsInt(HInvoke* invoke) {
313  CreateIntToIntPlusTemp(arena_, invoke);
314}
315
316void IntrinsicCodeGeneratorARM::VisitMathAbsInt(HInvoke* invoke) {
317  GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
318}
319
320
321void IntrinsicLocationsBuilderARM::VisitMathAbsLong(HInvoke* invoke) {
322  CreateIntToIntPlusTemp(arena_, invoke);
323}
324
325void IntrinsicCodeGeneratorARM::VisitMathAbsLong(HInvoke* invoke) {
326  GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
327}
328
329static void GenMinMax(LocationSummary* locations,
330                      bool is_min,
331                      ArmAssembler* assembler) {
332  Register op1 = locations->InAt(0).AsRegister<Register>();
333  Register op2 = locations->InAt(1).AsRegister<Register>();
334  Register out = locations->Out().AsRegister<Register>();
335
336  __ cmp(op1, ShifterOperand(op2));
337
338  __ it((is_min) ? Condition::LT : Condition::GT, kItElse);
339  __ mov(out, ShifterOperand(op1), is_min ? Condition::LT : Condition::GT);
340  __ mov(out, ShifterOperand(op2), is_min ? Condition::GE : Condition::LE);
341}
342
343static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
344  LocationSummary* locations = new (arena) LocationSummary(invoke,
345                                                           LocationSummary::kNoCall,
346                                                           kIntrinsified);
347  locations->SetInAt(0, Location::RequiresRegister());
348  locations->SetInAt(1, Location::RequiresRegister());
349  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
350}
351
352void IntrinsicLocationsBuilderARM::VisitMathMinIntInt(HInvoke* invoke) {
353  CreateIntIntToIntLocations(arena_, invoke);
354}
355
356void IntrinsicCodeGeneratorARM::VisitMathMinIntInt(HInvoke* invoke) {
357  GenMinMax(invoke->GetLocations(), /* is_min */ true, GetAssembler());
358}
359
360void IntrinsicLocationsBuilderARM::VisitMathMaxIntInt(HInvoke* invoke) {
361  CreateIntIntToIntLocations(arena_, invoke);
362}
363
364void IntrinsicCodeGeneratorARM::VisitMathMaxIntInt(HInvoke* invoke) {
365  GenMinMax(invoke->GetLocations(), /* is_min */ false, GetAssembler());
366}
367
368void IntrinsicLocationsBuilderARM::VisitMathSqrt(HInvoke* invoke) {
369  CreateFPToFPLocations(arena_, invoke);
370}
371
372void IntrinsicCodeGeneratorARM::VisitMathSqrt(HInvoke* invoke) {
373  LocationSummary* locations = invoke->GetLocations();
374  ArmAssembler* assembler = GetAssembler();
375  __ vsqrtd(FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()),
376            FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>()));
377}
378
379void IntrinsicLocationsBuilderARM::VisitMemoryPeekByte(HInvoke* invoke) {
380  CreateIntToIntLocations(arena_, invoke);
381}
382
383void IntrinsicCodeGeneratorARM::VisitMemoryPeekByte(HInvoke* invoke) {
384  ArmAssembler* assembler = GetAssembler();
385  // Ignore upper 4B of long address.
386  __ ldrsb(invoke->GetLocations()->Out().AsRegister<Register>(),
387           Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
388}
389
390void IntrinsicLocationsBuilderARM::VisitMemoryPeekIntNative(HInvoke* invoke) {
391  CreateIntToIntLocations(arena_, invoke);
392}
393
394void IntrinsicCodeGeneratorARM::VisitMemoryPeekIntNative(HInvoke* invoke) {
395  ArmAssembler* assembler = GetAssembler();
396  // Ignore upper 4B of long address.
397  __ ldr(invoke->GetLocations()->Out().AsRegister<Register>(),
398         Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
399}
400
401void IntrinsicLocationsBuilderARM::VisitMemoryPeekLongNative(HInvoke* invoke) {
402  CreateIntToIntLocations(arena_, invoke);
403}
404
405void IntrinsicCodeGeneratorARM::VisitMemoryPeekLongNative(HInvoke* invoke) {
406  ArmAssembler* assembler = GetAssembler();
407  // Ignore upper 4B of long address.
408  Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
409  // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
410  // exception. So we can't use ldrd as addr may be unaligned.
411  Register lo = invoke->GetLocations()->Out().AsRegisterPairLow<Register>();
412  Register hi = invoke->GetLocations()->Out().AsRegisterPairHigh<Register>();
413  if (addr == lo) {
414    __ ldr(hi, Address(addr, 4));
415    __ ldr(lo, Address(addr, 0));
416  } else {
417    __ ldr(lo, Address(addr, 0));
418    __ ldr(hi, Address(addr, 4));
419  }
420}
421
422void IntrinsicLocationsBuilderARM::VisitMemoryPeekShortNative(HInvoke* invoke) {
423  CreateIntToIntLocations(arena_, invoke);
424}
425
426void IntrinsicCodeGeneratorARM::VisitMemoryPeekShortNative(HInvoke* invoke) {
427  ArmAssembler* assembler = GetAssembler();
428  // Ignore upper 4B of long address.
429  __ ldrsh(invoke->GetLocations()->Out().AsRegister<Register>(),
430           Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
431}
432
433static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
434  LocationSummary* locations = new (arena) LocationSummary(invoke,
435                                                           LocationSummary::kNoCall,
436                                                           kIntrinsified);
437  locations->SetInAt(0, Location::RequiresRegister());
438  locations->SetInAt(1, Location::RequiresRegister());
439}
440
441void IntrinsicLocationsBuilderARM::VisitMemoryPokeByte(HInvoke* invoke) {
442  CreateIntIntToVoidLocations(arena_, invoke);
443}
444
445void IntrinsicCodeGeneratorARM::VisitMemoryPokeByte(HInvoke* invoke) {
446  ArmAssembler* assembler = GetAssembler();
447  __ strb(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
448          Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
449}
450
451void IntrinsicLocationsBuilderARM::VisitMemoryPokeIntNative(HInvoke* invoke) {
452  CreateIntIntToVoidLocations(arena_, invoke);
453}
454
455void IntrinsicCodeGeneratorARM::VisitMemoryPokeIntNative(HInvoke* invoke) {
456  ArmAssembler* assembler = GetAssembler();
457  __ str(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
458         Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
459}
460
461void IntrinsicLocationsBuilderARM::VisitMemoryPokeLongNative(HInvoke* invoke) {
462  CreateIntIntToVoidLocations(arena_, invoke);
463}
464
465void IntrinsicCodeGeneratorARM::VisitMemoryPokeLongNative(HInvoke* invoke) {
466  ArmAssembler* assembler = GetAssembler();
467  // Ignore upper 4B of long address.
468  Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
469  // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
470  // exception. So we can't use ldrd as addr may be unaligned.
471  __ str(invoke->GetLocations()->InAt(1).AsRegisterPairLow<Register>(), Address(addr, 0));
472  __ str(invoke->GetLocations()->InAt(1).AsRegisterPairHigh<Register>(), Address(addr, 4));
473}
474
475void IntrinsicLocationsBuilderARM::VisitMemoryPokeShortNative(HInvoke* invoke) {
476  CreateIntIntToVoidLocations(arena_, invoke);
477}
478
479void IntrinsicCodeGeneratorARM::VisitMemoryPokeShortNative(HInvoke* invoke) {
480  ArmAssembler* assembler = GetAssembler();
481  __ strh(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
482          Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
483}
484
485void IntrinsicLocationsBuilderARM::VisitThreadCurrentThread(HInvoke* invoke) {
486  LocationSummary* locations = new (arena_) LocationSummary(invoke,
487                                                            LocationSummary::kNoCall,
488                                                            kIntrinsified);
489  locations->SetOut(Location::RequiresRegister());
490}
491
492void IntrinsicCodeGeneratorARM::VisitThreadCurrentThread(HInvoke* invoke) {
493  ArmAssembler* assembler = GetAssembler();
494  __ LoadFromOffset(kLoadWord,
495                    invoke->GetLocations()->Out().AsRegister<Register>(),
496                    TR,
497                    Thread::PeerOffset<kArmPointerSize>().Int32Value());
498}
499
500static void GenUnsafeGet(HInvoke* invoke,
501                         Primitive::Type type,
502                         bool is_volatile,
503                         CodeGeneratorARM* codegen) {
504  LocationSummary* locations = invoke->GetLocations();
505  DCHECK((type == Primitive::kPrimInt) ||
506         (type == Primitive::kPrimLong) ||
507         (type == Primitive::kPrimNot));
508  ArmAssembler* assembler = codegen->GetAssembler();
509  Location base_loc = locations->InAt(1);
510  Register base = base_loc.AsRegister<Register>();             // Object pointer.
511  Location offset_loc = locations->InAt(2);
512  Register offset = offset_loc.AsRegisterPairLow<Register>();  // Long offset, lo part only.
513  Location trg_loc = locations->Out();
514
515  if (type == Primitive::kPrimLong) {
516    Register trg_lo = trg_loc.AsRegisterPairLow<Register>();
517    __ add(IP, base, ShifterOperand(offset));
518    if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
519      Register trg_hi = trg_loc.AsRegisterPairHigh<Register>();
520      __ ldrexd(trg_lo, trg_hi, IP);
521    } else {
522      __ ldrd(trg_lo, Address(IP));
523    }
524  } else {
525    Register trg = trg_loc.AsRegister<Register>();
526    __ ldr(trg, Address(base, offset));
527  }
528
529  if (is_volatile) {
530    __ dmb(ISH);
531  }
532
533  if (type == Primitive::kPrimNot) {
534    codegen->MaybeGenerateReadBarrier(invoke, trg_loc, trg_loc, base_loc, 0U, offset_loc);
535  }
536}
537
538static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
539  bool can_call = kEmitCompilerReadBarrier &&
540      (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
541       invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
542  LocationSummary* locations = new (arena) LocationSummary(invoke,
543                                                           can_call ?
544                                                               LocationSummary::kCallOnSlowPath :
545                                                               LocationSummary::kNoCall,
546                                                           kIntrinsified);
547  locations->SetInAt(0, Location::NoLocation());        // Unused receiver.
548  locations->SetInAt(1, Location::RequiresRegister());
549  locations->SetInAt(2, Location::RequiresRegister());
550  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
551}
552
553void IntrinsicLocationsBuilderARM::VisitUnsafeGet(HInvoke* invoke) {
554  CreateIntIntIntToIntLocations(arena_, invoke);
555}
556void IntrinsicLocationsBuilderARM::VisitUnsafeGetVolatile(HInvoke* invoke) {
557  CreateIntIntIntToIntLocations(arena_, invoke);
558}
559void IntrinsicLocationsBuilderARM::VisitUnsafeGetLong(HInvoke* invoke) {
560  CreateIntIntIntToIntLocations(arena_, invoke);
561}
562void IntrinsicLocationsBuilderARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
563  CreateIntIntIntToIntLocations(arena_, invoke);
564}
565void IntrinsicLocationsBuilderARM::VisitUnsafeGetObject(HInvoke* invoke) {
566  CreateIntIntIntToIntLocations(arena_, invoke);
567}
568void IntrinsicLocationsBuilderARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
569  CreateIntIntIntToIntLocations(arena_, invoke);
570}
571
572void IntrinsicCodeGeneratorARM::VisitUnsafeGet(HInvoke* invoke) {
573  GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_);
574}
575void IntrinsicCodeGeneratorARM::VisitUnsafeGetVolatile(HInvoke* invoke) {
576  GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_);
577}
578void IntrinsicCodeGeneratorARM::VisitUnsafeGetLong(HInvoke* invoke) {
579  GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_);
580}
581void IntrinsicCodeGeneratorARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
582  GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_);
583}
584void IntrinsicCodeGeneratorARM::VisitUnsafeGetObject(HInvoke* invoke) {
585  GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_);
586}
587void IntrinsicCodeGeneratorARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
588  GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_);
589}
590
591static void CreateIntIntIntIntToVoid(ArenaAllocator* arena,
592                                     const ArmInstructionSetFeatures& features,
593                                     Primitive::Type type,
594                                     bool is_volatile,
595                                     HInvoke* invoke) {
596  LocationSummary* locations = new (arena) LocationSummary(invoke,
597                                                           LocationSummary::kNoCall,
598                                                           kIntrinsified);
599  locations->SetInAt(0, Location::NoLocation());        // Unused receiver.
600  locations->SetInAt(1, Location::RequiresRegister());
601  locations->SetInAt(2, Location::RequiresRegister());
602  locations->SetInAt(3, Location::RequiresRegister());
603
604  if (type == Primitive::kPrimLong) {
605    // Potentially need temps for ldrexd-strexd loop.
606    if (is_volatile && !features.HasAtomicLdrdAndStrd()) {
607      locations->AddTemp(Location::RequiresRegister());  // Temp_lo.
608      locations->AddTemp(Location::RequiresRegister());  // Temp_hi.
609    }
610  } else if (type == Primitive::kPrimNot) {
611    // Temps for card-marking.
612    locations->AddTemp(Location::RequiresRegister());  // Temp.
613    locations->AddTemp(Location::RequiresRegister());  // Card.
614  }
615}
616
617void IntrinsicLocationsBuilderARM::VisitUnsafePut(HInvoke* invoke) {
618  CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, /* is_volatile */ false, invoke);
619}
620void IntrinsicLocationsBuilderARM::VisitUnsafePutOrdered(HInvoke* invoke) {
621  CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, /* is_volatile */ false, invoke);
622}
623void IntrinsicLocationsBuilderARM::VisitUnsafePutVolatile(HInvoke* invoke) {
624  CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, /* is_volatile */ true, invoke);
625}
626void IntrinsicLocationsBuilderARM::VisitUnsafePutObject(HInvoke* invoke) {
627  CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, /* is_volatile */ false, invoke);
628}
629void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
630  CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, /* is_volatile */ false, invoke);
631}
632void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
633  CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, /* is_volatile */ true, invoke);
634}
635void IntrinsicLocationsBuilderARM::VisitUnsafePutLong(HInvoke* invoke) {
636  CreateIntIntIntIntToVoid(
637      arena_, features_, Primitive::kPrimLong, /* is_volatile */ false, invoke);
638}
639void IntrinsicLocationsBuilderARM::VisitUnsafePutLongOrdered(HInvoke* invoke) {
640  CreateIntIntIntIntToVoid(
641      arena_, features_, Primitive::kPrimLong, /* is_volatile */ false, invoke);
642}
643void IntrinsicLocationsBuilderARM::VisitUnsafePutLongVolatile(HInvoke* invoke) {
644  CreateIntIntIntIntToVoid(
645      arena_, features_, Primitive::kPrimLong, /* is_volatile */ true, invoke);
646}
647
648static void GenUnsafePut(LocationSummary* locations,
649                         Primitive::Type type,
650                         bool is_volatile,
651                         bool is_ordered,
652                         CodeGeneratorARM* codegen) {
653  ArmAssembler* assembler = codegen->GetAssembler();
654
655  Register base = locations->InAt(1).AsRegister<Register>();           // Object pointer.
656  Register offset = locations->InAt(2).AsRegisterPairLow<Register>();  // Long offset, lo part only.
657  Register value;
658
659  if (is_volatile || is_ordered) {
660    __ dmb(ISH);
661  }
662
663  if (type == Primitive::kPrimLong) {
664    Register value_lo = locations->InAt(3).AsRegisterPairLow<Register>();
665    value = value_lo;
666    if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
667      Register temp_lo = locations->GetTemp(0).AsRegister<Register>();
668      Register temp_hi = locations->GetTemp(1).AsRegister<Register>();
669      Register value_hi = locations->InAt(3).AsRegisterPairHigh<Register>();
670
671      __ add(IP, base, ShifterOperand(offset));
672      Label loop_head;
673      __ Bind(&loop_head);
674      __ ldrexd(temp_lo, temp_hi, IP);
675      __ strexd(temp_lo, value_lo, value_hi, IP);
676      __ cmp(temp_lo, ShifterOperand(0));
677      __ b(&loop_head, NE);
678    } else {
679      __ add(IP, base, ShifterOperand(offset));
680      __ strd(value_lo, Address(IP));
681    }
682  } else {
683    value = locations->InAt(3).AsRegister<Register>();
684    Register source = value;
685    if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
686      Register temp = locations->GetTemp(0).AsRegister<Register>();
687      __ Mov(temp, value);
688      __ PoisonHeapReference(temp);
689      source = temp;
690    }
691    __ str(source, Address(base, offset));
692  }
693
694  if (is_volatile) {
695    __ dmb(ISH);
696  }
697
698  if (type == Primitive::kPrimNot) {
699    Register temp = locations->GetTemp(0).AsRegister<Register>();
700    Register card = locations->GetTemp(1).AsRegister<Register>();
701    bool value_can_be_null = true;  // TODO: Worth finding out this information?
702    codegen->MarkGCCard(temp, card, base, value, value_can_be_null);
703  }
704}
705
706void IntrinsicCodeGeneratorARM::VisitUnsafePut(HInvoke* invoke) {
707  GenUnsafePut(invoke->GetLocations(),
708               Primitive::kPrimInt,
709               /* is_volatile */ false,
710               /* is_ordered */ false,
711               codegen_);
712}
713void IntrinsicCodeGeneratorARM::VisitUnsafePutOrdered(HInvoke* invoke) {
714  GenUnsafePut(invoke->GetLocations(),
715               Primitive::kPrimInt,
716               /* is_volatile */ false,
717               /* is_ordered */ true,
718               codegen_);
719}
720void IntrinsicCodeGeneratorARM::VisitUnsafePutVolatile(HInvoke* invoke) {
721  GenUnsafePut(invoke->GetLocations(),
722               Primitive::kPrimInt,
723               /* is_volatile */ true,
724               /* is_ordered */ false,
725               codegen_);
726}
727void IntrinsicCodeGeneratorARM::VisitUnsafePutObject(HInvoke* invoke) {
728  GenUnsafePut(invoke->GetLocations(),
729               Primitive::kPrimNot,
730               /* is_volatile */ false,
731               /* is_ordered */ false,
732               codegen_);
733}
734void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
735  GenUnsafePut(invoke->GetLocations(),
736               Primitive::kPrimNot,
737               /* is_volatile */ false,
738               /* is_ordered */ true,
739               codegen_);
740}
741void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
742  GenUnsafePut(invoke->GetLocations(),
743               Primitive::kPrimNot,
744               /* is_volatile */ true,
745               /* is_ordered */ false,
746               codegen_);
747}
748void IntrinsicCodeGeneratorARM::VisitUnsafePutLong(HInvoke* invoke) {
749  GenUnsafePut(invoke->GetLocations(),
750               Primitive::kPrimLong,
751               /* is_volatile */ false,
752               /* is_ordered */ false,
753               codegen_);
754}
755void IntrinsicCodeGeneratorARM::VisitUnsafePutLongOrdered(HInvoke* invoke) {
756  GenUnsafePut(invoke->GetLocations(),
757               Primitive::kPrimLong,
758               /* is_volatile */ false,
759               /* is_ordered */ true,
760               codegen_);
761}
762void IntrinsicCodeGeneratorARM::VisitUnsafePutLongVolatile(HInvoke* invoke) {
763  GenUnsafePut(invoke->GetLocations(),
764               Primitive::kPrimLong,
765               /* is_volatile */ true,
766               /* is_ordered */ false,
767               codegen_);
768}
769
770static void CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator* arena,
771                                                HInvoke* invoke) {
772  LocationSummary* locations = new (arena) LocationSummary(invoke,
773                                                           LocationSummary::kNoCall,
774                                                           kIntrinsified);
775  locations->SetInAt(0, Location::NoLocation());        // Unused receiver.
776  locations->SetInAt(1, Location::RequiresRegister());
777  locations->SetInAt(2, Location::RequiresRegister());
778  locations->SetInAt(3, Location::RequiresRegister());
779  locations->SetInAt(4, Location::RequiresRegister());
780
781  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
782
783  locations->AddTemp(Location::RequiresRegister());  // Pointer.
784  locations->AddTemp(Location::RequiresRegister());  // Temp 1.
785  locations->AddTemp(Location::RequiresRegister());  // Temp 2.
786}
787
788static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM* codegen) {
789  DCHECK_NE(type, Primitive::kPrimLong);
790
791  ArmAssembler* assembler = codegen->GetAssembler();
792
793  Register out = locations->Out().AsRegister<Register>();              // Boolean result.
794
795  Register base = locations->InAt(1).AsRegister<Register>();           // Object pointer.
796  Register offset = locations->InAt(2).AsRegisterPairLow<Register>();  // Offset (discard high 4B).
797  Register expected_lo = locations->InAt(3).AsRegister<Register>();    // Expected.
798  Register value_lo = locations->InAt(4).AsRegister<Register>();       // Value.
799
800  Register tmp_ptr = locations->GetTemp(0).AsRegister<Register>();     // Pointer to actual memory.
801  Register tmp_lo = locations->GetTemp(1).AsRegister<Register>();      // Value in memory.
802
803  if (type == Primitive::kPrimNot) {
804    // Mark card for object assuming new value is stored. Worst case we will mark an unchanged
805    // object and scan the receiver at the next GC for nothing.
806    bool value_can_be_null = true;  // TODO: Worth finding out this information?
807    codegen->MarkGCCard(tmp_ptr, tmp_lo, base, value_lo, value_can_be_null);
808  }
809
810  // Prevent reordering with prior memory operations.
811  __ dmb(ISH);
812
813  __ add(tmp_ptr, base, ShifterOperand(offset));
814
815  if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
816    codegen->GetAssembler()->PoisonHeapReference(expected_lo);
817    codegen->GetAssembler()->PoisonHeapReference(value_lo);
818  }
819
820  // do {
821  //   tmp = [r_ptr] - expected;
822  // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
823  // result = tmp != 0;
824
825  Label loop_head;
826  __ Bind(&loop_head);
827
828  // TODO: When `type == Primitive::kPrimNot`, add a read barrier for
829  // the reference stored in the object before attempting the CAS,
830  // similar to the one in the art::Unsafe_compareAndSwapObject JNI
831  // implementation.
832  //
833  // Note that this code is not (yet) used when read barriers are
834  // enabled (see IntrinsicLocationsBuilderARM::VisitUnsafeCASObject).
835  DCHECK(!(type == Primitive::kPrimNot && kEmitCompilerReadBarrier));
836  __ ldrex(tmp_lo, tmp_ptr);
837
838  __ subs(tmp_lo, tmp_lo, ShifterOperand(expected_lo));
839
840  __ it(EQ, ItState::kItT);
841  __ strex(tmp_lo, value_lo, tmp_ptr, EQ);
842  __ cmp(tmp_lo, ShifterOperand(1), EQ);
843
844  __ b(&loop_head, EQ);
845
846  __ dmb(ISH);
847
848  __ rsbs(out, tmp_lo, ShifterOperand(1));
849  __ it(CC);
850  __ mov(out, ShifterOperand(0), CC);
851
852  if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
853    codegen->GetAssembler()->UnpoisonHeapReference(value_lo);
854    codegen->GetAssembler()->UnpoisonHeapReference(expected_lo);
855  }
856}
857
858void IntrinsicLocationsBuilderARM::VisitUnsafeCASInt(HInvoke* invoke) {
859  CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke);
860}
861void IntrinsicLocationsBuilderARM::VisitUnsafeCASObject(HInvoke* invoke) {
862  // The UnsafeCASObject intrinsic is missing a read barrier, and
863  // therefore sometimes does not work as expected (b/25883050).
864  // Turn it off temporarily as a quick fix, until the read barrier is
865  // implemented (see TODO in GenCAS below).
866  //
867  // Also, the UnsafeCASObject intrinsic does not always work when heap
868  // poisoning is enabled (it breaks run-test 004-UnsafeTest); turn it
869  // off temporarily as a quick fix (b/26204023).
870  //
871  // TODO(rpl): Fix these two issues and re-enable this intrinsic.
872  if (kEmitCompilerReadBarrier || kPoisonHeapReferences) {
873    return;
874  }
875
876  CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke);
877}
878void IntrinsicCodeGeneratorARM::VisitUnsafeCASInt(HInvoke* invoke) {
879  GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
880}
881void IntrinsicCodeGeneratorARM::VisitUnsafeCASObject(HInvoke* invoke) {
882  GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
883}
884
885void IntrinsicLocationsBuilderARM::VisitStringCharAt(HInvoke* invoke) {
886  LocationSummary* locations = new (arena_) LocationSummary(invoke,
887                                                            LocationSummary::kCallOnSlowPath,
888                                                            kIntrinsified);
889  locations->SetInAt(0, Location::RequiresRegister());
890  locations->SetInAt(1, Location::RequiresRegister());
891  locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
892
893  locations->AddTemp(Location::RequiresRegister());
894  locations->AddTemp(Location::RequiresRegister());
895}
896
897void IntrinsicCodeGeneratorARM::VisitStringCharAt(HInvoke* invoke) {
898  ArmAssembler* assembler = GetAssembler();
899  LocationSummary* locations = invoke->GetLocations();
900
901  // Location of reference to data array
902  const MemberOffset value_offset = mirror::String::ValueOffset();
903  // Location of count
904  const MemberOffset count_offset = mirror::String::CountOffset();
905
906  Register obj = locations->InAt(0).AsRegister<Register>();  // String object pointer.
907  Register idx = locations->InAt(1).AsRegister<Register>();  // Index of character.
908  Register out = locations->Out().AsRegister<Register>();    // Result character.
909
910  Register temp = locations->GetTemp(0).AsRegister<Register>();
911  Register array_temp = locations->GetTemp(1).AsRegister<Register>();
912
913  // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
914  //       the cost.
915  // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
916  //       we will not optimize the code for constants (which would save a register).
917
918  SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
919  codegen_->AddSlowPath(slow_path);
920
921  __ ldr(temp, Address(obj, count_offset.Int32Value()));          // temp = str.length.
922  codegen_->MaybeRecordImplicitNullCheck(invoke);
923  __ cmp(idx, ShifterOperand(temp));
924  __ b(slow_path->GetEntryLabel(), CS);
925
926  __ add(array_temp, obj, ShifterOperand(value_offset.Int32Value()));  // array_temp := str.value.
927
928  // Load the value.
929  __ ldrh(out, Address(array_temp, idx, LSL, 1));                 // out := array_temp[idx].
930
931  __ Bind(slow_path->GetExitLabel());
932}
933
934void IntrinsicLocationsBuilderARM::VisitStringCompareTo(HInvoke* invoke) {
935  // The inputs plus one temp.
936  LocationSummary* locations = new (arena_) LocationSummary(invoke,
937                                                            LocationSummary::kCall,
938                                                            kIntrinsified);
939  InvokeRuntimeCallingConvention calling_convention;
940  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
941  locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
942  locations->SetOut(Location::RegisterLocation(R0));
943}
944
945void IntrinsicCodeGeneratorARM::VisitStringCompareTo(HInvoke* invoke) {
946  ArmAssembler* assembler = GetAssembler();
947  LocationSummary* locations = invoke->GetLocations();
948
949  // Note that the null check must have been done earlier.
950  DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
951
952  Register argument = locations->InAt(1).AsRegister<Register>();
953  __ cmp(argument, ShifterOperand(0));
954  SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
955  codegen_->AddSlowPath(slow_path);
956  __ b(slow_path->GetEntryLabel(), EQ);
957
958  __ LoadFromOffset(
959      kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pStringCompareTo).Int32Value());
960  __ blx(LR);
961  __ Bind(slow_path->GetExitLabel());
962}
963
964void IntrinsicLocationsBuilderARM::VisitStringEquals(HInvoke* invoke) {
965  LocationSummary* locations = new (arena_) LocationSummary(invoke,
966                                                            LocationSummary::kNoCall,
967                                                            kIntrinsified);
968  InvokeRuntimeCallingConvention calling_convention;
969  locations->SetInAt(0, Location::RequiresRegister());
970  locations->SetInAt(1, Location::RequiresRegister());
971  // Temporary registers to store lengths of strings and for calculations.
972  // Using instruction cbz requires a low register, so explicitly set a temp to be R0.
973  locations->AddTemp(Location::RegisterLocation(R0));
974  locations->AddTemp(Location::RequiresRegister());
975  locations->AddTemp(Location::RequiresRegister());
976
977  locations->SetOut(Location::RequiresRegister());
978}
979
980void IntrinsicCodeGeneratorARM::VisitStringEquals(HInvoke* invoke) {
981  ArmAssembler* assembler = GetAssembler();
982  LocationSummary* locations = invoke->GetLocations();
983
984  Register str = locations->InAt(0).AsRegister<Register>();
985  Register arg = locations->InAt(1).AsRegister<Register>();
986  Register out = locations->Out().AsRegister<Register>();
987
988  Register temp = locations->GetTemp(0).AsRegister<Register>();
989  Register temp1 = locations->GetTemp(1).AsRegister<Register>();
990  Register temp2 = locations->GetTemp(2).AsRegister<Register>();
991
992  Label loop;
993  Label end;
994  Label return_true;
995  Label return_false;
996
997  // Get offsets of count, value, and class fields within a string object.
998  const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
999  const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
1000  const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value();
1001
1002  // Note that the null check must have been done earlier.
1003  DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1004
1005  // Check if input is null, return false if it is.
1006  __ CompareAndBranchIfZero(arg, &return_false);
1007
1008  // Instanceof check for the argument by comparing class fields.
1009  // All string objects must have the same type since String cannot be subclassed.
1010  // Receiver must be a string object, so its class field is equal to all strings' class fields.
1011  // If the argument is a string object, its class field must be equal to receiver's class field.
1012  __ ldr(temp, Address(str, class_offset));
1013  __ ldr(temp1, Address(arg, class_offset));
1014  __ cmp(temp, ShifterOperand(temp1));
1015  __ b(&return_false, NE);
1016
1017  // Load lengths of this and argument strings.
1018  __ ldr(temp, Address(str, count_offset));
1019  __ ldr(temp1, Address(arg, count_offset));
1020  // Check if lengths are equal, return false if they're not.
1021  __ cmp(temp, ShifterOperand(temp1));
1022  __ b(&return_false, NE);
1023  // Return true if both strings are empty.
1024  __ cbz(temp, &return_true);
1025
1026  // Reference equality check, return true if same reference.
1027  __ cmp(str, ShifterOperand(arg));
1028  __ b(&return_true, EQ);
1029
1030  // Assertions that must hold in order to compare strings 2 characters at a time.
1031  DCHECK_ALIGNED(value_offset, 4);
1032  static_assert(IsAligned<4>(kObjectAlignment), "String of odd length is not zero padded");
1033
1034  __ LoadImmediate(temp1, value_offset);
1035
1036  // Loop to compare strings 2 characters at a time starting at the front of the string.
1037  // Ok to do this because strings with an odd length are zero-padded.
1038  __ Bind(&loop);
1039  __ ldr(out, Address(str, temp1));
1040  __ ldr(temp2, Address(arg, temp1));
1041  __ cmp(out, ShifterOperand(temp2));
1042  __ b(&return_false, NE);
1043  __ add(temp1, temp1, ShifterOperand(sizeof(uint32_t)));
1044  __ subs(temp, temp, ShifterOperand(sizeof(uint32_t) /  sizeof(uint16_t)));
1045  __ b(&loop, GT);
1046
1047  // Return true and exit the function.
1048  // If loop does not result in returning false, we return true.
1049  __ Bind(&return_true);
1050  __ LoadImmediate(out, 1);
1051  __ b(&end);
1052
1053  // Return false and exit the function.
1054  __ Bind(&return_false);
1055  __ LoadImmediate(out, 0);
1056  __ Bind(&end);
1057}
1058
1059static void GenerateVisitStringIndexOf(HInvoke* invoke,
1060                                       ArmAssembler* assembler,
1061                                       CodeGeneratorARM* codegen,
1062                                       ArenaAllocator* allocator,
1063                                       bool start_at_zero) {
1064  LocationSummary* locations = invoke->GetLocations();
1065  Register tmp_reg = locations->GetTemp(0).AsRegister<Register>();
1066
1067  // Note that the null check must have been done earlier.
1068  DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1069
1070  // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
1071  // or directly dispatch if we have a constant.
1072  SlowPathCode* slow_path = nullptr;
1073  if (invoke->InputAt(1)->IsIntConstant()) {
1074    if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) >
1075        std::numeric_limits<uint16_t>::max()) {
1076      // Always needs the slow-path. We could directly dispatch to it, but this case should be
1077      // rare, so for simplicity just put the full slow-path down and branch unconditionally.
1078      slow_path = new (allocator) IntrinsicSlowPathARM(invoke);
1079      codegen->AddSlowPath(slow_path);
1080      __ b(slow_path->GetEntryLabel());
1081      __ Bind(slow_path->GetExitLabel());
1082      return;
1083    }
1084  } else {
1085    Register char_reg = locations->InAt(1).AsRegister<Register>();
1086    __ LoadImmediate(tmp_reg, std::numeric_limits<uint16_t>::max());
1087    __ cmp(char_reg, ShifterOperand(tmp_reg));
1088    slow_path = new (allocator) IntrinsicSlowPathARM(invoke);
1089    codegen->AddSlowPath(slow_path);
1090    __ b(slow_path->GetEntryLabel(), HI);
1091  }
1092
1093  if (start_at_zero) {
1094    DCHECK_EQ(tmp_reg, R2);
1095    // Start-index = 0.
1096    __ LoadImmediate(tmp_reg, 0);
1097  }
1098
1099  __ LoadFromOffset(kLoadWord, LR, TR,
1100                    QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pIndexOf).Int32Value());
1101  __ blx(LR);
1102
1103  if (slow_path != nullptr) {
1104    __ Bind(slow_path->GetExitLabel());
1105  }
1106}
1107
1108void IntrinsicLocationsBuilderARM::VisitStringIndexOf(HInvoke* invoke) {
1109  LocationSummary* locations = new (arena_) LocationSummary(invoke,
1110                                                            LocationSummary::kCall,
1111                                                            kIntrinsified);
1112  // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1113  // best to align the inputs accordingly.
1114  InvokeRuntimeCallingConvention calling_convention;
1115  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1116  locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1117  locations->SetOut(Location::RegisterLocation(R0));
1118
1119  // Need a temp for slow-path codepoint compare, and need to send start-index=0.
1120  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1121}
1122
1123void IntrinsicCodeGeneratorARM::VisitStringIndexOf(HInvoke* invoke) {
1124  GenerateVisitStringIndexOf(
1125      invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true);
1126}
1127
1128void IntrinsicLocationsBuilderARM::VisitStringIndexOfAfter(HInvoke* invoke) {
1129  LocationSummary* locations = new (arena_) LocationSummary(invoke,
1130                                                            LocationSummary::kCall,
1131                                                            kIntrinsified);
1132  // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1133  // best to align the inputs accordingly.
1134  InvokeRuntimeCallingConvention calling_convention;
1135  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1136  locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1137  locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1138  locations->SetOut(Location::RegisterLocation(R0));
1139
1140  // Need a temp for slow-path codepoint compare.
1141  locations->AddTemp(Location::RequiresRegister());
1142}
1143
1144void IntrinsicCodeGeneratorARM::VisitStringIndexOfAfter(HInvoke* invoke) {
1145  GenerateVisitStringIndexOf(
1146      invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false);
1147}
1148
1149void IntrinsicLocationsBuilderARM::VisitStringNewStringFromBytes(HInvoke* invoke) {
1150  LocationSummary* locations = new (arena_) LocationSummary(invoke,
1151                                                            LocationSummary::kCall,
1152                                                            kIntrinsified);
1153  InvokeRuntimeCallingConvention calling_convention;
1154  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1155  locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1156  locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1157  locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
1158  locations->SetOut(Location::RegisterLocation(R0));
1159}
1160
1161void IntrinsicCodeGeneratorARM::VisitStringNewStringFromBytes(HInvoke* invoke) {
1162  ArmAssembler* assembler = GetAssembler();
1163  LocationSummary* locations = invoke->GetLocations();
1164
1165  Register byte_array = locations->InAt(0).AsRegister<Register>();
1166  __ cmp(byte_array, ShifterOperand(0));
1167  SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
1168  codegen_->AddSlowPath(slow_path);
1169  __ b(slow_path->GetEntryLabel(), EQ);
1170
1171  __ LoadFromOffset(
1172      kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromBytes).Int32Value());
1173  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1174  __ blx(LR);
1175  __ Bind(slow_path->GetExitLabel());
1176}
1177
1178void IntrinsicLocationsBuilderARM::VisitStringNewStringFromChars(HInvoke* invoke) {
1179  LocationSummary* locations = new (arena_) LocationSummary(invoke,
1180                                                            LocationSummary::kCall,
1181                                                            kIntrinsified);
1182  InvokeRuntimeCallingConvention calling_convention;
1183  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1184  locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1185  locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1186  locations->SetOut(Location::RegisterLocation(R0));
1187}
1188
1189void IntrinsicCodeGeneratorARM::VisitStringNewStringFromChars(HInvoke* invoke) {
1190  ArmAssembler* assembler = GetAssembler();
1191
1192  __ LoadFromOffset(
1193      kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromChars).Int32Value());
1194  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1195  __ blx(LR);
1196}
1197
1198void IntrinsicLocationsBuilderARM::VisitStringNewStringFromString(HInvoke* invoke) {
1199  LocationSummary* locations = new (arena_) LocationSummary(invoke,
1200                                                            LocationSummary::kCall,
1201                                                            kIntrinsified);
1202  InvokeRuntimeCallingConvention calling_convention;
1203  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1204  locations->SetOut(Location::RegisterLocation(R0));
1205}
1206
1207void IntrinsicCodeGeneratorARM::VisitStringNewStringFromString(HInvoke* invoke) {
1208  ArmAssembler* assembler = GetAssembler();
1209  LocationSummary* locations = invoke->GetLocations();
1210
1211  Register string_to_copy = locations->InAt(0).AsRegister<Register>();
1212  __ cmp(string_to_copy, ShifterOperand(0));
1213  SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
1214  codegen_->AddSlowPath(slow_path);
1215  __ b(slow_path->GetEntryLabel(), EQ);
1216
1217  __ LoadFromOffset(kLoadWord,
1218      LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromString).Int32Value());
1219  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1220  __ blx(LR);
1221  __ Bind(slow_path->GetExitLabel());
1222}
1223
1224void IntrinsicLocationsBuilderARM::VisitSystemArrayCopy(HInvoke* invoke) {
1225  CodeGenerator::CreateSystemArrayCopyLocationSummary(invoke);
1226  LocationSummary* locations = invoke->GetLocations();
1227  if (locations == nullptr) {
1228    return;
1229  }
1230
1231  HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
1232  HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
1233  HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
1234
1235  if (src_pos != nullptr && !assembler_->ShifterOperandCanAlwaysHold(src_pos->GetValue())) {
1236    locations->SetInAt(1, Location::RequiresRegister());
1237  }
1238  if (dest_pos != nullptr && !assembler_->ShifterOperandCanAlwaysHold(dest_pos->GetValue())) {
1239    locations->SetInAt(3, Location::RequiresRegister());
1240  }
1241  if (length != nullptr && !assembler_->ShifterOperandCanAlwaysHold(length->GetValue())) {
1242    locations->SetInAt(4, Location::RequiresRegister());
1243  }
1244}
1245
1246static void CheckPosition(ArmAssembler* assembler,
1247                          Location pos,
1248                          Register input,
1249                          Location length,
1250                          SlowPathCode* slow_path,
1251                          Register input_len,
1252                          Register temp,
1253                          bool length_is_input_length = false) {
1254  // Where is the length in the Array?
1255  const uint32_t length_offset = mirror::Array::LengthOffset().Uint32Value();
1256
1257  if (pos.IsConstant()) {
1258    int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
1259    if (pos_const == 0) {
1260      if (!length_is_input_length) {
1261        // Check that length(input) >= length.
1262        __ LoadFromOffset(kLoadWord, temp, input, length_offset);
1263        if (length.IsConstant()) {
1264          __ cmp(temp, ShifterOperand(length.GetConstant()->AsIntConstant()->GetValue()));
1265        } else {
1266          __ cmp(temp, ShifterOperand(length.AsRegister<Register>()));
1267        }
1268        __ b(slow_path->GetEntryLabel(), LT);
1269      }
1270    } else {
1271      // Check that length(input) >= pos.
1272      __ LoadFromOffset(kLoadWord, input_len, input, length_offset);
1273      __ subs(temp, input_len, ShifterOperand(pos_const));
1274      __ b(slow_path->GetEntryLabel(), LT);
1275
1276      // Check that (length(input) - pos) >= length.
1277      if (length.IsConstant()) {
1278        __ cmp(temp, ShifterOperand(length.GetConstant()->AsIntConstant()->GetValue()));
1279      } else {
1280        __ cmp(temp, ShifterOperand(length.AsRegister<Register>()));
1281      }
1282      __ b(slow_path->GetEntryLabel(), LT);
1283    }
1284  } else if (length_is_input_length) {
1285    // The only way the copy can succeed is if pos is zero.
1286    Register pos_reg = pos.AsRegister<Register>();
1287    __ CompareAndBranchIfNonZero(pos_reg, slow_path->GetEntryLabel());
1288  } else {
1289    // Check that pos >= 0.
1290    Register pos_reg = pos.AsRegister<Register>();
1291    __ cmp(pos_reg, ShifterOperand(0));
1292    __ b(slow_path->GetEntryLabel(), LT);
1293
1294    // Check that pos <= length(input).
1295    __ LoadFromOffset(kLoadWord, temp, input, length_offset);
1296    __ subs(temp, temp, ShifterOperand(pos_reg));
1297    __ b(slow_path->GetEntryLabel(), LT);
1298
1299    // Check that (length(input) - pos) >= length.
1300    if (length.IsConstant()) {
1301      __ cmp(temp, ShifterOperand(length.GetConstant()->AsIntConstant()->GetValue()));
1302    } else {
1303      __ cmp(temp, ShifterOperand(length.AsRegister<Register>()));
1304    }
1305    __ b(slow_path->GetEntryLabel(), LT);
1306  }
1307}
1308
1309// TODO: Implement read barriers in the SystemArrayCopy intrinsic.
1310// Note that this code path is not used (yet) because we do not
1311// intrinsify methods that can go into the IntrinsicSlowPathARM
1312// slow path.
1313void IntrinsicCodeGeneratorARM::VisitSystemArrayCopy(HInvoke* invoke) {
1314  ArmAssembler* assembler = GetAssembler();
1315  LocationSummary* locations = invoke->GetLocations();
1316
1317  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1318  uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
1319  uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
1320  uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
1321
1322  Register src = locations->InAt(0).AsRegister<Register>();
1323  Location src_pos = locations->InAt(1);
1324  Register dest = locations->InAt(2).AsRegister<Register>();
1325  Location dest_pos = locations->InAt(3);
1326  Location length = locations->InAt(4);
1327  Register temp1 = locations->GetTemp(0).AsRegister<Register>();
1328  Register temp2 = locations->GetTemp(1).AsRegister<Register>();
1329  Register temp3 = locations->GetTemp(2).AsRegister<Register>();
1330
1331  SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
1332  codegen_->AddSlowPath(slow_path);
1333
1334  Label ok;
1335  SystemArrayCopyOptimizations optimizations(invoke);
1336
1337  if (!optimizations.GetDestinationIsSource()) {
1338    if (!src_pos.IsConstant() || !dest_pos.IsConstant()) {
1339      __ cmp(src, ShifterOperand(dest));
1340    }
1341  }
1342
1343  // If source and destination are the same, we go to slow path if we need to do
1344  // forward copying.
1345  if (src_pos.IsConstant()) {
1346    int32_t src_pos_constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
1347    if (dest_pos.IsConstant()) {
1348      // Checked when building locations.
1349      DCHECK(!optimizations.GetDestinationIsSource()
1350             || (src_pos_constant >= dest_pos.GetConstant()->AsIntConstant()->GetValue()));
1351    } else {
1352      if (!optimizations.GetDestinationIsSource()) {
1353        __ b(&ok, NE);
1354      }
1355      __ cmp(dest_pos.AsRegister<Register>(), ShifterOperand(src_pos_constant));
1356      __ b(slow_path->GetEntryLabel(), GT);
1357    }
1358  } else {
1359    if (!optimizations.GetDestinationIsSource()) {
1360      __ b(&ok, NE);
1361    }
1362    if (dest_pos.IsConstant()) {
1363      int32_t dest_pos_constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
1364      __ cmp(src_pos.AsRegister<Register>(), ShifterOperand(dest_pos_constant));
1365    } else {
1366      __ cmp(src_pos.AsRegister<Register>(), ShifterOperand(dest_pos.AsRegister<Register>()));
1367    }
1368    __ b(slow_path->GetEntryLabel(), LT);
1369  }
1370
1371  __ Bind(&ok);
1372
1373  if (!optimizations.GetSourceIsNotNull()) {
1374    // Bail out if the source is null.
1375    __ CompareAndBranchIfZero(src, slow_path->GetEntryLabel());
1376  }
1377
1378  if (!optimizations.GetDestinationIsNotNull() && !optimizations.GetDestinationIsSource()) {
1379    // Bail out if the destination is null.
1380    __ CompareAndBranchIfZero(dest, slow_path->GetEntryLabel());
1381  }
1382
1383  // If the length is negative, bail out.
1384  // We have already checked in the LocationsBuilder for the constant case.
1385  if (!length.IsConstant() &&
1386      !optimizations.GetCountIsSourceLength() &&
1387      !optimizations.GetCountIsDestinationLength()) {
1388    __ cmp(length.AsRegister<Register>(), ShifterOperand(0));
1389    __ b(slow_path->GetEntryLabel(), LT);
1390  }
1391
1392  // Validity checks: source.
1393  CheckPosition(assembler,
1394                src_pos,
1395                src,
1396                length,
1397                slow_path,
1398                temp1,
1399                temp2,
1400                optimizations.GetCountIsSourceLength());
1401
1402  // Validity checks: dest.
1403  CheckPosition(assembler,
1404                dest_pos,
1405                dest,
1406                length,
1407                slow_path,
1408                temp1,
1409                temp2,
1410                optimizations.GetCountIsDestinationLength());
1411
1412  if (!optimizations.GetDoesNotNeedTypeCheck()) {
1413    // Check whether all elements of the source array are assignable to the component
1414    // type of the destination array. We do two checks: the classes are the same,
1415    // or the destination is Object[]. If none of these checks succeed, we go to the
1416    // slow path.
1417    __ LoadFromOffset(kLoadWord, temp1, dest, class_offset);
1418    __ LoadFromOffset(kLoadWord, temp2, src, class_offset);
1419    bool did_unpoison = false;
1420    if (!optimizations.GetDestinationIsNonPrimitiveArray() ||
1421        !optimizations.GetSourceIsNonPrimitiveArray()) {
1422      // One or two of the references need to be unpoisoned. Unpoisoned them
1423      // both to make the identity check valid.
1424      __ MaybeUnpoisonHeapReference(temp1);
1425      __ MaybeUnpoisonHeapReference(temp2);
1426      did_unpoison = true;
1427    }
1428
1429    if (!optimizations.GetDestinationIsNonPrimitiveArray()) {
1430      // Bail out if the destination is not a non primitive array.
1431      __ LoadFromOffset(kLoadWord, temp3, temp1, component_offset);
1432      __ CompareAndBranchIfZero(temp3, slow_path->GetEntryLabel());
1433      __ MaybeUnpoisonHeapReference(temp3);
1434      __ LoadFromOffset(kLoadUnsignedHalfword, temp3, temp3, primitive_offset);
1435      static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
1436      __ CompareAndBranchIfNonZero(temp3, slow_path->GetEntryLabel());
1437    }
1438
1439    if (!optimizations.GetSourceIsNonPrimitiveArray()) {
1440      // Bail out if the source is not a non primitive array.
1441      // Bail out if the destination is not a non primitive array.
1442      __ LoadFromOffset(kLoadWord, temp3, temp2, component_offset);
1443      __ CompareAndBranchIfZero(temp3, slow_path->GetEntryLabel());
1444      __ MaybeUnpoisonHeapReference(temp3);
1445      __ LoadFromOffset(kLoadUnsignedHalfword, temp3, temp3, primitive_offset);
1446      static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
1447      __ CompareAndBranchIfNonZero(temp3, slow_path->GetEntryLabel());
1448    }
1449
1450    __ cmp(temp1, ShifterOperand(temp2));
1451
1452    if (optimizations.GetDestinationIsTypedObjectArray()) {
1453      Label do_copy;
1454      __ b(&do_copy, EQ);
1455      if (!did_unpoison) {
1456        __ MaybeUnpoisonHeapReference(temp1);
1457      }
1458      __ LoadFromOffset(kLoadWord, temp1, temp1, component_offset);
1459      __ MaybeUnpoisonHeapReference(temp1);
1460      __ LoadFromOffset(kLoadWord, temp1, temp1, super_offset);
1461      // No need to unpoison the result, we're comparing against null.
1462      __ CompareAndBranchIfNonZero(temp1, slow_path->GetEntryLabel());
1463      __ Bind(&do_copy);
1464    } else {
1465      __ b(slow_path->GetEntryLabel(), NE);
1466    }
1467  } else if (!optimizations.GetSourceIsNonPrimitiveArray()) {
1468    DCHECK(optimizations.GetDestinationIsNonPrimitiveArray());
1469    // Bail out if the source is not a non primitive array.
1470    __ LoadFromOffset(kLoadWord, temp1, src, class_offset);
1471    __ MaybeUnpoisonHeapReference(temp1);
1472    __ LoadFromOffset(kLoadWord, temp3, temp1, component_offset);
1473    __ CompareAndBranchIfZero(temp3, slow_path->GetEntryLabel());
1474    __ MaybeUnpoisonHeapReference(temp3);
1475    __ LoadFromOffset(kLoadUnsignedHalfword, temp3, temp3, primitive_offset);
1476    static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
1477    __ CompareAndBranchIfNonZero(temp3, slow_path->GetEntryLabel());
1478  }
1479
1480  // Compute base source address, base destination address, and end source address.
1481
1482  uint32_t element_size = sizeof(int32_t);
1483  uint32_t offset = mirror::Array::DataOffset(element_size).Uint32Value();
1484  if (src_pos.IsConstant()) {
1485    int32_t constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
1486    __ AddConstant(temp1, src, element_size * constant + offset);
1487  } else {
1488    __ add(temp1, src, ShifterOperand(src_pos.AsRegister<Register>(), LSL, 2));
1489    __ AddConstant(temp1, offset);
1490  }
1491
1492  if (dest_pos.IsConstant()) {
1493    int32_t constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
1494    __ AddConstant(temp2, dest, element_size * constant + offset);
1495  } else {
1496    __ add(temp2, dest, ShifterOperand(dest_pos.AsRegister<Register>(), LSL, 2));
1497    __ AddConstant(temp2, offset);
1498  }
1499
1500  if (length.IsConstant()) {
1501    int32_t constant = length.GetConstant()->AsIntConstant()->GetValue();
1502    __ AddConstant(temp3, temp1, element_size * constant);
1503  } else {
1504    __ add(temp3, temp1, ShifterOperand(length.AsRegister<Register>(), LSL, 2));
1505  }
1506
1507  // Iterate over the arrays and do a raw copy of the objects. We don't need to
1508  // poison/unpoison, nor do any read barrier as the next uses of the destination
1509  // array will do it.
1510  Label loop, done;
1511  __ cmp(temp1, ShifterOperand(temp3));
1512  __ b(&done, EQ);
1513  __ Bind(&loop);
1514  __ ldr(IP, Address(temp1, element_size, Address::PostIndex));
1515  __ str(IP, Address(temp2, element_size, Address::PostIndex));
1516  __ cmp(temp1, ShifterOperand(temp3));
1517  __ b(&loop, NE);
1518  __ Bind(&done);
1519
1520  // We only need one card marking on the destination array.
1521  codegen_->MarkGCCard(temp1,
1522                       temp2,
1523                       dest,
1524                       Register(kNoRegister),
1525                       /* can_be_null */ false);
1526
1527  __ Bind(slow_path->GetExitLabel());
1528}
1529
1530// Unimplemented intrinsics.
1531
1532#define UNIMPLEMENTED_INTRINSIC(Name)                                                  \
1533void IntrinsicLocationsBuilderARM::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1534}                                                                                      \
1535void IntrinsicCodeGeneratorARM::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) {    \
1536}
1537
1538UNIMPLEMENTED_INTRINSIC(IntegerReverse)
1539UNIMPLEMENTED_INTRINSIC(IntegerReverseBytes)
1540UNIMPLEMENTED_INTRINSIC(IntegerRotateLeft)
1541UNIMPLEMENTED_INTRINSIC(IntegerRotateRight)
1542UNIMPLEMENTED_INTRINSIC(LongReverse)
1543UNIMPLEMENTED_INTRINSIC(LongReverseBytes)
1544UNIMPLEMENTED_INTRINSIC(LongRotateLeft)
1545UNIMPLEMENTED_INTRINSIC(LongRotateRight)
1546UNIMPLEMENTED_INTRINSIC(ShortReverseBytes)
1547UNIMPLEMENTED_INTRINSIC(MathMinDoubleDouble)
1548UNIMPLEMENTED_INTRINSIC(MathMinFloatFloat)
1549UNIMPLEMENTED_INTRINSIC(MathMaxDoubleDouble)
1550UNIMPLEMENTED_INTRINSIC(MathMaxFloatFloat)
1551UNIMPLEMENTED_INTRINSIC(MathMinLongLong)
1552UNIMPLEMENTED_INTRINSIC(MathMaxLongLong)
1553UNIMPLEMENTED_INTRINSIC(MathCeil)          // Could be done by changing rounding mode, maybe?
1554UNIMPLEMENTED_INTRINSIC(MathFloor)         // Could be done by changing rounding mode, maybe?
1555UNIMPLEMENTED_INTRINSIC(MathRint)
1556UNIMPLEMENTED_INTRINSIC(MathRoundDouble)   // Could be done by changing rounding mode, maybe?
1557UNIMPLEMENTED_INTRINSIC(MathRoundFloat)    // Could be done by changing rounding mode, maybe?
1558UNIMPLEMENTED_INTRINSIC(UnsafeCASLong)     // High register pressure.
1559UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
1560UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
1561UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
1562UNIMPLEMENTED_INTRINSIC(MathCos)
1563UNIMPLEMENTED_INTRINSIC(MathSin)
1564UNIMPLEMENTED_INTRINSIC(MathAcos)
1565UNIMPLEMENTED_INTRINSIC(MathAsin)
1566UNIMPLEMENTED_INTRINSIC(MathAtan)
1567UNIMPLEMENTED_INTRINSIC(MathAtan2)
1568UNIMPLEMENTED_INTRINSIC(MathCbrt)
1569UNIMPLEMENTED_INTRINSIC(MathCosh)
1570UNIMPLEMENTED_INTRINSIC(MathExp)
1571UNIMPLEMENTED_INTRINSIC(MathExpm1)
1572UNIMPLEMENTED_INTRINSIC(MathHypot)
1573UNIMPLEMENTED_INTRINSIC(MathLog)
1574UNIMPLEMENTED_INTRINSIC(MathLog10)
1575UNIMPLEMENTED_INTRINSIC(MathNextAfter)
1576UNIMPLEMENTED_INTRINSIC(MathSinh)
1577UNIMPLEMENTED_INTRINSIC(MathTan)
1578UNIMPLEMENTED_INTRINSIC(MathTanh)
1579
1580#undef UNIMPLEMENTED_INTRINSIC
1581
1582#undef __
1583
1584}  // namespace arm
1585}  // namespace art
1586