1/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
20#include "art_method.h"
21#include "code_generator_arm.h"
22#include "entrypoints/quick/quick_entrypoints.h"
23#include "intrinsics.h"
24#include "intrinsics_utils.h"
25#include "mirror/array-inl.h"
26#include "mirror/string.h"
27#include "thread.h"
28#include "utils/arm/assembler_arm.h"
29
30namespace art {
31
32namespace arm {
33
34ArmAssembler* IntrinsicCodeGeneratorARM::GetAssembler() {
35  return codegen_->GetAssembler();
36}
37
38ArenaAllocator* IntrinsicCodeGeneratorARM::GetAllocator() {
39  return codegen_->GetGraph()->GetArena();
40}
41
42using IntrinsicSlowPathARM = IntrinsicSlowPath<InvokeDexCallingConventionVisitorARM>;
43
44bool IntrinsicLocationsBuilderARM::TryDispatch(HInvoke* invoke) {
45  Dispatch(invoke);
46  LocationSummary* res = invoke->GetLocations();
47  if (res == nullptr) {
48    return false;
49  }
50  if (kEmitCompilerReadBarrier && res->CanCall()) {
51    // Generating an intrinsic for this HInvoke may produce an
52    // IntrinsicSlowPathARM slow path.  Currently this approach
53    // does not work when using read barriers, as the emitted
54    // calling sequence will make use of another slow path
55    // (ReadBarrierForRootSlowPathARM for HInvokeStaticOrDirect,
56    // ReadBarrierSlowPathARM for HInvokeVirtual).  So we bail
57    // out in this case.
58    //
59    // TODO: Find a way to have intrinsics work with read barriers.
60    invoke->SetLocations(nullptr);
61    return false;
62  }
63  return res->Intrinsified();
64}
65
66#define __ assembler->
67
68static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
69  LocationSummary* locations = new (arena) LocationSummary(invoke,
70                                                           LocationSummary::kNoCall,
71                                                           kIntrinsified);
72  locations->SetInAt(0, Location::RequiresFpuRegister());
73  locations->SetOut(Location::RequiresRegister());
74}
75
76static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
77  LocationSummary* locations = new (arena) LocationSummary(invoke,
78                                                           LocationSummary::kNoCall,
79                                                           kIntrinsified);
80  locations->SetInAt(0, Location::RequiresRegister());
81  locations->SetOut(Location::RequiresFpuRegister());
82}
83
84static void MoveFPToInt(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
85  Location input = locations->InAt(0);
86  Location output = locations->Out();
87  if (is64bit) {
88    __ vmovrrd(output.AsRegisterPairLow<Register>(),
89               output.AsRegisterPairHigh<Register>(),
90               FromLowSToD(input.AsFpuRegisterPairLow<SRegister>()));
91  } else {
92    __ vmovrs(output.AsRegister<Register>(), input.AsFpuRegister<SRegister>());
93  }
94}
95
96static void MoveIntToFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
97  Location input = locations->InAt(0);
98  Location output = locations->Out();
99  if (is64bit) {
100    __ vmovdrr(FromLowSToD(output.AsFpuRegisterPairLow<SRegister>()),
101               input.AsRegisterPairLow<Register>(),
102               input.AsRegisterPairHigh<Register>());
103  } else {
104    __ vmovsr(output.AsFpuRegister<SRegister>(), input.AsRegister<Register>());
105  }
106}
107
108void IntrinsicLocationsBuilderARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
109  CreateFPToIntLocations(arena_, invoke);
110}
111void IntrinsicLocationsBuilderARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
112  CreateIntToFPLocations(arena_, invoke);
113}
114
115void IntrinsicCodeGeneratorARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
116  MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
117}
118void IntrinsicCodeGeneratorARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
119  MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
120}
121
122void IntrinsicLocationsBuilderARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
123  CreateFPToIntLocations(arena_, invoke);
124}
125void IntrinsicLocationsBuilderARM::VisitFloatIntBitsToFloat(HInvoke* invoke) {
126  CreateIntToFPLocations(arena_, invoke);
127}
128
129void IntrinsicCodeGeneratorARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
130  MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
131}
132void IntrinsicCodeGeneratorARM::VisitFloatIntBitsToFloat(HInvoke* invoke) {
133  MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
134}
135
136static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
137  LocationSummary* locations = new (arena) LocationSummary(invoke,
138                                                           LocationSummary::kNoCall,
139                                                           kIntrinsified);
140  locations->SetInAt(0, Location::RequiresRegister());
141  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
142}
143
144static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
145  LocationSummary* locations = new (arena) LocationSummary(invoke,
146                                                           LocationSummary::kNoCall,
147                                                           kIntrinsified);
148  locations->SetInAt(0, Location::RequiresFpuRegister());
149  locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
150}
151
152static void GenNumberOfLeadingZeros(LocationSummary* locations,
153                                    Primitive::Type type,
154                                    ArmAssembler* assembler) {
155  Location in = locations->InAt(0);
156  Register out = locations->Out().AsRegister<Register>();
157
158  DCHECK((type == Primitive::kPrimInt) || (type == Primitive::kPrimLong));
159
160  if (type == Primitive::kPrimLong) {
161    Register in_reg_lo = in.AsRegisterPairLow<Register>();
162    Register in_reg_hi = in.AsRegisterPairHigh<Register>();
163    Label end;
164    __ clz(out, in_reg_hi);
165    __ CompareAndBranchIfNonZero(in_reg_hi, &end);
166    __ clz(out, in_reg_lo);
167    __ AddConstant(out, 32);
168    __ Bind(&end);
169  } else {
170    __ clz(out, in.AsRegister<Register>());
171  }
172}
173
174void IntrinsicLocationsBuilderARM::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
175  CreateIntToIntLocations(arena_, invoke);
176}
177
178void IntrinsicCodeGeneratorARM::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
179  GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
180}
181
182void IntrinsicLocationsBuilderARM::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
183  LocationSummary* locations = new (arena_) LocationSummary(invoke,
184                                                           LocationSummary::kNoCall,
185                                                           kIntrinsified);
186  locations->SetInAt(0, Location::RequiresRegister());
187  locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
188}
189
190void IntrinsicCodeGeneratorARM::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
191  GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
192}
193
194static void GenNumberOfTrailingZeros(LocationSummary* locations,
195                                     Primitive::Type type,
196                                     ArmAssembler* assembler) {
197  DCHECK((type == Primitive::kPrimInt) || (type == Primitive::kPrimLong));
198
199  Register out = locations->Out().AsRegister<Register>();
200
201  if (type == Primitive::kPrimLong) {
202    Register in_reg_lo = locations->InAt(0).AsRegisterPairLow<Register>();
203    Register in_reg_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
204    Label end;
205    __ rbit(out, in_reg_lo);
206    __ clz(out, out);
207    __ CompareAndBranchIfNonZero(in_reg_lo, &end);
208    __ rbit(out, in_reg_hi);
209    __ clz(out, out);
210    __ AddConstant(out, 32);
211    __ Bind(&end);
212  } else {
213    Register in = locations->InAt(0).AsRegister<Register>();
214    __ rbit(out, in);
215    __ clz(out, out);
216  }
217}
218
219void IntrinsicLocationsBuilderARM::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
220  LocationSummary* locations = new (arena_) LocationSummary(invoke,
221                                                            LocationSummary::kNoCall,
222                                                            kIntrinsified);
223  locations->SetInAt(0, Location::RequiresRegister());
224  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
225}
226
227void IntrinsicCodeGeneratorARM::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
228  GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
229}
230
231void IntrinsicLocationsBuilderARM::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
232  LocationSummary* locations = new (arena_) LocationSummary(invoke,
233                                                            LocationSummary::kNoCall,
234                                                            kIntrinsified);
235  locations->SetInAt(0, Location::RequiresRegister());
236  locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
237}
238
239void IntrinsicCodeGeneratorARM::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
240  GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
241}
242
243static void MathAbsFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
244  Location in = locations->InAt(0);
245  Location out = locations->Out();
246
247  if (is64bit) {
248    __ vabsd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
249             FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
250  } else {
251    __ vabss(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>());
252  }
253}
254
255void IntrinsicLocationsBuilderARM::VisitMathAbsDouble(HInvoke* invoke) {
256  CreateFPToFPLocations(arena_, invoke);
257}
258
259void IntrinsicCodeGeneratorARM::VisitMathAbsDouble(HInvoke* invoke) {
260  MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
261}
262
263void IntrinsicLocationsBuilderARM::VisitMathAbsFloat(HInvoke* invoke) {
264  CreateFPToFPLocations(arena_, invoke);
265}
266
267void IntrinsicCodeGeneratorARM::VisitMathAbsFloat(HInvoke* invoke) {
268  MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
269}
270
271static void CreateIntToIntPlusTemp(ArenaAllocator* arena, HInvoke* invoke) {
272  LocationSummary* locations = new (arena) LocationSummary(invoke,
273                                                           LocationSummary::kNoCall,
274                                                           kIntrinsified);
275  locations->SetInAt(0, Location::RequiresRegister());
276  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
277
278  locations->AddTemp(Location::RequiresRegister());
279}
280
281static void GenAbsInteger(LocationSummary* locations,
282                          bool is64bit,
283                          ArmAssembler* assembler) {
284  Location in = locations->InAt(0);
285  Location output = locations->Out();
286
287  Register mask = locations->GetTemp(0).AsRegister<Register>();
288
289  if (is64bit) {
290    Register in_reg_lo = in.AsRegisterPairLow<Register>();
291    Register in_reg_hi = in.AsRegisterPairHigh<Register>();
292    Register out_reg_lo = output.AsRegisterPairLow<Register>();
293    Register out_reg_hi = output.AsRegisterPairHigh<Register>();
294
295    DCHECK_NE(out_reg_lo, in_reg_hi) << "Diagonal overlap unexpected.";
296
297    __ Asr(mask, in_reg_hi, 31);
298    __ adds(out_reg_lo, in_reg_lo, ShifterOperand(mask));
299    __ adc(out_reg_hi, in_reg_hi, ShifterOperand(mask));
300    __ eor(out_reg_lo, mask, ShifterOperand(out_reg_lo));
301    __ eor(out_reg_hi, mask, ShifterOperand(out_reg_hi));
302  } else {
303    Register in_reg = in.AsRegister<Register>();
304    Register out_reg = output.AsRegister<Register>();
305
306    __ Asr(mask, in_reg, 31);
307    __ add(out_reg, in_reg, ShifterOperand(mask));
308    __ eor(out_reg, mask, ShifterOperand(out_reg));
309  }
310}
311
312void IntrinsicLocationsBuilderARM::VisitMathAbsInt(HInvoke* invoke) {
313  CreateIntToIntPlusTemp(arena_, invoke);
314}
315
316void IntrinsicCodeGeneratorARM::VisitMathAbsInt(HInvoke* invoke) {
317  GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
318}
319
320
321void IntrinsicLocationsBuilderARM::VisitMathAbsLong(HInvoke* invoke) {
322  CreateIntToIntPlusTemp(arena_, invoke);
323}
324
325void IntrinsicCodeGeneratorARM::VisitMathAbsLong(HInvoke* invoke) {
326  GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
327}
328
329static void GenMinMax(LocationSummary* locations,
330                      bool is_min,
331                      ArmAssembler* assembler) {
332  Register op1 = locations->InAt(0).AsRegister<Register>();
333  Register op2 = locations->InAt(1).AsRegister<Register>();
334  Register out = locations->Out().AsRegister<Register>();
335
336  __ cmp(op1, ShifterOperand(op2));
337
338  __ it((is_min) ? Condition::LT : Condition::GT, kItElse);
339  __ mov(out, ShifterOperand(op1), is_min ? Condition::LT : Condition::GT);
340  __ mov(out, ShifterOperand(op2), is_min ? Condition::GE : Condition::LE);
341}
342
343static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
344  LocationSummary* locations = new (arena) LocationSummary(invoke,
345                                                           LocationSummary::kNoCall,
346                                                           kIntrinsified);
347  locations->SetInAt(0, Location::RequiresRegister());
348  locations->SetInAt(1, Location::RequiresRegister());
349  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
350}
351
352void IntrinsicLocationsBuilderARM::VisitMathMinIntInt(HInvoke* invoke) {
353  CreateIntIntToIntLocations(arena_, invoke);
354}
355
356void IntrinsicCodeGeneratorARM::VisitMathMinIntInt(HInvoke* invoke) {
357  GenMinMax(invoke->GetLocations(), /* is_min */ true, GetAssembler());
358}
359
360void IntrinsicLocationsBuilderARM::VisitMathMaxIntInt(HInvoke* invoke) {
361  CreateIntIntToIntLocations(arena_, invoke);
362}
363
364void IntrinsicCodeGeneratorARM::VisitMathMaxIntInt(HInvoke* invoke) {
365  GenMinMax(invoke->GetLocations(), /* is_min */ false, GetAssembler());
366}
367
368void IntrinsicLocationsBuilderARM::VisitMathSqrt(HInvoke* invoke) {
369  CreateFPToFPLocations(arena_, invoke);
370}
371
372void IntrinsicCodeGeneratorARM::VisitMathSqrt(HInvoke* invoke) {
373  LocationSummary* locations = invoke->GetLocations();
374  ArmAssembler* assembler = GetAssembler();
375  __ vsqrtd(FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()),
376            FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>()));
377}
378
379void IntrinsicLocationsBuilderARM::VisitMemoryPeekByte(HInvoke* invoke) {
380  CreateIntToIntLocations(arena_, invoke);
381}
382
383void IntrinsicCodeGeneratorARM::VisitMemoryPeekByte(HInvoke* invoke) {
384  ArmAssembler* assembler = GetAssembler();
385  // Ignore upper 4B of long address.
386  __ ldrsb(invoke->GetLocations()->Out().AsRegister<Register>(),
387           Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
388}
389
390void IntrinsicLocationsBuilderARM::VisitMemoryPeekIntNative(HInvoke* invoke) {
391  CreateIntToIntLocations(arena_, invoke);
392}
393
394void IntrinsicCodeGeneratorARM::VisitMemoryPeekIntNative(HInvoke* invoke) {
395  ArmAssembler* assembler = GetAssembler();
396  // Ignore upper 4B of long address.
397  __ ldr(invoke->GetLocations()->Out().AsRegister<Register>(),
398         Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
399}
400
401void IntrinsicLocationsBuilderARM::VisitMemoryPeekLongNative(HInvoke* invoke) {
402  CreateIntToIntLocations(arena_, invoke);
403}
404
405void IntrinsicCodeGeneratorARM::VisitMemoryPeekLongNative(HInvoke* invoke) {
406  ArmAssembler* assembler = GetAssembler();
407  // Ignore upper 4B of long address.
408  Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
409  // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
410  // exception. So we can't use ldrd as addr may be unaligned.
411  Register lo = invoke->GetLocations()->Out().AsRegisterPairLow<Register>();
412  Register hi = invoke->GetLocations()->Out().AsRegisterPairHigh<Register>();
413  if (addr == lo) {
414    __ ldr(hi, Address(addr, 4));
415    __ ldr(lo, Address(addr, 0));
416  } else {
417    __ ldr(lo, Address(addr, 0));
418    __ ldr(hi, Address(addr, 4));
419  }
420}
421
422void IntrinsicLocationsBuilderARM::VisitMemoryPeekShortNative(HInvoke* invoke) {
423  CreateIntToIntLocations(arena_, invoke);
424}
425
426void IntrinsicCodeGeneratorARM::VisitMemoryPeekShortNative(HInvoke* invoke) {
427  ArmAssembler* assembler = GetAssembler();
428  // Ignore upper 4B of long address.
429  __ ldrsh(invoke->GetLocations()->Out().AsRegister<Register>(),
430           Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
431}
432
433static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
434  LocationSummary* locations = new (arena) LocationSummary(invoke,
435                                                           LocationSummary::kNoCall,
436                                                           kIntrinsified);
437  locations->SetInAt(0, Location::RequiresRegister());
438  locations->SetInAt(1, Location::RequiresRegister());
439}
440
441void IntrinsicLocationsBuilderARM::VisitMemoryPokeByte(HInvoke* invoke) {
442  CreateIntIntToVoidLocations(arena_, invoke);
443}
444
445void IntrinsicCodeGeneratorARM::VisitMemoryPokeByte(HInvoke* invoke) {
446  ArmAssembler* assembler = GetAssembler();
447  __ strb(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
448          Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
449}
450
451void IntrinsicLocationsBuilderARM::VisitMemoryPokeIntNative(HInvoke* invoke) {
452  CreateIntIntToVoidLocations(arena_, invoke);
453}
454
455void IntrinsicCodeGeneratorARM::VisitMemoryPokeIntNative(HInvoke* invoke) {
456  ArmAssembler* assembler = GetAssembler();
457  __ str(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
458         Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
459}
460
461void IntrinsicLocationsBuilderARM::VisitMemoryPokeLongNative(HInvoke* invoke) {
462  CreateIntIntToVoidLocations(arena_, invoke);
463}
464
465void IntrinsicCodeGeneratorARM::VisitMemoryPokeLongNative(HInvoke* invoke) {
466  ArmAssembler* assembler = GetAssembler();
467  // Ignore upper 4B of long address.
468  Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
469  // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
470  // exception. So we can't use ldrd as addr may be unaligned.
471  __ str(invoke->GetLocations()->InAt(1).AsRegisterPairLow<Register>(), Address(addr, 0));
472  __ str(invoke->GetLocations()->InAt(1).AsRegisterPairHigh<Register>(), Address(addr, 4));
473}
474
475void IntrinsicLocationsBuilderARM::VisitMemoryPokeShortNative(HInvoke* invoke) {
476  CreateIntIntToVoidLocations(arena_, invoke);
477}
478
479void IntrinsicCodeGeneratorARM::VisitMemoryPokeShortNative(HInvoke* invoke) {
480  ArmAssembler* assembler = GetAssembler();
481  __ strh(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
482          Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
483}
484
485void IntrinsicLocationsBuilderARM::VisitThreadCurrentThread(HInvoke* invoke) {
486  LocationSummary* locations = new (arena_) LocationSummary(invoke,
487                                                            LocationSummary::kNoCall,
488                                                            kIntrinsified);
489  locations->SetOut(Location::RequiresRegister());
490}
491
492void IntrinsicCodeGeneratorARM::VisitThreadCurrentThread(HInvoke* invoke) {
493  ArmAssembler* assembler = GetAssembler();
494  __ LoadFromOffset(kLoadWord,
495                    invoke->GetLocations()->Out().AsRegister<Register>(),
496                    TR,
497                    Thread::PeerOffset<kArmPointerSize>().Int32Value());
498}
499
500static void GenUnsafeGet(HInvoke* invoke,
501                         Primitive::Type type,
502                         bool is_volatile,
503                         CodeGeneratorARM* codegen) {
504  LocationSummary* locations = invoke->GetLocations();
505  ArmAssembler* assembler = codegen->GetAssembler();
506  Location base_loc = locations->InAt(1);
507  Register base = base_loc.AsRegister<Register>();             // Object pointer.
508  Location offset_loc = locations->InAt(2);
509  Register offset = offset_loc.AsRegisterPairLow<Register>();  // Long offset, lo part only.
510  Location trg_loc = locations->Out();
511
512  switch (type) {
513    case Primitive::kPrimInt: {
514      Register trg = trg_loc.AsRegister<Register>();
515      __ ldr(trg, Address(base, offset));
516      if (is_volatile) {
517        __ dmb(ISH);
518      }
519      break;
520    }
521
522    case Primitive::kPrimNot: {
523      Register trg = trg_loc.AsRegister<Register>();
524      if (kEmitCompilerReadBarrier) {
525        if (kUseBakerReadBarrier) {
526          Location temp = locations->GetTemp(0);
527          codegen->GenerateArrayLoadWithBakerReadBarrier(
528              invoke, trg_loc, base, 0U, offset_loc, temp, /* needs_null_check */ false);
529          if (is_volatile) {
530            __ dmb(ISH);
531          }
532        } else {
533          __ ldr(trg, Address(base, offset));
534          if (is_volatile) {
535            __ dmb(ISH);
536          }
537          codegen->GenerateReadBarrierSlow(invoke, trg_loc, trg_loc, base_loc, 0U, offset_loc);
538        }
539      } else {
540        __ ldr(trg, Address(base, offset));
541        if (is_volatile) {
542          __ dmb(ISH);
543        }
544        __ MaybeUnpoisonHeapReference(trg);
545      }
546      break;
547    }
548
549    case Primitive::kPrimLong: {
550      Register trg_lo = trg_loc.AsRegisterPairLow<Register>();
551      __ add(IP, base, ShifterOperand(offset));
552      if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
553        Register trg_hi = trg_loc.AsRegisterPairHigh<Register>();
554        __ ldrexd(trg_lo, trg_hi, IP);
555      } else {
556        __ ldrd(trg_lo, Address(IP));
557      }
558      if (is_volatile) {
559        __ dmb(ISH);
560      }
561      break;
562    }
563
564    default:
565      LOG(FATAL) << "Unexpected type " << type;
566      UNREACHABLE();
567  }
568}
569
570static void CreateIntIntIntToIntLocations(ArenaAllocator* arena,
571                                          HInvoke* invoke,
572                                          Primitive::Type type) {
573  bool can_call = kEmitCompilerReadBarrier &&
574      (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
575       invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
576  LocationSummary* locations = new (arena) LocationSummary(invoke,
577                                                           can_call ?
578                                                               LocationSummary::kCallOnSlowPath :
579                                                               LocationSummary::kNoCall,
580                                                           kIntrinsified);
581  locations->SetInAt(0, Location::NoLocation());        // Unused receiver.
582  locations->SetInAt(1, Location::RequiresRegister());
583  locations->SetInAt(2, Location::RequiresRegister());
584  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
585  if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
586    // We need a temporary register for the read barrier marking slow
587    // path in InstructionCodeGeneratorARM::GenerateArrayLoadWithBakerReadBarrier.
588    locations->AddTemp(Location::RequiresRegister());
589  }
590}
591
592void IntrinsicLocationsBuilderARM::VisitUnsafeGet(HInvoke* invoke) {
593  CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimInt);
594}
595void IntrinsicLocationsBuilderARM::VisitUnsafeGetVolatile(HInvoke* invoke) {
596  CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimInt);
597}
598void IntrinsicLocationsBuilderARM::VisitUnsafeGetLong(HInvoke* invoke) {
599  CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimLong);
600}
601void IntrinsicLocationsBuilderARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
602  CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimLong);
603}
604void IntrinsicLocationsBuilderARM::VisitUnsafeGetObject(HInvoke* invoke) {
605  CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimNot);
606}
607void IntrinsicLocationsBuilderARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
608  CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimNot);
609}
610
611void IntrinsicCodeGeneratorARM::VisitUnsafeGet(HInvoke* invoke) {
612  GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_);
613}
614void IntrinsicCodeGeneratorARM::VisitUnsafeGetVolatile(HInvoke* invoke) {
615  GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_);
616}
617void IntrinsicCodeGeneratorARM::VisitUnsafeGetLong(HInvoke* invoke) {
618  GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_);
619}
620void IntrinsicCodeGeneratorARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
621  GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_);
622}
623void IntrinsicCodeGeneratorARM::VisitUnsafeGetObject(HInvoke* invoke) {
624  GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_);
625}
626void IntrinsicCodeGeneratorARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
627  GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_);
628}
629
630static void CreateIntIntIntIntToVoid(ArenaAllocator* arena,
631                                     const ArmInstructionSetFeatures& features,
632                                     Primitive::Type type,
633                                     bool is_volatile,
634                                     HInvoke* invoke) {
635  LocationSummary* locations = new (arena) LocationSummary(invoke,
636                                                           LocationSummary::kNoCall,
637                                                           kIntrinsified);
638  locations->SetInAt(0, Location::NoLocation());        // Unused receiver.
639  locations->SetInAt(1, Location::RequiresRegister());
640  locations->SetInAt(2, Location::RequiresRegister());
641  locations->SetInAt(3, Location::RequiresRegister());
642
643  if (type == Primitive::kPrimLong) {
644    // Potentially need temps for ldrexd-strexd loop.
645    if (is_volatile && !features.HasAtomicLdrdAndStrd()) {
646      locations->AddTemp(Location::RequiresRegister());  // Temp_lo.
647      locations->AddTemp(Location::RequiresRegister());  // Temp_hi.
648    }
649  } else if (type == Primitive::kPrimNot) {
650    // Temps for card-marking.
651    locations->AddTemp(Location::RequiresRegister());  // Temp.
652    locations->AddTemp(Location::RequiresRegister());  // Card.
653  }
654}
655
656void IntrinsicLocationsBuilderARM::VisitUnsafePut(HInvoke* invoke) {
657  CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, /* is_volatile */ false, invoke);
658}
659void IntrinsicLocationsBuilderARM::VisitUnsafePutOrdered(HInvoke* invoke) {
660  CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, /* is_volatile */ false, invoke);
661}
662void IntrinsicLocationsBuilderARM::VisitUnsafePutVolatile(HInvoke* invoke) {
663  CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, /* is_volatile */ true, invoke);
664}
665void IntrinsicLocationsBuilderARM::VisitUnsafePutObject(HInvoke* invoke) {
666  CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, /* is_volatile */ false, invoke);
667}
668void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
669  CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, /* is_volatile */ false, invoke);
670}
671void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
672  CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, /* is_volatile */ true, invoke);
673}
674void IntrinsicLocationsBuilderARM::VisitUnsafePutLong(HInvoke* invoke) {
675  CreateIntIntIntIntToVoid(
676      arena_, features_, Primitive::kPrimLong, /* is_volatile */ false, invoke);
677}
678void IntrinsicLocationsBuilderARM::VisitUnsafePutLongOrdered(HInvoke* invoke) {
679  CreateIntIntIntIntToVoid(
680      arena_, features_, Primitive::kPrimLong, /* is_volatile */ false, invoke);
681}
682void IntrinsicLocationsBuilderARM::VisitUnsafePutLongVolatile(HInvoke* invoke) {
683  CreateIntIntIntIntToVoid(
684      arena_, features_, Primitive::kPrimLong, /* is_volatile */ true, invoke);
685}
686
687static void GenUnsafePut(LocationSummary* locations,
688                         Primitive::Type type,
689                         bool is_volatile,
690                         bool is_ordered,
691                         CodeGeneratorARM* codegen) {
692  ArmAssembler* assembler = codegen->GetAssembler();
693
694  Register base = locations->InAt(1).AsRegister<Register>();           // Object pointer.
695  Register offset = locations->InAt(2).AsRegisterPairLow<Register>();  // Long offset, lo part only.
696  Register value;
697
698  if (is_volatile || is_ordered) {
699    __ dmb(ISH);
700  }
701
702  if (type == Primitive::kPrimLong) {
703    Register value_lo = locations->InAt(3).AsRegisterPairLow<Register>();
704    value = value_lo;
705    if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
706      Register temp_lo = locations->GetTemp(0).AsRegister<Register>();
707      Register temp_hi = locations->GetTemp(1).AsRegister<Register>();
708      Register value_hi = locations->InAt(3).AsRegisterPairHigh<Register>();
709
710      __ add(IP, base, ShifterOperand(offset));
711      Label loop_head;
712      __ Bind(&loop_head);
713      __ ldrexd(temp_lo, temp_hi, IP);
714      __ strexd(temp_lo, value_lo, value_hi, IP);
715      __ cmp(temp_lo, ShifterOperand(0));
716      __ b(&loop_head, NE);
717    } else {
718      __ add(IP, base, ShifterOperand(offset));
719      __ strd(value_lo, Address(IP));
720    }
721  } else {
722    value = locations->InAt(3).AsRegister<Register>();
723    Register source = value;
724    if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
725      Register temp = locations->GetTemp(0).AsRegister<Register>();
726      __ Mov(temp, value);
727      __ PoisonHeapReference(temp);
728      source = temp;
729    }
730    __ str(source, Address(base, offset));
731  }
732
733  if (is_volatile) {
734    __ dmb(ISH);
735  }
736
737  if (type == Primitive::kPrimNot) {
738    Register temp = locations->GetTemp(0).AsRegister<Register>();
739    Register card = locations->GetTemp(1).AsRegister<Register>();
740    bool value_can_be_null = true;  // TODO: Worth finding out this information?
741    codegen->MarkGCCard(temp, card, base, value, value_can_be_null);
742  }
743}
744
745void IntrinsicCodeGeneratorARM::VisitUnsafePut(HInvoke* invoke) {
746  GenUnsafePut(invoke->GetLocations(),
747               Primitive::kPrimInt,
748               /* is_volatile */ false,
749               /* is_ordered */ false,
750               codegen_);
751}
752void IntrinsicCodeGeneratorARM::VisitUnsafePutOrdered(HInvoke* invoke) {
753  GenUnsafePut(invoke->GetLocations(),
754               Primitive::kPrimInt,
755               /* is_volatile */ false,
756               /* is_ordered */ true,
757               codegen_);
758}
759void IntrinsicCodeGeneratorARM::VisitUnsafePutVolatile(HInvoke* invoke) {
760  GenUnsafePut(invoke->GetLocations(),
761               Primitive::kPrimInt,
762               /* is_volatile */ true,
763               /* is_ordered */ false,
764               codegen_);
765}
766void IntrinsicCodeGeneratorARM::VisitUnsafePutObject(HInvoke* invoke) {
767  GenUnsafePut(invoke->GetLocations(),
768               Primitive::kPrimNot,
769               /* is_volatile */ false,
770               /* is_ordered */ false,
771               codegen_);
772}
773void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
774  GenUnsafePut(invoke->GetLocations(),
775               Primitive::kPrimNot,
776               /* is_volatile */ false,
777               /* is_ordered */ true,
778               codegen_);
779}
780void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
781  GenUnsafePut(invoke->GetLocations(),
782               Primitive::kPrimNot,
783               /* is_volatile */ true,
784               /* is_ordered */ false,
785               codegen_);
786}
787void IntrinsicCodeGeneratorARM::VisitUnsafePutLong(HInvoke* invoke) {
788  GenUnsafePut(invoke->GetLocations(),
789               Primitive::kPrimLong,
790               /* is_volatile */ false,
791               /* is_ordered */ false,
792               codegen_);
793}
794void IntrinsicCodeGeneratorARM::VisitUnsafePutLongOrdered(HInvoke* invoke) {
795  GenUnsafePut(invoke->GetLocations(),
796               Primitive::kPrimLong,
797               /* is_volatile */ false,
798               /* is_ordered */ true,
799               codegen_);
800}
801void IntrinsicCodeGeneratorARM::VisitUnsafePutLongVolatile(HInvoke* invoke) {
802  GenUnsafePut(invoke->GetLocations(),
803               Primitive::kPrimLong,
804               /* is_volatile */ true,
805               /* is_ordered */ false,
806               codegen_);
807}
808
809static void CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator* arena,
810                                                HInvoke* invoke,
811                                                Primitive::Type type) {
812  LocationSummary* locations = new (arena) LocationSummary(invoke,
813                                                           LocationSummary::kNoCall,
814                                                           kIntrinsified);
815  locations->SetInAt(0, Location::NoLocation());        // Unused receiver.
816  locations->SetInAt(1, Location::RequiresRegister());
817  locations->SetInAt(2, Location::RequiresRegister());
818  locations->SetInAt(3, Location::RequiresRegister());
819  locations->SetInAt(4, Location::RequiresRegister());
820
821  // If heap poisoning is enabled, we don't want the unpoisoning
822  // operations to potentially clobber the output.
823  Location::OutputOverlap overlaps = (kPoisonHeapReferences && type == Primitive::kPrimNot)
824      ? Location::kOutputOverlap
825      : Location::kNoOutputOverlap;
826  locations->SetOut(Location::RequiresRegister(), overlaps);
827
828  locations->AddTemp(Location::RequiresRegister());  // Pointer.
829  locations->AddTemp(Location::RequiresRegister());  // Temp 1.
830}
831
832static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM* codegen) {
833  DCHECK_NE(type, Primitive::kPrimLong);
834
835  ArmAssembler* assembler = codegen->GetAssembler();
836
837  Register out = locations->Out().AsRegister<Register>();              // Boolean result.
838
839  Register base = locations->InAt(1).AsRegister<Register>();           // Object pointer.
840  Register offset = locations->InAt(2).AsRegisterPairLow<Register>();  // Offset (discard high 4B).
841  Register expected_lo = locations->InAt(3).AsRegister<Register>();    // Expected.
842  Register value_lo = locations->InAt(4).AsRegister<Register>();       // Value.
843
844  Register tmp_ptr = locations->GetTemp(0).AsRegister<Register>();     // Pointer to actual memory.
845  Register tmp_lo = locations->GetTemp(1).AsRegister<Register>();      // Value in memory.
846
847  if (type == Primitive::kPrimNot) {
848    // Mark card for object assuming new value is stored. Worst case we will mark an unchanged
849    // object and scan the receiver at the next GC for nothing.
850    bool value_can_be_null = true;  // TODO: Worth finding out this information?
851    codegen->MarkGCCard(tmp_ptr, tmp_lo, base, value_lo, value_can_be_null);
852  }
853
854  // Prevent reordering with prior memory operations.
855  // Emit a DMB ISH instruction instead of an DMB ISHST one, as the
856  // latter allows a preceding load to be delayed past the STXR
857  // instruction below.
858  __ dmb(ISH);
859
860  __ add(tmp_ptr, base, ShifterOperand(offset));
861
862  if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
863    codegen->GetAssembler()->PoisonHeapReference(expected_lo);
864    if (value_lo == expected_lo) {
865      // Do not poison `value_lo`, as it is the same register as
866      // `expected_lo`, which has just been poisoned.
867    } else {
868      codegen->GetAssembler()->PoisonHeapReference(value_lo);
869    }
870  }
871
872  // do {
873  //   tmp = [r_ptr] - expected;
874  // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
875  // result = tmp != 0;
876
877  Label loop_head;
878  __ Bind(&loop_head);
879
880  // TODO: When `type == Primitive::kPrimNot`, add a read barrier for
881  // the reference stored in the object before attempting the CAS,
882  // similar to the one in the art::Unsafe_compareAndSwapObject JNI
883  // implementation.
884  //
885  // Note that this code is not (yet) used when read barriers are
886  // enabled (see IntrinsicLocationsBuilderARM::VisitUnsafeCASObject).
887  DCHECK(!(type == Primitive::kPrimNot && kEmitCompilerReadBarrier));
888  __ ldrex(tmp_lo, tmp_ptr);
889
890  __ subs(tmp_lo, tmp_lo, ShifterOperand(expected_lo));
891
892  __ it(EQ, ItState::kItT);
893  __ strex(tmp_lo, value_lo, tmp_ptr, EQ);
894  __ cmp(tmp_lo, ShifterOperand(1), EQ);
895
896  __ b(&loop_head, EQ);
897
898  __ dmb(ISH);
899
900  __ rsbs(out, tmp_lo, ShifterOperand(1));
901  __ it(CC);
902  __ mov(out, ShifterOperand(0), CC);
903
904  if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
905    codegen->GetAssembler()->UnpoisonHeapReference(expected_lo);
906    if (value_lo == expected_lo) {
907      // Do not unpoison `value_lo`, as it is the same register as
908      // `expected_lo`, which has just been unpoisoned.
909    } else {
910      codegen->GetAssembler()->UnpoisonHeapReference(value_lo);
911    }
912  }
913}
914
915void IntrinsicLocationsBuilderARM::VisitUnsafeCASInt(HInvoke* invoke) {
916  CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke, Primitive::kPrimInt);
917}
918void IntrinsicLocationsBuilderARM::VisitUnsafeCASObject(HInvoke* invoke) {
919  // The UnsafeCASObject intrinsic is missing a read barrier, and
920  // therefore sometimes does not work as expected (b/25883050).
921  // Turn it off temporarily as a quick fix, until the read barrier is
922  // implemented (see TODO in GenCAS below).
923  //
924  // TODO(rpl): Fix this issue and re-enable this intrinsic with read barriers.
925  if (kEmitCompilerReadBarrier) {
926    return;
927  }
928
929  CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke, Primitive::kPrimNot);
930}
931void IntrinsicCodeGeneratorARM::VisitUnsafeCASInt(HInvoke* invoke) {
932  GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
933}
934void IntrinsicCodeGeneratorARM::VisitUnsafeCASObject(HInvoke* invoke) {
935  GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
936}
937
938void IntrinsicLocationsBuilderARM::VisitStringCharAt(HInvoke* invoke) {
939  LocationSummary* locations = new (arena_) LocationSummary(invoke,
940                                                            LocationSummary::kCallOnSlowPath,
941                                                            kIntrinsified);
942  locations->SetInAt(0, Location::RequiresRegister());
943  locations->SetInAt(1, Location::RequiresRegister());
944  locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
945
946  locations->AddTemp(Location::RequiresRegister());
947  locations->AddTemp(Location::RequiresRegister());
948}
949
950void IntrinsicCodeGeneratorARM::VisitStringCharAt(HInvoke* invoke) {
951  ArmAssembler* assembler = GetAssembler();
952  LocationSummary* locations = invoke->GetLocations();
953
954  // Location of reference to data array
955  const MemberOffset value_offset = mirror::String::ValueOffset();
956  // Location of count
957  const MemberOffset count_offset = mirror::String::CountOffset();
958
959  Register obj = locations->InAt(0).AsRegister<Register>();  // String object pointer.
960  Register idx = locations->InAt(1).AsRegister<Register>();  // Index of character.
961  Register out = locations->Out().AsRegister<Register>();    // Result character.
962
963  Register temp = locations->GetTemp(0).AsRegister<Register>();
964  Register array_temp = locations->GetTemp(1).AsRegister<Register>();
965
966  // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
967  //       the cost.
968  // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
969  //       we will not optimize the code for constants (which would save a register).
970
971  SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
972  codegen_->AddSlowPath(slow_path);
973
974  __ ldr(temp, Address(obj, count_offset.Int32Value()));          // temp = str.length.
975  codegen_->MaybeRecordImplicitNullCheck(invoke);
976  __ cmp(idx, ShifterOperand(temp));
977  __ b(slow_path->GetEntryLabel(), CS);
978
979  __ add(array_temp, obj, ShifterOperand(value_offset.Int32Value()));  // array_temp := str.value.
980
981  // Load the value.
982  __ ldrh(out, Address(array_temp, idx, LSL, 1));                 // out := array_temp[idx].
983
984  __ Bind(slow_path->GetExitLabel());
985}
986
987void IntrinsicLocationsBuilderARM::VisitStringCompareTo(HInvoke* invoke) {
988  // The inputs plus one temp.
989  LocationSummary* locations = new (arena_) LocationSummary(invoke,
990                                                            LocationSummary::kCall,
991                                                            kIntrinsified);
992  InvokeRuntimeCallingConvention calling_convention;
993  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
994  locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
995  locations->SetOut(Location::RegisterLocation(R0));
996}
997
998void IntrinsicCodeGeneratorARM::VisitStringCompareTo(HInvoke* invoke) {
999  ArmAssembler* assembler = GetAssembler();
1000  LocationSummary* locations = invoke->GetLocations();
1001
1002  // Note that the null check must have been done earlier.
1003  DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1004
1005  Register argument = locations->InAt(1).AsRegister<Register>();
1006  __ cmp(argument, ShifterOperand(0));
1007  SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
1008  codegen_->AddSlowPath(slow_path);
1009  __ b(slow_path->GetEntryLabel(), EQ);
1010
1011  __ LoadFromOffset(
1012      kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pStringCompareTo).Int32Value());
1013  __ blx(LR);
1014  __ Bind(slow_path->GetExitLabel());
1015}
1016
1017void IntrinsicLocationsBuilderARM::VisitStringEquals(HInvoke* invoke) {
1018  LocationSummary* locations = new (arena_) LocationSummary(invoke,
1019                                                            LocationSummary::kNoCall,
1020                                                            kIntrinsified);
1021  InvokeRuntimeCallingConvention calling_convention;
1022  locations->SetInAt(0, Location::RequiresRegister());
1023  locations->SetInAt(1, Location::RequiresRegister());
1024  // Temporary registers to store lengths of strings and for calculations.
1025  // Using instruction cbz requires a low register, so explicitly set a temp to be R0.
1026  locations->AddTemp(Location::RegisterLocation(R0));
1027  locations->AddTemp(Location::RequiresRegister());
1028  locations->AddTemp(Location::RequiresRegister());
1029
1030  locations->SetOut(Location::RequiresRegister());
1031}
1032
1033void IntrinsicCodeGeneratorARM::VisitStringEquals(HInvoke* invoke) {
1034  ArmAssembler* assembler = GetAssembler();
1035  LocationSummary* locations = invoke->GetLocations();
1036
1037  Register str = locations->InAt(0).AsRegister<Register>();
1038  Register arg = locations->InAt(1).AsRegister<Register>();
1039  Register out = locations->Out().AsRegister<Register>();
1040
1041  Register temp = locations->GetTemp(0).AsRegister<Register>();
1042  Register temp1 = locations->GetTemp(1).AsRegister<Register>();
1043  Register temp2 = locations->GetTemp(2).AsRegister<Register>();
1044
1045  Label loop;
1046  Label end;
1047  Label return_true;
1048  Label return_false;
1049
1050  // Get offsets of count, value, and class fields within a string object.
1051  const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
1052  const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
1053  const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value();
1054
1055  // Note that the null check must have been done earlier.
1056  DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1057
1058  // Check if input is null, return false if it is.
1059  __ CompareAndBranchIfZero(arg, &return_false);
1060
1061  // Instanceof check for the argument by comparing class fields.
1062  // All string objects must have the same type since String cannot be subclassed.
1063  // Receiver must be a string object, so its class field is equal to all strings' class fields.
1064  // If the argument is a string object, its class field must be equal to receiver's class field.
1065  __ ldr(temp, Address(str, class_offset));
1066  __ ldr(temp1, Address(arg, class_offset));
1067  __ cmp(temp, ShifterOperand(temp1));
1068  __ b(&return_false, NE);
1069
1070  // Load lengths of this and argument strings.
1071  __ ldr(temp, Address(str, count_offset));
1072  __ ldr(temp1, Address(arg, count_offset));
1073  // Check if lengths are equal, return false if they're not.
1074  __ cmp(temp, ShifterOperand(temp1));
1075  __ b(&return_false, NE);
1076  // Return true if both strings are empty.
1077  __ cbz(temp, &return_true);
1078
1079  // Reference equality check, return true if same reference.
1080  __ cmp(str, ShifterOperand(arg));
1081  __ b(&return_true, EQ);
1082
1083  // Assertions that must hold in order to compare strings 2 characters at a time.
1084  DCHECK_ALIGNED(value_offset, 4);
1085  static_assert(IsAligned<4>(kObjectAlignment), "String of odd length is not zero padded");
1086
1087  __ LoadImmediate(temp1, value_offset);
1088
1089  // Loop to compare strings 2 characters at a time starting at the front of the string.
1090  // Ok to do this because strings with an odd length are zero-padded.
1091  __ Bind(&loop);
1092  __ ldr(out, Address(str, temp1));
1093  __ ldr(temp2, Address(arg, temp1));
1094  __ cmp(out, ShifterOperand(temp2));
1095  __ b(&return_false, NE);
1096  __ add(temp1, temp1, ShifterOperand(sizeof(uint32_t)));
1097  __ subs(temp, temp, ShifterOperand(sizeof(uint32_t) /  sizeof(uint16_t)));
1098  __ b(&loop, GT);
1099
1100  // Return true and exit the function.
1101  // If loop does not result in returning false, we return true.
1102  __ Bind(&return_true);
1103  __ LoadImmediate(out, 1);
1104  __ b(&end);
1105
1106  // Return false and exit the function.
1107  __ Bind(&return_false);
1108  __ LoadImmediate(out, 0);
1109  __ Bind(&end);
1110}
1111
1112static void GenerateVisitStringIndexOf(HInvoke* invoke,
1113                                       ArmAssembler* assembler,
1114                                       CodeGeneratorARM* codegen,
1115                                       ArenaAllocator* allocator,
1116                                       bool start_at_zero) {
1117  LocationSummary* locations = invoke->GetLocations();
1118  Register tmp_reg = locations->GetTemp(0).AsRegister<Register>();
1119
1120  // Note that the null check must have been done earlier.
1121  DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1122
1123  // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
1124  // or directly dispatch if we have a constant.
1125  SlowPathCode* slow_path = nullptr;
1126  if (invoke->InputAt(1)->IsIntConstant()) {
1127    if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) >
1128        std::numeric_limits<uint16_t>::max()) {
1129      // Always needs the slow-path. We could directly dispatch to it, but this case should be
1130      // rare, so for simplicity just put the full slow-path down and branch unconditionally.
1131      slow_path = new (allocator) IntrinsicSlowPathARM(invoke);
1132      codegen->AddSlowPath(slow_path);
1133      __ b(slow_path->GetEntryLabel());
1134      __ Bind(slow_path->GetExitLabel());
1135      return;
1136    }
1137  } else {
1138    Register char_reg = locations->InAt(1).AsRegister<Register>();
1139    __ LoadImmediate(tmp_reg, std::numeric_limits<uint16_t>::max());
1140    __ cmp(char_reg, ShifterOperand(tmp_reg));
1141    slow_path = new (allocator) IntrinsicSlowPathARM(invoke);
1142    codegen->AddSlowPath(slow_path);
1143    __ b(slow_path->GetEntryLabel(), HI);
1144  }
1145
1146  if (start_at_zero) {
1147    DCHECK_EQ(tmp_reg, R2);
1148    // Start-index = 0.
1149    __ LoadImmediate(tmp_reg, 0);
1150  }
1151
1152  __ LoadFromOffset(kLoadWord, LR, TR,
1153                    QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pIndexOf).Int32Value());
1154  CheckEntrypointTypes<kQuickIndexOf, int32_t, void*, uint32_t, uint32_t>();
1155  __ blx(LR);
1156
1157  if (slow_path != nullptr) {
1158    __ Bind(slow_path->GetExitLabel());
1159  }
1160}
1161
1162void IntrinsicLocationsBuilderARM::VisitStringIndexOf(HInvoke* invoke) {
1163  LocationSummary* locations = new (arena_) LocationSummary(invoke,
1164                                                            LocationSummary::kCall,
1165                                                            kIntrinsified);
1166  // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1167  // best to align the inputs accordingly.
1168  InvokeRuntimeCallingConvention calling_convention;
1169  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1170  locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1171  locations->SetOut(Location::RegisterLocation(R0));
1172
1173  // Need a temp for slow-path codepoint compare, and need to send start-index=0.
1174  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1175}
1176
1177void IntrinsicCodeGeneratorARM::VisitStringIndexOf(HInvoke* invoke) {
1178  GenerateVisitStringIndexOf(
1179      invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true);
1180}
1181
1182void IntrinsicLocationsBuilderARM::VisitStringIndexOfAfter(HInvoke* invoke) {
1183  LocationSummary* locations = new (arena_) LocationSummary(invoke,
1184                                                            LocationSummary::kCall,
1185                                                            kIntrinsified);
1186  // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1187  // best to align the inputs accordingly.
1188  InvokeRuntimeCallingConvention calling_convention;
1189  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1190  locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1191  locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1192  locations->SetOut(Location::RegisterLocation(R0));
1193
1194  // Need a temp for slow-path codepoint compare.
1195  locations->AddTemp(Location::RequiresRegister());
1196}
1197
1198void IntrinsicCodeGeneratorARM::VisitStringIndexOfAfter(HInvoke* invoke) {
1199  GenerateVisitStringIndexOf(
1200      invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false);
1201}
1202
1203void IntrinsicLocationsBuilderARM::VisitStringNewStringFromBytes(HInvoke* invoke) {
1204  LocationSummary* locations = new (arena_) LocationSummary(invoke,
1205                                                            LocationSummary::kCall,
1206                                                            kIntrinsified);
1207  InvokeRuntimeCallingConvention calling_convention;
1208  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1209  locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1210  locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1211  locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
1212  locations->SetOut(Location::RegisterLocation(R0));
1213}
1214
1215void IntrinsicCodeGeneratorARM::VisitStringNewStringFromBytes(HInvoke* invoke) {
1216  ArmAssembler* assembler = GetAssembler();
1217  LocationSummary* locations = invoke->GetLocations();
1218
1219  Register byte_array = locations->InAt(0).AsRegister<Register>();
1220  __ cmp(byte_array, ShifterOperand(0));
1221  SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
1222  codegen_->AddSlowPath(slow_path);
1223  __ b(slow_path->GetEntryLabel(), EQ);
1224
1225  __ LoadFromOffset(
1226      kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromBytes).Int32Value());
1227  CheckEntrypointTypes<kQuickAllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t>();
1228  __ blx(LR);
1229  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1230  __ Bind(slow_path->GetExitLabel());
1231}
1232
1233void IntrinsicLocationsBuilderARM::VisitStringNewStringFromChars(HInvoke* invoke) {
1234  LocationSummary* locations = new (arena_) LocationSummary(invoke,
1235                                                            LocationSummary::kCall,
1236                                                            kIntrinsified);
1237  InvokeRuntimeCallingConvention calling_convention;
1238  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1239  locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1240  locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1241  locations->SetOut(Location::RegisterLocation(R0));
1242}
1243
1244void IntrinsicCodeGeneratorARM::VisitStringNewStringFromChars(HInvoke* invoke) {
1245  ArmAssembler* assembler = GetAssembler();
1246
1247  // No need to emit code checking whether `locations->InAt(2)` is a null
1248  // pointer, as callers of the native method
1249  //
1250  //   java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
1251  //
1252  // all include a null check on `data` before calling that method.
1253  __ LoadFromOffset(
1254      kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromChars).Int32Value());
1255  CheckEntrypointTypes<kQuickAllocStringFromChars, void*, int32_t, int32_t, void*>();
1256  __ blx(LR);
1257  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1258}
1259
1260void IntrinsicLocationsBuilderARM::VisitStringNewStringFromString(HInvoke* invoke) {
1261  LocationSummary* locations = new (arena_) LocationSummary(invoke,
1262                                                            LocationSummary::kCall,
1263                                                            kIntrinsified);
1264  InvokeRuntimeCallingConvention calling_convention;
1265  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1266  locations->SetOut(Location::RegisterLocation(R0));
1267}
1268
1269void IntrinsicCodeGeneratorARM::VisitStringNewStringFromString(HInvoke* invoke) {
1270  ArmAssembler* assembler = GetAssembler();
1271  LocationSummary* locations = invoke->GetLocations();
1272
1273  Register string_to_copy = locations->InAt(0).AsRegister<Register>();
1274  __ cmp(string_to_copy, ShifterOperand(0));
1275  SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
1276  codegen_->AddSlowPath(slow_path);
1277  __ b(slow_path->GetEntryLabel(), EQ);
1278
1279  __ LoadFromOffset(kLoadWord,
1280      LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromString).Int32Value());
1281  CheckEntrypointTypes<kQuickAllocStringFromString, void*, void*>();
1282  __ blx(LR);
1283  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1284  __ Bind(slow_path->GetExitLabel());
1285}
1286
1287void IntrinsicLocationsBuilderARM::VisitSystemArrayCopy(HInvoke* invoke) {
1288  CodeGenerator::CreateSystemArrayCopyLocationSummary(invoke);
1289  LocationSummary* locations = invoke->GetLocations();
1290  if (locations == nullptr) {
1291    return;
1292  }
1293
1294  HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
1295  HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
1296  HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
1297
1298  if (src_pos != nullptr && !assembler_->ShifterOperandCanAlwaysHold(src_pos->GetValue())) {
1299    locations->SetInAt(1, Location::RequiresRegister());
1300  }
1301  if (dest_pos != nullptr && !assembler_->ShifterOperandCanAlwaysHold(dest_pos->GetValue())) {
1302    locations->SetInAt(3, Location::RequiresRegister());
1303  }
1304  if (length != nullptr && !assembler_->ShifterOperandCanAlwaysHold(length->GetValue())) {
1305    locations->SetInAt(4, Location::RequiresRegister());
1306  }
1307}
1308
1309static void CheckPosition(ArmAssembler* assembler,
1310                          Location pos,
1311                          Register input,
1312                          Location length,
1313                          SlowPathCode* slow_path,
1314                          Register input_len,
1315                          Register temp,
1316                          bool length_is_input_length = false) {
1317  // Where is the length in the Array?
1318  const uint32_t length_offset = mirror::Array::LengthOffset().Uint32Value();
1319
1320  if (pos.IsConstant()) {
1321    int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
1322    if (pos_const == 0) {
1323      if (!length_is_input_length) {
1324        // Check that length(input) >= length.
1325        __ LoadFromOffset(kLoadWord, temp, input, length_offset);
1326        if (length.IsConstant()) {
1327          __ cmp(temp, ShifterOperand(length.GetConstant()->AsIntConstant()->GetValue()));
1328        } else {
1329          __ cmp(temp, ShifterOperand(length.AsRegister<Register>()));
1330        }
1331        __ b(slow_path->GetEntryLabel(), LT);
1332      }
1333    } else {
1334      // Check that length(input) >= pos.
1335      __ LoadFromOffset(kLoadWord, input_len, input, length_offset);
1336      __ subs(temp, input_len, ShifterOperand(pos_const));
1337      __ b(slow_path->GetEntryLabel(), LT);
1338
1339      // Check that (length(input) - pos) >= length.
1340      if (length.IsConstant()) {
1341        __ cmp(temp, ShifterOperand(length.GetConstant()->AsIntConstant()->GetValue()));
1342      } else {
1343        __ cmp(temp, ShifterOperand(length.AsRegister<Register>()));
1344      }
1345      __ b(slow_path->GetEntryLabel(), LT);
1346    }
1347  } else if (length_is_input_length) {
1348    // The only way the copy can succeed is if pos is zero.
1349    Register pos_reg = pos.AsRegister<Register>();
1350    __ CompareAndBranchIfNonZero(pos_reg, slow_path->GetEntryLabel());
1351  } else {
1352    // Check that pos >= 0.
1353    Register pos_reg = pos.AsRegister<Register>();
1354    __ cmp(pos_reg, ShifterOperand(0));
1355    __ b(slow_path->GetEntryLabel(), LT);
1356
1357    // Check that pos <= length(input).
1358    __ LoadFromOffset(kLoadWord, temp, input, length_offset);
1359    __ subs(temp, temp, ShifterOperand(pos_reg));
1360    __ b(slow_path->GetEntryLabel(), LT);
1361
1362    // Check that (length(input) - pos) >= length.
1363    if (length.IsConstant()) {
1364      __ cmp(temp, ShifterOperand(length.GetConstant()->AsIntConstant()->GetValue()));
1365    } else {
1366      __ cmp(temp, ShifterOperand(length.AsRegister<Register>()));
1367    }
1368    __ b(slow_path->GetEntryLabel(), LT);
1369  }
1370}
1371
1372// TODO: Implement read barriers in the SystemArrayCopy intrinsic.
1373// Note that this code path is not used (yet) because we do not
1374// intrinsify methods that can go into the IntrinsicSlowPathARM
1375// slow path.
1376void IntrinsicCodeGeneratorARM::VisitSystemArrayCopy(HInvoke* invoke) {
1377  ArmAssembler* assembler = GetAssembler();
1378  LocationSummary* locations = invoke->GetLocations();
1379
1380  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1381  uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
1382  uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
1383  uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
1384
1385  Register src = locations->InAt(0).AsRegister<Register>();
1386  Location src_pos = locations->InAt(1);
1387  Register dest = locations->InAt(2).AsRegister<Register>();
1388  Location dest_pos = locations->InAt(3);
1389  Location length = locations->InAt(4);
1390  Register temp1 = locations->GetTemp(0).AsRegister<Register>();
1391  Register temp2 = locations->GetTemp(1).AsRegister<Register>();
1392  Register temp3 = locations->GetTemp(2).AsRegister<Register>();
1393
1394  SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
1395  codegen_->AddSlowPath(slow_path);
1396
1397  Label conditions_on_positions_validated;
1398  SystemArrayCopyOptimizations optimizations(invoke);
1399
1400  // If source and destination are the same, we go to slow path if we need to do
1401  // forward copying.
1402  if (src_pos.IsConstant()) {
1403    int32_t src_pos_constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
1404    if (dest_pos.IsConstant()) {
1405      int32_t dest_pos_constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
1406      if (optimizations.GetDestinationIsSource()) {
1407        // Checked when building locations.
1408        DCHECK_GE(src_pos_constant, dest_pos_constant);
1409      } else if (src_pos_constant < dest_pos_constant) {
1410        __ cmp(src, ShifterOperand(dest));
1411        __ b(slow_path->GetEntryLabel(), EQ);
1412      }
1413
1414      // Checked when building locations.
1415      DCHECK(!optimizations.GetDestinationIsSource()
1416             || (src_pos_constant >= dest_pos.GetConstant()->AsIntConstant()->GetValue()));
1417    } else {
1418      if (!optimizations.GetDestinationIsSource()) {
1419        __ cmp(src, ShifterOperand(dest));
1420        __ b(&conditions_on_positions_validated, NE);
1421      }
1422      __ cmp(dest_pos.AsRegister<Register>(), ShifterOperand(src_pos_constant));
1423      __ b(slow_path->GetEntryLabel(), GT);
1424    }
1425  } else {
1426    if (!optimizations.GetDestinationIsSource()) {
1427      __ cmp(src, ShifterOperand(dest));
1428      __ b(&conditions_on_positions_validated, NE);
1429    }
1430    if (dest_pos.IsConstant()) {
1431      int32_t dest_pos_constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
1432      __ cmp(src_pos.AsRegister<Register>(), ShifterOperand(dest_pos_constant));
1433    } else {
1434      __ cmp(src_pos.AsRegister<Register>(), ShifterOperand(dest_pos.AsRegister<Register>()));
1435    }
1436    __ b(slow_path->GetEntryLabel(), LT);
1437  }
1438
1439  __ Bind(&conditions_on_positions_validated);
1440
1441  if (!optimizations.GetSourceIsNotNull()) {
1442    // Bail out if the source is null.
1443    __ CompareAndBranchIfZero(src, slow_path->GetEntryLabel());
1444  }
1445
1446  if (!optimizations.GetDestinationIsNotNull() && !optimizations.GetDestinationIsSource()) {
1447    // Bail out if the destination is null.
1448    __ CompareAndBranchIfZero(dest, slow_path->GetEntryLabel());
1449  }
1450
1451  // If the length is negative, bail out.
1452  // We have already checked in the LocationsBuilder for the constant case.
1453  if (!length.IsConstant() &&
1454      !optimizations.GetCountIsSourceLength() &&
1455      !optimizations.GetCountIsDestinationLength()) {
1456    __ cmp(length.AsRegister<Register>(), ShifterOperand(0));
1457    __ b(slow_path->GetEntryLabel(), LT);
1458  }
1459
1460  // Validity checks: source.
1461  CheckPosition(assembler,
1462                src_pos,
1463                src,
1464                length,
1465                slow_path,
1466                temp1,
1467                temp2,
1468                optimizations.GetCountIsSourceLength());
1469
1470  // Validity checks: dest.
1471  CheckPosition(assembler,
1472                dest_pos,
1473                dest,
1474                length,
1475                slow_path,
1476                temp1,
1477                temp2,
1478                optimizations.GetCountIsDestinationLength());
1479
1480  if (!optimizations.GetDoesNotNeedTypeCheck()) {
1481    // Check whether all elements of the source array are assignable to the component
1482    // type of the destination array. We do two checks: the classes are the same,
1483    // or the destination is Object[]. If none of these checks succeed, we go to the
1484    // slow path.
1485    __ LoadFromOffset(kLoadWord, temp1, dest, class_offset);
1486    __ LoadFromOffset(kLoadWord, temp2, src, class_offset);
1487    bool did_unpoison = false;
1488    if (!optimizations.GetDestinationIsNonPrimitiveArray() ||
1489        !optimizations.GetSourceIsNonPrimitiveArray()) {
1490      // One or two of the references need to be unpoisoned. Unpoison them
1491      // both to make the identity check valid.
1492      __ MaybeUnpoisonHeapReference(temp1);
1493      __ MaybeUnpoisonHeapReference(temp2);
1494      did_unpoison = true;
1495    }
1496
1497    if (!optimizations.GetDestinationIsNonPrimitiveArray()) {
1498      // Bail out if the destination is not a non primitive array.
1499      // /* HeapReference<Class> */ temp3 = temp1->component_type_
1500      __ LoadFromOffset(kLoadWord, temp3, temp1, component_offset);
1501      __ CompareAndBranchIfZero(temp3, slow_path->GetEntryLabel());
1502      __ MaybeUnpoisonHeapReference(temp3);
1503      __ LoadFromOffset(kLoadUnsignedHalfword, temp3, temp3, primitive_offset);
1504      static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
1505      __ CompareAndBranchIfNonZero(temp3, slow_path->GetEntryLabel());
1506    }
1507
1508    if (!optimizations.GetSourceIsNonPrimitiveArray()) {
1509      // Bail out if the source is not a non primitive array.
1510      // /* HeapReference<Class> */ temp3 = temp2->component_type_
1511      __ LoadFromOffset(kLoadWord, temp3, temp2, component_offset);
1512      __ CompareAndBranchIfZero(temp3, slow_path->GetEntryLabel());
1513      __ MaybeUnpoisonHeapReference(temp3);
1514      __ LoadFromOffset(kLoadUnsignedHalfword, temp3, temp3, primitive_offset);
1515      static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
1516      __ CompareAndBranchIfNonZero(temp3, slow_path->GetEntryLabel());
1517    }
1518
1519    __ cmp(temp1, ShifterOperand(temp2));
1520
1521    if (optimizations.GetDestinationIsTypedObjectArray()) {
1522      Label do_copy;
1523      __ b(&do_copy, EQ);
1524      if (!did_unpoison) {
1525        __ MaybeUnpoisonHeapReference(temp1);
1526      }
1527      // /* HeapReference<Class> */ temp1 = temp1->component_type_
1528      __ LoadFromOffset(kLoadWord, temp1, temp1, component_offset);
1529      __ MaybeUnpoisonHeapReference(temp1);
1530      // /* HeapReference<Class> */ temp1 = temp1->super_class_
1531      __ LoadFromOffset(kLoadWord, temp1, temp1, super_offset);
1532      // No need to unpoison the result, we're comparing against null.
1533      __ CompareAndBranchIfNonZero(temp1, slow_path->GetEntryLabel());
1534      __ Bind(&do_copy);
1535    } else {
1536      __ b(slow_path->GetEntryLabel(), NE);
1537    }
1538  } else if (!optimizations.GetSourceIsNonPrimitiveArray()) {
1539    DCHECK(optimizations.GetDestinationIsNonPrimitiveArray());
1540    // Bail out if the source is not a non primitive array.
1541    // /* HeapReference<Class> */ temp1 = src->klass_
1542    __ LoadFromOffset(kLoadWord, temp1, src, class_offset);
1543    __ MaybeUnpoisonHeapReference(temp1);
1544    // /* HeapReference<Class> */ temp3 = temp1->component_type_
1545    __ LoadFromOffset(kLoadWord, temp3, temp1, component_offset);
1546    __ CompareAndBranchIfZero(temp3, slow_path->GetEntryLabel());
1547    __ MaybeUnpoisonHeapReference(temp3);
1548    __ LoadFromOffset(kLoadUnsignedHalfword, temp3, temp3, primitive_offset);
1549    static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
1550    __ CompareAndBranchIfNonZero(temp3, slow_path->GetEntryLabel());
1551  }
1552
1553  // Compute base source address, base destination address, and end source address.
1554
1555  uint32_t element_size = sizeof(int32_t);
1556  uint32_t offset = mirror::Array::DataOffset(element_size).Uint32Value();
1557  if (src_pos.IsConstant()) {
1558    int32_t constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
1559    __ AddConstant(temp1, src, element_size * constant + offset);
1560  } else {
1561    __ add(temp1, src, ShifterOperand(src_pos.AsRegister<Register>(), LSL, 2));
1562    __ AddConstant(temp1, offset);
1563  }
1564
1565  if (dest_pos.IsConstant()) {
1566    int32_t constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
1567    __ AddConstant(temp2, dest, element_size * constant + offset);
1568  } else {
1569    __ add(temp2, dest, ShifterOperand(dest_pos.AsRegister<Register>(), LSL, 2));
1570    __ AddConstant(temp2, offset);
1571  }
1572
1573  if (length.IsConstant()) {
1574    int32_t constant = length.GetConstant()->AsIntConstant()->GetValue();
1575    __ AddConstant(temp3, temp1, element_size * constant);
1576  } else {
1577    __ add(temp3, temp1, ShifterOperand(length.AsRegister<Register>(), LSL, 2));
1578  }
1579
1580  // Iterate over the arrays and do a raw copy of the objects. We don't need to
1581  // poison/unpoison, nor do any read barrier as the next uses of the destination
1582  // array will do it.
1583  Label loop, done;
1584  __ cmp(temp1, ShifterOperand(temp3));
1585  __ b(&done, EQ);
1586  __ Bind(&loop);
1587  __ ldr(IP, Address(temp1, element_size, Address::PostIndex));
1588  __ str(IP, Address(temp2, element_size, Address::PostIndex));
1589  __ cmp(temp1, ShifterOperand(temp3));
1590  __ b(&loop, NE);
1591  __ Bind(&done);
1592
1593  // We only need one card marking on the destination array.
1594  codegen_->MarkGCCard(temp1,
1595                       temp2,
1596                       dest,
1597                       Register(kNoRegister),
1598                       /* value_can_be_null */ false);
1599
1600  __ Bind(slow_path->GetExitLabel());
1601}
1602
1603static void CreateFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) {
1604  // If the graph is debuggable, all callee-saved floating-point registers are blocked by
1605  // the code generator. Furthermore, the register allocator creates fixed live intervals
1606  // for all caller-saved registers because we are doing a function call. As a result, if
1607  // the input and output locations are unallocated, the register allocator runs out of
1608  // registers and fails; however, a debuggable graph is not the common case.
1609  if (invoke->GetBlock()->GetGraph()->IsDebuggable()) {
1610    return;
1611  }
1612
1613  DCHECK_EQ(invoke->GetNumberOfArguments(), 1U);
1614  DCHECK_EQ(invoke->InputAt(0)->GetType(), Primitive::kPrimDouble);
1615  DCHECK_EQ(invoke->GetType(), Primitive::kPrimDouble);
1616
1617  LocationSummary* const locations = new (arena) LocationSummary(invoke,
1618                                                                 LocationSummary::kCall,
1619                                                                 kIntrinsified);
1620  const InvokeRuntimeCallingConvention calling_convention;
1621
1622  locations->SetInAt(0, Location::RequiresFpuRegister());
1623  locations->SetOut(Location::RequiresFpuRegister());
1624  // Native code uses the soft float ABI.
1625  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1626  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1627}
1628
1629static void CreateFPFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) {
1630  // If the graph is debuggable, all callee-saved floating-point registers are blocked by
1631  // the code generator. Furthermore, the register allocator creates fixed live intervals
1632  // for all caller-saved registers because we are doing a function call. As a result, if
1633  // the input and output locations are unallocated, the register allocator runs out of
1634  // registers and fails; however, a debuggable graph is not the common case.
1635  if (invoke->GetBlock()->GetGraph()->IsDebuggable()) {
1636    return;
1637  }
1638
1639  DCHECK_EQ(invoke->GetNumberOfArguments(), 2U);
1640  DCHECK_EQ(invoke->InputAt(0)->GetType(), Primitive::kPrimDouble);
1641  DCHECK_EQ(invoke->InputAt(1)->GetType(), Primitive::kPrimDouble);
1642  DCHECK_EQ(invoke->GetType(), Primitive::kPrimDouble);
1643
1644  LocationSummary* const locations = new (arena) LocationSummary(invoke,
1645                                                                 LocationSummary::kCall,
1646                                                                 kIntrinsified);
1647  const InvokeRuntimeCallingConvention calling_convention;
1648
1649  locations->SetInAt(0, Location::RequiresFpuRegister());
1650  locations->SetInAt(1, Location::RequiresFpuRegister());
1651  locations->SetOut(Location::RequiresFpuRegister());
1652  // Native code uses the soft float ABI.
1653  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1654  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1655  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1656  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
1657}
1658
1659static void GenFPToFPCall(HInvoke* invoke,
1660                          ArmAssembler* assembler,
1661                          CodeGeneratorARM* codegen,
1662                          QuickEntrypointEnum entry) {
1663  LocationSummary* const locations = invoke->GetLocations();
1664  const InvokeRuntimeCallingConvention calling_convention;
1665
1666  DCHECK_EQ(invoke->GetNumberOfArguments(), 1U);
1667  DCHECK(locations->WillCall() && locations->Intrinsified());
1668  DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(0)));
1669  DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(1)));
1670
1671  __ LoadFromOffset(kLoadWord, LR, TR, GetThreadOffset<kArmWordSize>(entry).Int32Value());
1672  // Native code uses the soft float ABI.
1673  __ vmovrrd(calling_convention.GetRegisterAt(0),
1674             calling_convention.GetRegisterAt(1),
1675             FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>()));
1676  __ blx(LR);
1677  codegen->RecordPcInfo(invoke, invoke->GetDexPc());
1678  __ vmovdrr(FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()),
1679             calling_convention.GetRegisterAt(0),
1680             calling_convention.GetRegisterAt(1));
1681}
1682
1683static void GenFPFPToFPCall(HInvoke* invoke,
1684                          ArmAssembler* assembler,
1685                          CodeGeneratorARM* codegen,
1686                          QuickEntrypointEnum entry) {
1687  LocationSummary* const locations = invoke->GetLocations();
1688  const InvokeRuntimeCallingConvention calling_convention;
1689
1690  DCHECK_EQ(invoke->GetNumberOfArguments(), 2U);
1691  DCHECK(locations->WillCall() && locations->Intrinsified());
1692  DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(0)));
1693  DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(1)));
1694  DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(2)));
1695  DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(3)));
1696
1697  __ LoadFromOffset(kLoadWord, LR, TR, GetThreadOffset<kArmWordSize>(entry).Int32Value());
1698  // Native code uses the soft float ABI.
1699  __ vmovrrd(calling_convention.GetRegisterAt(0),
1700             calling_convention.GetRegisterAt(1),
1701             FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>()));
1702  __ vmovrrd(calling_convention.GetRegisterAt(2),
1703             calling_convention.GetRegisterAt(3),
1704             FromLowSToD(locations->InAt(1).AsFpuRegisterPairLow<SRegister>()));
1705  __ blx(LR);
1706  codegen->RecordPcInfo(invoke, invoke->GetDexPc());
1707  __ vmovdrr(FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()),
1708             calling_convention.GetRegisterAt(0),
1709             calling_convention.GetRegisterAt(1));
1710}
1711
1712void IntrinsicLocationsBuilderARM::VisitMathCos(HInvoke* invoke) {
1713  CreateFPToFPCallLocations(arena_, invoke);
1714}
1715
1716void IntrinsicCodeGeneratorARM::VisitMathCos(HInvoke* invoke) {
1717  GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickCos);
1718}
1719
1720void IntrinsicLocationsBuilderARM::VisitMathSin(HInvoke* invoke) {
1721  CreateFPToFPCallLocations(arena_, invoke);
1722}
1723
1724void IntrinsicCodeGeneratorARM::VisitMathSin(HInvoke* invoke) {
1725  GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickSin);
1726}
1727
1728void IntrinsicLocationsBuilderARM::VisitMathAcos(HInvoke* invoke) {
1729  CreateFPToFPCallLocations(arena_, invoke);
1730}
1731
1732void IntrinsicCodeGeneratorARM::VisitMathAcos(HInvoke* invoke) {
1733  GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickAcos);
1734}
1735
1736void IntrinsicLocationsBuilderARM::VisitMathAsin(HInvoke* invoke) {
1737  CreateFPToFPCallLocations(arena_, invoke);
1738}
1739
1740void IntrinsicCodeGeneratorARM::VisitMathAsin(HInvoke* invoke) {
1741  GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickAsin);
1742}
1743
1744void IntrinsicLocationsBuilderARM::VisitMathAtan(HInvoke* invoke) {
1745  CreateFPToFPCallLocations(arena_, invoke);
1746}
1747
1748void IntrinsicCodeGeneratorARM::VisitMathAtan(HInvoke* invoke) {
1749  GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickAtan);
1750}
1751
1752void IntrinsicLocationsBuilderARM::VisitMathCbrt(HInvoke* invoke) {
1753  CreateFPToFPCallLocations(arena_, invoke);
1754}
1755
1756void IntrinsicCodeGeneratorARM::VisitMathCbrt(HInvoke* invoke) {
1757  GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickCbrt);
1758}
1759
1760void IntrinsicLocationsBuilderARM::VisitMathCosh(HInvoke* invoke) {
1761  CreateFPToFPCallLocations(arena_, invoke);
1762}
1763
1764void IntrinsicCodeGeneratorARM::VisitMathCosh(HInvoke* invoke) {
1765  GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickCosh);
1766}
1767
1768void IntrinsicLocationsBuilderARM::VisitMathExp(HInvoke* invoke) {
1769  CreateFPToFPCallLocations(arena_, invoke);
1770}
1771
1772void IntrinsicCodeGeneratorARM::VisitMathExp(HInvoke* invoke) {
1773  GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickExp);
1774}
1775
1776void IntrinsicLocationsBuilderARM::VisitMathExpm1(HInvoke* invoke) {
1777  CreateFPToFPCallLocations(arena_, invoke);
1778}
1779
1780void IntrinsicCodeGeneratorARM::VisitMathExpm1(HInvoke* invoke) {
1781  GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickExpm1);
1782}
1783
1784void IntrinsicLocationsBuilderARM::VisitMathLog(HInvoke* invoke) {
1785  CreateFPToFPCallLocations(arena_, invoke);
1786}
1787
1788void IntrinsicCodeGeneratorARM::VisitMathLog(HInvoke* invoke) {
1789  GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickLog);
1790}
1791
1792void IntrinsicLocationsBuilderARM::VisitMathLog10(HInvoke* invoke) {
1793  CreateFPToFPCallLocations(arena_, invoke);
1794}
1795
1796void IntrinsicCodeGeneratorARM::VisitMathLog10(HInvoke* invoke) {
1797  GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickLog10);
1798}
1799
1800void IntrinsicLocationsBuilderARM::VisitMathSinh(HInvoke* invoke) {
1801  CreateFPToFPCallLocations(arena_, invoke);
1802}
1803
1804void IntrinsicCodeGeneratorARM::VisitMathSinh(HInvoke* invoke) {
1805  GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickSinh);
1806}
1807
1808void IntrinsicLocationsBuilderARM::VisitMathTan(HInvoke* invoke) {
1809  CreateFPToFPCallLocations(arena_, invoke);
1810}
1811
1812void IntrinsicCodeGeneratorARM::VisitMathTan(HInvoke* invoke) {
1813  GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickTan);
1814}
1815
1816void IntrinsicLocationsBuilderARM::VisitMathTanh(HInvoke* invoke) {
1817  CreateFPToFPCallLocations(arena_, invoke);
1818}
1819
1820void IntrinsicCodeGeneratorARM::VisitMathTanh(HInvoke* invoke) {
1821  GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickTanh);
1822}
1823
1824void IntrinsicLocationsBuilderARM::VisitMathAtan2(HInvoke* invoke) {
1825  CreateFPFPToFPCallLocations(arena_, invoke);
1826}
1827
1828void IntrinsicCodeGeneratorARM::VisitMathAtan2(HInvoke* invoke) {
1829  GenFPFPToFPCall(invoke, GetAssembler(), codegen_, kQuickAtan2);
1830}
1831
1832void IntrinsicLocationsBuilderARM::VisitMathHypot(HInvoke* invoke) {
1833  CreateFPFPToFPCallLocations(arena_, invoke);
1834}
1835
1836void IntrinsicCodeGeneratorARM::VisitMathHypot(HInvoke* invoke) {
1837  GenFPFPToFPCall(invoke, GetAssembler(), codegen_, kQuickHypot);
1838}
1839
1840void IntrinsicLocationsBuilderARM::VisitMathNextAfter(HInvoke* invoke) {
1841  CreateFPFPToFPCallLocations(arena_, invoke);
1842}
1843
1844void IntrinsicCodeGeneratorARM::VisitMathNextAfter(HInvoke* invoke) {
1845  GenFPFPToFPCall(invoke, GetAssembler(), codegen_, kQuickNextAfter);
1846}
1847
1848void IntrinsicLocationsBuilderARM::VisitIntegerReverse(HInvoke* invoke) {
1849  CreateIntToIntLocations(arena_, invoke);
1850}
1851
1852void IntrinsicCodeGeneratorARM::VisitIntegerReverse(HInvoke* invoke) {
1853  ArmAssembler* assembler = GetAssembler();
1854  LocationSummary* locations = invoke->GetLocations();
1855
1856  Register out = locations->Out().AsRegister<Register>();
1857  Register in  = locations->InAt(0).AsRegister<Register>();
1858
1859  __ rbit(out, in);
1860}
1861
1862void IntrinsicLocationsBuilderARM::VisitLongReverse(HInvoke* invoke) {
1863  LocationSummary* locations = new (arena_) LocationSummary(invoke,
1864                                                            LocationSummary::kNoCall,
1865                                                            kIntrinsified);
1866  locations->SetInAt(0, Location::RequiresRegister());
1867  locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1868}
1869
1870void IntrinsicCodeGeneratorARM::VisitLongReverse(HInvoke* invoke) {
1871  ArmAssembler* assembler = GetAssembler();
1872  LocationSummary* locations = invoke->GetLocations();
1873
1874  Register in_reg_lo  = locations->InAt(0).AsRegisterPairLow<Register>();
1875  Register in_reg_hi  = locations->InAt(0).AsRegisterPairHigh<Register>();
1876  Register out_reg_lo = locations->Out().AsRegisterPairLow<Register>();
1877  Register out_reg_hi = locations->Out().AsRegisterPairHigh<Register>();
1878
1879  __ rbit(out_reg_lo, in_reg_hi);
1880  __ rbit(out_reg_hi, in_reg_lo);
1881}
1882
1883void IntrinsicLocationsBuilderARM::VisitIntegerReverseBytes(HInvoke* invoke) {
1884  CreateIntToIntLocations(arena_, invoke);
1885}
1886
1887void IntrinsicCodeGeneratorARM::VisitIntegerReverseBytes(HInvoke* invoke) {
1888  ArmAssembler* assembler = GetAssembler();
1889  LocationSummary* locations = invoke->GetLocations();
1890
1891  Register out = locations->Out().AsRegister<Register>();
1892  Register in  = locations->InAt(0).AsRegister<Register>();
1893
1894  __ rev(out, in);
1895}
1896
1897void IntrinsicLocationsBuilderARM::VisitLongReverseBytes(HInvoke* invoke) {
1898  LocationSummary* locations = new (arena_) LocationSummary(invoke,
1899                                                            LocationSummary::kNoCall,
1900                                                            kIntrinsified);
1901  locations->SetInAt(0, Location::RequiresRegister());
1902  locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1903}
1904
1905void IntrinsicCodeGeneratorARM::VisitLongReverseBytes(HInvoke* invoke) {
1906  ArmAssembler* assembler = GetAssembler();
1907  LocationSummary* locations = invoke->GetLocations();
1908
1909  Register in_reg_lo  = locations->InAt(0).AsRegisterPairLow<Register>();
1910  Register in_reg_hi  = locations->InAt(0).AsRegisterPairHigh<Register>();
1911  Register out_reg_lo = locations->Out().AsRegisterPairLow<Register>();
1912  Register out_reg_hi = locations->Out().AsRegisterPairHigh<Register>();
1913
1914  __ rev(out_reg_lo, in_reg_hi);
1915  __ rev(out_reg_hi, in_reg_lo);
1916}
1917
1918void IntrinsicLocationsBuilderARM::VisitShortReverseBytes(HInvoke* invoke) {
1919  CreateIntToIntLocations(arena_, invoke);
1920}
1921
1922void IntrinsicCodeGeneratorARM::VisitShortReverseBytes(HInvoke* invoke) {
1923  ArmAssembler* assembler = GetAssembler();
1924  LocationSummary* locations = invoke->GetLocations();
1925
1926  Register out = locations->Out().AsRegister<Register>();
1927  Register in  = locations->InAt(0).AsRegister<Register>();
1928
1929  __ revsh(out, in);
1930}
1931
1932void IntrinsicLocationsBuilderARM::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1933  LocationSummary* locations = new (arena_) LocationSummary(invoke,
1934                                                            LocationSummary::kNoCall,
1935                                                            kIntrinsified);
1936  locations->SetInAt(0, Location::RequiresRegister());
1937  locations->SetInAt(1, Location::RequiresRegister());
1938  locations->SetInAt(2, Location::RequiresRegister());
1939  locations->SetInAt(3, Location::RequiresRegister());
1940  locations->SetInAt(4, Location::RequiresRegister());
1941
1942  locations->AddTemp(Location::RequiresRegister());
1943  locations->AddTemp(Location::RequiresRegister());
1944  locations->AddTemp(Location::RequiresRegister());
1945  locations->AddTemp(Location::RequiresRegister());
1946}
1947
1948void IntrinsicCodeGeneratorARM::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1949  ArmAssembler* assembler = GetAssembler();
1950  LocationSummary* locations = invoke->GetLocations();
1951
1952  // Check assumption that sizeof(Char) is 2 (used in scaling below).
1953  const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
1954  DCHECK_EQ(char_size, 2u);
1955
1956  // Location of data in char array buffer.
1957  const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
1958
1959  // Location of char array data in string.
1960  const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
1961
1962  // void getCharsNoCheck(int srcBegin, int srcEnd, char[] dst, int dstBegin);
1963  // Since getChars() calls getCharsNoCheck() - we use registers rather than constants.
1964  Register srcObj = locations->InAt(0).AsRegister<Register>();
1965  Register srcBegin = locations->InAt(1).AsRegister<Register>();
1966  Register srcEnd = locations->InAt(2).AsRegister<Register>();
1967  Register dstObj = locations->InAt(3).AsRegister<Register>();
1968  Register dstBegin = locations->InAt(4).AsRegister<Register>();
1969
1970  Register src_ptr = locations->GetTemp(0).AsRegister<Register>();
1971  Register src_ptr_end = locations->GetTemp(1).AsRegister<Register>();
1972  Register dst_ptr = locations->GetTemp(2).AsRegister<Register>();
1973  Register tmp = locations->GetTemp(3).AsRegister<Register>();
1974
1975  // src range to copy.
1976  __ add(src_ptr, srcObj, ShifterOperand(value_offset));
1977  __ add(src_ptr_end, src_ptr, ShifterOperand(srcEnd, LSL, 1));
1978  __ add(src_ptr, src_ptr, ShifterOperand(srcBegin, LSL, 1));
1979
1980  // dst to be copied.
1981  __ add(dst_ptr, dstObj, ShifterOperand(data_offset));
1982  __ add(dst_ptr, dst_ptr, ShifterOperand(dstBegin, LSL, 1));
1983
1984  // Do the copy.
1985  Label loop, done;
1986  __ Bind(&loop);
1987  __ cmp(src_ptr, ShifterOperand(src_ptr_end));
1988  __ b(&done, EQ);
1989  __ ldrh(tmp, Address(src_ptr, char_size, Address::PostIndex));
1990  __ strh(tmp, Address(dst_ptr, char_size, Address::PostIndex));
1991  __ b(&loop);
1992  __ Bind(&done);
1993}
1994
1995UNIMPLEMENTED_INTRINSIC(ARM, IntegerBitCount)
1996UNIMPLEMENTED_INTRINSIC(ARM, LongBitCount)
1997UNIMPLEMENTED_INTRINSIC(ARM, MathMinDoubleDouble)
1998UNIMPLEMENTED_INTRINSIC(ARM, MathMinFloatFloat)
1999UNIMPLEMENTED_INTRINSIC(ARM, MathMaxDoubleDouble)
2000UNIMPLEMENTED_INTRINSIC(ARM, MathMaxFloatFloat)
2001UNIMPLEMENTED_INTRINSIC(ARM, MathMinLongLong)
2002UNIMPLEMENTED_INTRINSIC(ARM, MathMaxLongLong)
2003UNIMPLEMENTED_INTRINSIC(ARM, MathCeil)          // Could be done by changing rounding mode, maybe?
2004UNIMPLEMENTED_INTRINSIC(ARM, MathFloor)         // Could be done by changing rounding mode, maybe?
2005UNIMPLEMENTED_INTRINSIC(ARM, MathRint)
2006UNIMPLEMENTED_INTRINSIC(ARM, MathRoundDouble)   // Could be done by changing rounding mode, maybe?
2007UNIMPLEMENTED_INTRINSIC(ARM, MathRoundFloat)    // Could be done by changing rounding mode, maybe?
2008UNIMPLEMENTED_INTRINSIC(ARM, UnsafeCASLong)     // High register pressure.
2009UNIMPLEMENTED_INTRINSIC(ARM, SystemArrayCopyChar)
2010UNIMPLEMENTED_INTRINSIC(ARM, ReferenceGetReferent)
2011UNIMPLEMENTED_INTRINSIC(ARM, FloatIsInfinite)
2012UNIMPLEMENTED_INTRINSIC(ARM, DoubleIsInfinite)
2013UNIMPLEMENTED_INTRINSIC(ARM, IntegerHighestOneBit)
2014UNIMPLEMENTED_INTRINSIC(ARM, LongHighestOneBit)
2015UNIMPLEMENTED_INTRINSIC(ARM, IntegerLowestOneBit)
2016UNIMPLEMENTED_INTRINSIC(ARM, LongLowestOneBit)
2017
2018// 1.8.
2019UNIMPLEMENTED_INTRINSIC(ARM, UnsafeGetAndAddInt)
2020UNIMPLEMENTED_INTRINSIC(ARM, UnsafeGetAndAddLong)
2021UNIMPLEMENTED_INTRINSIC(ARM, UnsafeGetAndSetInt)
2022UNIMPLEMENTED_INTRINSIC(ARM, UnsafeGetAndSetLong)
2023UNIMPLEMENTED_INTRINSIC(ARM, UnsafeGetAndSetObject)
2024
2025UNREACHABLE_INTRINSICS(ARM)
2026
2027#undef __
2028
2029}  // namespace arm
2030}  // namespace art
2031