intrinsics_arm64.cc revision 7b56502c52271c52ef0232ccd47e96badfe5dba6
1/* 2 * Copyright (C) 2015 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#include "intrinsics_arm64.h" 18 19#include "arch/arm64/instruction_set_features_arm64.h" 20#include "art_method.h" 21#include "code_generator_arm64.h" 22#include "common_arm64.h" 23#include "entrypoints/quick/quick_entrypoints.h" 24#include "intrinsics.h" 25#include "mirror/array-inl.h" 26#include "mirror/string.h" 27#include "thread.h" 28#include "utils/arm64/assembler_arm64.h" 29#include "utils/arm64/constants_arm64.h" 30 31#include "vixl/a64/disasm-a64.h" 32#include "vixl/a64/macro-assembler-a64.h" 33 34using namespace vixl; // NOLINT(build/namespaces) 35 36namespace art { 37 38namespace arm64 { 39 40using helpers::DRegisterFrom; 41using helpers::FPRegisterFrom; 42using helpers::HeapOperand; 43using helpers::LocationFrom; 44using helpers::OperandFrom; 45using helpers::RegisterFrom; 46using helpers::SRegisterFrom; 47using helpers::WRegisterFrom; 48using helpers::XRegisterFrom; 49 50namespace { 51 52ALWAYS_INLINE inline MemOperand AbsoluteHeapOperandFrom(Location location, size_t offset = 0) { 53 return MemOperand(XRegisterFrom(location), offset); 54} 55 56} // namespace 57 58vixl::MacroAssembler* IntrinsicCodeGeneratorARM64::GetVIXLAssembler() { 59 return codegen_->GetAssembler()->vixl_masm_; 60} 61 62ArenaAllocator* IntrinsicCodeGeneratorARM64::GetAllocator() { 63 return codegen_->GetGraph()->GetArena(); 64} 65 66#define __ codegen->GetAssembler()->vixl_masm_-> 67 68static void MoveFromReturnRegister(Location trg, 69 Primitive::Type type, 70 CodeGeneratorARM64* codegen) { 71 if (!trg.IsValid()) { 72 DCHECK(type == Primitive::kPrimVoid); 73 return; 74 } 75 76 DCHECK_NE(type, Primitive::kPrimVoid); 77 78 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) { 79 Register trg_reg = RegisterFrom(trg, type); 80 Register res_reg = RegisterFrom(ARM64ReturnLocation(type), type); 81 __ Mov(trg_reg, res_reg, kDiscardForSameWReg); 82 } else { 83 FPRegister trg_reg = FPRegisterFrom(trg, type); 84 FPRegister res_reg = FPRegisterFrom(ARM64ReturnLocation(type), type); 85 __ Fmov(trg_reg, res_reg); 86 } 87} 88 89static void MoveArguments(HInvoke* invoke, CodeGeneratorARM64* codegen) { 90 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor; 91 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor); 92} 93 94// Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified 95// call. This will copy the arguments into the positions for a regular call. 96// 97// Note: The actual parameters are required to be in the locations given by the invoke's location 98// summary. If an intrinsic modifies those locations before a slowpath call, they must be 99// restored! 100class IntrinsicSlowPathARM64 : public SlowPathCodeARM64 { 101 public: 102 explicit IntrinsicSlowPathARM64(HInvoke* invoke) : invoke_(invoke) { } 103 104 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE { 105 CodeGeneratorARM64* codegen = down_cast<CodeGeneratorARM64*>(codegen_in); 106 __ Bind(GetEntryLabel()); 107 108 SaveLiveRegisters(codegen, invoke_->GetLocations()); 109 110 MoveArguments(invoke_, codegen); 111 112 if (invoke_->IsInvokeStaticOrDirect()) { 113 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(), 114 LocationFrom(kArtMethodRegister)); 115 } else { 116 codegen->GenerateVirtualCall(invoke_->AsInvokeVirtual(), LocationFrom(kArtMethodRegister)); 117 } 118 codegen->RecordPcInfo(invoke_, invoke_->GetDexPc(), this); 119 120 // Copy the result back to the expected output. 121 Location out = invoke_->GetLocations()->Out(); 122 if (out.IsValid()) { 123 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory. 124 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg())); 125 MoveFromReturnRegister(out, invoke_->GetType(), codegen); 126 } 127 128 RestoreLiveRegisters(codegen, invoke_->GetLocations()); 129 __ B(GetExitLabel()); 130 } 131 132 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathARM64"; } 133 134 private: 135 // The instruction where this slow path is happening. 136 HInvoke* const invoke_; 137 138 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathARM64); 139}; 140 141#undef __ 142 143bool IntrinsicLocationsBuilderARM64::TryDispatch(HInvoke* invoke) { 144 Dispatch(invoke); 145 LocationSummary* res = invoke->GetLocations(); 146 if (res == nullptr) { 147 return false; 148 } 149 if (kEmitCompilerReadBarrier && res->CanCall()) { 150 // Generating an intrinsic for this HInvoke may produce an 151 // IntrinsicSlowPathARM64 slow path. Currently this approach 152 // does not work when using read barriers, as the emitted 153 // calling sequence will make use of another slow path 154 // (ReadBarrierForRootSlowPathARM64 for HInvokeStaticOrDirect, 155 // ReadBarrierSlowPathARM64 for HInvokeVirtual). So we bail 156 // out in this case. 157 // 158 // TODO: Find a way to have intrinsics work with read barriers. 159 invoke->SetLocations(nullptr); 160 return false; 161 } 162 return res->Intrinsified(); 163} 164 165#define __ masm-> 166 167static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { 168 LocationSummary* locations = new (arena) LocationSummary(invoke, 169 LocationSummary::kNoCall, 170 kIntrinsified); 171 locations->SetInAt(0, Location::RequiresFpuRegister()); 172 locations->SetOut(Location::RequiresRegister()); 173} 174 175static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) { 176 LocationSummary* locations = new (arena) LocationSummary(invoke, 177 LocationSummary::kNoCall, 178 kIntrinsified); 179 locations->SetInAt(0, Location::RequiresRegister()); 180 locations->SetOut(Location::RequiresFpuRegister()); 181} 182 183static void MoveFPToInt(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) { 184 Location input = locations->InAt(0); 185 Location output = locations->Out(); 186 __ Fmov(is64bit ? XRegisterFrom(output) : WRegisterFrom(output), 187 is64bit ? DRegisterFrom(input) : SRegisterFrom(input)); 188} 189 190static void MoveIntToFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) { 191 Location input = locations->InAt(0); 192 Location output = locations->Out(); 193 __ Fmov(is64bit ? DRegisterFrom(output) : SRegisterFrom(output), 194 is64bit ? XRegisterFrom(input) : WRegisterFrom(input)); 195} 196 197void IntrinsicLocationsBuilderARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) { 198 CreateFPToIntLocations(arena_, invoke); 199} 200void IntrinsicLocationsBuilderARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) { 201 CreateIntToFPLocations(arena_, invoke); 202} 203 204void IntrinsicCodeGeneratorARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) { 205 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler()); 206} 207void IntrinsicCodeGeneratorARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) { 208 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler()); 209} 210 211void IntrinsicLocationsBuilderARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) { 212 CreateFPToIntLocations(arena_, invoke); 213} 214void IntrinsicLocationsBuilderARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) { 215 CreateIntToFPLocations(arena_, invoke); 216} 217 218void IntrinsicCodeGeneratorARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) { 219 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler()); 220} 221void IntrinsicCodeGeneratorARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) { 222 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler()); 223} 224 225static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { 226 LocationSummary* locations = new (arena) LocationSummary(invoke, 227 LocationSummary::kNoCall, 228 kIntrinsified); 229 locations->SetInAt(0, Location::RequiresRegister()); 230 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 231} 232 233static void GenReverseBytes(LocationSummary* locations, 234 Primitive::Type type, 235 vixl::MacroAssembler* masm) { 236 Location in = locations->InAt(0); 237 Location out = locations->Out(); 238 239 switch (type) { 240 case Primitive::kPrimShort: 241 __ Rev16(WRegisterFrom(out), WRegisterFrom(in)); 242 __ Sxth(WRegisterFrom(out), WRegisterFrom(out)); 243 break; 244 case Primitive::kPrimInt: 245 case Primitive::kPrimLong: 246 __ Rev(RegisterFrom(out, type), RegisterFrom(in, type)); 247 break; 248 default: 249 LOG(FATAL) << "Unexpected size for reverse-bytes: " << type; 250 UNREACHABLE(); 251 } 252} 253 254void IntrinsicLocationsBuilderARM64::VisitIntegerReverseBytes(HInvoke* invoke) { 255 CreateIntToIntLocations(arena_, invoke); 256} 257 258void IntrinsicCodeGeneratorARM64::VisitIntegerReverseBytes(HInvoke* invoke) { 259 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler()); 260} 261 262void IntrinsicLocationsBuilderARM64::VisitLongReverseBytes(HInvoke* invoke) { 263 CreateIntToIntLocations(arena_, invoke); 264} 265 266void IntrinsicCodeGeneratorARM64::VisitLongReverseBytes(HInvoke* invoke) { 267 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler()); 268} 269 270void IntrinsicLocationsBuilderARM64::VisitShortReverseBytes(HInvoke* invoke) { 271 CreateIntToIntLocations(arena_, invoke); 272} 273 274void IntrinsicCodeGeneratorARM64::VisitShortReverseBytes(HInvoke* invoke) { 275 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetVIXLAssembler()); 276} 277 278static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { 279 LocationSummary* locations = new (arena) LocationSummary(invoke, 280 LocationSummary::kNoCall, 281 kIntrinsified); 282 locations->SetInAt(0, Location::RequiresRegister()); 283 locations->SetInAt(1, Location::RequiresRegister()); 284 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 285} 286 287static void GenCompare(LocationSummary* locations, bool is_long, vixl::MacroAssembler* masm) { 288 Location op1 = locations->InAt(0); 289 Location op2 = locations->InAt(1); 290 Location out = locations->Out(); 291 292 Register op1_reg = is_long ? XRegisterFrom(op1) : WRegisterFrom(op1); 293 Register op2_reg = is_long ? XRegisterFrom(op2) : WRegisterFrom(op2); 294 Register out_reg = WRegisterFrom(out); 295 296 __ Cmp(op1_reg, op2_reg); 297 __ Cset(out_reg, gt); // out == +1 if GT or 0 otherwise 298 __ Cinv(out_reg, out_reg, lt); // out == -1 if LT or unchanged otherwise 299} 300 301void IntrinsicLocationsBuilderARM64::VisitIntegerCompare(HInvoke* invoke) { 302 CreateIntIntToIntLocations(arena_, invoke); 303} 304 305void IntrinsicCodeGeneratorARM64::VisitIntegerCompare(HInvoke* invoke) { 306 GenCompare(invoke->GetLocations(), /* is_long */ false, GetVIXLAssembler()); 307} 308 309void IntrinsicLocationsBuilderARM64::VisitLongCompare(HInvoke* invoke) { 310 CreateIntIntToIntLocations(arena_, invoke); 311} 312 313void IntrinsicCodeGeneratorARM64::VisitLongCompare(HInvoke* invoke) { 314 GenCompare(invoke->GetLocations(), /* is_long */ true, GetVIXLAssembler()); 315} 316 317static void GenNumberOfLeadingZeros(LocationSummary* locations, 318 Primitive::Type type, 319 vixl::MacroAssembler* masm) { 320 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong); 321 322 Location in = locations->InAt(0); 323 Location out = locations->Out(); 324 325 __ Clz(RegisterFrom(out, type), RegisterFrom(in, type)); 326} 327 328void IntrinsicLocationsBuilderARM64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) { 329 CreateIntToIntLocations(arena_, invoke); 330} 331 332void IntrinsicCodeGeneratorARM64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) { 333 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler()); 334} 335 336void IntrinsicLocationsBuilderARM64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) { 337 CreateIntToIntLocations(arena_, invoke); 338} 339 340void IntrinsicCodeGeneratorARM64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) { 341 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler()); 342} 343 344static void GenNumberOfTrailingZeros(LocationSummary* locations, 345 Primitive::Type type, 346 vixl::MacroAssembler* masm) { 347 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong); 348 349 Location in = locations->InAt(0); 350 Location out = locations->Out(); 351 352 __ Rbit(RegisterFrom(out, type), RegisterFrom(in, type)); 353 __ Clz(RegisterFrom(out, type), RegisterFrom(out, type)); 354} 355 356void IntrinsicLocationsBuilderARM64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) { 357 CreateIntToIntLocations(arena_, invoke); 358} 359 360void IntrinsicCodeGeneratorARM64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) { 361 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler()); 362} 363 364void IntrinsicLocationsBuilderARM64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) { 365 CreateIntToIntLocations(arena_, invoke); 366} 367 368void IntrinsicCodeGeneratorARM64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) { 369 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler()); 370} 371 372static void GenReverse(LocationSummary* locations, 373 Primitive::Type type, 374 vixl::MacroAssembler* masm) { 375 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong); 376 377 Location in = locations->InAt(0); 378 Location out = locations->Out(); 379 380 __ Rbit(RegisterFrom(out, type), RegisterFrom(in, type)); 381} 382 383void IntrinsicLocationsBuilderARM64::VisitIntegerReverse(HInvoke* invoke) { 384 CreateIntToIntLocations(arena_, invoke); 385} 386 387void IntrinsicCodeGeneratorARM64::VisitIntegerReverse(HInvoke* invoke) { 388 GenReverse(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler()); 389} 390 391void IntrinsicLocationsBuilderARM64::VisitLongReverse(HInvoke* invoke) { 392 CreateIntToIntLocations(arena_, invoke); 393} 394 395void IntrinsicCodeGeneratorARM64::VisitLongReverse(HInvoke* invoke) { 396 GenReverse(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler()); 397} 398 399static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) { 400 LocationSummary* locations = new (arena) LocationSummary(invoke, 401 LocationSummary::kNoCall, 402 kIntrinsified); 403 locations->SetInAt(0, Location::RequiresFpuRegister()); 404 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); 405} 406 407static void MathAbsFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) { 408 Location in = locations->InAt(0); 409 Location out = locations->Out(); 410 411 FPRegister in_reg = is64bit ? DRegisterFrom(in) : SRegisterFrom(in); 412 FPRegister out_reg = is64bit ? DRegisterFrom(out) : SRegisterFrom(out); 413 414 __ Fabs(out_reg, in_reg); 415} 416 417void IntrinsicLocationsBuilderARM64::VisitMathAbsDouble(HInvoke* invoke) { 418 CreateFPToFPLocations(arena_, invoke); 419} 420 421void IntrinsicCodeGeneratorARM64::VisitMathAbsDouble(HInvoke* invoke) { 422 MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler()); 423} 424 425void IntrinsicLocationsBuilderARM64::VisitMathAbsFloat(HInvoke* invoke) { 426 CreateFPToFPLocations(arena_, invoke); 427} 428 429void IntrinsicCodeGeneratorARM64::VisitMathAbsFloat(HInvoke* invoke) { 430 MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler()); 431} 432 433static void CreateIntToInt(ArenaAllocator* arena, HInvoke* invoke) { 434 LocationSummary* locations = new (arena) LocationSummary(invoke, 435 LocationSummary::kNoCall, 436 kIntrinsified); 437 locations->SetInAt(0, Location::RequiresRegister()); 438 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 439} 440 441static void GenAbsInteger(LocationSummary* locations, 442 bool is64bit, 443 vixl::MacroAssembler* masm) { 444 Location in = locations->InAt(0); 445 Location output = locations->Out(); 446 447 Register in_reg = is64bit ? XRegisterFrom(in) : WRegisterFrom(in); 448 Register out_reg = is64bit ? XRegisterFrom(output) : WRegisterFrom(output); 449 450 __ Cmp(in_reg, Operand(0)); 451 __ Cneg(out_reg, in_reg, lt); 452} 453 454void IntrinsicLocationsBuilderARM64::VisitMathAbsInt(HInvoke* invoke) { 455 CreateIntToInt(arena_, invoke); 456} 457 458void IntrinsicCodeGeneratorARM64::VisitMathAbsInt(HInvoke* invoke) { 459 GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler()); 460} 461 462void IntrinsicLocationsBuilderARM64::VisitMathAbsLong(HInvoke* invoke) { 463 CreateIntToInt(arena_, invoke); 464} 465 466void IntrinsicCodeGeneratorARM64::VisitMathAbsLong(HInvoke* invoke) { 467 GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler()); 468} 469 470static void GenMinMaxFP(LocationSummary* locations, 471 bool is_min, 472 bool is_double, 473 vixl::MacroAssembler* masm) { 474 Location op1 = locations->InAt(0); 475 Location op2 = locations->InAt(1); 476 Location out = locations->Out(); 477 478 FPRegister op1_reg = is_double ? DRegisterFrom(op1) : SRegisterFrom(op1); 479 FPRegister op2_reg = is_double ? DRegisterFrom(op2) : SRegisterFrom(op2); 480 FPRegister out_reg = is_double ? DRegisterFrom(out) : SRegisterFrom(out); 481 if (is_min) { 482 __ Fmin(out_reg, op1_reg, op2_reg); 483 } else { 484 __ Fmax(out_reg, op1_reg, op2_reg); 485 } 486} 487 488static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) { 489 LocationSummary* locations = new (arena) LocationSummary(invoke, 490 LocationSummary::kNoCall, 491 kIntrinsified); 492 locations->SetInAt(0, Location::RequiresFpuRegister()); 493 locations->SetInAt(1, Location::RequiresFpuRegister()); 494 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); 495} 496 497void IntrinsicLocationsBuilderARM64::VisitMathMinDoubleDouble(HInvoke* invoke) { 498 CreateFPFPToFPLocations(arena_, invoke); 499} 500 501void IntrinsicCodeGeneratorARM64::VisitMathMinDoubleDouble(HInvoke* invoke) { 502 GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, /* is_double */ true, GetVIXLAssembler()); 503} 504 505void IntrinsicLocationsBuilderARM64::VisitMathMinFloatFloat(HInvoke* invoke) { 506 CreateFPFPToFPLocations(arena_, invoke); 507} 508 509void IntrinsicCodeGeneratorARM64::VisitMathMinFloatFloat(HInvoke* invoke) { 510 GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, /* is_double */ false, GetVIXLAssembler()); 511} 512 513void IntrinsicLocationsBuilderARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) { 514 CreateFPFPToFPLocations(arena_, invoke); 515} 516 517void IntrinsicCodeGeneratorARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) { 518 GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, /* is_double */ true, GetVIXLAssembler()); 519} 520 521void IntrinsicLocationsBuilderARM64::VisitMathMaxFloatFloat(HInvoke* invoke) { 522 CreateFPFPToFPLocations(arena_, invoke); 523} 524 525void IntrinsicCodeGeneratorARM64::VisitMathMaxFloatFloat(HInvoke* invoke) { 526 GenMinMaxFP( 527 invoke->GetLocations(), /* is_min */ false, /* is_double */ false, GetVIXLAssembler()); 528} 529 530static void GenMinMax(LocationSummary* locations, 531 bool is_min, 532 bool is_long, 533 vixl::MacroAssembler* masm) { 534 Location op1 = locations->InAt(0); 535 Location op2 = locations->InAt(1); 536 Location out = locations->Out(); 537 538 Register op1_reg = is_long ? XRegisterFrom(op1) : WRegisterFrom(op1); 539 Register op2_reg = is_long ? XRegisterFrom(op2) : WRegisterFrom(op2); 540 Register out_reg = is_long ? XRegisterFrom(out) : WRegisterFrom(out); 541 542 __ Cmp(op1_reg, op2_reg); 543 __ Csel(out_reg, op1_reg, op2_reg, is_min ? lt : gt); 544} 545 546void IntrinsicLocationsBuilderARM64::VisitMathMinIntInt(HInvoke* invoke) { 547 CreateIntIntToIntLocations(arena_, invoke); 548} 549 550void IntrinsicCodeGeneratorARM64::VisitMathMinIntInt(HInvoke* invoke) { 551 GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ false, GetVIXLAssembler()); 552} 553 554void IntrinsicLocationsBuilderARM64::VisitMathMinLongLong(HInvoke* invoke) { 555 CreateIntIntToIntLocations(arena_, invoke); 556} 557 558void IntrinsicCodeGeneratorARM64::VisitMathMinLongLong(HInvoke* invoke) { 559 GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ true, GetVIXLAssembler()); 560} 561 562void IntrinsicLocationsBuilderARM64::VisitMathMaxIntInt(HInvoke* invoke) { 563 CreateIntIntToIntLocations(arena_, invoke); 564} 565 566void IntrinsicCodeGeneratorARM64::VisitMathMaxIntInt(HInvoke* invoke) { 567 GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ false, GetVIXLAssembler()); 568} 569 570void IntrinsicLocationsBuilderARM64::VisitMathMaxLongLong(HInvoke* invoke) { 571 CreateIntIntToIntLocations(arena_, invoke); 572} 573 574void IntrinsicCodeGeneratorARM64::VisitMathMaxLongLong(HInvoke* invoke) { 575 GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ true, GetVIXLAssembler()); 576} 577 578void IntrinsicLocationsBuilderARM64::VisitMathSqrt(HInvoke* invoke) { 579 CreateFPToFPLocations(arena_, invoke); 580} 581 582void IntrinsicCodeGeneratorARM64::VisitMathSqrt(HInvoke* invoke) { 583 LocationSummary* locations = invoke->GetLocations(); 584 vixl::MacroAssembler* masm = GetVIXLAssembler(); 585 __ Fsqrt(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0))); 586} 587 588void IntrinsicLocationsBuilderARM64::VisitMathCeil(HInvoke* invoke) { 589 CreateFPToFPLocations(arena_, invoke); 590} 591 592void IntrinsicCodeGeneratorARM64::VisitMathCeil(HInvoke* invoke) { 593 LocationSummary* locations = invoke->GetLocations(); 594 vixl::MacroAssembler* masm = GetVIXLAssembler(); 595 __ Frintp(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0))); 596} 597 598void IntrinsicLocationsBuilderARM64::VisitMathFloor(HInvoke* invoke) { 599 CreateFPToFPLocations(arena_, invoke); 600} 601 602void IntrinsicCodeGeneratorARM64::VisitMathFloor(HInvoke* invoke) { 603 LocationSummary* locations = invoke->GetLocations(); 604 vixl::MacroAssembler* masm = GetVIXLAssembler(); 605 __ Frintm(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0))); 606} 607 608void IntrinsicLocationsBuilderARM64::VisitMathRint(HInvoke* invoke) { 609 CreateFPToFPLocations(arena_, invoke); 610} 611 612void IntrinsicCodeGeneratorARM64::VisitMathRint(HInvoke* invoke) { 613 LocationSummary* locations = invoke->GetLocations(); 614 vixl::MacroAssembler* masm = GetVIXLAssembler(); 615 __ Frintn(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0))); 616} 617 618static void CreateFPToIntPlusTempLocations(ArenaAllocator* arena, HInvoke* invoke) { 619 LocationSummary* locations = new (arena) LocationSummary(invoke, 620 LocationSummary::kNoCall, 621 kIntrinsified); 622 locations->SetInAt(0, Location::RequiresFpuRegister()); 623 locations->SetOut(Location::RequiresRegister()); 624} 625 626static void GenMathRound(LocationSummary* locations, 627 bool is_double, 628 vixl::MacroAssembler* masm) { 629 FPRegister in_reg = is_double ? 630 DRegisterFrom(locations->InAt(0)) : SRegisterFrom(locations->InAt(0)); 631 Register out_reg = is_double ? 632 XRegisterFrom(locations->Out()) : WRegisterFrom(locations->Out()); 633 UseScratchRegisterScope temps(masm); 634 FPRegister temp1_reg = temps.AcquireSameSizeAs(in_reg); 635 636 // 0.5 can be encoded as an immediate, so use fmov. 637 if (is_double) { 638 __ Fmov(temp1_reg, static_cast<double>(0.5)); 639 } else { 640 __ Fmov(temp1_reg, static_cast<float>(0.5)); 641 } 642 __ Fadd(temp1_reg, in_reg, temp1_reg); 643 __ Fcvtms(out_reg, temp1_reg); 644} 645 646void IntrinsicLocationsBuilderARM64::VisitMathRoundDouble(HInvoke* invoke) { 647 // See intrinsics.h. 648 if (kRoundIsPlusPointFive) { 649 CreateFPToIntPlusTempLocations(arena_, invoke); 650 } 651} 652 653void IntrinsicCodeGeneratorARM64::VisitMathRoundDouble(HInvoke* invoke) { 654 GenMathRound(invoke->GetLocations(), /* is_double */ true, GetVIXLAssembler()); 655} 656 657void IntrinsicLocationsBuilderARM64::VisitMathRoundFloat(HInvoke* invoke) { 658 // See intrinsics.h. 659 if (kRoundIsPlusPointFive) { 660 CreateFPToIntPlusTempLocations(arena_, invoke); 661 } 662} 663 664void IntrinsicCodeGeneratorARM64::VisitMathRoundFloat(HInvoke* invoke) { 665 GenMathRound(invoke->GetLocations(), /* is_double */ false, GetVIXLAssembler()); 666} 667 668void IntrinsicLocationsBuilderARM64::VisitMemoryPeekByte(HInvoke* invoke) { 669 CreateIntToIntLocations(arena_, invoke); 670} 671 672void IntrinsicCodeGeneratorARM64::VisitMemoryPeekByte(HInvoke* invoke) { 673 vixl::MacroAssembler* masm = GetVIXLAssembler(); 674 __ Ldrsb(WRegisterFrom(invoke->GetLocations()->Out()), 675 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0)); 676} 677 678void IntrinsicLocationsBuilderARM64::VisitMemoryPeekIntNative(HInvoke* invoke) { 679 CreateIntToIntLocations(arena_, invoke); 680} 681 682void IntrinsicCodeGeneratorARM64::VisitMemoryPeekIntNative(HInvoke* invoke) { 683 vixl::MacroAssembler* masm = GetVIXLAssembler(); 684 __ Ldr(WRegisterFrom(invoke->GetLocations()->Out()), 685 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0)); 686} 687 688void IntrinsicLocationsBuilderARM64::VisitMemoryPeekLongNative(HInvoke* invoke) { 689 CreateIntToIntLocations(arena_, invoke); 690} 691 692void IntrinsicCodeGeneratorARM64::VisitMemoryPeekLongNative(HInvoke* invoke) { 693 vixl::MacroAssembler* masm = GetVIXLAssembler(); 694 __ Ldr(XRegisterFrom(invoke->GetLocations()->Out()), 695 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0)); 696} 697 698void IntrinsicLocationsBuilderARM64::VisitMemoryPeekShortNative(HInvoke* invoke) { 699 CreateIntToIntLocations(arena_, invoke); 700} 701 702void IntrinsicCodeGeneratorARM64::VisitMemoryPeekShortNative(HInvoke* invoke) { 703 vixl::MacroAssembler* masm = GetVIXLAssembler(); 704 __ Ldrsh(WRegisterFrom(invoke->GetLocations()->Out()), 705 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0)); 706} 707 708static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) { 709 LocationSummary* locations = new (arena) LocationSummary(invoke, 710 LocationSummary::kNoCall, 711 kIntrinsified); 712 locations->SetInAt(0, Location::RequiresRegister()); 713 locations->SetInAt(1, Location::RequiresRegister()); 714} 715 716void IntrinsicLocationsBuilderARM64::VisitMemoryPokeByte(HInvoke* invoke) { 717 CreateIntIntToVoidLocations(arena_, invoke); 718} 719 720void IntrinsicCodeGeneratorARM64::VisitMemoryPokeByte(HInvoke* invoke) { 721 vixl::MacroAssembler* masm = GetVIXLAssembler(); 722 __ Strb(WRegisterFrom(invoke->GetLocations()->InAt(1)), 723 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0)); 724} 725 726void IntrinsicLocationsBuilderARM64::VisitMemoryPokeIntNative(HInvoke* invoke) { 727 CreateIntIntToVoidLocations(arena_, invoke); 728} 729 730void IntrinsicCodeGeneratorARM64::VisitMemoryPokeIntNative(HInvoke* invoke) { 731 vixl::MacroAssembler* masm = GetVIXLAssembler(); 732 __ Str(WRegisterFrom(invoke->GetLocations()->InAt(1)), 733 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0)); 734} 735 736void IntrinsicLocationsBuilderARM64::VisitMemoryPokeLongNative(HInvoke* invoke) { 737 CreateIntIntToVoidLocations(arena_, invoke); 738} 739 740void IntrinsicCodeGeneratorARM64::VisitMemoryPokeLongNative(HInvoke* invoke) { 741 vixl::MacroAssembler* masm = GetVIXLAssembler(); 742 __ Str(XRegisterFrom(invoke->GetLocations()->InAt(1)), 743 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0)); 744} 745 746void IntrinsicLocationsBuilderARM64::VisitMemoryPokeShortNative(HInvoke* invoke) { 747 CreateIntIntToVoidLocations(arena_, invoke); 748} 749 750void IntrinsicCodeGeneratorARM64::VisitMemoryPokeShortNative(HInvoke* invoke) { 751 vixl::MacroAssembler* masm = GetVIXLAssembler(); 752 __ Strh(WRegisterFrom(invoke->GetLocations()->InAt(1)), 753 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0)); 754} 755 756void IntrinsicLocationsBuilderARM64::VisitThreadCurrentThread(HInvoke* invoke) { 757 LocationSummary* locations = new (arena_) LocationSummary(invoke, 758 LocationSummary::kNoCall, 759 kIntrinsified); 760 locations->SetOut(Location::RequiresRegister()); 761} 762 763void IntrinsicCodeGeneratorARM64::VisitThreadCurrentThread(HInvoke* invoke) { 764 codegen_->Load(Primitive::kPrimNot, WRegisterFrom(invoke->GetLocations()->Out()), 765 MemOperand(tr, Thread::PeerOffset<8>().Int32Value())); 766} 767 768static void GenUnsafeGet(HInvoke* invoke, 769 Primitive::Type type, 770 bool is_volatile, 771 CodeGeneratorARM64* codegen) { 772 LocationSummary* locations = invoke->GetLocations(); 773 DCHECK((type == Primitive::kPrimInt) || 774 (type == Primitive::kPrimLong) || 775 (type == Primitive::kPrimNot)); 776 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_; 777 Location base_loc = locations->InAt(1); 778 Register base = WRegisterFrom(base_loc); // Object pointer. 779 Location offset_loc = locations->InAt(2); 780 Register offset = XRegisterFrom(offset_loc); // Long offset. 781 Location trg_loc = locations->Out(); 782 Register trg = RegisterFrom(trg_loc, type); 783 bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease(); 784 785 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) { 786 // UnsafeGetObject/UnsafeGetObjectVolatile with Baker's read barrier case. 787 UseScratchRegisterScope temps(masm); 788 Register temp = temps.AcquireW(); 789 codegen->GenerateArrayLoadWithBakerReadBarrier( 790 invoke, trg_loc, base, 0U, offset_loc, temp, /* needs_null_check */ false); 791 if (is_volatile && !use_acquire_release) { 792 __ Dmb(InnerShareable, BarrierReads); 793 } 794 } else { 795 // Other cases. 796 MemOperand mem_op(base.X(), offset); 797 if (is_volatile) { 798 if (use_acquire_release) { 799 codegen->LoadAcquire(invoke, trg, mem_op, /* needs_null_check */ true); 800 } else { 801 codegen->Load(type, trg, mem_op); 802 __ Dmb(InnerShareable, BarrierReads); 803 } 804 } else { 805 codegen->Load(type, trg, mem_op); 806 } 807 808 if (type == Primitive::kPrimNot) { 809 DCHECK(trg.IsW()); 810 codegen->MaybeGenerateReadBarrierSlow(invoke, trg_loc, trg_loc, base_loc, 0U, offset_loc); 811 } 812 } 813} 814 815static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { 816 bool can_call = kEmitCompilerReadBarrier && 817 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject || 818 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile); 819 LocationSummary* locations = new (arena) LocationSummary(invoke, 820 can_call ? 821 LocationSummary::kCallOnSlowPath : 822 LocationSummary::kNoCall, 823 kIntrinsified); 824 locations->SetInAt(0, Location::NoLocation()); // Unused receiver. 825 locations->SetInAt(1, Location::RequiresRegister()); 826 locations->SetInAt(2, Location::RequiresRegister()); 827 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 828} 829 830void IntrinsicLocationsBuilderARM64::VisitUnsafeGet(HInvoke* invoke) { 831 CreateIntIntIntToIntLocations(arena_, invoke); 832} 833void IntrinsicLocationsBuilderARM64::VisitUnsafeGetVolatile(HInvoke* invoke) { 834 CreateIntIntIntToIntLocations(arena_, invoke); 835} 836void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLong(HInvoke* invoke) { 837 CreateIntIntIntToIntLocations(arena_, invoke); 838} 839void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) { 840 CreateIntIntIntToIntLocations(arena_, invoke); 841} 842void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObject(HInvoke* invoke) { 843 CreateIntIntIntToIntLocations(arena_, invoke); 844} 845void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) { 846 CreateIntIntIntToIntLocations(arena_, invoke); 847} 848 849void IntrinsicCodeGeneratorARM64::VisitUnsafeGet(HInvoke* invoke) { 850 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_); 851} 852void IntrinsicCodeGeneratorARM64::VisitUnsafeGetVolatile(HInvoke* invoke) { 853 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_); 854} 855void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLong(HInvoke* invoke) { 856 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_); 857} 858void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) { 859 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_); 860} 861void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObject(HInvoke* invoke) { 862 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_); 863} 864void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) { 865 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_); 866} 867 868static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) { 869 LocationSummary* locations = new (arena) LocationSummary(invoke, 870 LocationSummary::kNoCall, 871 kIntrinsified); 872 locations->SetInAt(0, Location::NoLocation()); // Unused receiver. 873 locations->SetInAt(1, Location::RequiresRegister()); 874 locations->SetInAt(2, Location::RequiresRegister()); 875 locations->SetInAt(3, Location::RequiresRegister()); 876} 877 878void IntrinsicLocationsBuilderARM64::VisitUnsafePut(HInvoke* invoke) { 879 CreateIntIntIntIntToVoid(arena_, invoke); 880} 881void IntrinsicLocationsBuilderARM64::VisitUnsafePutOrdered(HInvoke* invoke) { 882 CreateIntIntIntIntToVoid(arena_, invoke); 883} 884void IntrinsicLocationsBuilderARM64::VisitUnsafePutVolatile(HInvoke* invoke) { 885 CreateIntIntIntIntToVoid(arena_, invoke); 886} 887void IntrinsicLocationsBuilderARM64::VisitUnsafePutObject(HInvoke* invoke) { 888 CreateIntIntIntIntToVoid(arena_, invoke); 889} 890void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) { 891 CreateIntIntIntIntToVoid(arena_, invoke); 892} 893void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) { 894 CreateIntIntIntIntToVoid(arena_, invoke); 895} 896void IntrinsicLocationsBuilderARM64::VisitUnsafePutLong(HInvoke* invoke) { 897 CreateIntIntIntIntToVoid(arena_, invoke); 898} 899void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) { 900 CreateIntIntIntIntToVoid(arena_, invoke); 901} 902void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) { 903 CreateIntIntIntIntToVoid(arena_, invoke); 904} 905 906static void GenUnsafePut(LocationSummary* locations, 907 Primitive::Type type, 908 bool is_volatile, 909 bool is_ordered, 910 CodeGeneratorARM64* codegen) { 911 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_; 912 913 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer. 914 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset. 915 Register value = RegisterFrom(locations->InAt(3), type); 916 Register source = value; 917 bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease(); 918 919 MemOperand mem_op(base.X(), offset); 920 921 { 922 // We use a block to end the scratch scope before the write barrier, thus 923 // freeing the temporary registers so they can be used in `MarkGCCard`. 924 UseScratchRegisterScope temps(masm); 925 926 if (kPoisonHeapReferences && type == Primitive::kPrimNot) { 927 DCHECK(value.IsW()); 928 Register temp = temps.AcquireW(); 929 __ Mov(temp.W(), value.W()); 930 codegen->GetAssembler()->PoisonHeapReference(temp.W()); 931 source = temp; 932 } 933 934 if (is_volatile || is_ordered) { 935 if (use_acquire_release) { 936 codegen->StoreRelease(type, source, mem_op); 937 } else { 938 __ Dmb(InnerShareable, BarrierAll); 939 codegen->Store(type, source, mem_op); 940 if (is_volatile) { 941 __ Dmb(InnerShareable, BarrierReads); 942 } 943 } 944 } else { 945 codegen->Store(type, source, mem_op); 946 } 947 } 948 949 if (type == Primitive::kPrimNot) { 950 bool value_can_be_null = true; // TODO: Worth finding out this information? 951 codegen->MarkGCCard(base, value, value_can_be_null); 952 } 953} 954 955void IntrinsicCodeGeneratorARM64::VisitUnsafePut(HInvoke* invoke) { 956 GenUnsafePut(invoke->GetLocations(), 957 Primitive::kPrimInt, 958 /* is_volatile */ false, 959 /* is_ordered */ false, 960 codegen_); 961} 962void IntrinsicCodeGeneratorARM64::VisitUnsafePutOrdered(HInvoke* invoke) { 963 GenUnsafePut(invoke->GetLocations(), 964 Primitive::kPrimInt, 965 /* is_volatile */ false, 966 /* is_ordered */ true, 967 codegen_); 968} 969void IntrinsicCodeGeneratorARM64::VisitUnsafePutVolatile(HInvoke* invoke) { 970 GenUnsafePut(invoke->GetLocations(), 971 Primitive::kPrimInt, 972 /* is_volatile */ true, 973 /* is_ordered */ false, 974 codegen_); 975} 976void IntrinsicCodeGeneratorARM64::VisitUnsafePutObject(HInvoke* invoke) { 977 GenUnsafePut(invoke->GetLocations(), 978 Primitive::kPrimNot, 979 /* is_volatile */ false, 980 /* is_ordered */ false, 981 codegen_); 982} 983void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) { 984 GenUnsafePut(invoke->GetLocations(), 985 Primitive::kPrimNot, 986 /* is_volatile */ false, 987 /* is_ordered */ true, 988 codegen_); 989} 990void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) { 991 GenUnsafePut(invoke->GetLocations(), 992 Primitive::kPrimNot, 993 /* is_volatile */ true, 994 /* is_ordered */ false, 995 codegen_); 996} 997void IntrinsicCodeGeneratorARM64::VisitUnsafePutLong(HInvoke* invoke) { 998 GenUnsafePut(invoke->GetLocations(), 999 Primitive::kPrimLong, 1000 /* is_volatile */ false, 1001 /* is_ordered */ false, 1002 codegen_); 1003} 1004void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) { 1005 GenUnsafePut(invoke->GetLocations(), 1006 Primitive::kPrimLong, 1007 /* is_volatile */ false, 1008 /* is_ordered */ true, 1009 codegen_); 1010} 1011void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) { 1012 GenUnsafePut(invoke->GetLocations(), 1013 Primitive::kPrimLong, 1014 /* is_volatile */ true, 1015 /* is_ordered */ false, 1016 codegen_); 1017} 1018 1019static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, 1020 HInvoke* invoke, 1021 Primitive::Type type) { 1022 LocationSummary* locations = new (arena) LocationSummary(invoke, 1023 LocationSummary::kNoCall, 1024 kIntrinsified); 1025 locations->SetInAt(0, Location::NoLocation()); // Unused receiver. 1026 locations->SetInAt(1, Location::RequiresRegister()); 1027 locations->SetInAt(2, Location::RequiresRegister()); 1028 locations->SetInAt(3, Location::RequiresRegister()); 1029 locations->SetInAt(4, Location::RequiresRegister()); 1030 1031 // If heap poisoning is enabled, we don't want the unpoisoning 1032 // operations to potentially clobber the output. 1033 Location::OutputOverlap overlaps = (kPoisonHeapReferences && type == Primitive::kPrimNot) 1034 ? Location::kOutputOverlap 1035 : Location::kNoOutputOverlap; 1036 locations->SetOut(Location::RequiresRegister(), overlaps); 1037} 1038 1039static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM64* codegen) { 1040 bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease(); 1041 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_; 1042 1043 Register out = WRegisterFrom(locations->Out()); // Boolean result. 1044 1045 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer. 1046 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset. 1047 Register expected = RegisterFrom(locations->InAt(3), type); // Expected. 1048 Register value = RegisterFrom(locations->InAt(4), type); // Value. 1049 1050 // This needs to be before the temp registers, as MarkGCCard also uses VIXL temps. 1051 if (type == Primitive::kPrimNot) { 1052 // Mark card for object assuming new value is stored. 1053 bool value_can_be_null = true; // TODO: Worth finding out this information? 1054 codegen->MarkGCCard(base, value, value_can_be_null); 1055 } 1056 1057 UseScratchRegisterScope temps(masm); 1058 Register tmp_ptr = temps.AcquireX(); // Pointer to actual memory. 1059 Register tmp_value = temps.AcquireSameSizeAs(value); // Value in memory. 1060 1061 Register tmp_32 = tmp_value.W(); 1062 1063 __ Add(tmp_ptr, base.X(), Operand(offset)); 1064 1065 if (kPoisonHeapReferences && type == Primitive::kPrimNot) { 1066 codegen->GetAssembler()->PoisonHeapReference(expected); 1067 if (value.Is(expected)) { 1068 // Do not poison `value`, as it is the same register as 1069 // `expected`, which has just been poisoned. 1070 } else { 1071 codegen->GetAssembler()->PoisonHeapReference(value); 1072 } 1073 } 1074 1075 // do { 1076 // tmp_value = [tmp_ptr] - expected; 1077 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value)); 1078 // result = tmp_value != 0; 1079 1080 vixl::Label loop_head, exit_loop; 1081 if (use_acquire_release) { 1082 __ Bind(&loop_head); 1083 // TODO: When `type == Primitive::kPrimNot`, add a read barrier for 1084 // the reference stored in the object before attempting the CAS, 1085 // similar to the one in the art::Unsafe_compareAndSwapObject JNI 1086 // implementation. 1087 // 1088 // Note that this code is not (yet) used when read barriers are 1089 // enabled (see IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject). 1090 DCHECK(!(type == Primitive::kPrimNot && kEmitCompilerReadBarrier)); 1091 __ Ldaxr(tmp_value, MemOperand(tmp_ptr)); 1092 __ Cmp(tmp_value, expected); 1093 __ B(&exit_loop, ne); 1094 __ Stlxr(tmp_32, value, MemOperand(tmp_ptr)); 1095 __ Cbnz(tmp_32, &loop_head); 1096 } else { 1097 // Emit a `Dmb(InnerShareable, BarrierAll)` (DMB ISH) instruction 1098 // instead of a `Dmb(InnerShareable, BarrierWrites)` (DMB ISHST) 1099 // one, as the latter allows a preceding load to be delayed past 1100 // the STXR instruction below. 1101 __ Dmb(InnerShareable, BarrierAll); 1102 __ Bind(&loop_head); 1103 // TODO: When `type == Primitive::kPrimNot`, add a read barrier for 1104 // the reference stored in the object before attempting the CAS, 1105 // similar to the one in the art::Unsafe_compareAndSwapObject JNI 1106 // implementation. 1107 // 1108 // Note that this code is not (yet) used when read barriers are 1109 // enabled (see IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject). 1110 DCHECK(!(type == Primitive::kPrimNot && kEmitCompilerReadBarrier)); 1111 __ Ldxr(tmp_value, MemOperand(tmp_ptr)); 1112 __ Cmp(tmp_value, expected); 1113 __ B(&exit_loop, ne); 1114 __ Stxr(tmp_32, value, MemOperand(tmp_ptr)); 1115 __ Cbnz(tmp_32, &loop_head); 1116 __ Dmb(InnerShareable, BarrierAll); 1117 } 1118 __ Bind(&exit_loop); 1119 __ Cset(out, eq); 1120 1121 if (kPoisonHeapReferences && type == Primitive::kPrimNot) { 1122 codegen->GetAssembler()->UnpoisonHeapReference(expected); 1123 if (value.Is(expected)) { 1124 // Do not unpoison `value`, as it is the same register as 1125 // `expected`, which has just been unpoisoned. 1126 } else { 1127 codegen->GetAssembler()->UnpoisonHeapReference(value); 1128 } 1129 } 1130} 1131 1132void IntrinsicLocationsBuilderARM64::VisitUnsafeCASInt(HInvoke* invoke) { 1133 CreateIntIntIntIntIntToInt(arena_, invoke, Primitive::kPrimInt); 1134} 1135void IntrinsicLocationsBuilderARM64::VisitUnsafeCASLong(HInvoke* invoke) { 1136 CreateIntIntIntIntIntToInt(arena_, invoke, Primitive::kPrimLong); 1137} 1138void IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject(HInvoke* invoke) { 1139 // The UnsafeCASObject intrinsic is missing a read barrier, and 1140 // therefore sometimes does not work as expected (b/25883050). 1141 // Turn it off temporarily as a quick fix, until the read barrier is 1142 // implemented (see TODO in GenCAS below). 1143 // 1144 // TODO(rpl): Fix this issue and re-enable this intrinsic with read barriers. 1145 if (kEmitCompilerReadBarrier) { 1146 return; 1147 } 1148 1149 CreateIntIntIntIntIntToInt(arena_, invoke, Primitive::kPrimNot); 1150} 1151 1152void IntrinsicCodeGeneratorARM64::VisitUnsafeCASInt(HInvoke* invoke) { 1153 GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_); 1154} 1155void IntrinsicCodeGeneratorARM64::VisitUnsafeCASLong(HInvoke* invoke) { 1156 GenCas(invoke->GetLocations(), Primitive::kPrimLong, codegen_); 1157} 1158void IntrinsicCodeGeneratorARM64::VisitUnsafeCASObject(HInvoke* invoke) { 1159 GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_); 1160} 1161 1162void IntrinsicLocationsBuilderARM64::VisitStringCharAt(HInvoke* invoke) { 1163 LocationSummary* locations = new (arena_) LocationSummary(invoke, 1164 LocationSummary::kCallOnSlowPath, 1165 kIntrinsified); 1166 locations->SetInAt(0, Location::RequiresRegister()); 1167 locations->SetInAt(1, Location::RequiresRegister()); 1168 // In case we need to go in the slow path, we can't have the output be the same 1169 // as the input: the current liveness analysis considers the input to be live 1170 // at the point of the call. 1171 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap); 1172} 1173 1174void IntrinsicCodeGeneratorARM64::VisitStringCharAt(HInvoke* invoke) { 1175 vixl::MacroAssembler* masm = GetVIXLAssembler(); 1176 LocationSummary* locations = invoke->GetLocations(); 1177 1178 // Location of reference to data array 1179 const MemberOffset value_offset = mirror::String::ValueOffset(); 1180 // Location of count 1181 const MemberOffset count_offset = mirror::String::CountOffset(); 1182 1183 Register obj = WRegisterFrom(locations->InAt(0)); // String object pointer. 1184 Register idx = WRegisterFrom(locations->InAt(1)); // Index of character. 1185 Register out = WRegisterFrom(locations->Out()); // Result character. 1186 1187 UseScratchRegisterScope temps(masm); 1188 Register temp = temps.AcquireW(); 1189 Register array_temp = temps.AcquireW(); // We can trade this for worse scheduling. 1190 1191 // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth 1192 // the cost. 1193 // TODO: For simplicity, the index parameter is requested in a register, so different from Quick 1194 // we will not optimize the code for constants (which would save a register). 1195 1196 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke); 1197 codegen_->AddSlowPath(slow_path); 1198 1199 __ Ldr(temp, HeapOperand(obj, count_offset)); // temp = str.length. 1200 codegen_->MaybeRecordImplicitNullCheck(invoke); 1201 __ Cmp(idx, temp); 1202 __ B(hs, slow_path->GetEntryLabel()); 1203 1204 __ Add(array_temp, obj, Operand(value_offset.Int32Value())); // array_temp := str.value. 1205 1206 // Load the value. 1207 __ Ldrh(out, MemOperand(array_temp.X(), idx, UXTW, 1)); // out := array_temp[idx]. 1208 1209 __ Bind(slow_path->GetExitLabel()); 1210} 1211 1212void IntrinsicLocationsBuilderARM64::VisitStringCompareTo(HInvoke* invoke) { 1213 LocationSummary* locations = new (arena_) LocationSummary(invoke, 1214 LocationSummary::kCall, 1215 kIntrinsified); 1216 InvokeRuntimeCallingConvention calling_convention; 1217 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0))); 1218 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1))); 1219 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt)); 1220} 1221 1222void IntrinsicCodeGeneratorARM64::VisitStringCompareTo(HInvoke* invoke) { 1223 vixl::MacroAssembler* masm = GetVIXLAssembler(); 1224 LocationSummary* locations = invoke->GetLocations(); 1225 1226 // Note that the null check must have been done earlier. 1227 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0))); 1228 1229 Register argument = WRegisterFrom(locations->InAt(1)); 1230 __ Cmp(argument, 0); 1231 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke); 1232 codegen_->AddSlowPath(slow_path); 1233 __ B(eq, slow_path->GetEntryLabel()); 1234 1235 __ Ldr( 1236 lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pStringCompareTo).Int32Value())); 1237 __ Blr(lr); 1238 __ Bind(slow_path->GetExitLabel()); 1239} 1240 1241void IntrinsicLocationsBuilderARM64::VisitStringEquals(HInvoke* invoke) { 1242 LocationSummary* locations = new (arena_) LocationSummary(invoke, 1243 LocationSummary::kNoCall, 1244 kIntrinsified); 1245 locations->SetInAt(0, Location::RequiresRegister()); 1246 locations->SetInAt(1, Location::RequiresRegister()); 1247 // Temporary registers to store lengths of strings and for calculations. 1248 locations->AddTemp(Location::RequiresRegister()); 1249 locations->AddTemp(Location::RequiresRegister()); 1250 1251 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap); 1252} 1253 1254void IntrinsicCodeGeneratorARM64::VisitStringEquals(HInvoke* invoke) { 1255 vixl::MacroAssembler* masm = GetVIXLAssembler(); 1256 LocationSummary* locations = invoke->GetLocations(); 1257 1258 Register str = WRegisterFrom(locations->InAt(0)); 1259 Register arg = WRegisterFrom(locations->InAt(1)); 1260 Register out = XRegisterFrom(locations->Out()); 1261 1262 UseScratchRegisterScope scratch_scope(masm); 1263 Register temp = scratch_scope.AcquireW(); 1264 Register temp1 = WRegisterFrom(locations->GetTemp(0)); 1265 Register temp2 = WRegisterFrom(locations->GetTemp(1)); 1266 1267 vixl::Label loop; 1268 vixl::Label end; 1269 vixl::Label return_true; 1270 vixl::Label return_false; 1271 1272 // Get offsets of count, value, and class fields within a string object. 1273 const int32_t count_offset = mirror::String::CountOffset().Int32Value(); 1274 const int32_t value_offset = mirror::String::ValueOffset().Int32Value(); 1275 const int32_t class_offset = mirror::Object::ClassOffset().Int32Value(); 1276 1277 // Note that the null check must have been done earlier. 1278 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0))); 1279 1280 // Check if input is null, return false if it is. 1281 __ Cbz(arg, &return_false); 1282 1283 // Reference equality check, return true if same reference. 1284 __ Cmp(str, arg); 1285 __ B(&return_true, eq); 1286 1287 // Instanceof check for the argument by comparing class fields. 1288 // All string objects must have the same type since String cannot be subclassed. 1289 // Receiver must be a string object, so its class field is equal to all strings' class fields. 1290 // If the argument is a string object, its class field must be equal to receiver's class field. 1291 __ Ldr(temp, MemOperand(str.X(), class_offset)); 1292 __ Ldr(temp1, MemOperand(arg.X(), class_offset)); 1293 __ Cmp(temp, temp1); 1294 __ B(&return_false, ne); 1295 1296 // Load lengths of this and argument strings. 1297 __ Ldr(temp, MemOperand(str.X(), count_offset)); 1298 __ Ldr(temp1, MemOperand(arg.X(), count_offset)); 1299 // Check if lengths are equal, return false if they're not. 1300 __ Cmp(temp, temp1); 1301 __ B(&return_false, ne); 1302 // Store offset of string value in preparation for comparison loop 1303 __ Mov(temp1, value_offset); 1304 // Return true if both strings are empty. 1305 __ Cbz(temp, &return_true); 1306 1307 // Assertions that must hold in order to compare strings 4 characters at a time. 1308 DCHECK_ALIGNED(value_offset, 8); 1309 static_assert(IsAligned<8>(kObjectAlignment), "String of odd length is not zero padded"); 1310 1311 temp1 = temp1.X(); 1312 temp2 = temp2.X(); 1313 1314 // Loop to compare strings 4 characters at a time starting at the beginning of the string. 1315 // Ok to do this because strings are zero-padded to be 8-byte aligned. 1316 __ Bind(&loop); 1317 __ Ldr(out, MemOperand(str.X(), temp1)); 1318 __ Ldr(temp2, MemOperand(arg.X(), temp1)); 1319 __ Add(temp1, temp1, Operand(sizeof(uint64_t))); 1320 __ Cmp(out, temp2); 1321 __ B(&return_false, ne); 1322 __ Sub(temp, temp, Operand(4), SetFlags); 1323 __ B(&loop, gt); 1324 1325 // Return true and exit the function. 1326 // If loop does not result in returning false, we return true. 1327 __ Bind(&return_true); 1328 __ Mov(out, 1); 1329 __ B(&end); 1330 1331 // Return false and exit the function. 1332 __ Bind(&return_false); 1333 __ Mov(out, 0); 1334 __ Bind(&end); 1335} 1336 1337static void GenerateVisitStringIndexOf(HInvoke* invoke, 1338 vixl::MacroAssembler* masm, 1339 CodeGeneratorARM64* codegen, 1340 ArenaAllocator* allocator, 1341 bool start_at_zero) { 1342 LocationSummary* locations = invoke->GetLocations(); 1343 Register tmp_reg = WRegisterFrom(locations->GetTemp(0)); 1344 1345 // Note that the null check must have been done earlier. 1346 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0))); 1347 1348 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically, 1349 // or directly dispatch if we have a constant. 1350 SlowPathCodeARM64* slow_path = nullptr; 1351 if (invoke->InputAt(1)->IsIntConstant()) { 1352 if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) > 0xFFFFU) { 1353 // Always needs the slow-path. We could directly dispatch to it, but this case should be 1354 // rare, so for simplicity just put the full slow-path down and branch unconditionally. 1355 slow_path = new (allocator) IntrinsicSlowPathARM64(invoke); 1356 codegen->AddSlowPath(slow_path); 1357 __ B(slow_path->GetEntryLabel()); 1358 __ Bind(slow_path->GetExitLabel()); 1359 return; 1360 } 1361 } else { 1362 Register char_reg = WRegisterFrom(locations->InAt(1)); 1363 __ Mov(tmp_reg, 0xFFFF); 1364 __ Cmp(char_reg, Operand(tmp_reg)); 1365 slow_path = new (allocator) IntrinsicSlowPathARM64(invoke); 1366 codegen->AddSlowPath(slow_path); 1367 __ B(hi, slow_path->GetEntryLabel()); 1368 } 1369 1370 if (start_at_zero) { 1371 // Start-index = 0. 1372 __ Mov(tmp_reg, 0); 1373 } 1374 1375 __ Ldr(lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pIndexOf).Int32Value())); 1376 __ Blr(lr); 1377 1378 if (slow_path != nullptr) { 1379 __ Bind(slow_path->GetExitLabel()); 1380 } 1381} 1382 1383void IntrinsicLocationsBuilderARM64::VisitStringIndexOf(HInvoke* invoke) { 1384 LocationSummary* locations = new (arena_) LocationSummary(invoke, 1385 LocationSummary::kCall, 1386 kIntrinsified); 1387 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's 1388 // best to align the inputs accordingly. 1389 InvokeRuntimeCallingConvention calling_convention; 1390 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0))); 1391 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1))); 1392 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt)); 1393 1394 // Need a temp for slow-path codepoint compare, and need to send start_index=0. 1395 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(2))); 1396} 1397 1398void IntrinsicCodeGeneratorARM64::VisitStringIndexOf(HInvoke* invoke) { 1399 GenerateVisitStringIndexOf( 1400 invoke, GetVIXLAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true); 1401} 1402 1403void IntrinsicLocationsBuilderARM64::VisitStringIndexOfAfter(HInvoke* invoke) { 1404 LocationSummary* locations = new (arena_) LocationSummary(invoke, 1405 LocationSummary::kCall, 1406 kIntrinsified); 1407 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's 1408 // best to align the inputs accordingly. 1409 InvokeRuntimeCallingConvention calling_convention; 1410 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0))); 1411 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1))); 1412 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2))); 1413 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt)); 1414 1415 // Need a temp for slow-path codepoint compare. 1416 locations->AddTemp(Location::RequiresRegister()); 1417} 1418 1419void IntrinsicCodeGeneratorARM64::VisitStringIndexOfAfter(HInvoke* invoke) { 1420 GenerateVisitStringIndexOf( 1421 invoke, GetVIXLAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false); 1422} 1423 1424void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromBytes(HInvoke* invoke) { 1425 LocationSummary* locations = new (arena_) LocationSummary(invoke, 1426 LocationSummary::kCall, 1427 kIntrinsified); 1428 InvokeRuntimeCallingConvention calling_convention; 1429 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0))); 1430 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1))); 1431 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2))); 1432 locations->SetInAt(3, LocationFrom(calling_convention.GetRegisterAt(3))); 1433 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot)); 1434} 1435 1436void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromBytes(HInvoke* invoke) { 1437 vixl::MacroAssembler* masm = GetVIXLAssembler(); 1438 LocationSummary* locations = invoke->GetLocations(); 1439 1440 Register byte_array = WRegisterFrom(locations->InAt(0)); 1441 __ Cmp(byte_array, 0); 1442 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke); 1443 codegen_->AddSlowPath(slow_path); 1444 __ B(eq, slow_path->GetEntryLabel()); 1445 1446 __ Ldr(lr, 1447 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromBytes).Int32Value())); 1448 codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); 1449 __ Blr(lr); 1450 __ Bind(slow_path->GetExitLabel()); 1451} 1452 1453void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromChars(HInvoke* invoke) { 1454 LocationSummary* locations = new (arena_) LocationSummary(invoke, 1455 LocationSummary::kCall, 1456 kIntrinsified); 1457 InvokeRuntimeCallingConvention calling_convention; 1458 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0))); 1459 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1))); 1460 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2))); 1461 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot)); 1462} 1463 1464void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromChars(HInvoke* invoke) { 1465 vixl::MacroAssembler* masm = GetVIXLAssembler(); 1466 1467 __ Ldr(lr, 1468 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromChars).Int32Value())); 1469 codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); 1470 __ Blr(lr); 1471} 1472 1473void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromString(HInvoke* invoke) { 1474 // The inputs plus one temp. 1475 LocationSummary* locations = new (arena_) LocationSummary(invoke, 1476 LocationSummary::kCall, 1477 kIntrinsified); 1478 InvokeRuntimeCallingConvention calling_convention; 1479 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0))); 1480 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1))); 1481 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2))); 1482 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot)); 1483} 1484 1485void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromString(HInvoke* invoke) { 1486 vixl::MacroAssembler* masm = GetVIXLAssembler(); 1487 LocationSummary* locations = invoke->GetLocations(); 1488 1489 Register string_to_copy = WRegisterFrom(locations->InAt(0)); 1490 __ Cmp(string_to_copy, 0); 1491 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke); 1492 codegen_->AddSlowPath(slow_path); 1493 __ B(eq, slow_path->GetEntryLabel()); 1494 1495 __ Ldr(lr, 1496 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromString).Int32Value())); 1497 codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); 1498 __ Blr(lr); 1499 __ Bind(slow_path->GetExitLabel()); 1500} 1501 1502// Unimplemented intrinsics. 1503 1504#define UNIMPLEMENTED_INTRINSIC(Name) \ 1505void IntrinsicLocationsBuilderARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \ 1506} \ 1507void IntrinsicCodeGeneratorARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \ 1508} 1509 1510UNIMPLEMENTED_INTRINSIC(IntegerBitCount) 1511UNIMPLEMENTED_INTRINSIC(LongBitCount) 1512UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar) 1513UNIMPLEMENTED_INTRINSIC(SystemArrayCopy) 1514UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent) 1515UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck) 1516 1517UNIMPLEMENTED_INTRINSIC(MathCos) 1518UNIMPLEMENTED_INTRINSIC(MathSin) 1519UNIMPLEMENTED_INTRINSIC(MathAcos) 1520UNIMPLEMENTED_INTRINSIC(MathAsin) 1521UNIMPLEMENTED_INTRINSIC(MathAtan) 1522UNIMPLEMENTED_INTRINSIC(MathAtan2) 1523UNIMPLEMENTED_INTRINSIC(MathCbrt) 1524UNIMPLEMENTED_INTRINSIC(MathCosh) 1525UNIMPLEMENTED_INTRINSIC(MathExp) 1526UNIMPLEMENTED_INTRINSIC(MathExpm1) 1527UNIMPLEMENTED_INTRINSIC(MathHypot) 1528UNIMPLEMENTED_INTRINSIC(MathLog) 1529UNIMPLEMENTED_INTRINSIC(MathLog10) 1530UNIMPLEMENTED_INTRINSIC(MathNextAfter) 1531UNIMPLEMENTED_INTRINSIC(MathSinh) 1532UNIMPLEMENTED_INTRINSIC(MathTan) 1533UNIMPLEMENTED_INTRINSIC(MathTanh) 1534 1535UNIMPLEMENTED_INTRINSIC(FloatIsInfinite) 1536UNIMPLEMENTED_INTRINSIC(DoubleIsInfinite) 1537UNIMPLEMENTED_INTRINSIC(FloatIsNaN) 1538UNIMPLEMENTED_INTRINSIC(DoubleIsNaN) 1539 1540UNIMPLEMENTED_INTRINSIC(IntegerHighestOneBit) 1541UNIMPLEMENTED_INTRINSIC(LongHighestOneBit) 1542UNIMPLEMENTED_INTRINSIC(IntegerLowestOneBit) 1543UNIMPLEMENTED_INTRINSIC(LongLowestOneBit) 1544UNIMPLEMENTED_INTRINSIC(IntegerSignum) 1545UNIMPLEMENTED_INTRINSIC(LongSignum) 1546 1547// Rotate operations are handled as HRor instructions. 1548UNIMPLEMENTED_INTRINSIC(IntegerRotateLeft) 1549UNIMPLEMENTED_INTRINSIC(IntegerRotateRight) 1550UNIMPLEMENTED_INTRINSIC(LongRotateLeft) 1551UNIMPLEMENTED_INTRINSIC(LongRotateRight) 1552 1553#undef UNIMPLEMENTED_INTRINSIC 1554 1555#undef __ 1556 1557} // namespace arm64 1558} // namespace art 1559