code_generator_arm64.cc revision 9aec02fc5df5518c16f1e5a9b6cb198a192db973
1/* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#include "code_generator_arm64.h" 18 19#include "entrypoints/quick/quick_entrypoints.h" 20#include "gc/accounting/card_table.h" 21#include "mirror/array-inl.h" 22#include "mirror/art_method.h" 23#include "mirror/class.h" 24#include "thread.h" 25#include "utils/arm64/assembler_arm64.h" 26#include "utils/assembler.h" 27#include "utils/stack_checks.h" 28 29 30using namespace vixl; // NOLINT(build/namespaces) 31 32#ifdef __ 33#error "ARM64 Codegen VIXL macro-assembler macro already defined." 34#endif 35 36 37namespace art { 38 39namespace arm64 { 40 41// TODO: clean-up some of the constant definitions. 42static constexpr size_t kHeapRefSize = sizeof(mirror::HeapReference<mirror::Object>); 43static constexpr int kCurrentMethodStackOffset = 0; 44 45namespace { 46 47bool IsFPType(Primitive::Type type) { 48 return type == Primitive::kPrimFloat || type == Primitive::kPrimDouble; 49} 50 51bool IsIntegralType(Primitive::Type type) { 52 switch (type) { 53 case Primitive::kPrimByte: 54 case Primitive::kPrimChar: 55 case Primitive::kPrimShort: 56 case Primitive::kPrimInt: 57 case Primitive::kPrimLong: 58 return true; 59 default: 60 return false; 61 } 62} 63 64bool Is64BitType(Primitive::Type type) { 65 return type == Primitive::kPrimLong || type == Primitive::kPrimDouble; 66} 67 68// Convenience helpers to ease conversion to and from VIXL operands. 69static_assert((SP == 31) && (WSP == 31) && (XZR == 32) && (WZR == 32), 70 "Unexpected values for register codes."); 71 72int VIXLRegCodeFromART(int code) { 73 if (code == SP) { 74 return vixl::kSPRegInternalCode; 75 } 76 if (code == XZR) { 77 return vixl::kZeroRegCode; 78 } 79 return code; 80} 81 82int ARTRegCodeFromVIXL(int code) { 83 if (code == vixl::kSPRegInternalCode) { 84 return SP; 85 } 86 if (code == vixl::kZeroRegCode) { 87 return XZR; 88 } 89 return code; 90} 91 92Register XRegisterFrom(Location location) { 93 return Register::XRegFromCode(VIXLRegCodeFromART(location.reg())); 94} 95 96Register WRegisterFrom(Location location) { 97 return Register::WRegFromCode(VIXLRegCodeFromART(location.reg())); 98} 99 100Register RegisterFrom(Location location, Primitive::Type type) { 101 DCHECK(type != Primitive::kPrimVoid && !IsFPType(type)); 102 return type == Primitive::kPrimLong ? XRegisterFrom(location) : WRegisterFrom(location); 103} 104 105Register OutputRegister(HInstruction* instr) { 106 return RegisterFrom(instr->GetLocations()->Out(), instr->GetType()); 107} 108 109Register InputRegisterAt(HInstruction* instr, int input_index) { 110 return RegisterFrom(instr->GetLocations()->InAt(input_index), 111 instr->InputAt(input_index)->GetType()); 112} 113 114FPRegister DRegisterFrom(Location location) { 115 return FPRegister::DRegFromCode(location.reg()); 116} 117 118FPRegister SRegisterFrom(Location location) { 119 return FPRegister::SRegFromCode(location.reg()); 120} 121 122FPRegister FPRegisterFrom(Location location, Primitive::Type type) { 123 DCHECK(IsFPType(type)); 124 return type == Primitive::kPrimDouble ? DRegisterFrom(location) : SRegisterFrom(location); 125} 126 127FPRegister OutputFPRegister(HInstruction* instr) { 128 return FPRegisterFrom(instr->GetLocations()->Out(), instr->GetType()); 129} 130 131FPRegister InputFPRegisterAt(HInstruction* instr, int input_index) { 132 return FPRegisterFrom(instr->GetLocations()->InAt(input_index), 133 instr->InputAt(input_index)->GetType()); 134} 135 136CPURegister OutputCPURegister(HInstruction* instr) { 137 return IsFPType(instr->GetType()) ? static_cast<CPURegister>(OutputFPRegister(instr)) 138 : static_cast<CPURegister>(OutputRegister(instr)); 139} 140 141CPURegister InputCPURegisterAt(HInstruction* instr, int index) { 142 return IsFPType(instr->InputAt(index)->GetType()) 143 ? static_cast<CPURegister>(InputFPRegisterAt(instr, index)) 144 : static_cast<CPURegister>(InputRegisterAt(instr, index)); 145} 146 147int64_t Int64ConstantFrom(Location location) { 148 HConstant* instr = location.GetConstant(); 149 return instr->IsIntConstant() ? instr->AsIntConstant()->GetValue() 150 : instr->AsLongConstant()->GetValue(); 151} 152 153Operand OperandFrom(Location location, Primitive::Type type) { 154 if (location.IsRegister()) { 155 return Operand(RegisterFrom(location, type)); 156 } else { 157 return Operand(Int64ConstantFrom(location)); 158 } 159} 160 161Operand InputOperandAt(HInstruction* instr, int input_index) { 162 return OperandFrom(instr->GetLocations()->InAt(input_index), 163 instr->InputAt(input_index)->GetType()); 164} 165 166MemOperand StackOperandFrom(Location location) { 167 return MemOperand(sp, location.GetStackIndex()); 168} 169 170MemOperand HeapOperand(const Register& base, size_t offset) { 171 // A heap reference must be 32bit, so fit in a W register. 172 DCHECK(base.IsW()); 173 return MemOperand(base.X(), offset); 174} 175 176MemOperand HeapOperand(const Register& base, Offset offset) { 177 return HeapOperand(base, offset.SizeValue()); 178} 179 180MemOperand HeapOperandFrom(Location location, Offset offset) { 181 return HeapOperand(RegisterFrom(location, Primitive::kPrimNot), offset); 182} 183 184Location LocationFrom(const Register& reg) { 185 return Location::RegisterLocation(ARTRegCodeFromVIXL(reg.code())); 186} 187 188Location LocationFrom(const FPRegister& fpreg) { 189 return Location::FpuRegisterLocation(fpreg.code()); 190} 191 192} // namespace 193 194inline Condition ARM64Condition(IfCondition cond) { 195 switch (cond) { 196 case kCondEQ: return eq; 197 case kCondNE: return ne; 198 case kCondLT: return lt; 199 case kCondLE: return le; 200 case kCondGT: return gt; 201 case kCondGE: return ge; 202 default: 203 LOG(FATAL) << "Unknown if condition"; 204 } 205 return nv; // Unreachable. 206} 207 208Location ARM64ReturnLocation(Primitive::Type return_type) { 209 DCHECK_NE(return_type, Primitive::kPrimVoid); 210 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the 211 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`, 212 // but we use the exact registers for clarity. 213 if (return_type == Primitive::kPrimFloat) { 214 return LocationFrom(s0); 215 } else if (return_type == Primitive::kPrimDouble) { 216 return LocationFrom(d0); 217 } else if (return_type == Primitive::kPrimLong) { 218 return LocationFrom(x0); 219 } else { 220 return LocationFrom(w0); 221 } 222} 223 224static const Register kRuntimeParameterCoreRegisters[] = { x0, x1, x2, x3, x4, x5, x6, x7 }; 225static constexpr size_t kRuntimeParameterCoreRegistersLength = 226 arraysize(kRuntimeParameterCoreRegisters); 227static const FPRegister kRuntimeParameterFpuRegisters[] = { }; 228static constexpr size_t kRuntimeParameterFpuRegistersLength = 0; 229 230class InvokeRuntimeCallingConvention : public CallingConvention<Register, FPRegister> { 231 public: 232 static constexpr size_t kParameterCoreRegistersLength = arraysize(kParameterCoreRegisters); 233 234 InvokeRuntimeCallingConvention() 235 : CallingConvention(kRuntimeParameterCoreRegisters, 236 kRuntimeParameterCoreRegistersLength, 237 kRuntimeParameterFpuRegisters, 238 kRuntimeParameterFpuRegistersLength) {} 239 240 Location GetReturnLocation(Primitive::Type return_type); 241 242 private: 243 DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention); 244}; 245 246Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) { 247 return ARM64ReturnLocation(return_type); 248} 249 250#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> 251#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, x).Int32Value() 252 253class SlowPathCodeARM64 : public SlowPathCode { 254 public: 255 SlowPathCodeARM64() : entry_label_(), exit_label_() {} 256 257 vixl::Label* GetEntryLabel() { return &entry_label_; } 258 vixl::Label* GetExitLabel() { return &exit_label_; } 259 260 private: 261 vixl::Label entry_label_; 262 vixl::Label exit_label_; 263 264 DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM64); 265}; 266 267class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 { 268 public: 269 BoundsCheckSlowPathARM64() {} 270 271 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 272 __ Bind(GetEntryLabel()); 273 __ Brk(__LINE__); // TODO: Unimplemented BoundsCheckSlowPathARM64. 274 } 275 276 private: 277 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64); 278}; 279 280class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 { 281 public: 282 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : instruction_(instruction) {} 283 284 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 285 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen); 286 __ Bind(GetEntryLabel()); 287 arm64_codegen->InvokeRuntime( 288 QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc()); 289 } 290 291 private: 292 HDivZeroCheck* const instruction_; 293 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64); 294}; 295 296class LoadClassSlowPathARM64 : public SlowPathCodeARM64 { 297 public: 298 LoadClassSlowPathARM64(HLoadClass* cls, 299 HInstruction* at, 300 uint32_t dex_pc, 301 bool do_clinit) 302 : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) { 303 DCHECK(at->IsLoadClass() || at->IsClinitCheck()); 304 } 305 306 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 307 LocationSummary* locations = at_->GetLocations(); 308 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen); 309 310 __ Bind(GetEntryLabel()); 311 codegen->SaveLiveRegisters(locations); 312 313 InvokeRuntimeCallingConvention calling_convention; 314 __ Mov(calling_convention.GetRegisterAt(0).W(), cls_->GetTypeIndex()); 315 arm64_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1).W()); 316 int32_t entry_point_offset = do_clinit_ ? QUICK_ENTRY_POINT(pInitializeStaticStorage) 317 : QUICK_ENTRY_POINT(pInitializeType); 318 arm64_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_); 319 320 // Move the class to the desired location. 321 Location out = locations->Out(); 322 if (out.IsValid()) { 323 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg())); 324 Primitive::Type type = at_->GetType(); 325 arm64_codegen->MoveHelper(out, calling_convention.GetReturnLocation(type), type); 326 } 327 328 codegen->RestoreLiveRegisters(locations); 329 __ B(GetExitLabel()); 330 } 331 332 private: 333 // The class this slow path will load. 334 HLoadClass* const cls_; 335 336 // The instruction where this slow path is happening. 337 // (Might be the load class or an initialization check). 338 HInstruction* const at_; 339 340 // The dex PC of `at_`. 341 const uint32_t dex_pc_; 342 343 // Whether to initialize the class. 344 const bool do_clinit_; 345 346 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64); 347}; 348 349class LoadStringSlowPathARM64 : public SlowPathCodeARM64 { 350 public: 351 explicit LoadStringSlowPathARM64(HLoadString* instruction) : instruction_(instruction) {} 352 353 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 354 LocationSummary* locations = instruction_->GetLocations(); 355 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg())); 356 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen); 357 358 __ Bind(GetEntryLabel()); 359 codegen->SaveLiveRegisters(locations); 360 361 InvokeRuntimeCallingConvention calling_convention; 362 arm64_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(0).W()); 363 __ Mov(calling_convention.GetRegisterAt(1).W(), instruction_->GetStringIndex()); 364 arm64_codegen->InvokeRuntime( 365 QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc()); 366 Primitive::Type type = instruction_->GetType(); 367 arm64_codegen->MoveHelper(locations->Out(), calling_convention.GetReturnLocation(type), type); 368 369 codegen->RestoreLiveRegisters(locations); 370 __ B(GetExitLabel()); 371 } 372 373 private: 374 HLoadString* const instruction_; 375 376 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64); 377}; 378 379class NullCheckSlowPathARM64 : public SlowPathCodeARM64 { 380 public: 381 explicit NullCheckSlowPathARM64(HNullCheck* instr) : instruction_(instr) {} 382 383 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 384 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen); 385 __ Bind(GetEntryLabel()); 386 arm64_codegen->InvokeRuntime( 387 QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc()); 388 } 389 390 private: 391 HNullCheck* const instruction_; 392 393 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64); 394}; 395 396class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 { 397 public: 398 explicit SuspendCheckSlowPathARM64(HSuspendCheck* instruction, 399 HBasicBlock* successor) 400 : instruction_(instruction), successor_(successor) {} 401 402 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 403 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen); 404 __ Bind(GetEntryLabel()); 405 codegen->SaveLiveRegisters(instruction_->GetLocations()); 406 arm64_codegen->InvokeRuntime( 407 QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc()); 408 codegen->RestoreLiveRegisters(instruction_->GetLocations()); 409 if (successor_ == nullptr) { 410 __ B(GetReturnLabel()); 411 } else { 412 __ B(arm64_codegen->GetLabelOf(successor_)); 413 } 414 } 415 416 vixl::Label* GetReturnLabel() { 417 DCHECK(successor_ == nullptr); 418 return &return_label_; 419 } 420 421 422 private: 423 HSuspendCheck* const instruction_; 424 // If not null, the block to branch to after the suspend check. 425 HBasicBlock* const successor_; 426 427 // If `successor_` is null, the label to branch to after the suspend check. 428 vixl::Label return_label_; 429 430 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64); 431}; 432 433class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 { 434 public: 435 TypeCheckSlowPathARM64() {} 436 437 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 438 __ Bind(GetEntryLabel()); 439 __ Brk(__LINE__); // TODO: Unimplemented TypeCheckSlowPathARM64. 440 __ b(GetExitLabel()); 441 } 442 443 private: 444 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64); 445}; 446 447#undef __ 448 449Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) { 450 Location next_location; 451 if (type == Primitive::kPrimVoid) { 452 LOG(FATAL) << "Unreachable type " << type; 453 } 454 455 if (IsFPType(type) && (fp_index_ < calling_convention.GetNumberOfFpuRegisters())) { 456 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(fp_index_++)); 457 } else if (!IsFPType(type) && (gp_index_ < calling_convention.GetNumberOfRegisters())) { 458 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++)); 459 } else { 460 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_); 461 next_location = Is64BitType(type) ? Location::DoubleStackSlot(stack_offset) 462 : Location::StackSlot(stack_offset); 463 } 464 465 // Space on the stack is reserved for all arguments. 466 stack_index_ += Is64BitType(type) ? 2 : 1; 467 return next_location; 468} 469 470CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph) 471 : CodeGenerator(graph, 472 kNumberOfAllocatableRegisters, 473 kNumberOfAllocatableFPRegisters, 474 kNumberOfAllocatableRegisterPairs), 475 block_labels_(nullptr), 476 location_builder_(graph, this), 477 instruction_visitor_(graph, this) {} 478 479#undef __ 480#define __ GetVIXLAssembler()-> 481 482void CodeGeneratorARM64::GenerateFrameEntry() { 483 // TODO: Add proper support for the stack overflow check. 484 UseScratchRegisterScope temps(GetVIXLAssembler()); 485 Register temp = temps.AcquireX(); 486 __ Add(temp, sp, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64))); 487 __ Ldr(temp, MemOperand(temp, 0)); 488 RecordPcInfo(nullptr, 0); 489 490 CPURegList preserved_regs = GetFramePreservedRegisters(); 491 int frame_size = GetFrameSize(); 492 core_spill_mask_ |= preserved_regs.list(); 493 494 __ Str(w0, MemOperand(sp, -frame_size, PreIndex)); 495 __ PokeCPURegList(preserved_regs, frame_size - preserved_regs.TotalSizeInBytes()); 496 497 // Stack layout: 498 // sp[frame_size - 8] : lr. 499 // ... : other preserved registers. 500 // sp[frame_size - regs_size]: first preserved register. 501 // ... : reserved frame space. 502 // sp[0] : current method. 503} 504 505void CodeGeneratorARM64::GenerateFrameExit() { 506 int frame_size = GetFrameSize(); 507 CPURegList preserved_regs = GetFramePreservedRegisters(); 508 __ PeekCPURegList(preserved_regs, frame_size - preserved_regs.TotalSizeInBytes()); 509 __ Drop(frame_size); 510} 511 512void CodeGeneratorARM64::Bind(HBasicBlock* block) { 513 __ Bind(GetLabelOf(block)); 514} 515 516void CodeGeneratorARM64::Move(HInstruction* instruction, 517 Location location, 518 HInstruction* move_for) { 519 LocationSummary* locations = instruction->GetLocations(); 520 if (locations != nullptr && locations->Out().Equals(location)) { 521 return; 522 } 523 524 Primitive::Type type = instruction->GetType(); 525 DCHECK_NE(type, Primitive::kPrimVoid); 526 527 if (instruction->IsIntConstant() || instruction->IsLongConstant()) { 528 int64_t value = instruction->IsIntConstant() ? instruction->AsIntConstant()->GetValue() 529 : instruction->AsLongConstant()->GetValue(); 530 if (location.IsRegister()) { 531 Register dst = RegisterFrom(location, type); 532 DCHECK((instruction->IsIntConstant() && dst.Is32Bits()) || 533 (instruction->IsLongConstant() && dst.Is64Bits())); 534 __ Mov(dst, value); 535 } else { 536 DCHECK(location.IsStackSlot() || location.IsDoubleStackSlot()); 537 UseScratchRegisterScope temps(GetVIXLAssembler()); 538 Register temp = instruction->IsIntConstant() ? temps.AcquireW() : temps.AcquireX(); 539 __ Mov(temp, value); 540 __ Str(temp, StackOperandFrom(location)); 541 } 542 } else if (instruction->IsTemporary()) { 543 Location temp_location = GetTemporaryLocation(instruction->AsTemporary()); 544 MoveHelper(location, temp_location, type); 545 } else if (instruction->IsLoadLocal()) { 546 uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal()); 547 if (Is64BitType(type)) { 548 MoveHelper(location, Location::DoubleStackSlot(stack_slot), type); 549 } else { 550 MoveHelper(location, Location::StackSlot(stack_slot), type); 551 } 552 553 } else { 554 DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary()); 555 MoveHelper(location, locations->Out(), type); 556 } 557} 558 559size_t CodeGeneratorARM64::FrameEntrySpillSize() const { 560 return GetFramePreservedRegistersSize(); 561} 562 563Location CodeGeneratorARM64::GetStackLocation(HLoadLocal* load) const { 564 Primitive::Type type = load->GetType(); 565 566 switch (type) { 567 case Primitive::kPrimNot: 568 case Primitive::kPrimInt: 569 case Primitive::kPrimFloat: 570 return Location::StackSlot(GetStackSlot(load->GetLocal())); 571 572 case Primitive::kPrimLong: 573 case Primitive::kPrimDouble: 574 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal())); 575 576 case Primitive::kPrimBoolean: 577 case Primitive::kPrimByte: 578 case Primitive::kPrimChar: 579 case Primitive::kPrimShort: 580 case Primitive::kPrimVoid: 581 LOG(FATAL) << "Unexpected type " << type; 582 } 583 584 LOG(FATAL) << "Unreachable"; 585 return Location::NoLocation(); 586} 587 588void CodeGeneratorARM64::MarkGCCard(Register object, Register value) { 589 UseScratchRegisterScope temps(GetVIXLAssembler()); 590 Register card = temps.AcquireX(); 591 Register temp = temps.AcquireX(); 592 vixl::Label done; 593 __ Cbz(value, &done); 594 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64WordSize>().Int32Value())); 595 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift); 596 __ Strb(card, MemOperand(card, temp)); 597 __ Bind(&done); 598} 599 600void CodeGeneratorARM64::SetupBlockedRegisters() const { 601 // Block reserved registers: 602 // ip0 (VIXL temporary) 603 // ip1 (VIXL temporary) 604 // xSuspend (Suspend counter) 605 // lr 606 // sp is not part of the allocatable registers, so we don't need to block it. 607 // TODO: Avoid blocking callee-saved registers, and instead preserve them 608 // where necessary. 609 CPURegList reserved_core_registers = vixl_reserved_core_registers; 610 reserved_core_registers.Combine(runtime_reserved_core_registers); 611 reserved_core_registers.Combine(quick_callee_saved_registers); 612 while (!reserved_core_registers.IsEmpty()) { 613 blocked_core_registers_[reserved_core_registers.PopLowestIndex().code()] = true; 614 } 615 CPURegList reserved_fp_registers = vixl_reserved_fp_registers; 616 reserved_fp_registers.Combine(CPURegList::GetCalleeSavedFP()); 617 while (!reserved_core_registers.IsEmpty()) { 618 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().code()] = true; 619 } 620} 621 622Location CodeGeneratorARM64::AllocateFreeRegister(Primitive::Type type) const { 623 if (type == Primitive::kPrimVoid) { 624 LOG(FATAL) << "Unreachable type " << type; 625 } 626 627 if (IsFPType(type)) { 628 ssize_t reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfAllocatableFPRegisters); 629 DCHECK_NE(reg, -1); 630 return Location::FpuRegisterLocation(reg); 631 } else { 632 ssize_t reg = FindFreeEntry(blocked_core_registers_, kNumberOfAllocatableRegisters); 633 DCHECK_NE(reg, -1); 634 return Location::RegisterLocation(reg); 635 } 636} 637 638void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const { 639 stream << Arm64ManagedRegister::FromXRegister(XRegister(reg)); 640} 641 642void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const { 643 stream << Arm64ManagedRegister::FromDRegister(DRegister(reg)); 644} 645 646void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) { 647 if (constant->IsIntConstant() || constant->IsLongConstant()) { 648 __ Mov(Register(destination), 649 constant->IsIntConstant() ? constant->AsIntConstant()->GetValue() 650 : constant->AsLongConstant()->GetValue()); 651 } else if (constant->IsFloatConstant()) { 652 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue()); 653 } else { 654 DCHECK(constant->IsDoubleConstant()); 655 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue()); 656 } 657} 658 659void CodeGeneratorARM64::MoveHelper(Location destination, 660 Location source, 661 Primitive::Type type) { 662 if (source.Equals(destination)) { 663 return; 664 } 665 if (destination.IsRegister()) { 666 Register dst = RegisterFrom(destination, type); 667 if (source.IsStackSlot() || source.IsDoubleStackSlot()) { 668 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot()); 669 __ Ldr(dst, StackOperandFrom(source)); 670 } else { 671 __ Mov(dst, OperandFrom(source, type)); 672 } 673 } else if (destination.IsFpuRegister()) { 674 FPRegister dst = FPRegisterFrom(destination, type); 675 if (source.IsStackSlot() || source.IsDoubleStackSlot()) { 676 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot()); 677 __ Ldr(dst, StackOperandFrom(source)); 678 } else if (source.IsFpuRegister()) { 679 __ Fmov(dst, FPRegisterFrom(source, type)); 680 } else { 681 MoveConstant(dst, source.GetConstant()); 682 } 683 } else { 684 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot()); 685 if (source.IsRegister()) { 686 __ Str(RegisterFrom(source, type), StackOperandFrom(destination)); 687 } else if (source.IsFpuRegister()) { 688 __ Str(FPRegisterFrom(source, type), StackOperandFrom(destination)); 689 } else if (source.IsConstant()) { 690 UseScratchRegisterScope temps(GetVIXLAssembler()); 691 HConstant* cst = source.GetConstant(); 692 CPURegister temp; 693 if (cst->IsIntConstant() || cst->IsLongConstant()) { 694 temp = cst->IsIntConstant() ? temps.AcquireW() : temps.AcquireX(); 695 } else { 696 DCHECK(cst->IsFloatConstant() || cst->IsDoubleConstant()); 697 temp = cst->IsFloatConstant() ? temps.AcquireS() : temps.AcquireD(); 698 } 699 MoveConstant(temp, cst); 700 __ Str(temp, StackOperandFrom(destination)); 701 } else { 702 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot()); 703 UseScratchRegisterScope temps(GetVIXLAssembler()); 704 Register temp = destination.IsDoubleStackSlot() ? temps.AcquireX() : temps.AcquireW(); 705 __ Ldr(temp, StackOperandFrom(source)); 706 __ Str(temp, StackOperandFrom(destination)); 707 } 708 } 709} 710 711void CodeGeneratorARM64::Load(Primitive::Type type, 712 vixl::CPURegister dst, 713 const vixl::MemOperand& src) { 714 switch (type) { 715 case Primitive::kPrimBoolean: 716 __ Ldrb(Register(dst), src); 717 break; 718 case Primitive::kPrimByte: 719 __ Ldrsb(Register(dst), src); 720 break; 721 case Primitive::kPrimShort: 722 __ Ldrsh(Register(dst), src); 723 break; 724 case Primitive::kPrimChar: 725 __ Ldrh(Register(dst), src); 726 break; 727 case Primitive::kPrimInt: 728 case Primitive::kPrimNot: 729 case Primitive::kPrimLong: 730 case Primitive::kPrimFloat: 731 case Primitive::kPrimDouble: 732 DCHECK(dst.Is64Bits() == Is64BitType(type)); 733 __ Ldr(dst, src); 734 break; 735 case Primitive::kPrimVoid: 736 LOG(FATAL) << "Unreachable type " << type; 737 } 738} 739 740void CodeGeneratorARM64::Store(Primitive::Type type, 741 vixl::CPURegister rt, 742 const vixl::MemOperand& dst) { 743 switch (type) { 744 case Primitive::kPrimBoolean: 745 case Primitive::kPrimByte: 746 __ Strb(Register(rt), dst); 747 break; 748 case Primitive::kPrimChar: 749 case Primitive::kPrimShort: 750 __ Strh(Register(rt), dst); 751 break; 752 case Primitive::kPrimInt: 753 case Primitive::kPrimNot: 754 case Primitive::kPrimLong: 755 case Primitive::kPrimFloat: 756 case Primitive::kPrimDouble: 757 DCHECK(rt.Is64Bits() == Is64BitType(type)); 758 __ Str(rt, dst); 759 break; 760 case Primitive::kPrimVoid: 761 LOG(FATAL) << "Unreachable type " << type; 762 } 763} 764 765void CodeGeneratorARM64::LoadCurrentMethod(vixl::Register current_method) { 766 DCHECK(current_method.IsW()); 767 __ Ldr(current_method, MemOperand(sp, kCurrentMethodStackOffset)); 768} 769 770void CodeGeneratorARM64::InvokeRuntime(int32_t entry_point_offset, 771 HInstruction* instruction, 772 uint32_t dex_pc) { 773 __ Ldr(lr, MemOperand(tr, entry_point_offset)); 774 __ Blr(lr); 775 RecordPcInfo(instruction, dex_pc); 776 DCHECK(instruction->IsSuspendCheck() 777 || instruction->IsBoundsCheck() 778 || instruction->IsNullCheck() 779 || instruction->IsDivZeroCheck() 780 || !IsLeafMethod()); 781} 782 783void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path, 784 vixl::Register class_reg) { 785 UseScratchRegisterScope temps(GetVIXLAssembler()); 786 Register temp = temps.AcquireW(); 787 __ Ldr(temp, HeapOperand(class_reg, mirror::Class::StatusOffset())); 788 __ Cmp(temp, mirror::Class::kStatusInitialized); 789 __ B(lt, slow_path->GetEntryLabel()); 790 // Even if the initialized flag is set, we may be in a situation where caches are not synced 791 // properly. Therefore, we do a memory fence. 792 __ Dmb(InnerShareable, BarrierAll); 793 __ Bind(slow_path->GetExitLabel()); 794} 795 796InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph, 797 CodeGeneratorARM64* codegen) 798 : HGraphVisitor(graph), 799 assembler_(codegen->GetAssembler()), 800 codegen_(codegen) {} 801 802#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \ 803 M(ParallelMove) \ 804 M(Rem) \ 805 M(Shl) \ 806 M(Shr) \ 807 M(UShr) \ 808 809#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode 810 811enum UnimplementedInstructionBreakCode { 812 // Using a base helps identify when we hit such breakpoints. 813 UnimplementedInstructionBreakCodeBaseCode = 0x900, 814#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name), 815 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION) 816#undef ENUM_UNIMPLEMENTED_INSTRUCTION 817}; 818 819#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \ 820 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr) { \ 821 UNUSED(instr); \ 822 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \ 823 } \ 824 void LocationsBuilderARM64::Visit##name(H##name* instr) { \ 825 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \ 826 locations->SetOut(Location::Any()); \ 827 } 828 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS) 829#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS 830 831#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE 832#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION 833 834void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) { 835 DCHECK_EQ(instr->InputCount(), 2U); 836 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); 837 Primitive::Type type = instr->GetResultType(); 838 switch (type) { 839 case Primitive::kPrimInt: 840 case Primitive::kPrimLong: 841 locations->SetInAt(0, Location::RequiresRegister()); 842 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1))); 843 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 844 break; 845 846 case Primitive::kPrimFloat: 847 case Primitive::kPrimDouble: 848 locations->SetInAt(0, Location::RequiresFpuRegister()); 849 locations->SetInAt(1, Location::RequiresFpuRegister()); 850 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); 851 break; 852 853 default: 854 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type; 855 } 856} 857 858void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) { 859 Primitive::Type type = instr->GetType(); 860 861 switch (type) { 862 case Primitive::kPrimInt: 863 case Primitive::kPrimLong: { 864 Register dst = OutputRegister(instr); 865 Register lhs = InputRegisterAt(instr, 0); 866 Operand rhs = InputOperandAt(instr, 1); 867 if (instr->IsAdd()) { 868 __ Add(dst, lhs, rhs); 869 } else if (instr->IsAnd()) { 870 __ And(dst, lhs, rhs); 871 } else if (instr->IsOr()) { 872 __ Orr(dst, lhs, rhs); 873 } else if (instr->IsSub()) { 874 __ Sub(dst, lhs, rhs); 875 } else { 876 DCHECK(instr->IsXor()); 877 __ Eor(dst, lhs, rhs); 878 } 879 break; 880 } 881 case Primitive::kPrimFloat: 882 case Primitive::kPrimDouble: { 883 FPRegister dst = OutputFPRegister(instr); 884 FPRegister lhs = InputFPRegisterAt(instr, 0); 885 FPRegister rhs = InputFPRegisterAt(instr, 1); 886 if (instr->IsAdd()) { 887 __ Fadd(dst, lhs, rhs); 888 } else if (instr->IsSub()) { 889 __ Fsub(dst, lhs, rhs); 890 } else { 891 LOG(FATAL) << "Unexpected floating-point binary operation"; 892 } 893 break; 894 } 895 default: 896 LOG(FATAL) << "Unexpected binary operation type " << type; 897 } 898} 899 900void LocationsBuilderARM64::VisitAdd(HAdd* instruction) { 901 HandleBinaryOp(instruction); 902} 903 904void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) { 905 HandleBinaryOp(instruction); 906} 907 908void LocationsBuilderARM64::VisitAnd(HAnd* instruction) { 909 HandleBinaryOp(instruction); 910} 911 912void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) { 913 HandleBinaryOp(instruction); 914} 915 916void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) { 917 LocationSummary* locations = 918 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 919 locations->SetInAt(0, Location::RequiresRegister()); 920 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1))); 921 locations->SetOut(Location::RequiresRegister()); 922} 923 924void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) { 925 LocationSummary* locations = instruction->GetLocations(); 926 Primitive::Type type = instruction->GetType(); 927 Register obj = InputRegisterAt(instruction, 0); 928 Location index = locations->InAt(1); 929 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(type)).Uint32Value(); 930 MemOperand source(obj); 931 UseScratchRegisterScope temps(GetVIXLAssembler()); 932 933 if (index.IsConstant()) { 934 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type); 935 source = MemOperand(obj, offset); 936 } else { 937 Register temp = temps.AcquireSameSizeAs(obj); 938 Register index_reg = RegisterFrom(index, Primitive::kPrimInt); 939 __ Add(temp, obj, Operand(index_reg, LSL, Primitive::ComponentSizeShift(type))); 940 source = MemOperand(temp, offset); 941 } 942 943 codegen_->Load(type, OutputCPURegister(instruction), source); 944} 945 946void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) { 947 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction); 948 locations->SetInAt(0, Location::RequiresRegister()); 949 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 950} 951 952void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) { 953 __ Ldr(OutputRegister(instruction), 954 HeapOperand(InputRegisterAt(instruction, 0), mirror::Array::LengthOffset())); 955} 956 957void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) { 958 Primitive::Type value_type = instruction->GetComponentType(); 959 bool is_object = value_type == Primitive::kPrimNot; 960 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary( 961 instruction, is_object ? LocationSummary::kCall : LocationSummary::kNoCall); 962 if (is_object) { 963 InvokeRuntimeCallingConvention calling_convention; 964 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0))); 965 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1))); 966 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2))); 967 } else { 968 locations->SetInAt(0, Location::RequiresRegister()); 969 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1))); 970 locations->SetInAt(2, Location::RequiresRegister()); 971 } 972} 973 974void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) { 975 Primitive::Type value_type = instruction->GetComponentType(); 976 if (value_type == Primitive::kPrimNot) { 977 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject), instruction, instruction->GetDexPc()); 978 979 } else { 980 LocationSummary* locations = instruction->GetLocations(); 981 Register obj = InputRegisterAt(instruction, 0); 982 CPURegister value = InputCPURegisterAt(instruction, 2); 983 Location index = locations->InAt(1); 984 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value(); 985 MemOperand destination(obj); 986 UseScratchRegisterScope temps(GetVIXLAssembler()); 987 988 if (index.IsConstant()) { 989 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type); 990 destination = MemOperand(obj, offset); 991 } else { 992 Register temp = temps.AcquireSameSizeAs(obj); 993 Register index_reg = InputRegisterAt(instruction, 1); 994 __ Add(temp, obj, Operand(index_reg, LSL, Primitive::ComponentSizeShift(value_type))); 995 destination = MemOperand(temp, offset); 996 } 997 998 codegen_->Store(value_type, value, destination); 999 } 1000} 1001 1002void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) { 1003 LocationSummary* locations = 1004 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 1005 locations->SetInAt(0, Location::RequiresRegister()); 1006 locations->SetInAt(1, Location::RequiresRegister()); 1007 if (instruction->HasUses()) { 1008 locations->SetOut(Location::SameAsFirstInput()); 1009 } 1010} 1011 1012void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) { 1013 BoundsCheckSlowPathARM64* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(); 1014 codegen_->AddSlowPath(slow_path); 1015 1016 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1)); 1017 __ B(slow_path->GetEntryLabel(), hs); 1018} 1019 1020void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) { 1021 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary( 1022 instruction, LocationSummary::kCallOnSlowPath); 1023 locations->SetInAt(0, Location::RequiresRegister()); 1024 locations->SetInAt(1, Location::RequiresRegister()); 1025} 1026 1027void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) { 1028 UseScratchRegisterScope temps(GetVIXLAssembler()); 1029 Register obj = InputRegisterAt(instruction, 0);; 1030 Register cls = InputRegisterAt(instruction, 1);; 1031 Register temp = temps.AcquireW(); 1032 1033 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(); 1034 codegen_->AddSlowPath(slow_path); 1035 1036 // TODO: avoid this check if we know obj is not null. 1037 __ Cbz(obj, slow_path->GetExitLabel()); 1038 // Compare the class of `obj` with `cls`. 1039 __ Ldr(temp, HeapOperand(obj, mirror::Object::ClassOffset())); 1040 __ Cmp(temp, cls); 1041 __ B(ne, slow_path->GetEntryLabel()); 1042 __ Bind(slow_path->GetExitLabel()); 1043} 1044 1045void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) { 1046 LocationSummary* locations = 1047 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath); 1048 locations->SetInAt(0, Location::RequiresRegister()); 1049 if (check->HasUses()) { 1050 locations->SetOut(Location::SameAsFirstInput()); 1051 } 1052} 1053 1054void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) { 1055 // We assume the class is not null. 1056 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64( 1057 check->GetLoadClass(), check, check->GetDexPc(), true); 1058 codegen_->AddSlowPath(slow_path); 1059 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0)); 1060} 1061 1062void LocationsBuilderARM64::VisitCompare(HCompare* instruction) { 1063 LocationSummary* locations = 1064 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 1065 locations->SetInAt(0, Location::RequiresRegister()); 1066 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1))); 1067 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1068} 1069 1070void InstructionCodeGeneratorARM64::VisitCompare(HCompare* instruction) { 1071 Primitive::Type in_type = instruction->InputAt(0)->GetType(); 1072 1073 DCHECK_EQ(in_type, Primitive::kPrimLong); 1074 switch (in_type) { 1075 case Primitive::kPrimLong: { 1076 vixl::Label done; 1077 Register result = OutputRegister(instruction); 1078 Register left = InputRegisterAt(instruction, 0); 1079 Operand right = InputOperandAt(instruction, 1); 1080 __ Subs(result.X(), left, right); 1081 __ B(eq, &done); 1082 __ Mov(result, 1); 1083 __ Cneg(result, result, le); 1084 __ Bind(&done); 1085 break; 1086 } 1087 default: 1088 LOG(FATAL) << "Unimplemented compare type " << in_type; 1089 } 1090} 1091 1092void LocationsBuilderARM64::VisitCondition(HCondition* instruction) { 1093 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction); 1094 locations->SetInAt(0, Location::RequiresRegister()); 1095 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1))); 1096 if (instruction->NeedsMaterialization()) { 1097 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1098 } 1099} 1100 1101void InstructionCodeGeneratorARM64::VisitCondition(HCondition* instruction) { 1102 if (!instruction->NeedsMaterialization()) { 1103 return; 1104 } 1105 1106 LocationSummary* locations = instruction->GetLocations(); 1107 Register lhs = InputRegisterAt(instruction, 0); 1108 Operand rhs = InputOperandAt(instruction, 1); 1109 Register res = RegisterFrom(locations->Out(), instruction->GetType()); 1110 Condition cond = ARM64Condition(instruction->GetCondition()); 1111 1112 __ Cmp(lhs, rhs); 1113 __ Csel(res, vixl::Assembler::AppropriateZeroRegFor(res), Operand(1), InvertCondition(cond)); 1114} 1115 1116#define FOR_EACH_CONDITION_INSTRUCTION(M) \ 1117 M(Equal) \ 1118 M(NotEqual) \ 1119 M(LessThan) \ 1120 M(LessThanOrEqual) \ 1121 M(GreaterThan) \ 1122 M(GreaterThanOrEqual) 1123#define DEFINE_CONDITION_VISITORS(Name) \ 1124void LocationsBuilderARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); } \ 1125void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); } 1126FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS) 1127#undef DEFINE_CONDITION_VISITORS 1128#undef FOR_EACH_CONDITION_INSTRUCTION 1129 1130void LocationsBuilderARM64::VisitDiv(HDiv* div) { 1131 LocationSummary* locations = 1132 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall); 1133 switch (div->GetResultType()) { 1134 case Primitive::kPrimInt: 1135 case Primitive::kPrimLong: 1136 locations->SetInAt(0, Location::RequiresRegister()); 1137 locations->SetInAt(1, Location::RequiresRegister()); 1138 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1139 break; 1140 1141 case Primitive::kPrimFloat: 1142 case Primitive::kPrimDouble: 1143 locations->SetInAt(0, Location::RequiresFpuRegister()); 1144 locations->SetInAt(1, Location::RequiresFpuRegister()); 1145 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); 1146 break; 1147 1148 default: 1149 LOG(FATAL) << "Unexpected div type " << div->GetResultType(); 1150 } 1151} 1152 1153void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) { 1154 Primitive::Type type = div->GetResultType(); 1155 switch (type) { 1156 case Primitive::kPrimInt: 1157 case Primitive::kPrimLong: 1158 __ Sdiv(OutputRegister(div), InputRegisterAt(div, 0), InputRegisterAt(div, 1)); 1159 break; 1160 1161 case Primitive::kPrimFloat: 1162 case Primitive::kPrimDouble: 1163 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1)); 1164 break; 1165 1166 default: 1167 LOG(FATAL) << "Unexpected div type " << type; 1168 } 1169} 1170 1171void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) { 1172 LocationSummary* locations = 1173 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 1174 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0))); 1175 if (instruction->HasUses()) { 1176 locations->SetOut(Location::SameAsFirstInput()); 1177 } 1178} 1179 1180void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) { 1181 SlowPathCodeARM64* slow_path = 1182 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction); 1183 codegen_->AddSlowPath(slow_path); 1184 Location value = instruction->GetLocations()->InAt(0); 1185 1186 if (value.IsConstant()) { 1187 int64_t divisor = Int64ConstantFrom(value); 1188 if (divisor == 0) { 1189 __ B(slow_path->GetEntryLabel()); 1190 } else { 1191 LOG(FATAL) << "Divisions by non-null constants should have been optimized away."; 1192 } 1193 } else { 1194 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel()); 1195 } 1196} 1197 1198void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) { 1199 LocationSummary* locations = 1200 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall); 1201 locations->SetOut(Location::ConstantLocation(constant)); 1202} 1203 1204void InstructionCodeGeneratorARM64::VisitDoubleConstant(HDoubleConstant* constant) { 1205 UNUSED(constant); 1206 // Will be generated at use site. 1207} 1208 1209void LocationsBuilderARM64::VisitExit(HExit* exit) { 1210 exit->SetLocations(nullptr); 1211} 1212 1213void InstructionCodeGeneratorARM64::VisitExit(HExit* exit) { 1214 UNUSED(exit); 1215 if (kIsDebugBuild) { 1216 down_cast<Arm64Assembler*>(GetAssembler())->Comment("Unreachable"); 1217 __ Brk(__LINE__); // TODO: Introduce special markers for such code locations. 1218 } 1219} 1220 1221void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) { 1222 LocationSummary* locations = 1223 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall); 1224 locations->SetOut(Location::ConstantLocation(constant)); 1225} 1226 1227void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant) { 1228 UNUSED(constant); 1229 // Will be generated at use site. 1230} 1231 1232void LocationsBuilderARM64::VisitGoto(HGoto* got) { 1233 got->SetLocations(nullptr); 1234} 1235 1236void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) { 1237 HBasicBlock* successor = got->GetSuccessor(); 1238 // TODO: Support for suspend checks emission. 1239 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) { 1240 __ B(codegen_->GetLabelOf(successor)); 1241 } 1242} 1243 1244void LocationsBuilderARM64::VisitIf(HIf* if_instr) { 1245 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr); 1246 HInstruction* cond = if_instr->InputAt(0); 1247 DCHECK(cond->IsCondition()); 1248 if (cond->AsCondition()->NeedsMaterialization()) { 1249 locations->SetInAt(0, Location::RequiresRegister()); 1250 } 1251} 1252 1253void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) { 1254 HInstruction* cond = if_instr->InputAt(0); 1255 DCHECK(cond->IsCondition()); 1256 HCondition* condition = cond->AsCondition(); 1257 vixl::Label* true_target = codegen_->GetLabelOf(if_instr->IfTrueSuccessor()); 1258 vixl::Label* false_target = codegen_->GetLabelOf(if_instr->IfFalseSuccessor()); 1259 1260 // TODO: Support constant condition input in VisitIf. 1261 1262 if (condition->NeedsMaterialization()) { 1263 // The condition instruction has been materialized, compare the output to 0. 1264 Location cond_val = if_instr->GetLocations()->InAt(0); 1265 DCHECK(cond_val.IsRegister()); 1266 __ Cbnz(InputRegisterAt(if_instr, 0), true_target); 1267 1268 } else { 1269 // The condition instruction has not been materialized, use its inputs as 1270 // the comparison and its condition as the branch condition. 1271 Register lhs = InputRegisterAt(condition, 0); 1272 Operand rhs = InputOperandAt(condition, 1); 1273 Condition arm64_cond = ARM64Condition(condition->GetCondition()); 1274 if ((arm64_cond == eq || arm64_cond == ne) && rhs.IsImmediate() && (rhs.immediate() == 0)) { 1275 if (arm64_cond == eq) { 1276 __ Cbz(lhs, true_target); 1277 } else { 1278 __ Cbnz(lhs, true_target); 1279 } 1280 } else { 1281 __ Cmp(lhs, rhs); 1282 __ B(arm64_cond, true_target); 1283 } 1284 } 1285 1286 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfFalseSuccessor())) { 1287 __ B(false_target); 1288 } 1289} 1290 1291void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) { 1292 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction); 1293 locations->SetInAt(0, Location::RequiresRegister()); 1294 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1295} 1296 1297void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) { 1298 MemOperand field = MemOperand(InputRegisterAt(instruction, 0), 1299 instruction->GetFieldOffset().Uint32Value()); 1300 codegen_->Load(instruction->GetType(), OutputCPURegister(instruction), field); 1301} 1302 1303void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) { 1304 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction); 1305 locations->SetInAt(0, Location::RequiresRegister()); 1306 locations->SetInAt(1, Location::RequiresRegister()); 1307} 1308 1309void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) { 1310 Primitive::Type field_type = instruction->GetFieldType(); 1311 CPURegister value = InputCPURegisterAt(instruction, 1); 1312 Register obj = InputRegisterAt(instruction, 0); 1313 codegen_->Store(field_type, value, MemOperand(obj, instruction->GetFieldOffset().Uint32Value())); 1314 if (field_type == Primitive::kPrimNot) { 1315 codegen_->MarkGCCard(obj, Register(value)); 1316 } 1317} 1318 1319void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) { 1320 LocationSummary::CallKind call_kind = 1321 instruction->IsClassFinal() ? LocationSummary::kNoCall : LocationSummary::kCallOnSlowPath; 1322 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind); 1323 locations->SetInAt(0, Location::RequiresRegister()); 1324 locations->SetInAt(1, Location::RequiresRegister()); 1325 locations->SetOut(Location::RequiresRegister(), true); // The output does overlap inputs. 1326} 1327 1328void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) { 1329 LocationSummary* locations = instruction->GetLocations(); 1330 Register obj = InputRegisterAt(instruction, 0);; 1331 Register cls = InputRegisterAt(instruction, 1);; 1332 Register out = OutputRegister(instruction); 1333 1334 vixl::Label done; 1335 1336 // Return 0 if `obj` is null. 1337 // TODO: Avoid this check if we know `obj` is not null. 1338 __ Mov(out, 0); 1339 __ Cbz(obj, &done); 1340 1341 // Compare the class of `obj` with `cls`. 1342 __ Ldr(out, MemOperand(obj, mirror::Object::ClassOffset().Int32Value())); 1343 __ Cmp(out, cls); 1344 if (instruction->IsClassFinal()) { 1345 // Classes must be equal for the instanceof to succeed. 1346 __ Cset(out, eq); 1347 } else { 1348 // If the classes are not equal, we go into a slow path. 1349 DCHECK(locations->OnlyCallsOnSlowPath()); 1350 SlowPathCodeARM64* slow_path = 1351 new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(); 1352 codegen_->AddSlowPath(slow_path); 1353 __ B(ne, slow_path->GetEntryLabel()); 1354 __ Mov(out, 1); 1355 __ Bind(slow_path->GetExitLabel()); 1356 } 1357 1358 __ Bind(&done); 1359} 1360 1361void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) { 1362 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant); 1363 locations->SetOut(Location::ConstantLocation(constant)); 1364} 1365 1366void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant) { 1367 // Will be generated at use site. 1368 UNUSED(constant); 1369} 1370 1371void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) { 1372 LocationSummary* locations = 1373 new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall); 1374 locations->AddTemp(LocationFrom(x0)); 1375 1376 InvokeDexCallingConventionVisitor calling_convention_visitor; 1377 for (size_t i = 0; i < invoke->InputCount(); i++) { 1378 HInstruction* input = invoke->InputAt(i); 1379 locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType())); 1380 } 1381 1382 Primitive::Type return_type = invoke->GetType(); 1383 if (return_type != Primitive::kPrimVoid) { 1384 locations->SetOut(calling_convention_visitor.GetReturnLocation(return_type)); 1385 } 1386} 1387 1388void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) { 1389 HandleInvoke(invoke); 1390} 1391 1392void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) { 1393 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError. 1394 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0)); 1395 uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() + 1396 (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry); 1397 Location receiver = invoke->GetLocations()->InAt(0); 1398 Offset class_offset = mirror::Object::ClassOffset(); 1399 Offset entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize); 1400 1401 // The register ip1 is required to be used for the hidden argument in 1402 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it. 1403 UseScratchRegisterScope scratch_scope(GetVIXLAssembler()); 1404 scratch_scope.Exclude(ip1); 1405 __ Mov(ip1, invoke->GetDexMethodIndex()); 1406 1407 // temp = object->GetClass(); 1408 if (receiver.IsStackSlot()) { 1409 __ Ldr(temp, StackOperandFrom(receiver)); 1410 __ Ldr(temp, HeapOperand(temp, class_offset)); 1411 } else { 1412 __ Ldr(temp, HeapOperandFrom(receiver, class_offset)); 1413 } 1414 // temp = temp->GetImtEntryAt(method_offset); 1415 __ Ldr(temp, HeapOperand(temp, method_offset)); 1416 // lr = temp->GetEntryPoint(); 1417 __ Ldr(lr, HeapOperand(temp, entry_point)); 1418 // lr(); 1419 __ Blr(lr); 1420 DCHECK(!codegen_->IsLeafMethod()); 1421 codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); 1422} 1423 1424void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) { 1425 HandleInvoke(invoke); 1426} 1427 1428void LocationsBuilderARM64::VisitInvokeStatic(HInvokeStatic* invoke) { 1429 HandleInvoke(invoke); 1430} 1431 1432void InstructionCodeGeneratorARM64::VisitInvokeStatic(HInvokeStatic* invoke) { 1433 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0)); 1434 // Make sure that ArtMethod* is passed in W0 as per the calling convention 1435 DCHECK(temp.Is(w0)); 1436 size_t index_in_cache = mirror::Array::DataOffset(kHeapRefSize).SizeValue() + 1437 invoke->GetIndexInDexCache() * kHeapRefSize; 1438 1439 // TODO: Implement all kinds of calls: 1440 // 1) boot -> boot 1441 // 2) app -> boot 1442 // 3) app -> app 1443 // 1444 // Currently we implement the app -> app logic, which looks up in the resolve cache. 1445 1446 // temp = method; 1447 codegen_->LoadCurrentMethod(temp); 1448 // temp = temp->dex_cache_resolved_methods_; 1449 __ Ldr(temp, MemOperand(temp.X(), 1450 mirror::ArtMethod::DexCacheResolvedMethodsOffset().SizeValue())); 1451 // temp = temp[index_in_cache]; 1452 __ Ldr(temp, MemOperand(temp.X(), index_in_cache)); 1453 // lr = temp->entry_point_from_quick_compiled_code_; 1454 __ Ldr(lr, MemOperand(temp.X(), 1455 mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset( 1456 kArm64WordSize).SizeValue())); 1457 // lr(); 1458 __ Blr(lr); 1459 1460 codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); 1461 DCHECK(!codegen_->IsLeafMethod()); 1462} 1463 1464void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) { 1465 LocationSummary* locations = invoke->GetLocations(); 1466 Location receiver = locations->InAt(0); 1467 Register temp = XRegisterFrom(invoke->GetLocations()->GetTemp(0)); 1468 size_t method_offset = mirror::Class::EmbeddedVTableOffset().SizeValue() + 1469 invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry); 1470 Offset class_offset = mirror::Object::ClassOffset(); 1471 Offset entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize); 1472 1473 // temp = object->GetClass(); 1474 if (receiver.IsStackSlot()) { 1475 __ Ldr(temp.W(), MemOperand(sp, receiver.GetStackIndex())); 1476 __ Ldr(temp.W(), MemOperand(temp, class_offset.SizeValue())); 1477 } else { 1478 DCHECK(receiver.IsRegister()); 1479 __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset)); 1480 } 1481 // temp = temp->GetMethodAt(method_offset); 1482 __ Ldr(temp.W(), MemOperand(temp, method_offset)); 1483 // lr = temp->GetEntryPoint(); 1484 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue())); 1485 // lr(); 1486 __ Blr(lr); 1487 DCHECK(!codegen_->IsLeafMethod()); 1488 codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); 1489} 1490 1491void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) { 1492 LocationSummary::CallKind call_kind = cls->CanCallRuntime() ? LocationSummary::kCallOnSlowPath 1493 : LocationSummary::kNoCall; 1494 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind); 1495 locations->SetOut(Location::RequiresRegister()); 1496} 1497 1498void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) { 1499 Register out = OutputRegister(cls); 1500 if (cls->IsReferrersClass()) { 1501 DCHECK(!cls->CanCallRuntime()); 1502 DCHECK(!cls->MustGenerateClinitCheck()); 1503 codegen_->LoadCurrentMethod(out); 1504 __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DeclaringClassOffset())); 1505 } else { 1506 DCHECK(cls->CanCallRuntime()); 1507 codegen_->LoadCurrentMethod(out); 1508 __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DexCacheResolvedTypesOffset())); 1509 __ Ldr(out, MemOperand(out.X(), CodeGenerator::GetCacheOffset(cls->GetTypeIndex()))); 1510 1511 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64( 1512 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck()); 1513 codegen_->AddSlowPath(slow_path); 1514 __ Cbz(out, slow_path->GetEntryLabel()); 1515 if (cls->MustGenerateClinitCheck()) { 1516 GenerateClassInitializationCheck(slow_path, out); 1517 } else { 1518 __ Bind(slow_path->GetExitLabel()); 1519 } 1520 } 1521} 1522 1523void LocationsBuilderARM64::VisitLoadException(HLoadException* load) { 1524 LocationSummary* locations = 1525 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall); 1526 locations->SetOut(Location::RequiresRegister()); 1527} 1528 1529void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) { 1530 MemOperand exception = MemOperand(tr, Thread::ExceptionOffset<kArm64WordSize>().Int32Value()); 1531 __ Ldr(OutputRegister(instruction), exception); 1532 __ Str(wzr, exception); 1533} 1534 1535void LocationsBuilderARM64::VisitLoadLocal(HLoadLocal* load) { 1536 load->SetLocations(nullptr); 1537} 1538 1539void InstructionCodeGeneratorARM64::VisitLoadLocal(HLoadLocal* load) { 1540 // Nothing to do, this is driven by the code generator. 1541 UNUSED(load); 1542} 1543 1544void LocationsBuilderARM64::VisitLoadString(HLoadString* load) { 1545 LocationSummary* locations = 1546 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath); 1547 locations->SetOut(Location::RequiresRegister()); 1548} 1549 1550void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) { 1551 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load); 1552 codegen_->AddSlowPath(slow_path); 1553 1554 Register out = OutputRegister(load); 1555 codegen_->LoadCurrentMethod(out); 1556 __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DexCacheStringsOffset())); 1557 __ Ldr(out, MemOperand(out.X(), CodeGenerator::GetCacheOffset(load->GetStringIndex()))); 1558 __ Cbz(out, slow_path->GetEntryLabel()); 1559 __ Bind(slow_path->GetExitLabel()); 1560} 1561 1562void LocationsBuilderARM64::VisitLocal(HLocal* local) { 1563 local->SetLocations(nullptr); 1564} 1565 1566void InstructionCodeGeneratorARM64::VisitLocal(HLocal* local) { 1567 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock()); 1568} 1569 1570void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) { 1571 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant); 1572 locations->SetOut(Location::ConstantLocation(constant)); 1573} 1574 1575void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant) { 1576 // Will be generated at use site. 1577 UNUSED(constant); 1578} 1579 1580void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) { 1581 LocationSummary* locations = 1582 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall); 1583 InvokeRuntimeCallingConvention calling_convention; 1584 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0))); 1585} 1586 1587void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) { 1588 codegen_->InvokeRuntime(instruction->IsEnter() 1589 ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject), 1590 instruction, 1591 instruction->GetDexPc()); 1592} 1593 1594void LocationsBuilderARM64::VisitMul(HMul* mul) { 1595 LocationSummary* locations = 1596 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall); 1597 switch (mul->GetResultType()) { 1598 case Primitive::kPrimInt: 1599 case Primitive::kPrimLong: 1600 locations->SetInAt(0, Location::RequiresRegister()); 1601 locations->SetInAt(1, Location::RequiresRegister()); 1602 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1603 break; 1604 1605 case Primitive::kPrimFloat: 1606 case Primitive::kPrimDouble: 1607 locations->SetInAt(0, Location::RequiresFpuRegister()); 1608 locations->SetInAt(1, Location::RequiresFpuRegister()); 1609 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); 1610 break; 1611 1612 default: 1613 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType(); 1614 } 1615} 1616 1617void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) { 1618 switch (mul->GetResultType()) { 1619 case Primitive::kPrimInt: 1620 case Primitive::kPrimLong: 1621 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1)); 1622 break; 1623 1624 case Primitive::kPrimFloat: 1625 case Primitive::kPrimDouble: 1626 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1)); 1627 break; 1628 1629 default: 1630 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType(); 1631 } 1632} 1633 1634void LocationsBuilderARM64::VisitNeg(HNeg* neg) { 1635 LocationSummary* locations = 1636 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall); 1637 switch (neg->GetResultType()) { 1638 case Primitive::kPrimInt: 1639 case Primitive::kPrimLong: 1640 locations->SetInAt(0, Location::RegisterOrConstant(neg->InputAt(0))); 1641 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1642 break; 1643 1644 case Primitive::kPrimFloat: 1645 case Primitive::kPrimDouble: 1646 locations->SetInAt(0, Location::RequiresFpuRegister()); 1647 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); 1648 break; 1649 1650 default: 1651 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType(); 1652 } 1653} 1654 1655void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) { 1656 switch (neg->GetResultType()) { 1657 case Primitive::kPrimInt: 1658 case Primitive::kPrimLong: 1659 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0)); 1660 break; 1661 1662 case Primitive::kPrimFloat: 1663 case Primitive::kPrimDouble: 1664 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0)); 1665 break; 1666 1667 default: 1668 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType(); 1669 } 1670} 1671 1672void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) { 1673 LocationSummary* locations = 1674 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall); 1675 InvokeRuntimeCallingConvention calling_convention; 1676 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0))); 1677 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(1))); 1678 locations->SetOut(LocationFrom(x0)); 1679 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(2))); 1680} 1681 1682void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) { 1683 LocationSummary* locations = instruction->GetLocations(); 1684 InvokeRuntimeCallingConvention calling_convention; 1685 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt); 1686 DCHECK(type_index.Is(w0)); 1687 Register current_method = RegisterFrom(locations->GetTemp(1), Primitive::kPrimNot); 1688 DCHECK(current_method.Is(w1)); 1689 codegen_->LoadCurrentMethod(current_method); 1690 __ Mov(type_index, instruction->GetTypeIndex()); 1691 codegen_->InvokeRuntime( 1692 QUICK_ENTRY_POINT(pAllocArrayWithAccessCheck), instruction, instruction->GetDexPc()); 1693} 1694 1695void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) { 1696 LocationSummary* locations = 1697 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall); 1698 InvokeRuntimeCallingConvention calling_convention; 1699 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0))); 1700 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(1))); 1701 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot)); 1702} 1703 1704void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) { 1705 LocationSummary* locations = instruction->GetLocations(); 1706 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt); 1707 DCHECK(type_index.Is(w0)); 1708 Register current_method = RegisterFrom(locations->GetTemp(1), Primitive::kPrimNot); 1709 DCHECK(current_method.Is(w1)); 1710 codegen_->LoadCurrentMethod(current_method); 1711 __ Mov(type_index, instruction->GetTypeIndex()); 1712 codegen_->InvokeRuntime( 1713 QUICK_ENTRY_POINT(pAllocObjectWithAccessCheck), instruction, instruction->GetDexPc()); 1714} 1715 1716void LocationsBuilderARM64::VisitNot(HNot* instruction) { 1717 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction); 1718 locations->SetInAt(0, Location::RequiresRegister()); 1719 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1720} 1721 1722void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) { 1723 switch (instruction->InputAt(0)->GetType()) { 1724 case Primitive::kPrimBoolean: 1725 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), Operand(1)); 1726 break; 1727 1728 case Primitive::kPrimInt: 1729 case Primitive::kPrimLong: 1730 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0)); 1731 break; 1732 1733 default: 1734 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType(); 1735 } 1736} 1737 1738void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) { 1739 LocationSummary* locations = 1740 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 1741 locations->SetInAt(0, Location::RequiresRegister()); 1742 if (instruction->HasUses()) { 1743 locations->SetOut(Location::SameAsFirstInput()); 1744 } 1745} 1746 1747void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) { 1748 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction); 1749 codegen_->AddSlowPath(slow_path); 1750 1751 LocationSummary* locations = instruction->GetLocations(); 1752 Location obj = locations->InAt(0); 1753 if (obj.IsRegister()) { 1754 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel()); 1755 } else { 1756 DCHECK(obj.IsConstant()) << obj; 1757 DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0); 1758 __ B(slow_path->GetEntryLabel()); 1759 } 1760} 1761 1762void LocationsBuilderARM64::VisitOr(HOr* instruction) { 1763 HandleBinaryOp(instruction); 1764} 1765 1766void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) { 1767 HandleBinaryOp(instruction); 1768} 1769 1770void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) { 1771 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction); 1772 Location location = parameter_visitor_.GetNextLocation(instruction->GetType()); 1773 if (location.IsStackSlot()) { 1774 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize()); 1775 } else if (location.IsDoubleStackSlot()) { 1776 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize()); 1777 } 1778 locations->SetOut(location); 1779} 1780 1781void InstructionCodeGeneratorARM64::VisitParameterValue(HParameterValue* instruction) { 1782 // Nothing to do, the parameter is already at its location. 1783 UNUSED(instruction); 1784} 1785 1786void LocationsBuilderARM64::VisitPhi(HPhi* instruction) { 1787 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction); 1788 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) { 1789 locations->SetInAt(i, Location::Any()); 1790 } 1791 locations->SetOut(Location::Any()); 1792} 1793 1794void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction) { 1795 UNUSED(instruction); 1796 LOG(FATAL) << "Unreachable"; 1797} 1798 1799void LocationsBuilderARM64::VisitReturn(HReturn* instruction) { 1800 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction); 1801 Primitive::Type return_type = instruction->InputAt(0)->GetType(); 1802 locations->SetInAt(0, ARM64ReturnLocation(return_type)); 1803} 1804 1805void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction) { 1806 UNUSED(instruction); 1807 codegen_->GenerateFrameExit(); 1808 __ Br(lr); 1809} 1810 1811void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) { 1812 instruction->SetLocations(nullptr); 1813} 1814 1815void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction) { 1816 UNUSED(instruction); 1817 codegen_->GenerateFrameExit(); 1818 __ Br(lr); 1819} 1820 1821void LocationsBuilderARM64::VisitStoreLocal(HStoreLocal* store) { 1822 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(store); 1823 Primitive::Type field_type = store->InputAt(1)->GetType(); 1824 switch (field_type) { 1825 case Primitive::kPrimNot: 1826 case Primitive::kPrimBoolean: 1827 case Primitive::kPrimByte: 1828 case Primitive::kPrimChar: 1829 case Primitive::kPrimShort: 1830 case Primitive::kPrimInt: 1831 case Primitive::kPrimFloat: 1832 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal()))); 1833 break; 1834 1835 case Primitive::kPrimLong: 1836 case Primitive::kPrimDouble: 1837 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal()))); 1838 break; 1839 1840 default: 1841 LOG(FATAL) << "Unimplemented local type " << field_type; 1842 } 1843} 1844 1845void InstructionCodeGeneratorARM64::VisitStoreLocal(HStoreLocal* store) { 1846 UNUSED(store); 1847} 1848 1849void LocationsBuilderARM64::VisitSub(HSub* instruction) { 1850 HandleBinaryOp(instruction); 1851} 1852 1853void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) { 1854 HandleBinaryOp(instruction); 1855} 1856 1857void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) { 1858 LocationSummary* locations = 1859 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 1860 locations->SetInAt(0, Location::RequiresRegister()); 1861 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1862} 1863 1864void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) { 1865 Register cls = InputRegisterAt(instruction, 0); 1866 uint32_t offset = instruction->GetFieldOffset().Uint32Value(); 1867 codegen_->Load(instruction->GetType(), OutputCPURegister(instruction), MemOperand(cls, offset)); 1868} 1869 1870void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) { 1871 LocationSummary* locations = 1872 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 1873 locations->SetInAt(0, Location::RequiresRegister()); 1874 locations->SetInAt(1, Location::RequiresRegister()); 1875} 1876 1877void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) { 1878 CPURegister value = InputCPURegisterAt(instruction, 1); 1879 Register cls = InputRegisterAt(instruction, 0); 1880 uint32_t offset = instruction->GetFieldOffset().Uint32Value(); 1881 Primitive::Type field_type = instruction->GetFieldType(); 1882 1883 codegen_->Store(field_type, value, MemOperand(cls, offset)); 1884 if (field_type == Primitive::kPrimNot) { 1885 codegen_->MarkGCCard(cls, Register(value)); 1886 } 1887} 1888 1889void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) { 1890 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath); 1891} 1892 1893void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) { 1894 // TODO: Improve support for suspend checks. 1895 SuspendCheckSlowPathARM64* slow_path = 1896 new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, nullptr); 1897 codegen_->AddSlowPath(slow_path); 1898 1899 __ Subs(wSuspend, wSuspend, 1); 1900 __ B(slow_path->GetEntryLabel(), le); 1901 __ Bind(slow_path->GetReturnLabel()); 1902} 1903 1904void LocationsBuilderARM64::VisitTemporary(HTemporary* temp) { 1905 temp->SetLocations(nullptr); 1906} 1907 1908void InstructionCodeGeneratorARM64::VisitTemporary(HTemporary* temp) { 1909 // Nothing to do, this is driven by the code generator. 1910 UNUSED(temp); 1911} 1912 1913void LocationsBuilderARM64::VisitThrow(HThrow* instruction) { 1914 LocationSummary* locations = 1915 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall); 1916 InvokeRuntimeCallingConvention calling_convention; 1917 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0))); 1918} 1919 1920void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) { 1921 codegen_->InvokeRuntime( 1922 QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc()); 1923} 1924 1925void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) { 1926 LocationSummary* locations = 1927 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall); 1928 Primitive::Type input_type = conversion->GetInputType(); 1929 Primitive::Type result_type = conversion->GetResultType(); 1930 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) || 1931 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) { 1932 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type; 1933 } 1934 1935 if (IsFPType(input_type)) { 1936 locations->SetInAt(0, Location::RequiresFpuRegister()); 1937 } else { 1938 locations->SetInAt(0, Location::RequiresRegister()); 1939 } 1940 1941 if (IsFPType(result_type)) { 1942 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); 1943 } else { 1944 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1945 } 1946} 1947 1948void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) { 1949 Primitive::Type result_type = conversion->GetResultType(); 1950 Primitive::Type input_type = conversion->GetInputType(); 1951 1952 DCHECK_NE(input_type, result_type); 1953 1954 if (IsIntegralType(result_type) && IsIntegralType(input_type)) { 1955 int result_size = Primitive::ComponentSize(result_type); 1956 int input_size = Primitive::ComponentSize(input_type); 1957 int min_size = kBitsPerByte * std::min(result_size, input_size); 1958 if ((result_type == Primitive::kPrimChar) || 1959 ((input_type == Primitive::kPrimChar) && (result_size > input_size))) { 1960 __ Ubfx(OutputRegister(conversion), InputRegisterAt(conversion, 0), 0, min_size); 1961 } else { 1962 __ Sbfx(OutputRegister(conversion), InputRegisterAt(conversion, 0), 0, min_size); 1963 } 1964 return; 1965 } 1966 1967 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type 1968 << " to " << result_type; 1969} 1970 1971void LocationsBuilderARM64::VisitXor(HXor* instruction) { 1972 HandleBinaryOp(instruction); 1973} 1974 1975void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) { 1976 HandleBinaryOp(instruction); 1977} 1978 1979#undef __ 1980#undef QUICK_ENTRY_POINT 1981 1982} // namespace arm64 1983} // namespace art 1984