1// Copyright 2013 the V8 project authors. All rights reserved. 2// Use of this source code is governed by a BSD-style license that can be 3// found in the LICENSE file. 4 5#ifndef V8_ARM64_ASSEMBLER_ARM64_INL_H_ 6#define V8_ARM64_ASSEMBLER_ARM64_INL_H_ 7 8#include "src/arm64/assembler-arm64.h" 9#include "src/assembler.h" 10#include "src/debug/debug.h" 11 12 13namespace v8 { 14namespace internal { 15 16 17bool CpuFeatures::SupportsCrankshaft() { return true; } 18 19 20void RelocInfo::apply(intptr_t delta) { 21 // On arm64 only internal references need extra work. 22 DCHECK(RelocInfo::IsInternalReference(rmode_)); 23 24 // Absolute code pointer inside code object moves with the code object. 25 intptr_t* p = reinterpret_cast<intptr_t*>(pc_); 26 *p += delta; // Relocate entry. 27} 28 29 30void RelocInfo::set_target_address(Address target, 31 WriteBarrierMode write_barrier_mode, 32 ICacheFlushMode icache_flush_mode) { 33 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)); 34 Assembler::set_target_address_at(isolate_, pc_, host_, target, 35 icache_flush_mode); 36 if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL && 37 IsCodeTarget(rmode_)) { 38 Object* target_code = Code::GetCodeFromTargetAddress(target); 39 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode( 40 host(), this, HeapObject::cast(target_code)); 41 } 42} 43 44inline int CPURegister::code() const { 45 DCHECK(IsValid()); 46 return reg_code; 47} 48 49 50inline CPURegister::RegisterType CPURegister::type() const { 51 DCHECK(IsValidOrNone()); 52 return reg_type; 53} 54 55 56inline RegList CPURegister::Bit() const { 57 DCHECK(static_cast<size_t>(reg_code) < (sizeof(RegList) * kBitsPerByte)); 58 return IsValid() ? 1UL << reg_code : 0; 59} 60 61 62inline int CPURegister::SizeInBits() const { 63 DCHECK(IsValid()); 64 return reg_size; 65} 66 67 68inline int CPURegister::SizeInBytes() const { 69 DCHECK(IsValid()); 70 DCHECK(SizeInBits() % 8 == 0); 71 return reg_size / 8; 72} 73 74 75inline bool CPURegister::Is32Bits() const { 76 DCHECK(IsValid()); 77 return reg_size == 32; 78} 79 80 81inline bool CPURegister::Is64Bits() const { 82 DCHECK(IsValid()); 83 return reg_size == 64; 84} 85 86 87inline bool CPURegister::IsValid() const { 88 if (IsValidRegister() || IsValidFPRegister()) { 89 DCHECK(!IsNone()); 90 return true; 91 } else { 92 DCHECK(IsNone()); 93 return false; 94 } 95} 96 97 98inline bool CPURegister::IsValidRegister() const { 99 return IsRegister() && 100 ((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits)) && 101 ((reg_code < kNumberOfRegisters) || (reg_code == kSPRegInternalCode)); 102} 103 104 105inline bool CPURegister::IsValidFPRegister() const { 106 return IsFPRegister() && 107 ((reg_size == kSRegSizeInBits) || (reg_size == kDRegSizeInBits)) && 108 (reg_code < kNumberOfFPRegisters); 109} 110 111 112inline bool CPURegister::IsNone() const { 113 // kNoRegister types should always have size 0 and code 0. 114 DCHECK((reg_type != kNoRegister) || (reg_code == 0)); 115 DCHECK((reg_type != kNoRegister) || (reg_size == 0)); 116 117 return reg_type == kNoRegister; 118} 119 120 121inline bool CPURegister::Is(const CPURegister& other) const { 122 DCHECK(IsValidOrNone() && other.IsValidOrNone()); 123 return Aliases(other) && (reg_size == other.reg_size); 124} 125 126 127inline bool CPURegister::Aliases(const CPURegister& other) const { 128 DCHECK(IsValidOrNone() && other.IsValidOrNone()); 129 return (reg_code == other.reg_code) && (reg_type == other.reg_type); 130} 131 132 133inline bool CPURegister::IsRegister() const { 134 return reg_type == kRegister; 135} 136 137 138inline bool CPURegister::IsFPRegister() const { 139 return reg_type == kFPRegister; 140} 141 142 143inline bool CPURegister::IsSameSizeAndType(const CPURegister& other) const { 144 return (reg_size == other.reg_size) && (reg_type == other.reg_type); 145} 146 147 148inline bool CPURegister::IsValidOrNone() const { 149 return IsValid() || IsNone(); 150} 151 152 153inline bool CPURegister::IsZero() const { 154 DCHECK(IsValid()); 155 return IsRegister() && (reg_code == kZeroRegCode); 156} 157 158 159inline bool CPURegister::IsSP() const { 160 DCHECK(IsValid()); 161 return IsRegister() && (reg_code == kSPRegInternalCode); 162} 163 164 165inline void CPURegList::Combine(const CPURegList& other) { 166 DCHECK(IsValid()); 167 DCHECK(other.type() == type_); 168 DCHECK(other.RegisterSizeInBits() == size_); 169 list_ |= other.list(); 170} 171 172 173inline void CPURegList::Remove(const CPURegList& other) { 174 DCHECK(IsValid()); 175 if (other.type() == type_) { 176 list_ &= ~other.list(); 177 } 178} 179 180 181inline void CPURegList::Combine(const CPURegister& other) { 182 DCHECK(other.type() == type_); 183 DCHECK(other.SizeInBits() == size_); 184 Combine(other.code()); 185} 186 187 188inline void CPURegList::Remove(const CPURegister& other1, 189 const CPURegister& other2, 190 const CPURegister& other3, 191 const CPURegister& other4) { 192 if (!other1.IsNone() && (other1.type() == type_)) Remove(other1.code()); 193 if (!other2.IsNone() && (other2.type() == type_)) Remove(other2.code()); 194 if (!other3.IsNone() && (other3.type() == type_)) Remove(other3.code()); 195 if (!other4.IsNone() && (other4.type() == type_)) Remove(other4.code()); 196} 197 198 199inline void CPURegList::Combine(int code) { 200 DCHECK(IsValid()); 201 DCHECK(CPURegister::Create(code, size_, type_).IsValid()); 202 list_ |= (1UL << code); 203} 204 205 206inline void CPURegList::Remove(int code) { 207 DCHECK(IsValid()); 208 DCHECK(CPURegister::Create(code, size_, type_).IsValid()); 209 list_ &= ~(1UL << code); 210} 211 212 213inline Register Register::XRegFromCode(unsigned code) { 214 if (code == kSPRegInternalCode) { 215 return csp; 216 } else { 217 DCHECK(code < kNumberOfRegisters); 218 return Register::Create(code, kXRegSizeInBits); 219 } 220} 221 222 223inline Register Register::WRegFromCode(unsigned code) { 224 if (code == kSPRegInternalCode) { 225 return wcsp; 226 } else { 227 DCHECK(code < kNumberOfRegisters); 228 return Register::Create(code, kWRegSizeInBits); 229 } 230} 231 232 233inline FPRegister FPRegister::SRegFromCode(unsigned code) { 234 DCHECK(code < kNumberOfFPRegisters); 235 return FPRegister::Create(code, kSRegSizeInBits); 236} 237 238 239inline FPRegister FPRegister::DRegFromCode(unsigned code) { 240 DCHECK(code < kNumberOfFPRegisters); 241 return FPRegister::Create(code, kDRegSizeInBits); 242} 243 244 245inline Register CPURegister::W() const { 246 DCHECK(IsValidRegister()); 247 return Register::WRegFromCode(reg_code); 248} 249 250 251inline Register CPURegister::X() const { 252 DCHECK(IsValidRegister()); 253 return Register::XRegFromCode(reg_code); 254} 255 256 257inline FPRegister CPURegister::S() const { 258 DCHECK(IsValidFPRegister()); 259 return FPRegister::SRegFromCode(reg_code); 260} 261 262 263inline FPRegister CPURegister::D() const { 264 DCHECK(IsValidFPRegister()); 265 return FPRegister::DRegFromCode(reg_code); 266} 267 268 269// Immediate. 270// Default initializer is for int types 271template<typename T> 272struct ImmediateInitializer { 273 static const bool kIsIntType = true; 274 static inline RelocInfo::Mode rmode_for(T) { 275 return sizeof(T) == 8 ? RelocInfo::NONE64 : RelocInfo::NONE32; 276 } 277 static inline int64_t immediate_for(T t) { 278 STATIC_ASSERT(sizeof(T) <= 8); 279 return t; 280 } 281}; 282 283 284template<> 285struct ImmediateInitializer<Smi*> { 286 static const bool kIsIntType = false; 287 static inline RelocInfo::Mode rmode_for(Smi* t) { 288 return RelocInfo::NONE64; 289 } 290 static inline int64_t immediate_for(Smi* t) {; 291 return reinterpret_cast<int64_t>(t); 292 } 293}; 294 295 296template<> 297struct ImmediateInitializer<ExternalReference> { 298 static const bool kIsIntType = false; 299 static inline RelocInfo::Mode rmode_for(ExternalReference t) { 300 return RelocInfo::EXTERNAL_REFERENCE; 301 } 302 static inline int64_t immediate_for(ExternalReference t) {; 303 return reinterpret_cast<int64_t>(t.address()); 304 } 305}; 306 307 308template<typename T> 309Immediate::Immediate(Handle<T> value) { 310 InitializeHandle(value); 311} 312 313 314template<typename T> 315Immediate::Immediate(T t) 316 : value_(ImmediateInitializer<T>::immediate_for(t)), 317 rmode_(ImmediateInitializer<T>::rmode_for(t)) {} 318 319 320template<typename T> 321Immediate::Immediate(T t, RelocInfo::Mode rmode) 322 : value_(ImmediateInitializer<T>::immediate_for(t)), 323 rmode_(rmode) { 324 STATIC_ASSERT(ImmediateInitializer<T>::kIsIntType); 325} 326 327 328// Operand. 329template<typename T> 330Operand::Operand(Handle<T> value) : immediate_(value), reg_(NoReg) {} 331 332 333template<typename T> 334Operand::Operand(T t) : immediate_(t), reg_(NoReg) {} 335 336 337template<typename T> 338Operand::Operand(T t, RelocInfo::Mode rmode) 339 : immediate_(t, rmode), 340 reg_(NoReg) {} 341 342 343Operand::Operand(Register reg, Shift shift, unsigned shift_amount) 344 : immediate_(0), 345 reg_(reg), 346 shift_(shift), 347 extend_(NO_EXTEND), 348 shift_amount_(shift_amount) { 349 DCHECK(reg.Is64Bits() || (shift_amount < kWRegSizeInBits)); 350 DCHECK(reg.Is32Bits() || (shift_amount < kXRegSizeInBits)); 351 DCHECK(!reg.IsSP()); 352} 353 354 355Operand::Operand(Register reg, Extend extend, unsigned shift_amount) 356 : immediate_(0), 357 reg_(reg), 358 shift_(NO_SHIFT), 359 extend_(extend), 360 shift_amount_(shift_amount) { 361 DCHECK(reg.IsValid()); 362 DCHECK(shift_amount <= 4); 363 DCHECK(!reg.IsSP()); 364 365 // Extend modes SXTX and UXTX require a 64-bit register. 366 DCHECK(reg.Is64Bits() || ((extend != SXTX) && (extend != UXTX))); 367} 368 369 370bool Operand::IsImmediate() const { 371 return reg_.Is(NoReg); 372} 373 374 375bool Operand::IsShiftedRegister() const { 376 return reg_.IsValid() && (shift_ != NO_SHIFT); 377} 378 379 380bool Operand::IsExtendedRegister() const { 381 return reg_.IsValid() && (extend_ != NO_EXTEND); 382} 383 384 385bool Operand::IsZero() const { 386 if (IsImmediate()) { 387 return ImmediateValue() == 0; 388 } else { 389 return reg().IsZero(); 390 } 391} 392 393 394Operand Operand::ToExtendedRegister() const { 395 DCHECK(IsShiftedRegister()); 396 DCHECK((shift_ == LSL) && (shift_amount_ <= 4)); 397 return Operand(reg_, reg_.Is64Bits() ? UXTX : UXTW, shift_amount_); 398} 399 400 401Immediate Operand::immediate() const { 402 DCHECK(IsImmediate()); 403 return immediate_; 404} 405 406 407int64_t Operand::ImmediateValue() const { 408 DCHECK(IsImmediate()); 409 return immediate_.value(); 410} 411 412 413Register Operand::reg() const { 414 DCHECK(IsShiftedRegister() || IsExtendedRegister()); 415 return reg_; 416} 417 418 419Shift Operand::shift() const { 420 DCHECK(IsShiftedRegister()); 421 return shift_; 422} 423 424 425Extend Operand::extend() const { 426 DCHECK(IsExtendedRegister()); 427 return extend_; 428} 429 430 431unsigned Operand::shift_amount() const { 432 DCHECK(IsShiftedRegister() || IsExtendedRegister()); 433 return shift_amount_; 434} 435 436 437Operand Operand::UntagSmi(Register smi) { 438 STATIC_ASSERT(kXRegSizeInBits == static_cast<unsigned>(kSmiShift + 439 kSmiValueSize)); 440 DCHECK(smi.Is64Bits()); 441 return Operand(smi, ASR, kSmiShift); 442} 443 444 445Operand Operand::UntagSmiAndScale(Register smi, int scale) { 446 STATIC_ASSERT(kXRegSizeInBits == static_cast<unsigned>(kSmiShift + 447 kSmiValueSize)); 448 DCHECK(smi.Is64Bits()); 449 DCHECK((scale >= 0) && (scale <= (64 - kSmiValueSize))); 450 if (scale > kSmiShift) { 451 return Operand(smi, LSL, scale - kSmiShift); 452 } else if (scale < kSmiShift) { 453 return Operand(smi, ASR, kSmiShift - scale); 454 } 455 return Operand(smi); 456} 457 458 459MemOperand::MemOperand() 460 : base_(NoReg), regoffset_(NoReg), offset_(0), addrmode_(Offset), 461 shift_(NO_SHIFT), extend_(NO_EXTEND), shift_amount_(0) { 462} 463 464 465MemOperand::MemOperand(Register base, int64_t offset, AddrMode addrmode) 466 : base_(base), regoffset_(NoReg), offset_(offset), addrmode_(addrmode), 467 shift_(NO_SHIFT), extend_(NO_EXTEND), shift_amount_(0) { 468 DCHECK(base.Is64Bits() && !base.IsZero()); 469} 470 471 472MemOperand::MemOperand(Register base, 473 Register regoffset, 474 Extend extend, 475 unsigned shift_amount) 476 : base_(base), regoffset_(regoffset), offset_(0), addrmode_(Offset), 477 shift_(NO_SHIFT), extend_(extend), shift_amount_(shift_amount) { 478 DCHECK(base.Is64Bits() && !base.IsZero()); 479 DCHECK(!regoffset.IsSP()); 480 DCHECK((extend == UXTW) || (extend == SXTW) || (extend == SXTX)); 481 482 // SXTX extend mode requires a 64-bit offset register. 483 DCHECK(regoffset.Is64Bits() || (extend != SXTX)); 484} 485 486 487MemOperand::MemOperand(Register base, 488 Register regoffset, 489 Shift shift, 490 unsigned shift_amount) 491 : base_(base), regoffset_(regoffset), offset_(0), addrmode_(Offset), 492 shift_(shift), extend_(NO_EXTEND), shift_amount_(shift_amount) { 493 DCHECK(base.Is64Bits() && !base.IsZero()); 494 DCHECK(regoffset.Is64Bits() && !regoffset.IsSP()); 495 DCHECK(shift == LSL); 496} 497 498 499MemOperand::MemOperand(Register base, const Operand& offset, AddrMode addrmode) 500 : base_(base), addrmode_(addrmode) { 501 DCHECK(base.Is64Bits() && !base.IsZero()); 502 503 if (offset.IsImmediate()) { 504 offset_ = offset.ImmediateValue(); 505 506 regoffset_ = NoReg; 507 } else if (offset.IsShiftedRegister()) { 508 DCHECK(addrmode == Offset); 509 510 regoffset_ = offset.reg(); 511 shift_ = offset.shift(); 512 shift_amount_ = offset.shift_amount(); 513 514 extend_ = NO_EXTEND; 515 offset_ = 0; 516 517 // These assertions match those in the shifted-register constructor. 518 DCHECK(regoffset_.Is64Bits() && !regoffset_.IsSP()); 519 DCHECK(shift_ == LSL); 520 } else { 521 DCHECK(offset.IsExtendedRegister()); 522 DCHECK(addrmode == Offset); 523 524 regoffset_ = offset.reg(); 525 extend_ = offset.extend(); 526 shift_amount_ = offset.shift_amount(); 527 528 shift_ = NO_SHIFT; 529 offset_ = 0; 530 531 // These assertions match those in the extended-register constructor. 532 DCHECK(!regoffset_.IsSP()); 533 DCHECK((extend_ == UXTW) || (extend_ == SXTW) || (extend_ == SXTX)); 534 DCHECK((regoffset_.Is64Bits() || (extend_ != SXTX))); 535 } 536} 537 538bool MemOperand::IsImmediateOffset() const { 539 return (addrmode_ == Offset) && regoffset_.Is(NoReg); 540} 541 542 543bool MemOperand::IsRegisterOffset() const { 544 return (addrmode_ == Offset) && !regoffset_.Is(NoReg); 545} 546 547 548bool MemOperand::IsPreIndex() const { 549 return addrmode_ == PreIndex; 550} 551 552 553bool MemOperand::IsPostIndex() const { 554 return addrmode_ == PostIndex; 555} 556 557Operand MemOperand::OffsetAsOperand() const { 558 if (IsImmediateOffset()) { 559 return offset(); 560 } else { 561 DCHECK(IsRegisterOffset()); 562 if (extend() == NO_EXTEND) { 563 return Operand(regoffset(), shift(), shift_amount()); 564 } else { 565 return Operand(regoffset(), extend(), shift_amount()); 566 } 567 } 568} 569 570 571void Assembler::Unreachable() { 572#ifdef USE_SIMULATOR 573 debug("UNREACHABLE", __LINE__, BREAK); 574#else 575 // Crash by branching to 0. lr now points near the fault. 576 Emit(BLR | Rn(xzr)); 577#endif 578} 579 580 581Address Assembler::target_pointer_address_at(Address pc) { 582 Instruction* instr = reinterpret_cast<Instruction*>(pc); 583 DCHECK(instr->IsLdrLiteralX()); 584 return reinterpret_cast<Address>(instr->ImmPCOffsetTarget()); 585} 586 587 588// Read/Modify the code target address in the branch/call instruction at pc. 589Address Assembler::target_address_at(Address pc, Address constant_pool) { 590 return Memory::Address_at(target_pointer_address_at(pc)); 591} 592 593 594Address Assembler::target_address_at(Address pc, Code* code) { 595 Address constant_pool = code ? code->constant_pool() : NULL; 596 return target_address_at(pc, constant_pool); 597} 598 599 600Address Assembler::target_address_from_return_address(Address pc) { 601 // Returns the address of the call target from the return address that will 602 // be returned to after a call. 603 // Call sequence on ARM64 is: 604 // ldr ip0, #... @ load from literal pool 605 // blr ip0 606 Address candidate = pc - 2 * kInstructionSize; 607 Instruction* instr = reinterpret_cast<Instruction*>(candidate); 608 USE(instr); 609 DCHECK(instr->IsLdrLiteralX()); 610 return candidate; 611} 612 613 614Address Assembler::return_address_from_call_start(Address pc) { 615 // The call, generated by MacroAssembler::Call, is one of two possible 616 // sequences: 617 // 618 // Without relocation: 619 // movz temp, #(target & 0x000000000000ffff) 620 // movk temp, #(target & 0x00000000ffff0000) 621 // movk temp, #(target & 0x0000ffff00000000) 622 // blr temp 623 // 624 // With relocation: 625 // ldr temp, =target 626 // blr temp 627 // 628 // The return address is immediately after the blr instruction in both cases, 629 // so it can be found by adding the call size to the address at the start of 630 // the call sequence. 631 STATIC_ASSERT(Assembler::kCallSizeWithoutRelocation == 4 * kInstructionSize); 632 STATIC_ASSERT(Assembler::kCallSizeWithRelocation == 2 * kInstructionSize); 633 634 Instruction* instr = reinterpret_cast<Instruction*>(pc); 635 if (instr->IsMovz()) { 636 // Verify the instruction sequence. 637 DCHECK(instr->following(1)->IsMovk()); 638 DCHECK(instr->following(2)->IsMovk()); 639 DCHECK(instr->following(3)->IsBranchAndLinkToRegister()); 640 return pc + Assembler::kCallSizeWithoutRelocation; 641 } else { 642 // Verify the instruction sequence. 643 DCHECK(instr->IsLdrLiteralX()); 644 DCHECK(instr->following(1)->IsBranchAndLinkToRegister()); 645 return pc + Assembler::kCallSizeWithRelocation; 646 } 647} 648 649 650void Assembler::deserialization_set_special_target_at( 651 Isolate* isolate, Address constant_pool_entry, Code* code, Address target) { 652 Memory::Address_at(constant_pool_entry) = target; 653} 654 655 656void Assembler::deserialization_set_target_internal_reference_at( 657 Isolate* isolate, Address pc, Address target, RelocInfo::Mode mode) { 658 Memory::Address_at(pc) = target; 659} 660 661 662void Assembler::set_target_address_at(Isolate* isolate, Address pc, 663 Address constant_pool, Address target, 664 ICacheFlushMode icache_flush_mode) { 665 Memory::Address_at(target_pointer_address_at(pc)) = target; 666 // Intuitively, we would think it is necessary to always flush the 667 // instruction cache after patching a target address in the code as follows: 668 // Assembler::FlushICache(isolate(), pc, sizeof(target)); 669 // However, on ARM, an instruction is actually patched in the case of 670 // embedded constants of the form: 671 // ldr ip, [pc, #...] 672 // since the instruction accessing this address in the constant pool remains 673 // unchanged, a flush is not required. 674} 675 676 677void Assembler::set_target_address_at(Isolate* isolate, Address pc, Code* code, 678 Address target, 679 ICacheFlushMode icache_flush_mode) { 680 Address constant_pool = code ? code->constant_pool() : NULL; 681 set_target_address_at(isolate, pc, constant_pool, target, icache_flush_mode); 682} 683 684 685int RelocInfo::target_address_size() { 686 return kPointerSize; 687} 688 689 690Address RelocInfo::target_address() { 691 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)); 692 return Assembler::target_address_at(pc_, host_); 693} 694 695Address RelocInfo::target_address_address() { 696 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) 697 || rmode_ == EMBEDDED_OBJECT 698 || rmode_ == EXTERNAL_REFERENCE); 699 return Assembler::target_pointer_address_at(pc_); 700} 701 702 703Address RelocInfo::constant_pool_entry_address() { 704 DCHECK(IsInConstantPool()); 705 return Assembler::target_pointer_address_at(pc_); 706} 707 708 709Object* RelocInfo::target_object() { 710 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); 711 return reinterpret_cast<Object*>(Assembler::target_address_at(pc_, host_)); 712} 713 714 715Handle<Object> RelocInfo::target_object_handle(Assembler* origin) { 716 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); 717 return Handle<Object>(reinterpret_cast<Object**>( 718 Assembler::target_address_at(pc_, host_))); 719} 720 721 722void RelocInfo::set_target_object(Object* target, 723 WriteBarrierMode write_barrier_mode, 724 ICacheFlushMode icache_flush_mode) { 725 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); 726 Assembler::set_target_address_at(isolate_, pc_, host_, 727 reinterpret_cast<Address>(target), 728 icache_flush_mode); 729 if (write_barrier_mode == UPDATE_WRITE_BARRIER && 730 host() != NULL && 731 target->IsHeapObject()) { 732 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode( 733 host(), this, HeapObject::cast(target)); 734 } 735} 736 737 738Address RelocInfo::target_external_reference() { 739 DCHECK(rmode_ == EXTERNAL_REFERENCE); 740 return Assembler::target_address_at(pc_, host_); 741} 742 743 744Address RelocInfo::target_internal_reference() { 745 DCHECK(rmode_ == INTERNAL_REFERENCE); 746 return Memory::Address_at(pc_); 747} 748 749 750Address RelocInfo::target_internal_reference_address() { 751 DCHECK(rmode_ == INTERNAL_REFERENCE); 752 return reinterpret_cast<Address>(pc_); 753} 754 755 756Address RelocInfo::target_runtime_entry(Assembler* origin) { 757 DCHECK(IsRuntimeEntry(rmode_)); 758 return target_address(); 759} 760 761 762void RelocInfo::set_target_runtime_entry(Address target, 763 WriteBarrierMode write_barrier_mode, 764 ICacheFlushMode icache_flush_mode) { 765 DCHECK(IsRuntimeEntry(rmode_)); 766 if (target_address() != target) { 767 set_target_address(target, write_barrier_mode, icache_flush_mode); 768 } 769} 770 771 772Handle<Cell> RelocInfo::target_cell_handle() { 773 UNIMPLEMENTED(); 774 Cell *null_cell = NULL; 775 return Handle<Cell>(null_cell); 776} 777 778 779Cell* RelocInfo::target_cell() { 780 DCHECK(rmode_ == RelocInfo::CELL); 781 return Cell::FromValueAddress(Memory::Address_at(pc_)); 782} 783 784 785void RelocInfo::set_target_cell(Cell* cell, 786 WriteBarrierMode write_barrier_mode, 787 ICacheFlushMode icache_flush_mode) { 788 UNIMPLEMENTED(); 789} 790 791 792static const int kNoCodeAgeSequenceLength = 5 * kInstructionSize; 793static const int kCodeAgeStubEntryOffset = 3 * kInstructionSize; 794 795 796Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) { 797 UNREACHABLE(); // This should never be reached on ARM64. 798 return Handle<Object>(); 799} 800 801 802Code* RelocInfo::code_age_stub() { 803 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE); 804 // Read the stub entry point from the code age sequence. 805 Address stub_entry_address = pc_ + kCodeAgeStubEntryOffset; 806 return Code::GetCodeFromTargetAddress(Memory::Address_at(stub_entry_address)); 807} 808 809 810void RelocInfo::set_code_age_stub(Code* stub, 811 ICacheFlushMode icache_flush_mode) { 812 DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE); 813 DCHECK(!Code::IsYoungSequence(stub->GetIsolate(), pc_)); 814 // Overwrite the stub entry point in the code age sequence. This is loaded as 815 // a literal so there is no need to call FlushICache here. 816 Address stub_entry_address = pc_ + kCodeAgeStubEntryOffset; 817 Memory::Address_at(stub_entry_address) = stub->instruction_start(); 818} 819 820 821Address RelocInfo::debug_call_address() { 822 DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()); 823 // For the above sequences the Relocinfo points to the load literal loading 824 // the call address. 825 STATIC_ASSERT(Assembler::kPatchDebugBreakSlotAddressOffset == 0); 826 return Assembler::target_address_at(pc_, host_); 827} 828 829 830void RelocInfo::set_debug_call_address(Address target) { 831 DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()); 832 STATIC_ASSERT(Assembler::kPatchDebugBreakSlotAddressOffset == 0); 833 Assembler::set_target_address_at(isolate_, pc_, host_, target); 834 if (host() != NULL) { 835 Object* target_code = Code::GetCodeFromTargetAddress(target); 836 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode( 837 host(), this, HeapObject::cast(target_code)); 838 } 839} 840 841 842void RelocInfo::WipeOut() { 843 DCHECK(IsEmbeddedObject(rmode_) || IsCodeTarget(rmode_) || 844 IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) || 845 IsInternalReference(rmode_)); 846 if (IsInternalReference(rmode_)) { 847 Memory::Address_at(pc_) = NULL; 848 } else { 849 Assembler::set_target_address_at(isolate_, pc_, host_, NULL); 850 } 851} 852 853template <typename ObjectVisitor> 854void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) { 855 RelocInfo::Mode mode = rmode(); 856 if (mode == RelocInfo::EMBEDDED_OBJECT) { 857 visitor->VisitEmbeddedPointer(this); 858 } else if (RelocInfo::IsCodeTarget(mode)) { 859 visitor->VisitCodeTarget(this); 860 } else if (mode == RelocInfo::CELL) { 861 visitor->VisitCell(this); 862 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) { 863 visitor->VisitExternalReference(this); 864 } else if (mode == RelocInfo::INTERNAL_REFERENCE) { 865 visitor->VisitInternalReference(this); 866 } else if (RelocInfo::IsDebugBreakSlot(mode) && 867 IsPatchedDebugBreakSlotSequence()) { 868 visitor->VisitDebugTarget(this); 869 } else if (RelocInfo::IsRuntimeEntry(mode)) { 870 visitor->VisitRuntimeEntry(this); 871 } 872} 873 874 875template<typename StaticVisitor> 876void RelocInfo::Visit(Heap* heap) { 877 RelocInfo::Mode mode = rmode(); 878 if (mode == RelocInfo::EMBEDDED_OBJECT) { 879 StaticVisitor::VisitEmbeddedPointer(heap, this); 880 } else if (RelocInfo::IsCodeTarget(mode)) { 881 StaticVisitor::VisitCodeTarget(heap, this); 882 } else if (mode == RelocInfo::CELL) { 883 StaticVisitor::VisitCell(heap, this); 884 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) { 885 StaticVisitor::VisitExternalReference(this); 886 } else if (mode == RelocInfo::INTERNAL_REFERENCE) { 887 StaticVisitor::VisitInternalReference(this); 888 } else if (RelocInfo::IsDebugBreakSlot(mode) && 889 IsPatchedDebugBreakSlotSequence()) { 890 StaticVisitor::VisitDebugTarget(heap, this); 891 } else if (RelocInfo::IsRuntimeEntry(mode)) { 892 StaticVisitor::VisitRuntimeEntry(this); 893 } 894} 895 896 897LoadStoreOp Assembler::LoadOpFor(const CPURegister& rt) { 898 DCHECK(rt.IsValid()); 899 if (rt.IsRegister()) { 900 return rt.Is64Bits() ? LDR_x : LDR_w; 901 } else { 902 DCHECK(rt.IsFPRegister()); 903 return rt.Is64Bits() ? LDR_d : LDR_s; 904 } 905} 906 907 908LoadStorePairOp Assembler::LoadPairOpFor(const CPURegister& rt, 909 const CPURegister& rt2) { 910 DCHECK(AreSameSizeAndType(rt, rt2)); 911 USE(rt2); 912 if (rt.IsRegister()) { 913 return rt.Is64Bits() ? LDP_x : LDP_w; 914 } else { 915 DCHECK(rt.IsFPRegister()); 916 return rt.Is64Bits() ? LDP_d : LDP_s; 917 } 918} 919 920 921LoadStoreOp Assembler::StoreOpFor(const CPURegister& rt) { 922 DCHECK(rt.IsValid()); 923 if (rt.IsRegister()) { 924 return rt.Is64Bits() ? STR_x : STR_w; 925 } else { 926 DCHECK(rt.IsFPRegister()); 927 return rt.Is64Bits() ? STR_d : STR_s; 928 } 929} 930 931 932LoadStorePairOp Assembler::StorePairOpFor(const CPURegister& rt, 933 const CPURegister& rt2) { 934 DCHECK(AreSameSizeAndType(rt, rt2)); 935 USE(rt2); 936 if (rt.IsRegister()) { 937 return rt.Is64Bits() ? STP_x : STP_w; 938 } else { 939 DCHECK(rt.IsFPRegister()); 940 return rt.Is64Bits() ? STP_d : STP_s; 941 } 942} 943 944 945LoadLiteralOp Assembler::LoadLiteralOpFor(const CPURegister& rt) { 946 if (rt.IsRegister()) { 947 return rt.Is64Bits() ? LDR_x_lit : LDR_w_lit; 948 } else { 949 DCHECK(rt.IsFPRegister()); 950 return rt.Is64Bits() ? LDR_d_lit : LDR_s_lit; 951 } 952} 953 954 955int Assembler::LinkAndGetInstructionOffsetTo(Label* label) { 956 DCHECK(kStartOfLabelLinkChain == 0); 957 int offset = LinkAndGetByteOffsetTo(label); 958 DCHECK(IsAligned(offset, kInstructionSize)); 959 return offset >> kInstructionSizeLog2; 960} 961 962 963Instr Assembler::Flags(FlagsUpdate S) { 964 if (S == SetFlags) { 965 return 1 << FlagsUpdate_offset; 966 } else if (S == LeaveFlags) { 967 return 0 << FlagsUpdate_offset; 968 } 969 UNREACHABLE(); 970 return 0; 971} 972 973 974Instr Assembler::Cond(Condition cond) { 975 return cond << Condition_offset; 976} 977 978 979Instr Assembler::ImmPCRelAddress(int imm21) { 980 CHECK(is_int21(imm21)); 981 Instr imm = static_cast<Instr>(truncate_to_int21(imm21)); 982 Instr immhi = (imm >> ImmPCRelLo_width) << ImmPCRelHi_offset; 983 Instr immlo = imm << ImmPCRelLo_offset; 984 return (immhi & ImmPCRelHi_mask) | (immlo & ImmPCRelLo_mask); 985} 986 987 988Instr Assembler::ImmUncondBranch(int imm26) { 989 CHECK(is_int26(imm26)); 990 return truncate_to_int26(imm26) << ImmUncondBranch_offset; 991} 992 993 994Instr Assembler::ImmCondBranch(int imm19) { 995 CHECK(is_int19(imm19)); 996 return truncate_to_int19(imm19) << ImmCondBranch_offset; 997} 998 999 1000Instr Assembler::ImmCmpBranch(int imm19) { 1001 CHECK(is_int19(imm19)); 1002 return truncate_to_int19(imm19) << ImmCmpBranch_offset; 1003} 1004 1005 1006Instr Assembler::ImmTestBranch(int imm14) { 1007 CHECK(is_int14(imm14)); 1008 return truncate_to_int14(imm14) << ImmTestBranch_offset; 1009} 1010 1011 1012Instr Assembler::ImmTestBranchBit(unsigned bit_pos) { 1013 DCHECK(is_uint6(bit_pos)); 1014 // Subtract five from the shift offset, as we need bit 5 from bit_pos. 1015 unsigned b5 = bit_pos << (ImmTestBranchBit5_offset - 5); 1016 unsigned b40 = bit_pos << ImmTestBranchBit40_offset; 1017 b5 &= ImmTestBranchBit5_mask; 1018 b40 &= ImmTestBranchBit40_mask; 1019 return b5 | b40; 1020} 1021 1022 1023Instr Assembler::SF(Register rd) { 1024 return rd.Is64Bits() ? SixtyFourBits : ThirtyTwoBits; 1025} 1026 1027 1028Instr Assembler::ImmAddSub(int imm) { 1029 DCHECK(IsImmAddSub(imm)); 1030 if (is_uint12(imm)) { // No shift required. 1031 imm <<= ImmAddSub_offset; 1032 } else { 1033 imm = ((imm >> 12) << ImmAddSub_offset) | (1 << ShiftAddSub_offset); 1034 } 1035 return imm; 1036} 1037 1038 1039Instr Assembler::ImmS(unsigned imms, unsigned reg_size) { 1040 DCHECK(((reg_size == kXRegSizeInBits) && is_uint6(imms)) || 1041 ((reg_size == kWRegSizeInBits) && is_uint5(imms))); 1042 USE(reg_size); 1043 return imms << ImmS_offset; 1044} 1045 1046 1047Instr Assembler::ImmR(unsigned immr, unsigned reg_size) { 1048 DCHECK(((reg_size == kXRegSizeInBits) && is_uint6(immr)) || 1049 ((reg_size == kWRegSizeInBits) && is_uint5(immr))); 1050 USE(reg_size); 1051 DCHECK(is_uint6(immr)); 1052 return immr << ImmR_offset; 1053} 1054 1055 1056Instr Assembler::ImmSetBits(unsigned imms, unsigned reg_size) { 1057 DCHECK((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits)); 1058 DCHECK(is_uint6(imms)); 1059 DCHECK((reg_size == kXRegSizeInBits) || is_uint6(imms + 3)); 1060 USE(reg_size); 1061 return imms << ImmSetBits_offset; 1062} 1063 1064 1065Instr Assembler::ImmRotate(unsigned immr, unsigned reg_size) { 1066 DCHECK((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits)); 1067 DCHECK(((reg_size == kXRegSizeInBits) && is_uint6(immr)) || 1068 ((reg_size == kWRegSizeInBits) && is_uint5(immr))); 1069 USE(reg_size); 1070 return immr << ImmRotate_offset; 1071} 1072 1073 1074Instr Assembler::ImmLLiteral(int imm19) { 1075 CHECK(is_int19(imm19)); 1076 return truncate_to_int19(imm19) << ImmLLiteral_offset; 1077} 1078 1079 1080Instr Assembler::BitN(unsigned bitn, unsigned reg_size) { 1081 DCHECK((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits)); 1082 DCHECK((reg_size == kXRegSizeInBits) || (bitn == 0)); 1083 USE(reg_size); 1084 return bitn << BitN_offset; 1085} 1086 1087 1088Instr Assembler::ShiftDP(Shift shift) { 1089 DCHECK(shift == LSL || shift == LSR || shift == ASR || shift == ROR); 1090 return shift << ShiftDP_offset; 1091} 1092 1093 1094Instr Assembler::ImmDPShift(unsigned amount) { 1095 DCHECK(is_uint6(amount)); 1096 return amount << ImmDPShift_offset; 1097} 1098 1099 1100Instr Assembler::ExtendMode(Extend extend) { 1101 return extend << ExtendMode_offset; 1102} 1103 1104 1105Instr Assembler::ImmExtendShift(unsigned left_shift) { 1106 DCHECK(left_shift <= 4); 1107 return left_shift << ImmExtendShift_offset; 1108} 1109 1110 1111Instr Assembler::ImmCondCmp(unsigned imm) { 1112 DCHECK(is_uint5(imm)); 1113 return imm << ImmCondCmp_offset; 1114} 1115 1116 1117Instr Assembler::Nzcv(StatusFlags nzcv) { 1118 return ((nzcv >> Flags_offset) & 0xf) << Nzcv_offset; 1119} 1120 1121 1122Instr Assembler::ImmLSUnsigned(int imm12) { 1123 DCHECK(is_uint12(imm12)); 1124 return imm12 << ImmLSUnsigned_offset; 1125} 1126 1127 1128Instr Assembler::ImmLS(int imm9) { 1129 DCHECK(is_int9(imm9)); 1130 return truncate_to_int9(imm9) << ImmLS_offset; 1131} 1132 1133 1134Instr Assembler::ImmLSPair(int imm7, LSDataSize size) { 1135 DCHECK(((imm7 >> size) << size) == imm7); 1136 int scaled_imm7 = imm7 >> size; 1137 DCHECK(is_int7(scaled_imm7)); 1138 return truncate_to_int7(scaled_imm7) << ImmLSPair_offset; 1139} 1140 1141 1142Instr Assembler::ImmShiftLS(unsigned shift_amount) { 1143 DCHECK(is_uint1(shift_amount)); 1144 return shift_amount << ImmShiftLS_offset; 1145} 1146 1147 1148Instr Assembler::ImmException(int imm16) { 1149 DCHECK(is_uint16(imm16)); 1150 return imm16 << ImmException_offset; 1151} 1152 1153 1154Instr Assembler::ImmSystemRegister(int imm15) { 1155 DCHECK(is_uint15(imm15)); 1156 return imm15 << ImmSystemRegister_offset; 1157} 1158 1159 1160Instr Assembler::ImmHint(int imm7) { 1161 DCHECK(is_uint7(imm7)); 1162 return imm7 << ImmHint_offset; 1163} 1164 1165 1166Instr Assembler::ImmBarrierDomain(int imm2) { 1167 DCHECK(is_uint2(imm2)); 1168 return imm2 << ImmBarrierDomain_offset; 1169} 1170 1171 1172Instr Assembler::ImmBarrierType(int imm2) { 1173 DCHECK(is_uint2(imm2)); 1174 return imm2 << ImmBarrierType_offset; 1175} 1176 1177 1178LSDataSize Assembler::CalcLSDataSize(LoadStoreOp op) { 1179 DCHECK((SizeLS_offset + SizeLS_width) == (kInstructionSize * 8)); 1180 return static_cast<LSDataSize>(op >> SizeLS_offset); 1181} 1182 1183 1184Instr Assembler::ImmMoveWide(int imm) { 1185 DCHECK(is_uint16(imm)); 1186 return imm << ImmMoveWide_offset; 1187} 1188 1189 1190Instr Assembler::ShiftMoveWide(int shift) { 1191 DCHECK(is_uint2(shift)); 1192 return shift << ShiftMoveWide_offset; 1193} 1194 1195 1196Instr Assembler::FPType(FPRegister fd) { 1197 return fd.Is64Bits() ? FP64 : FP32; 1198} 1199 1200 1201Instr Assembler::FPScale(unsigned scale) { 1202 DCHECK(is_uint6(scale)); 1203 return scale << FPScale_offset; 1204} 1205 1206 1207const Register& Assembler::AppropriateZeroRegFor(const CPURegister& reg) const { 1208 return reg.Is64Bits() ? xzr : wzr; 1209} 1210 1211 1212inline void Assembler::CheckBufferSpace() { 1213 DCHECK(pc_ < (buffer_ + buffer_size_)); 1214 if (buffer_space() < kGap) { 1215 GrowBuffer(); 1216 } 1217} 1218 1219 1220inline void Assembler::CheckBuffer() { 1221 CheckBufferSpace(); 1222 if (pc_offset() >= next_veneer_pool_check_) { 1223 CheckVeneerPool(false, true); 1224 } 1225 if (pc_offset() >= next_constant_pool_check_) { 1226 CheckConstPool(false, true); 1227 } 1228} 1229 1230 1231TypeFeedbackId Assembler::RecordedAstId() { 1232 DCHECK(!recorded_ast_id_.IsNone()); 1233 return recorded_ast_id_; 1234} 1235 1236 1237void Assembler::ClearRecordedAstId() { 1238 recorded_ast_id_ = TypeFeedbackId::None(); 1239} 1240 1241 1242} // namespace internal 1243} // namespace v8 1244 1245#endif // V8_ARM64_ASSEMBLER_ARM64_INL_H_ 1246