1// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_ARM64_ASSEMBLER_ARM64_INL_H_
6#define V8_ARM64_ASSEMBLER_ARM64_INL_H_
7
8#include "src/arm64/assembler-arm64.h"
9#include "src/assembler.h"
10#include "src/debug/debug.h"
11#include "src/objects-inl.h"
12
13namespace v8 {
14namespace internal {
15
16
17bool CpuFeatures::SupportsCrankshaft() { return true; }
18
19bool CpuFeatures::SupportsSimd128() { return false; }
20
21void RelocInfo::apply(intptr_t delta) {
22  // On arm64 only internal references need extra work.
23  DCHECK(RelocInfo::IsInternalReference(rmode_));
24
25  // Absolute code pointer inside code object moves with the code object.
26  intptr_t* p = reinterpret_cast<intptr_t*>(pc_);
27  *p += delta;  // Relocate entry.
28}
29
30inline int CPURegister::code() const {
31  DCHECK(IsValid());
32  return reg_code;
33}
34
35
36inline CPURegister::RegisterType CPURegister::type() const {
37  DCHECK(IsValidOrNone());
38  return reg_type;
39}
40
41
42inline RegList CPURegister::Bit() const {
43  DCHECK(static_cast<size_t>(reg_code) < (sizeof(RegList) * kBitsPerByte));
44  return IsValid() ? 1UL << reg_code : 0;
45}
46
47
48inline int CPURegister::SizeInBits() const {
49  DCHECK(IsValid());
50  return reg_size;
51}
52
53
54inline int CPURegister::SizeInBytes() const {
55  DCHECK(IsValid());
56  DCHECK(SizeInBits() % 8 == 0);
57  return reg_size / 8;
58}
59
60
61inline bool CPURegister::Is32Bits() const {
62  DCHECK(IsValid());
63  return reg_size == 32;
64}
65
66
67inline bool CPURegister::Is64Bits() const {
68  DCHECK(IsValid());
69  return reg_size == 64;
70}
71
72
73inline bool CPURegister::IsValid() const {
74  if (IsValidRegister() || IsValidFPRegister()) {
75    DCHECK(!IsNone());
76    return true;
77  } else {
78    DCHECK(IsNone());
79    return false;
80  }
81}
82
83
84inline bool CPURegister::IsValidRegister() const {
85  return IsRegister() &&
86         ((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits)) &&
87         ((reg_code < kNumberOfRegisters) || (reg_code == kSPRegInternalCode));
88}
89
90
91inline bool CPURegister::IsValidFPRegister() const {
92  return IsFPRegister() &&
93         ((reg_size == kSRegSizeInBits) || (reg_size == kDRegSizeInBits)) &&
94         (reg_code < kNumberOfFPRegisters);
95}
96
97
98inline bool CPURegister::IsNone() const {
99  // kNoRegister types should always have size 0 and code 0.
100  DCHECK((reg_type != kNoRegister) || (reg_code == 0));
101  DCHECK((reg_type != kNoRegister) || (reg_size == 0));
102
103  return reg_type == kNoRegister;
104}
105
106
107inline bool CPURegister::Is(const CPURegister& other) const {
108  DCHECK(IsValidOrNone() && other.IsValidOrNone());
109  return Aliases(other) && (reg_size == other.reg_size);
110}
111
112
113inline bool CPURegister::Aliases(const CPURegister& other) const {
114  DCHECK(IsValidOrNone() && other.IsValidOrNone());
115  return (reg_code == other.reg_code) && (reg_type == other.reg_type);
116}
117
118
119inline bool CPURegister::IsRegister() const {
120  return reg_type == kRegister;
121}
122
123
124inline bool CPURegister::IsFPRegister() const {
125  return reg_type == kFPRegister;
126}
127
128
129inline bool CPURegister::IsSameSizeAndType(const CPURegister& other) const {
130  return (reg_size == other.reg_size) && (reg_type == other.reg_type);
131}
132
133
134inline bool CPURegister::IsValidOrNone() const {
135  return IsValid() || IsNone();
136}
137
138
139inline bool CPURegister::IsZero() const {
140  DCHECK(IsValid());
141  return IsRegister() && (reg_code == kZeroRegCode);
142}
143
144
145inline bool CPURegister::IsSP() const {
146  DCHECK(IsValid());
147  return IsRegister() && (reg_code == kSPRegInternalCode);
148}
149
150
151inline void CPURegList::Combine(const CPURegList& other) {
152  DCHECK(IsValid());
153  DCHECK(other.type() == type_);
154  DCHECK(other.RegisterSizeInBits() == size_);
155  list_ |= other.list();
156}
157
158
159inline void CPURegList::Remove(const CPURegList& other) {
160  DCHECK(IsValid());
161  if (other.type() == type_) {
162    list_ &= ~other.list();
163  }
164}
165
166
167inline void CPURegList::Combine(const CPURegister& other) {
168  DCHECK(other.type() == type_);
169  DCHECK(other.SizeInBits() == size_);
170  Combine(other.code());
171}
172
173
174inline void CPURegList::Remove(const CPURegister& other1,
175                               const CPURegister& other2,
176                               const CPURegister& other3,
177                               const CPURegister& other4) {
178  if (!other1.IsNone() && (other1.type() == type_)) Remove(other1.code());
179  if (!other2.IsNone() && (other2.type() == type_)) Remove(other2.code());
180  if (!other3.IsNone() && (other3.type() == type_)) Remove(other3.code());
181  if (!other4.IsNone() && (other4.type() == type_)) Remove(other4.code());
182}
183
184
185inline void CPURegList::Combine(int code) {
186  DCHECK(IsValid());
187  DCHECK(CPURegister::Create(code, size_, type_).IsValid());
188  list_ |= (1UL << code);
189}
190
191
192inline void CPURegList::Remove(int code) {
193  DCHECK(IsValid());
194  DCHECK(CPURegister::Create(code, size_, type_).IsValid());
195  list_ &= ~(1UL << code);
196}
197
198
199inline Register Register::XRegFromCode(unsigned code) {
200  if (code == kSPRegInternalCode) {
201    return csp;
202  } else {
203    DCHECK(code < kNumberOfRegisters);
204    return Register::Create(code, kXRegSizeInBits);
205  }
206}
207
208
209inline Register Register::WRegFromCode(unsigned code) {
210  if (code == kSPRegInternalCode) {
211    return wcsp;
212  } else {
213    DCHECK(code < kNumberOfRegisters);
214    return Register::Create(code, kWRegSizeInBits);
215  }
216}
217
218
219inline FPRegister FPRegister::SRegFromCode(unsigned code) {
220  DCHECK(code < kNumberOfFPRegisters);
221  return FPRegister::Create(code, kSRegSizeInBits);
222}
223
224
225inline FPRegister FPRegister::DRegFromCode(unsigned code) {
226  DCHECK(code < kNumberOfFPRegisters);
227  return FPRegister::Create(code, kDRegSizeInBits);
228}
229
230
231inline Register CPURegister::W() const {
232  DCHECK(IsValidRegister());
233  return Register::WRegFromCode(reg_code);
234}
235
236
237inline Register CPURegister::X() const {
238  DCHECK(IsValidRegister());
239  return Register::XRegFromCode(reg_code);
240}
241
242
243inline FPRegister CPURegister::S() const {
244  DCHECK(IsValidFPRegister());
245  return FPRegister::SRegFromCode(reg_code);
246}
247
248
249inline FPRegister CPURegister::D() const {
250  DCHECK(IsValidFPRegister());
251  return FPRegister::DRegFromCode(reg_code);
252}
253
254
255// Immediate.
256// Default initializer is for int types
257template<typename T>
258struct ImmediateInitializer {
259  static const bool kIsIntType = true;
260  static inline RelocInfo::Mode rmode_for(T) {
261    return sizeof(T) == 8 ? RelocInfo::NONE64 : RelocInfo::NONE32;
262  }
263  static inline int64_t immediate_for(T t) {
264    STATIC_ASSERT(sizeof(T) <= 8);
265    return t;
266  }
267};
268
269
270template<>
271struct ImmediateInitializer<Smi*> {
272  static const bool kIsIntType = false;
273  static inline RelocInfo::Mode rmode_for(Smi* t) {
274    return RelocInfo::NONE64;
275  }
276  static inline int64_t immediate_for(Smi* t) {;
277    return reinterpret_cast<int64_t>(t);
278  }
279};
280
281
282template<>
283struct ImmediateInitializer<ExternalReference> {
284  static const bool kIsIntType = false;
285  static inline RelocInfo::Mode rmode_for(ExternalReference t) {
286    return RelocInfo::EXTERNAL_REFERENCE;
287  }
288  static inline int64_t immediate_for(ExternalReference t) {;
289    return reinterpret_cast<int64_t>(t.address());
290  }
291};
292
293
294template<typename T>
295Immediate::Immediate(Handle<T> value) {
296  InitializeHandle(value);
297}
298
299
300template<typename T>
301Immediate::Immediate(T t)
302    : value_(ImmediateInitializer<T>::immediate_for(t)),
303      rmode_(ImmediateInitializer<T>::rmode_for(t)) {}
304
305
306template<typename T>
307Immediate::Immediate(T t, RelocInfo::Mode rmode)
308    : value_(ImmediateInitializer<T>::immediate_for(t)),
309      rmode_(rmode) {
310  STATIC_ASSERT(ImmediateInitializer<T>::kIsIntType);
311}
312
313
314// Operand.
315template<typename T>
316Operand::Operand(Handle<T> value) : immediate_(value), reg_(NoReg) {}
317
318
319template<typename T>
320Operand::Operand(T t) : immediate_(t), reg_(NoReg) {}
321
322
323template<typename T>
324Operand::Operand(T t, RelocInfo::Mode rmode)
325    : immediate_(t, rmode),
326      reg_(NoReg) {}
327
328
329Operand::Operand(Register reg, Shift shift, unsigned shift_amount)
330    : immediate_(0),
331      reg_(reg),
332      shift_(shift),
333      extend_(NO_EXTEND),
334      shift_amount_(shift_amount) {
335  DCHECK(reg.Is64Bits() || (shift_amount < kWRegSizeInBits));
336  DCHECK(reg.Is32Bits() || (shift_amount < kXRegSizeInBits));
337  DCHECK(!reg.IsSP());
338}
339
340
341Operand::Operand(Register reg, Extend extend, unsigned shift_amount)
342    : immediate_(0),
343      reg_(reg),
344      shift_(NO_SHIFT),
345      extend_(extend),
346      shift_amount_(shift_amount) {
347  DCHECK(reg.IsValid());
348  DCHECK(shift_amount <= 4);
349  DCHECK(!reg.IsSP());
350
351  // Extend modes SXTX and UXTX require a 64-bit register.
352  DCHECK(reg.Is64Bits() || ((extend != SXTX) && (extend != UXTX)));
353}
354
355
356bool Operand::IsImmediate() const {
357  return reg_.Is(NoReg);
358}
359
360
361bool Operand::IsShiftedRegister() const {
362  return reg_.IsValid() && (shift_ != NO_SHIFT);
363}
364
365
366bool Operand::IsExtendedRegister() const {
367  return reg_.IsValid() && (extend_ != NO_EXTEND);
368}
369
370
371bool Operand::IsZero() const {
372  if (IsImmediate()) {
373    return ImmediateValue() == 0;
374  } else {
375    return reg().IsZero();
376  }
377}
378
379
380Operand Operand::ToExtendedRegister() const {
381  DCHECK(IsShiftedRegister());
382  DCHECK((shift_ == LSL) && (shift_amount_ <= 4));
383  return Operand(reg_, reg_.Is64Bits() ? UXTX : UXTW, shift_amount_);
384}
385
386
387Immediate Operand::immediate() const {
388  DCHECK(IsImmediate());
389  return immediate_;
390}
391
392
393int64_t Operand::ImmediateValue() const {
394  DCHECK(IsImmediate());
395  return immediate_.value();
396}
397
398
399Register Operand::reg() const {
400  DCHECK(IsShiftedRegister() || IsExtendedRegister());
401  return reg_;
402}
403
404
405Shift Operand::shift() const {
406  DCHECK(IsShiftedRegister());
407  return shift_;
408}
409
410
411Extend Operand::extend() const {
412  DCHECK(IsExtendedRegister());
413  return extend_;
414}
415
416
417unsigned Operand::shift_amount() const {
418  DCHECK(IsShiftedRegister() || IsExtendedRegister());
419  return shift_amount_;
420}
421
422
423Operand Operand::UntagSmi(Register smi) {
424  STATIC_ASSERT(kXRegSizeInBits == static_cast<unsigned>(kSmiShift +
425                                                         kSmiValueSize));
426  DCHECK(smi.Is64Bits());
427  return Operand(smi, ASR, kSmiShift);
428}
429
430
431Operand Operand::UntagSmiAndScale(Register smi, int scale) {
432  STATIC_ASSERT(kXRegSizeInBits == static_cast<unsigned>(kSmiShift +
433                                                         kSmiValueSize));
434  DCHECK(smi.Is64Bits());
435  DCHECK((scale >= 0) && (scale <= (64 - kSmiValueSize)));
436  if (scale > kSmiShift) {
437    return Operand(smi, LSL, scale - kSmiShift);
438  } else if (scale < kSmiShift) {
439    return Operand(smi, ASR, kSmiShift - scale);
440  }
441  return Operand(smi);
442}
443
444
445MemOperand::MemOperand()
446  : base_(NoReg), regoffset_(NoReg), offset_(0), addrmode_(Offset),
447    shift_(NO_SHIFT), extend_(NO_EXTEND), shift_amount_(0) {
448}
449
450
451MemOperand::MemOperand(Register base, int64_t offset, AddrMode addrmode)
452  : base_(base), regoffset_(NoReg), offset_(offset), addrmode_(addrmode),
453    shift_(NO_SHIFT), extend_(NO_EXTEND), shift_amount_(0) {
454  DCHECK(base.Is64Bits() && !base.IsZero());
455}
456
457
458MemOperand::MemOperand(Register base,
459                       Register regoffset,
460                       Extend extend,
461                       unsigned shift_amount)
462  : base_(base), regoffset_(regoffset), offset_(0), addrmode_(Offset),
463    shift_(NO_SHIFT), extend_(extend), shift_amount_(shift_amount) {
464  DCHECK(base.Is64Bits() && !base.IsZero());
465  DCHECK(!regoffset.IsSP());
466  DCHECK((extend == UXTW) || (extend == SXTW) || (extend == SXTX));
467
468  // SXTX extend mode requires a 64-bit offset register.
469  DCHECK(regoffset.Is64Bits() || (extend != SXTX));
470}
471
472
473MemOperand::MemOperand(Register base,
474                       Register regoffset,
475                       Shift shift,
476                       unsigned shift_amount)
477  : base_(base), regoffset_(regoffset), offset_(0), addrmode_(Offset),
478    shift_(shift), extend_(NO_EXTEND), shift_amount_(shift_amount) {
479  DCHECK(base.Is64Bits() && !base.IsZero());
480  DCHECK(regoffset.Is64Bits() && !regoffset.IsSP());
481  DCHECK(shift == LSL);
482}
483
484
485MemOperand::MemOperand(Register base, const Operand& offset, AddrMode addrmode)
486  : base_(base), addrmode_(addrmode) {
487  DCHECK(base.Is64Bits() && !base.IsZero());
488
489  if (offset.IsImmediate()) {
490    offset_ = offset.ImmediateValue();
491
492    regoffset_ = NoReg;
493  } else if (offset.IsShiftedRegister()) {
494    DCHECK(addrmode == Offset);
495
496    regoffset_ = offset.reg();
497    shift_ = offset.shift();
498    shift_amount_ = offset.shift_amount();
499
500    extend_ = NO_EXTEND;
501    offset_ = 0;
502
503    // These assertions match those in the shifted-register constructor.
504    DCHECK(regoffset_.Is64Bits() && !regoffset_.IsSP());
505    DCHECK(shift_ == LSL);
506  } else {
507    DCHECK(offset.IsExtendedRegister());
508    DCHECK(addrmode == Offset);
509
510    regoffset_ = offset.reg();
511    extend_ = offset.extend();
512    shift_amount_ = offset.shift_amount();
513
514    shift_ = NO_SHIFT;
515    offset_ = 0;
516
517    // These assertions match those in the extended-register constructor.
518    DCHECK(!regoffset_.IsSP());
519    DCHECK((extend_ == UXTW) || (extend_ == SXTW) || (extend_ == SXTX));
520    DCHECK((regoffset_.Is64Bits() || (extend_ != SXTX)));
521  }
522}
523
524bool MemOperand::IsImmediateOffset() const {
525  return (addrmode_ == Offset) && regoffset_.Is(NoReg);
526}
527
528
529bool MemOperand::IsRegisterOffset() const {
530  return (addrmode_ == Offset) && !regoffset_.Is(NoReg);
531}
532
533
534bool MemOperand::IsPreIndex() const {
535  return addrmode_ == PreIndex;
536}
537
538
539bool MemOperand::IsPostIndex() const {
540  return addrmode_ == PostIndex;
541}
542
543Operand MemOperand::OffsetAsOperand() const {
544  if (IsImmediateOffset()) {
545    return offset();
546  } else {
547    DCHECK(IsRegisterOffset());
548    if (extend() == NO_EXTEND) {
549      return Operand(regoffset(), shift(), shift_amount());
550    } else {
551      return Operand(regoffset(), extend(), shift_amount());
552    }
553  }
554}
555
556
557void Assembler::Unreachable() {
558#ifdef USE_SIMULATOR
559  debug("UNREACHABLE", __LINE__, BREAK);
560#else
561  // Crash by branching to 0. lr now points near the fault.
562  Emit(BLR | Rn(xzr));
563#endif
564}
565
566
567Address Assembler::target_pointer_address_at(Address pc) {
568  Instruction* instr = reinterpret_cast<Instruction*>(pc);
569  DCHECK(instr->IsLdrLiteralX());
570  return reinterpret_cast<Address>(instr->ImmPCOffsetTarget());
571}
572
573
574// Read/Modify the code target address in the branch/call instruction at pc.
575Address Assembler::target_address_at(Address pc, Address constant_pool) {
576  return Memory::Address_at(target_pointer_address_at(pc));
577}
578
579
580Address Assembler::target_address_at(Address pc, Code* code) {
581  Address constant_pool = code ? code->constant_pool() : NULL;
582  return target_address_at(pc, constant_pool);
583}
584
585
586Address Assembler::target_address_from_return_address(Address pc) {
587  // Returns the address of the call target from the return address that will
588  // be returned to after a call.
589  // Call sequence on ARM64 is:
590  //  ldr ip0, #... @ load from literal pool
591  //  blr ip0
592  Address candidate = pc - 2 * kInstructionSize;
593  Instruction* instr = reinterpret_cast<Instruction*>(candidate);
594  USE(instr);
595  DCHECK(instr->IsLdrLiteralX());
596  return candidate;
597}
598
599
600Address Assembler::return_address_from_call_start(Address pc) {
601  // The call, generated by MacroAssembler::Call, is one of two possible
602  // sequences:
603  //
604  // Without relocation:
605  //  movz  temp, #(target & 0x000000000000ffff)
606  //  movk  temp, #(target & 0x00000000ffff0000)
607  //  movk  temp, #(target & 0x0000ffff00000000)
608  //  blr   temp
609  //
610  // With relocation:
611  //  ldr   temp, =target
612  //  blr   temp
613  //
614  // The return address is immediately after the blr instruction in both cases,
615  // so it can be found by adding the call size to the address at the start of
616  // the call sequence.
617  STATIC_ASSERT(Assembler::kCallSizeWithoutRelocation == 4 * kInstructionSize);
618  STATIC_ASSERT(Assembler::kCallSizeWithRelocation == 2 * kInstructionSize);
619
620  Instruction* instr = reinterpret_cast<Instruction*>(pc);
621  if (instr->IsMovz()) {
622    // Verify the instruction sequence.
623    DCHECK(instr->following(1)->IsMovk());
624    DCHECK(instr->following(2)->IsMovk());
625    DCHECK(instr->following(3)->IsBranchAndLinkToRegister());
626    return pc + Assembler::kCallSizeWithoutRelocation;
627  } else {
628    // Verify the instruction sequence.
629    DCHECK(instr->IsLdrLiteralX());
630    DCHECK(instr->following(1)->IsBranchAndLinkToRegister());
631    return pc + Assembler::kCallSizeWithRelocation;
632  }
633}
634
635
636void Assembler::deserialization_set_special_target_at(
637    Isolate* isolate, Address constant_pool_entry, Code* code, Address target) {
638  Memory::Address_at(constant_pool_entry) = target;
639}
640
641
642void Assembler::deserialization_set_target_internal_reference_at(
643    Isolate* isolate, Address pc, Address target, RelocInfo::Mode mode) {
644  Memory::Address_at(pc) = target;
645}
646
647
648void Assembler::set_target_address_at(Isolate* isolate, Address pc,
649                                      Address constant_pool, Address target,
650                                      ICacheFlushMode icache_flush_mode) {
651  Memory::Address_at(target_pointer_address_at(pc)) = target;
652  // Intuitively, we would think it is necessary to always flush the
653  // instruction cache after patching a target address in the code as follows:
654  //   Assembler::FlushICache(isolate(), pc, sizeof(target));
655  // However, on ARM, an instruction is actually patched in the case of
656  // embedded constants of the form:
657  // ldr   ip, [pc, #...]
658  // since the instruction accessing this address in the constant pool remains
659  // unchanged, a flush is not required.
660}
661
662
663void Assembler::set_target_address_at(Isolate* isolate, Address pc, Code* code,
664                                      Address target,
665                                      ICacheFlushMode icache_flush_mode) {
666  Address constant_pool = code ? code->constant_pool() : NULL;
667  set_target_address_at(isolate, pc, constant_pool, target, icache_flush_mode);
668}
669
670
671int RelocInfo::target_address_size() {
672  return kPointerSize;
673}
674
675
676Address RelocInfo::target_address() {
677  DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
678  return Assembler::target_address_at(pc_, host_);
679}
680
681Address RelocInfo::target_address_address() {
682  DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
683                              || rmode_ == EMBEDDED_OBJECT
684                              || rmode_ == EXTERNAL_REFERENCE);
685  return Assembler::target_pointer_address_at(pc_);
686}
687
688
689Address RelocInfo::constant_pool_entry_address() {
690  DCHECK(IsInConstantPool());
691  return Assembler::target_pointer_address_at(pc_);
692}
693
694
695Object* RelocInfo::target_object() {
696  DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
697  return reinterpret_cast<Object*>(Assembler::target_address_at(pc_, host_));
698}
699
700
701Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
702  DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
703  return Handle<Object>(reinterpret_cast<Object**>(
704      Assembler::target_address_at(pc_, host_)));
705}
706
707
708void RelocInfo::set_target_object(Object* target,
709                                  WriteBarrierMode write_barrier_mode,
710                                  ICacheFlushMode icache_flush_mode) {
711  DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
712  Assembler::set_target_address_at(isolate_, pc_, host_,
713                                   reinterpret_cast<Address>(target),
714                                   icache_flush_mode);
715  if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
716      host() != NULL &&
717      target->IsHeapObject()) {
718    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
719        host(), this, HeapObject::cast(target));
720    host()->GetHeap()->RecordWriteIntoCode(host(), this, target);
721  }
722}
723
724
725Address RelocInfo::target_external_reference() {
726  DCHECK(rmode_ == EXTERNAL_REFERENCE);
727  return Assembler::target_address_at(pc_, host_);
728}
729
730
731Address RelocInfo::target_internal_reference() {
732  DCHECK(rmode_ == INTERNAL_REFERENCE);
733  return Memory::Address_at(pc_);
734}
735
736
737Address RelocInfo::target_internal_reference_address() {
738  DCHECK(rmode_ == INTERNAL_REFERENCE);
739  return reinterpret_cast<Address>(pc_);
740}
741
742
743Address RelocInfo::target_runtime_entry(Assembler* origin) {
744  DCHECK(IsRuntimeEntry(rmode_));
745  return target_address();
746}
747
748
749void RelocInfo::set_target_runtime_entry(Address target,
750                                         WriteBarrierMode write_barrier_mode,
751                                         ICacheFlushMode icache_flush_mode) {
752  DCHECK(IsRuntimeEntry(rmode_));
753  if (target_address() != target) {
754    set_target_address(target, write_barrier_mode, icache_flush_mode);
755  }
756}
757
758
759Handle<Cell> RelocInfo::target_cell_handle() {
760  UNIMPLEMENTED();
761  Cell *null_cell = NULL;
762  return Handle<Cell>(null_cell);
763}
764
765
766Cell* RelocInfo::target_cell() {
767  DCHECK(rmode_ == RelocInfo::CELL);
768  return Cell::FromValueAddress(Memory::Address_at(pc_));
769}
770
771
772void RelocInfo::set_target_cell(Cell* cell,
773                                WriteBarrierMode write_barrier_mode,
774                                ICacheFlushMode icache_flush_mode) {
775  UNIMPLEMENTED();
776}
777
778
779static const int kNoCodeAgeSequenceLength = 5 * kInstructionSize;
780static const int kCodeAgeStubEntryOffset = 3 * kInstructionSize;
781
782
783Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
784  UNREACHABLE();  // This should never be reached on ARM64.
785  return Handle<Object>();
786}
787
788
789Code* RelocInfo::code_age_stub() {
790  DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
791  // Read the stub entry point from the code age sequence.
792  Address stub_entry_address = pc_ + kCodeAgeStubEntryOffset;
793  return Code::GetCodeFromTargetAddress(Memory::Address_at(stub_entry_address));
794}
795
796
797void RelocInfo::set_code_age_stub(Code* stub,
798                                  ICacheFlushMode icache_flush_mode) {
799  DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
800  DCHECK(!Code::IsYoungSequence(stub->GetIsolate(), pc_));
801  // Overwrite the stub entry point in the code age sequence. This is loaded as
802  // a literal so there is no need to call FlushICache here.
803  Address stub_entry_address = pc_ + kCodeAgeStubEntryOffset;
804  Memory::Address_at(stub_entry_address) = stub->instruction_start();
805}
806
807
808Address RelocInfo::debug_call_address() {
809  DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
810  // For the above sequences the Relocinfo points to the load literal loading
811  // the call address.
812  STATIC_ASSERT(Assembler::kPatchDebugBreakSlotAddressOffset == 0);
813  return Assembler::target_address_at(pc_, host_);
814}
815
816
817void RelocInfo::set_debug_call_address(Address target) {
818  DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
819  STATIC_ASSERT(Assembler::kPatchDebugBreakSlotAddressOffset == 0);
820  Assembler::set_target_address_at(isolate_, pc_, host_, target);
821  if (host() != NULL) {
822    Object* target_code = Code::GetCodeFromTargetAddress(target);
823    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
824        host(), this, HeapObject::cast(target_code));
825  }
826}
827
828
829void RelocInfo::WipeOut() {
830  DCHECK(IsEmbeddedObject(rmode_) || IsCodeTarget(rmode_) ||
831         IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) ||
832         IsInternalReference(rmode_));
833  if (IsInternalReference(rmode_)) {
834    Memory::Address_at(pc_) = NULL;
835  } else {
836    Assembler::set_target_address_at(isolate_, pc_, host_, NULL);
837  }
838}
839
840template <typename ObjectVisitor>
841void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
842  RelocInfo::Mode mode = rmode();
843  if (mode == RelocInfo::EMBEDDED_OBJECT) {
844    visitor->VisitEmbeddedPointer(this);
845  } else if (RelocInfo::IsCodeTarget(mode)) {
846    visitor->VisitCodeTarget(this);
847  } else if (mode == RelocInfo::CELL) {
848    visitor->VisitCell(this);
849  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
850    visitor->VisitExternalReference(this);
851  } else if (mode == RelocInfo::INTERNAL_REFERENCE) {
852    visitor->VisitInternalReference(this);
853  } else if (RelocInfo::IsDebugBreakSlot(mode) &&
854             IsPatchedDebugBreakSlotSequence()) {
855    visitor->VisitDebugTarget(this);
856  } else if (RelocInfo::IsRuntimeEntry(mode)) {
857    visitor->VisitRuntimeEntry(this);
858  }
859}
860
861
862template<typename StaticVisitor>
863void RelocInfo::Visit(Heap* heap) {
864  RelocInfo::Mode mode = rmode();
865  if (mode == RelocInfo::EMBEDDED_OBJECT) {
866    StaticVisitor::VisitEmbeddedPointer(heap, this);
867  } else if (RelocInfo::IsCodeTarget(mode)) {
868    StaticVisitor::VisitCodeTarget(heap, this);
869  } else if (mode == RelocInfo::CELL) {
870    StaticVisitor::VisitCell(heap, this);
871  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
872    StaticVisitor::VisitExternalReference(this);
873  } else if (mode == RelocInfo::INTERNAL_REFERENCE) {
874    StaticVisitor::VisitInternalReference(this);
875  } else if (RelocInfo::IsDebugBreakSlot(mode) &&
876             IsPatchedDebugBreakSlotSequence()) {
877    StaticVisitor::VisitDebugTarget(heap, this);
878  } else if (RelocInfo::IsRuntimeEntry(mode)) {
879    StaticVisitor::VisitRuntimeEntry(this);
880  }
881}
882
883
884LoadStoreOp Assembler::LoadOpFor(const CPURegister& rt) {
885  DCHECK(rt.IsValid());
886  if (rt.IsRegister()) {
887    return rt.Is64Bits() ? LDR_x : LDR_w;
888  } else {
889    DCHECK(rt.IsFPRegister());
890    return rt.Is64Bits() ? LDR_d : LDR_s;
891  }
892}
893
894
895LoadStorePairOp Assembler::LoadPairOpFor(const CPURegister& rt,
896                                         const CPURegister& rt2) {
897  DCHECK(AreSameSizeAndType(rt, rt2));
898  USE(rt2);
899  if (rt.IsRegister()) {
900    return rt.Is64Bits() ? LDP_x : LDP_w;
901  } else {
902    DCHECK(rt.IsFPRegister());
903    return rt.Is64Bits() ? LDP_d : LDP_s;
904  }
905}
906
907
908LoadStoreOp Assembler::StoreOpFor(const CPURegister& rt) {
909  DCHECK(rt.IsValid());
910  if (rt.IsRegister()) {
911    return rt.Is64Bits() ? STR_x : STR_w;
912  } else {
913    DCHECK(rt.IsFPRegister());
914    return rt.Is64Bits() ? STR_d : STR_s;
915  }
916}
917
918
919LoadStorePairOp Assembler::StorePairOpFor(const CPURegister& rt,
920                                          const CPURegister& rt2) {
921  DCHECK(AreSameSizeAndType(rt, rt2));
922  USE(rt2);
923  if (rt.IsRegister()) {
924    return rt.Is64Bits() ? STP_x : STP_w;
925  } else {
926    DCHECK(rt.IsFPRegister());
927    return rt.Is64Bits() ? STP_d : STP_s;
928  }
929}
930
931
932LoadLiteralOp Assembler::LoadLiteralOpFor(const CPURegister& rt) {
933  if (rt.IsRegister()) {
934    return rt.Is64Bits() ? LDR_x_lit : LDR_w_lit;
935  } else {
936    DCHECK(rt.IsFPRegister());
937    return rt.Is64Bits() ? LDR_d_lit : LDR_s_lit;
938  }
939}
940
941
942int Assembler::LinkAndGetInstructionOffsetTo(Label* label) {
943  DCHECK(kStartOfLabelLinkChain == 0);
944  int offset = LinkAndGetByteOffsetTo(label);
945  DCHECK(IsAligned(offset, kInstructionSize));
946  return offset >> kInstructionSizeLog2;
947}
948
949
950Instr Assembler::Flags(FlagsUpdate S) {
951  if (S == SetFlags) {
952    return 1 << FlagsUpdate_offset;
953  } else if (S == LeaveFlags) {
954    return 0 << FlagsUpdate_offset;
955  }
956  UNREACHABLE();
957  return 0;
958}
959
960
961Instr Assembler::Cond(Condition cond) {
962  return cond << Condition_offset;
963}
964
965
966Instr Assembler::ImmPCRelAddress(int imm21) {
967  CHECK(is_int21(imm21));
968  Instr imm = static_cast<Instr>(truncate_to_int21(imm21));
969  Instr immhi = (imm >> ImmPCRelLo_width) << ImmPCRelHi_offset;
970  Instr immlo = imm << ImmPCRelLo_offset;
971  return (immhi & ImmPCRelHi_mask) | (immlo & ImmPCRelLo_mask);
972}
973
974
975Instr Assembler::ImmUncondBranch(int imm26) {
976  CHECK(is_int26(imm26));
977  return truncate_to_int26(imm26) << ImmUncondBranch_offset;
978}
979
980
981Instr Assembler::ImmCondBranch(int imm19) {
982  CHECK(is_int19(imm19));
983  return truncate_to_int19(imm19) << ImmCondBranch_offset;
984}
985
986
987Instr Assembler::ImmCmpBranch(int imm19) {
988  CHECK(is_int19(imm19));
989  return truncate_to_int19(imm19) << ImmCmpBranch_offset;
990}
991
992
993Instr Assembler::ImmTestBranch(int imm14) {
994  CHECK(is_int14(imm14));
995  return truncate_to_int14(imm14) << ImmTestBranch_offset;
996}
997
998
999Instr Assembler::ImmTestBranchBit(unsigned bit_pos) {
1000  DCHECK(is_uint6(bit_pos));
1001  // Subtract five from the shift offset, as we need bit 5 from bit_pos.
1002  unsigned b5 = bit_pos << (ImmTestBranchBit5_offset - 5);
1003  unsigned b40 = bit_pos << ImmTestBranchBit40_offset;
1004  b5 &= ImmTestBranchBit5_mask;
1005  b40 &= ImmTestBranchBit40_mask;
1006  return b5 | b40;
1007}
1008
1009
1010Instr Assembler::SF(Register rd) {
1011    return rd.Is64Bits() ? SixtyFourBits : ThirtyTwoBits;
1012}
1013
1014
1015Instr Assembler::ImmAddSub(int imm) {
1016  DCHECK(IsImmAddSub(imm));
1017  if (is_uint12(imm)) {  // No shift required.
1018    imm <<= ImmAddSub_offset;
1019  } else {
1020    imm = ((imm >> 12) << ImmAddSub_offset) | (1 << ShiftAddSub_offset);
1021  }
1022  return imm;
1023}
1024
1025
1026Instr Assembler::ImmS(unsigned imms, unsigned reg_size) {
1027  DCHECK(((reg_size == kXRegSizeInBits) && is_uint6(imms)) ||
1028         ((reg_size == kWRegSizeInBits) && is_uint5(imms)));
1029  USE(reg_size);
1030  return imms << ImmS_offset;
1031}
1032
1033
1034Instr Assembler::ImmR(unsigned immr, unsigned reg_size) {
1035  DCHECK(((reg_size == kXRegSizeInBits) && is_uint6(immr)) ||
1036         ((reg_size == kWRegSizeInBits) && is_uint5(immr)));
1037  USE(reg_size);
1038  DCHECK(is_uint6(immr));
1039  return immr << ImmR_offset;
1040}
1041
1042
1043Instr Assembler::ImmSetBits(unsigned imms, unsigned reg_size) {
1044  DCHECK((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits));
1045  DCHECK(is_uint6(imms));
1046  DCHECK((reg_size == kXRegSizeInBits) || is_uint6(imms + 3));
1047  USE(reg_size);
1048  return imms << ImmSetBits_offset;
1049}
1050
1051
1052Instr Assembler::ImmRotate(unsigned immr, unsigned reg_size) {
1053  DCHECK((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits));
1054  DCHECK(((reg_size == kXRegSizeInBits) && is_uint6(immr)) ||
1055         ((reg_size == kWRegSizeInBits) && is_uint5(immr)));
1056  USE(reg_size);
1057  return immr << ImmRotate_offset;
1058}
1059
1060
1061Instr Assembler::ImmLLiteral(int imm19) {
1062  CHECK(is_int19(imm19));
1063  return truncate_to_int19(imm19) << ImmLLiteral_offset;
1064}
1065
1066
1067Instr Assembler::BitN(unsigned bitn, unsigned reg_size) {
1068  DCHECK((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits));
1069  DCHECK((reg_size == kXRegSizeInBits) || (bitn == 0));
1070  USE(reg_size);
1071  return bitn << BitN_offset;
1072}
1073
1074
1075Instr Assembler::ShiftDP(Shift shift) {
1076  DCHECK(shift == LSL || shift == LSR || shift == ASR || shift == ROR);
1077  return shift << ShiftDP_offset;
1078}
1079
1080
1081Instr Assembler::ImmDPShift(unsigned amount) {
1082  DCHECK(is_uint6(amount));
1083  return amount << ImmDPShift_offset;
1084}
1085
1086
1087Instr Assembler::ExtendMode(Extend extend) {
1088  return extend << ExtendMode_offset;
1089}
1090
1091
1092Instr Assembler::ImmExtendShift(unsigned left_shift) {
1093  DCHECK(left_shift <= 4);
1094  return left_shift << ImmExtendShift_offset;
1095}
1096
1097
1098Instr Assembler::ImmCondCmp(unsigned imm) {
1099  DCHECK(is_uint5(imm));
1100  return imm << ImmCondCmp_offset;
1101}
1102
1103
1104Instr Assembler::Nzcv(StatusFlags nzcv) {
1105  return ((nzcv >> Flags_offset) & 0xf) << Nzcv_offset;
1106}
1107
1108
1109Instr Assembler::ImmLSUnsigned(int imm12) {
1110  DCHECK(is_uint12(imm12));
1111  return imm12 << ImmLSUnsigned_offset;
1112}
1113
1114
1115Instr Assembler::ImmLS(int imm9) {
1116  DCHECK(is_int9(imm9));
1117  return truncate_to_int9(imm9) << ImmLS_offset;
1118}
1119
1120
1121Instr Assembler::ImmLSPair(int imm7, LSDataSize size) {
1122  DCHECK(((imm7 >> size) << size) == imm7);
1123  int scaled_imm7 = imm7 >> size;
1124  DCHECK(is_int7(scaled_imm7));
1125  return truncate_to_int7(scaled_imm7) << ImmLSPair_offset;
1126}
1127
1128
1129Instr Assembler::ImmShiftLS(unsigned shift_amount) {
1130  DCHECK(is_uint1(shift_amount));
1131  return shift_amount << ImmShiftLS_offset;
1132}
1133
1134
1135Instr Assembler::ImmException(int imm16) {
1136  DCHECK(is_uint16(imm16));
1137  return imm16 << ImmException_offset;
1138}
1139
1140
1141Instr Assembler::ImmSystemRegister(int imm15) {
1142  DCHECK(is_uint15(imm15));
1143  return imm15 << ImmSystemRegister_offset;
1144}
1145
1146
1147Instr Assembler::ImmHint(int imm7) {
1148  DCHECK(is_uint7(imm7));
1149  return imm7 << ImmHint_offset;
1150}
1151
1152
1153Instr Assembler::ImmBarrierDomain(int imm2) {
1154  DCHECK(is_uint2(imm2));
1155  return imm2 << ImmBarrierDomain_offset;
1156}
1157
1158
1159Instr Assembler::ImmBarrierType(int imm2) {
1160  DCHECK(is_uint2(imm2));
1161  return imm2 << ImmBarrierType_offset;
1162}
1163
1164
1165LSDataSize Assembler::CalcLSDataSize(LoadStoreOp op) {
1166  DCHECK((SizeLS_offset + SizeLS_width) == (kInstructionSize * 8));
1167  return static_cast<LSDataSize>(op >> SizeLS_offset);
1168}
1169
1170
1171Instr Assembler::ImmMoveWide(int imm) {
1172  DCHECK(is_uint16(imm));
1173  return imm << ImmMoveWide_offset;
1174}
1175
1176
1177Instr Assembler::ShiftMoveWide(int shift) {
1178  DCHECK(is_uint2(shift));
1179  return shift << ShiftMoveWide_offset;
1180}
1181
1182
1183Instr Assembler::FPType(FPRegister fd) {
1184  return fd.Is64Bits() ? FP64 : FP32;
1185}
1186
1187
1188Instr Assembler::FPScale(unsigned scale) {
1189  DCHECK(is_uint6(scale));
1190  return scale << FPScale_offset;
1191}
1192
1193
1194const Register& Assembler::AppropriateZeroRegFor(const CPURegister& reg) const {
1195  return reg.Is64Bits() ? xzr : wzr;
1196}
1197
1198
1199inline void Assembler::CheckBufferSpace() {
1200  DCHECK(pc_ < (buffer_ + buffer_size_));
1201  if (buffer_space() < kGap) {
1202    GrowBuffer();
1203  }
1204}
1205
1206
1207inline void Assembler::CheckBuffer() {
1208  CheckBufferSpace();
1209  if (pc_offset() >= next_veneer_pool_check_) {
1210    CheckVeneerPool(false, true);
1211  }
1212  if (pc_offset() >= next_constant_pool_check_) {
1213    CheckConstPool(false, true);
1214  }
1215}
1216
1217
1218TypeFeedbackId Assembler::RecordedAstId() {
1219  DCHECK(!recorded_ast_id_.IsNone());
1220  return recorded_ast_id_;
1221}
1222
1223
1224void Assembler::ClearRecordedAstId() {
1225  recorded_ast_id_ = TypeFeedbackId::None();
1226}
1227
1228
1229}  // namespace internal
1230}  // namespace v8
1231
1232#endif  // V8_ARM64_ASSEMBLER_ARM64_INL_H_
1233