1// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_ARM64_CODE_STUBS_ARM64_H_
6#define V8_ARM64_CODE_STUBS_ARM64_H_
7
8namespace v8 {
9namespace internal {
10
11
12void ArrayNativeCode(MacroAssembler* masm, Label* call_generic_code);
13
14
15class StringHelper : public AllStatic {
16 public:
17  // Compares two flat one-byte strings and returns result in x0.
18  static void GenerateCompareFlatOneByteStrings(
19      MacroAssembler* masm, Register left, Register right, Register scratch1,
20      Register scratch2, Register scratch3, Register scratch4);
21
22  // Compare two flat one-byte strings for equality and returns result in x0.
23  static void GenerateFlatOneByteStringEquals(MacroAssembler* masm,
24                                              Register left, Register right,
25                                              Register scratch1,
26                                              Register scratch2,
27                                              Register scratch3);
28
29 private:
30  static void GenerateOneByteCharsCompareLoop(
31      MacroAssembler* masm, Register left, Register right, Register length,
32      Register scratch1, Register scratch2, Label* chars_not_equal);
33
34  DISALLOW_IMPLICIT_CONSTRUCTORS(StringHelper);
35};
36
37
38class StoreRegistersStateStub: public PlatformCodeStub {
39 public:
40  explicit StoreRegistersStateStub(Isolate* isolate)
41      : PlatformCodeStub(isolate) {}
42
43  static Register to_be_pushed_lr() { return ip0; }
44
45  static void GenerateAheadOfTime(Isolate* isolate);
46
47 private:
48  DEFINE_NULL_CALL_INTERFACE_DESCRIPTOR();
49  DEFINE_PLATFORM_CODE_STUB(StoreRegistersState, PlatformCodeStub);
50};
51
52
53class RestoreRegistersStateStub: public PlatformCodeStub {
54 public:
55  explicit RestoreRegistersStateStub(Isolate* isolate)
56      : PlatformCodeStub(isolate) {}
57
58  static void GenerateAheadOfTime(Isolate* isolate);
59
60 private:
61  DEFINE_NULL_CALL_INTERFACE_DESCRIPTOR();
62  DEFINE_PLATFORM_CODE_STUB(RestoreRegistersState, PlatformCodeStub);
63};
64
65
66class RecordWriteStub: public PlatformCodeStub {
67 public:
68  // Stub to record the write of 'value' at 'address' in 'object'.
69  // Typically 'address' = 'object' + <some offset>.
70  // See MacroAssembler::RecordWriteField() for example.
71  RecordWriteStub(Isolate* isolate,
72                  Register object,
73                  Register value,
74                  Register address,
75                  RememberedSetAction remembered_set_action,
76                  SaveFPRegsMode fp_mode)
77      : PlatformCodeStub(isolate),
78        regs_(object,   // An input reg.
79              address,  // An input reg.
80              value) {  // One scratch reg.
81    DCHECK(object.Is64Bits());
82    DCHECK(value.Is64Bits());
83    DCHECK(address.Is64Bits());
84    minor_key_ = ObjectBits::encode(object.code()) |
85                 ValueBits::encode(value.code()) |
86                 AddressBits::encode(address.code()) |
87                 RememberedSetActionBits::encode(remembered_set_action) |
88                 SaveFPRegsModeBits::encode(fp_mode);
89  }
90
91  RecordWriteStub(uint32_t key, Isolate* isolate)
92      : PlatformCodeStub(key, isolate), regs_(object(), address(), value()) {}
93
94  enum Mode {
95    STORE_BUFFER_ONLY,
96    INCREMENTAL,
97    INCREMENTAL_COMPACTION
98  };
99
100  bool SometimesSetsUpAFrame() override { return false; }
101
102  static Mode GetMode(Code* stub) {
103    // Find the mode depending on the first two instructions.
104    Instruction* instr1 =
105      reinterpret_cast<Instruction*>(stub->instruction_start());
106    Instruction* instr2 = instr1->following();
107
108    if (instr1->IsUncondBranchImm()) {
109      DCHECK(instr2->IsPCRelAddressing() && (instr2->Rd() == xzr.code()));
110      return INCREMENTAL;
111    }
112
113    DCHECK(instr1->IsPCRelAddressing() && (instr1->Rd() == xzr.code()));
114
115    if (instr2->IsUncondBranchImm()) {
116      return INCREMENTAL_COMPACTION;
117    }
118
119    DCHECK(instr2->IsPCRelAddressing());
120
121    return STORE_BUFFER_ONLY;
122  }
123
124  // We patch the two first instructions of the stub back and forth between an
125  // adr and branch when we start and stop incremental heap marking.
126  // The branch is
127  //   b label
128  // The adr is
129  //   adr xzr label
130  // so effectively a nop.
131  static void Patch(Code* stub, Mode mode) {
132    // We are going to patch the two first instructions of the stub.
133    PatchingAssembler patcher(
134        stub->GetIsolate(),
135        reinterpret_cast<Instruction*>(stub->instruction_start()), 2);
136    Instruction* instr1 = patcher.InstructionAt(0);
137    Instruction* instr2 = patcher.InstructionAt(kInstructionSize);
138    // Instructions must be either 'adr' or 'b'.
139    DCHECK(instr1->IsPCRelAddressing() || instr1->IsUncondBranchImm());
140    DCHECK(instr2->IsPCRelAddressing() || instr2->IsUncondBranchImm());
141    // Retrieve the offsets to the labels.
142    auto offset_to_incremental_noncompacting =
143        static_cast<int32_t>(instr1->ImmPCOffset());
144    auto offset_to_incremental_compacting =
145        static_cast<int32_t>(instr2->ImmPCOffset());
146
147    switch (mode) {
148      case STORE_BUFFER_ONLY:
149        DCHECK(GetMode(stub) == INCREMENTAL ||
150               GetMode(stub) == INCREMENTAL_COMPACTION);
151        patcher.adr(xzr, offset_to_incremental_noncompacting);
152        patcher.adr(xzr, offset_to_incremental_compacting);
153        break;
154      case INCREMENTAL:
155        DCHECK(GetMode(stub) == STORE_BUFFER_ONLY);
156        patcher.b(offset_to_incremental_noncompacting >> kInstructionSizeLog2);
157        patcher.adr(xzr, offset_to_incremental_compacting);
158        break;
159      case INCREMENTAL_COMPACTION:
160        DCHECK(GetMode(stub) == STORE_BUFFER_ONLY);
161        patcher.adr(xzr, offset_to_incremental_noncompacting);
162        patcher.b(offset_to_incremental_compacting >> kInstructionSizeLog2);
163        break;
164    }
165    DCHECK(GetMode(stub) == mode);
166  }
167
168  DEFINE_NULL_CALL_INTERFACE_DESCRIPTOR();
169
170 private:
171  // This is a helper class to manage the registers associated with the stub.
172  // The 'object' and 'address' registers must be preserved.
173  class RegisterAllocation {
174   public:
175    RegisterAllocation(Register object,
176                       Register address,
177                       Register scratch)
178        : object_(object),
179          address_(address),
180          scratch0_(scratch),
181          saved_regs_(kCallerSaved),
182          saved_fp_regs_(kCallerSavedFP) {
183      DCHECK(!AreAliased(scratch, object, address));
184
185      // The SaveCallerSaveRegisters method needs to save caller-saved
186      // registers, but we don't bother saving MacroAssembler scratch registers.
187      saved_regs_.Remove(MacroAssembler::DefaultTmpList());
188      saved_fp_regs_.Remove(MacroAssembler::DefaultFPTmpList());
189
190      // We would like to require more scratch registers for this stub,
191      // but the number of registers comes down to the ones used in
192      // FullCodeGen::SetVar(), which is architecture independent.
193      // We allocate 2 extra scratch registers that we'll save on the stack.
194      CPURegList pool_available = GetValidRegistersForAllocation();
195      CPURegList used_regs(object, address, scratch);
196      pool_available.Remove(used_regs);
197      scratch1_ = Register(pool_available.PopLowestIndex());
198      scratch2_ = Register(pool_available.PopLowestIndex());
199
200      // The scratch registers will be restored by other means so we don't need
201      // to save them with the other caller saved registers.
202      saved_regs_.Remove(scratch0_);
203      saved_regs_.Remove(scratch1_);
204      saved_regs_.Remove(scratch2_);
205    }
206
207    void Save(MacroAssembler* masm) {
208      // We don't have to save scratch0_ because it was given to us as
209      // a scratch register.
210      masm->Push(scratch1_, scratch2_);
211    }
212
213    void Restore(MacroAssembler* masm) {
214      masm->Pop(scratch2_, scratch1_);
215    }
216
217    // If we have to call into C then we need to save and restore all caller-
218    // saved registers that were not already preserved.
219    void SaveCallerSaveRegisters(MacroAssembler* masm, SaveFPRegsMode mode) {
220      // TODO(all): This can be very expensive, and it is likely that not every
221      // register will need to be preserved. Can we improve this?
222      masm->PushCPURegList(saved_regs_);
223      if (mode == kSaveFPRegs) {
224        masm->PushCPURegList(saved_fp_regs_);
225      }
226    }
227
228    void RestoreCallerSaveRegisters(MacroAssembler*masm, SaveFPRegsMode mode) {
229      // TODO(all): This can be very expensive, and it is likely that not every
230      // register will need to be preserved. Can we improve this?
231      if (mode == kSaveFPRegs) {
232        masm->PopCPURegList(saved_fp_regs_);
233      }
234      masm->PopCPURegList(saved_regs_);
235    }
236
237    Register object() { return object_; }
238    Register address() { return address_; }
239    Register scratch0() { return scratch0_; }
240    Register scratch1() { return scratch1_; }
241    Register scratch2() { return scratch2_; }
242
243   private:
244    Register object_;
245    Register address_;
246    Register scratch0_;
247    Register scratch1_;
248    Register scratch2_;
249    CPURegList saved_regs_;
250    CPURegList saved_fp_regs_;
251
252    // TODO(all): We should consider moving this somewhere else.
253    static CPURegList GetValidRegistersForAllocation() {
254      // The list of valid registers for allocation is defined as all the
255      // registers without those with a special meaning.
256      //
257      // The default list excludes registers x26 to x31 because they are
258      // reserved for the following purpose:
259      //  - x26 root register
260      //  - x27 context pointer register
261      //  - x28 jssp
262      //  - x29 frame pointer
263      //  - x30 link register(lr)
264      //  - x31 xzr/stack pointer
265      CPURegList list(CPURegister::kRegister, kXRegSizeInBits, 0, 25);
266
267      // We also remove MacroAssembler's scratch registers.
268      list.Remove(MacroAssembler::DefaultTmpList());
269
270      return list;
271    }
272
273    friend class RecordWriteStub;
274  };
275
276  enum OnNoNeedToInformIncrementalMarker {
277    kReturnOnNoNeedToInformIncrementalMarker,
278    kUpdateRememberedSetOnNoNeedToInformIncrementalMarker
279  };
280
281  inline Major MajorKey() const final { return RecordWrite; }
282
283  void Generate(MacroAssembler* masm) override;
284  void GenerateIncremental(MacroAssembler* masm, Mode mode);
285  void CheckNeedsToInformIncrementalMarker(
286      MacroAssembler* masm,
287      OnNoNeedToInformIncrementalMarker on_no_need,
288      Mode mode);
289  void InformIncrementalMarker(MacroAssembler* masm);
290
291  void Activate(Code* code) override {
292    code->GetHeap()->incremental_marking()->ActivateGeneratedStub(code);
293  }
294
295  Register object() const {
296    return Register::from_code(ObjectBits::decode(minor_key_));
297  }
298
299  Register value() const {
300    return Register::from_code(ValueBits::decode(minor_key_));
301  }
302
303  Register address() const {
304    return Register::from_code(AddressBits::decode(minor_key_));
305  }
306
307  RememberedSetAction remembered_set_action() const {
308    return RememberedSetActionBits::decode(minor_key_);
309  }
310
311  SaveFPRegsMode save_fp_regs_mode() const {
312    return SaveFPRegsModeBits::decode(minor_key_);
313  }
314
315  class ObjectBits: public BitField<int, 0, 5> {};
316  class ValueBits: public BitField<int, 5, 5> {};
317  class AddressBits: public BitField<int, 10, 5> {};
318  class RememberedSetActionBits: public BitField<RememberedSetAction, 15, 1> {};
319  class SaveFPRegsModeBits: public BitField<SaveFPRegsMode, 16, 1> {};
320
321  Label slow_;
322  RegisterAllocation regs_;
323};
324
325
326// Helper to call C++ functions from generated code. The caller must prepare
327// the exit frame before doing the call with GenerateCall.
328class DirectCEntryStub: public PlatformCodeStub {
329 public:
330  explicit DirectCEntryStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
331  void GenerateCall(MacroAssembler* masm, Register target);
332
333 private:
334  bool NeedsImmovableCode() override { return true; }
335
336  DEFINE_NULL_CALL_INTERFACE_DESCRIPTOR();
337  DEFINE_PLATFORM_CODE_STUB(DirectCEntry, PlatformCodeStub);
338};
339
340
341class NameDictionaryLookupStub: public PlatformCodeStub {
342 public:
343  enum LookupMode { POSITIVE_LOOKUP, NEGATIVE_LOOKUP };
344
345  NameDictionaryLookupStub(Isolate* isolate, LookupMode mode)
346      : PlatformCodeStub(isolate) {
347    minor_key_ = LookupModeBits::encode(mode);
348  }
349
350  static void GenerateNegativeLookup(MacroAssembler* masm,
351                                     Label* miss,
352                                     Label* done,
353                                     Register receiver,
354                                     Register properties,
355                                     Handle<Name> name,
356                                     Register scratch0);
357
358  bool SometimesSetsUpAFrame() override { return false; }
359
360 private:
361  static const int kInlinedProbes = 4;
362  static const int kTotalProbes = 20;
363
364  static const int kCapacityOffset =
365      NameDictionary::kHeaderSize +
366      NameDictionary::kCapacityIndex * kPointerSize;
367
368  static const int kElementsStartOffset =
369      NameDictionary::kHeaderSize +
370      NameDictionary::kElementsStartIndex * kPointerSize;
371
372  LookupMode mode() const { return LookupModeBits::decode(minor_key_); }
373
374  class LookupModeBits: public BitField<LookupMode, 0, 1> {};
375
376  DEFINE_NULL_CALL_INTERFACE_DESCRIPTOR();
377  DEFINE_PLATFORM_CODE_STUB(NameDictionaryLookup, PlatformCodeStub);
378};
379
380}  // namespace internal
381}  // namespace v8
382
383#endif  // V8_ARM64_CODE_STUBS_ARM64_H_
384