1// Copyright 2011 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_MIPS_CODE_STUBS_MIPS64_H_
6#define V8_MIPS_CODE_STUBS_MIPS64_H_
7
8#include "src/mips64/frames-mips64.h"
9
10namespace v8 {
11namespace internal {
12
13
14void ArrayNativeCode(MacroAssembler* masm, Label* call_generic_code);
15
16
17class StringHelper : public AllStatic {
18 public:
19  // Generate code for copying a large number of characters. This function
20  // is allowed to spend extra time setting up conditions to make copying
21  // faster. Copying of overlapping regions is not supported.
22  // Dest register ends at the position after the last character written.
23  static void GenerateCopyCharacters(MacroAssembler* masm,
24                                     Register dest,
25                                     Register src,
26                                     Register count,
27                                     Register scratch,
28                                     String::Encoding encoding);
29
30  // Compares two flat one-byte strings and returns result in v0.
31  static void GenerateCompareFlatOneByteStrings(
32      MacroAssembler* masm, Register left, Register right, Register scratch1,
33      Register scratch2, Register scratch3, Register scratch4);
34
35  // Compares two flat one-byte strings for equality and returns result in v0.
36  static void GenerateFlatOneByteStringEquals(MacroAssembler* masm,
37                                              Register left, Register right,
38                                              Register scratch1,
39                                              Register scratch2,
40                                              Register scratch3);
41
42 private:
43  static void GenerateOneByteCharsCompareLoop(
44      MacroAssembler* masm, Register left, Register right, Register length,
45      Register scratch1, Register scratch2, Register scratch3,
46      Label* chars_not_equal);
47
48 private:
49  DISALLOW_IMPLICIT_CONSTRUCTORS(StringHelper);
50};
51
52
53class StoreRegistersStateStub: public PlatformCodeStub {
54 public:
55  explicit StoreRegistersStateStub(Isolate* isolate)
56      : PlatformCodeStub(isolate) {}
57
58  static void GenerateAheadOfTime(Isolate* isolate);
59
60 private:
61  DEFINE_NULL_CALL_INTERFACE_DESCRIPTOR();
62  DEFINE_PLATFORM_CODE_STUB(StoreRegistersState, PlatformCodeStub);
63};
64
65
66class RestoreRegistersStateStub: public PlatformCodeStub {
67 public:
68  explicit RestoreRegistersStateStub(Isolate* isolate)
69      : PlatformCodeStub(isolate) {}
70
71  static void GenerateAheadOfTime(Isolate* isolate);
72
73 private:
74  DEFINE_NULL_CALL_INTERFACE_DESCRIPTOR();
75  DEFINE_PLATFORM_CODE_STUB(RestoreRegistersState, PlatformCodeStub);
76};
77
78
79class RecordWriteStub: public PlatformCodeStub {
80 public:
81  RecordWriteStub(Isolate* isolate,
82                  Register object,
83                  Register value,
84                  Register address,
85                  RememberedSetAction remembered_set_action,
86                  SaveFPRegsMode fp_mode)
87      : PlatformCodeStub(isolate),
88        regs_(object,   // An input reg.
89              address,  // An input reg.
90              value) {  // One scratch reg.
91    minor_key_ = ObjectBits::encode(object.code()) |
92                 ValueBits::encode(value.code()) |
93                 AddressBits::encode(address.code()) |
94                 RememberedSetActionBits::encode(remembered_set_action) |
95                 SaveFPRegsModeBits::encode(fp_mode);
96  }
97
98  RecordWriteStub(uint32_t key, Isolate* isolate)
99      : PlatformCodeStub(key, isolate), regs_(object(), address(), value()) {}
100
101  enum Mode {
102    STORE_BUFFER_ONLY,
103    INCREMENTAL,
104    INCREMENTAL_COMPACTION
105  };
106
107  bool SometimesSetsUpAFrame() override { return false; }
108
109  static void PatchBranchIntoNop(MacroAssembler* masm, int pos) {
110    const unsigned offset = masm->instr_at(pos) & kImm16Mask;
111    masm->instr_at_put(pos, BNE | (zero_reg.code() << kRsShift) |
112        (zero_reg.code() << kRtShift) | (offset & kImm16Mask));
113    DCHECK(Assembler::IsBne(masm->instr_at(pos)));
114  }
115
116  static void PatchNopIntoBranch(MacroAssembler* masm, int pos) {
117    const unsigned offset = masm->instr_at(pos) & kImm16Mask;
118    masm->instr_at_put(pos, BEQ | (zero_reg.code() << kRsShift) |
119        (zero_reg.code() << kRtShift) | (offset & kImm16Mask));
120    DCHECK(Assembler::IsBeq(masm->instr_at(pos)));
121  }
122
123  static Mode GetMode(Code* stub) {
124    Instr first_instruction = Assembler::instr_at(stub->instruction_start());
125    Instr second_instruction = Assembler::instr_at(stub->instruction_start() +
126                                                   2 * Assembler::kInstrSize);
127
128    if (Assembler::IsBeq(first_instruction)) {
129      return INCREMENTAL;
130    }
131
132    DCHECK(Assembler::IsBne(first_instruction));
133
134    if (Assembler::IsBeq(second_instruction)) {
135      return INCREMENTAL_COMPACTION;
136    }
137
138    DCHECK(Assembler::IsBne(second_instruction));
139
140    return STORE_BUFFER_ONLY;
141  }
142
143  static void Patch(Code* stub, Mode mode) {
144    MacroAssembler masm(stub->GetIsolate(), stub->instruction_start(),
145                        stub->instruction_size(), CodeObjectRequired::kNo);
146    switch (mode) {
147      case STORE_BUFFER_ONLY:
148        DCHECK(GetMode(stub) == INCREMENTAL ||
149               GetMode(stub) == INCREMENTAL_COMPACTION);
150        PatchBranchIntoNop(&masm, 0);
151        PatchBranchIntoNop(&masm, 2 * Assembler::kInstrSize);
152        break;
153      case INCREMENTAL:
154        DCHECK(GetMode(stub) == STORE_BUFFER_ONLY);
155        PatchNopIntoBranch(&masm, 0);
156        break;
157      case INCREMENTAL_COMPACTION:
158        DCHECK(GetMode(stub) == STORE_BUFFER_ONLY);
159        PatchNopIntoBranch(&masm, 2 * Assembler::kInstrSize);
160        break;
161    }
162    DCHECK(GetMode(stub) == mode);
163    Assembler::FlushICache(stub->GetIsolate(), stub->instruction_start(),
164                           4 * Assembler::kInstrSize);
165  }
166
167  DEFINE_NULL_CALL_INTERFACE_DESCRIPTOR();
168
169 private:
170  // This is a helper class for freeing up 3 scratch registers.  The input is
171  // two registers that must be preserved and one scratch register provided by
172  // the caller.
173  class RegisterAllocation {
174   public:
175    RegisterAllocation(Register object,
176                       Register address,
177                       Register scratch0)
178        : object_(object),
179          address_(address),
180          scratch0_(scratch0) {
181      DCHECK(!AreAliased(scratch0, object, address, no_reg));
182      scratch1_ = GetRegisterThatIsNotOneOf(object_, address_, scratch0_);
183    }
184
185    void Save(MacroAssembler* masm) {
186      DCHECK(!AreAliased(object_, address_, scratch1_, scratch0_));
187      // We don't have to save scratch0_ because it was given to us as
188      // a scratch register.
189      masm->push(scratch1_);
190    }
191
192    void Restore(MacroAssembler* masm) {
193      masm->pop(scratch1_);
194    }
195
196    // If we have to call into C then we need to save and restore all caller-
197    // saved registers that were not already preserved.  The scratch registers
198    // will be restored by other means so we don't bother pushing them here.
199    void SaveCallerSaveRegisters(MacroAssembler* masm, SaveFPRegsMode mode) {
200      masm->MultiPush((kJSCallerSaved | ra.bit()) & ~scratch1_.bit());
201      if (mode == kSaveFPRegs) {
202        masm->MultiPushFPU(kCallerSavedFPU);
203      }
204    }
205
206    inline void RestoreCallerSaveRegisters(MacroAssembler*masm,
207                                           SaveFPRegsMode mode) {
208      if (mode == kSaveFPRegs) {
209        masm->MultiPopFPU(kCallerSavedFPU);
210      }
211      masm->MultiPop((kJSCallerSaved | ra.bit()) & ~scratch1_.bit());
212    }
213
214    inline Register object() { return object_; }
215    inline Register address() { return address_; }
216    inline Register scratch0() { return scratch0_; }
217    inline Register scratch1() { return scratch1_; }
218
219   private:
220    Register object_;
221    Register address_;
222    Register scratch0_;
223    Register scratch1_;
224
225    friend class RecordWriteStub;
226  };
227
228  enum OnNoNeedToInformIncrementalMarker {
229    kReturnOnNoNeedToInformIncrementalMarker,
230    kUpdateRememberedSetOnNoNeedToInformIncrementalMarker
231  };
232
233  inline Major MajorKey() const final { return RecordWrite; }
234
235  void Generate(MacroAssembler* masm) override;
236  void GenerateIncremental(MacroAssembler* masm, Mode mode);
237  void CheckNeedsToInformIncrementalMarker(
238      MacroAssembler* masm,
239      OnNoNeedToInformIncrementalMarker on_no_need,
240      Mode mode);
241  void InformIncrementalMarker(MacroAssembler* masm);
242
243  void Activate(Code* code) override {
244    code->GetHeap()->incremental_marking()->ActivateGeneratedStub(code);
245  }
246
247  Register object() const {
248    return Register::from_code(ObjectBits::decode(minor_key_));
249  }
250
251  Register value() const {
252    return Register::from_code(ValueBits::decode(minor_key_));
253  }
254
255  Register address() const {
256    return Register::from_code(AddressBits::decode(minor_key_));
257  }
258
259  RememberedSetAction remembered_set_action() const {
260    return RememberedSetActionBits::decode(minor_key_);
261  }
262
263  SaveFPRegsMode save_fp_regs_mode() const {
264    return SaveFPRegsModeBits::decode(minor_key_);
265  }
266
267  class ObjectBits: public BitField<int, 0, 5> {};
268  class ValueBits: public BitField<int, 5, 5> {};
269  class AddressBits: public BitField<int, 10, 5> {};
270  class RememberedSetActionBits: public BitField<RememberedSetAction, 15, 1> {};
271  class SaveFPRegsModeBits: public BitField<SaveFPRegsMode, 16, 1> {};
272
273  Label slow_;
274  RegisterAllocation regs_;
275
276  DISALLOW_COPY_AND_ASSIGN(RecordWriteStub);
277};
278
279
280// Trampoline stub to call into native code. To call safely into native code
281// in the presence of compacting GC (which can move code objects) we need to
282// keep the code which called into native pinned in the memory. Currently the
283// simplest approach is to generate such stub early enough so it can never be
284// moved by GC
285class DirectCEntryStub: public PlatformCodeStub {
286 public:
287  explicit DirectCEntryStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
288  void GenerateCall(MacroAssembler* masm, Register target);
289
290 private:
291  bool NeedsImmovableCode() override { return true; }
292
293  DEFINE_NULL_CALL_INTERFACE_DESCRIPTOR();
294  DEFINE_PLATFORM_CODE_STUB(DirectCEntry, PlatformCodeStub);
295};
296
297
298class NameDictionaryLookupStub: public PlatformCodeStub {
299 public:
300  enum LookupMode { POSITIVE_LOOKUP, NEGATIVE_LOOKUP };
301
302  NameDictionaryLookupStub(Isolate* isolate, LookupMode mode)
303      : PlatformCodeStub(isolate) {
304    minor_key_ = LookupModeBits::encode(mode);
305  }
306
307  static void GenerateNegativeLookup(MacroAssembler* masm,
308                                     Label* miss,
309                                     Label* done,
310                                     Register receiver,
311                                     Register properties,
312                                     Handle<Name> name,
313                                     Register scratch0);
314
315  static void GeneratePositiveLookup(MacroAssembler* masm,
316                                     Label* miss,
317                                     Label* done,
318                                     Register elements,
319                                     Register name,
320                                     Register r0,
321                                     Register r1);
322
323  bool SometimesSetsUpAFrame() override { return false; }
324
325 private:
326  static const int kInlinedProbes = 4;
327  static const int kTotalProbes = 20;
328
329  static const int kCapacityOffset =
330      NameDictionary::kHeaderSize +
331      NameDictionary::kCapacityIndex * kPointerSize;
332
333  static const int kElementsStartOffset =
334      NameDictionary::kHeaderSize +
335      NameDictionary::kElementsStartIndex * kPointerSize;
336
337  LookupMode mode() const { return LookupModeBits::decode(minor_key_); }
338
339  class LookupModeBits: public BitField<LookupMode, 0, 1> {};
340
341  DEFINE_NULL_CALL_INTERFACE_DESCRIPTOR();
342  DEFINE_PLATFORM_CODE_STUB(NameDictionaryLookup, PlatformCodeStub);
343};
344
345
346}  // namespace internal
347}  // namespace v8
348
349#endif  // V8_MIPS_CODE_STUBS_MIPS64_H_
350