1
2// Copyright (c) 1994-2006 Sun Microsystems Inc.
3// All Rights Reserved.
4//
5// Redistribution and use in source and binary forms, with or without
6// modification, are permitted provided that the following conditions are
7// met:
8//
9// - Redistributions of source code must retain the above copyright notice,
10// this list of conditions and the following disclaimer.
11//
12// - Redistribution in binary form must reproduce the above copyright
13// notice, this list of conditions and the following disclaimer in the
14// documentation and/or other materials provided with the distribution.
15//
16// - Neither the name of Sun Microsystems or the names of contributors may
17// be used to endorse or promote products derived from this software without
18// specific prior written permission.
19//
20// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
21// IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
22// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
23// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
24// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
25// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
26// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
27// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
28// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
29// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
30// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31
32// The original source code covered by the above license above has been
33// modified significantly by Google Inc.
34// Copyright 2012 the V8 project authors. All rights reserved.
35
36
37#ifndef V8_MIPS_ASSEMBLER_MIPS_INL_H_
38#define V8_MIPS_ASSEMBLER_MIPS_INL_H_
39
40#include "src/mips64/assembler-mips64.h"
41
42#include "src/assembler.h"
43#include "src/debug/debug.h"
44#include "src/objects-inl.h"
45
46namespace v8 {
47namespace internal {
48
49
50bool CpuFeatures::SupportsCrankshaft() { return IsSupported(FPU); }
51
52bool CpuFeatures::SupportsSimd128() { return false; }
53
54// -----------------------------------------------------------------------------
55// Operand and MemOperand.
56
57Operand::Operand(int64_t immediate, RelocInfo::Mode rmode)  {
58  rm_ = no_reg;
59  imm64_ = immediate;
60  rmode_ = rmode;
61}
62
63
64Operand::Operand(const ExternalReference& f)  {
65  rm_ = no_reg;
66  imm64_ = reinterpret_cast<int64_t>(f.address());
67  rmode_ = RelocInfo::EXTERNAL_REFERENCE;
68}
69
70
71Operand::Operand(Smi* value) {
72  rm_ = no_reg;
73  imm64_ =  reinterpret_cast<intptr_t>(value);
74  rmode_ = RelocInfo::NONE32;
75}
76
77
78Operand::Operand(Register rm) {
79  rm_ = rm;
80}
81
82
83bool Operand::is_reg() const {
84  return rm_.is_valid();
85}
86
87
88// -----------------------------------------------------------------------------
89// RelocInfo.
90
91void RelocInfo::apply(intptr_t delta) {
92  if (IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_)) {
93    // Absolute code pointer inside code object moves with the code object.
94    byte* p = reinterpret_cast<byte*>(pc_);
95    int count = Assembler::RelocateInternalReference(rmode_, p, delta);
96    Assembler::FlushICache(isolate_, p, count * sizeof(uint32_t));
97  }
98}
99
100
101Address RelocInfo::target_address() {
102  DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
103  return Assembler::target_address_at(pc_, host_);
104}
105
106Address RelocInfo::target_address_address() {
107  DCHECK(IsCodeTarget(rmode_) ||
108         IsRuntimeEntry(rmode_) ||
109         rmode_ == EMBEDDED_OBJECT ||
110         rmode_ == EXTERNAL_REFERENCE);
111  // Read the address of the word containing the target_address in an
112  // instruction stream.
113  // The only architecture-independent user of this function is the serializer.
114  // The serializer uses it to find out how many raw bytes of instruction to
115  // output before the next target.
116  // For an instruction like LUI/ORI where the target bits are mixed into the
117  // instruction bits, the size of the target will be zero, indicating that the
118  // serializer should not step forward in memory after a target is resolved
119  // and written. In this case the target_address_address function should
120  // return the end of the instructions to be patched, allowing the
121  // deserializer to deserialize the instructions as raw bytes and put them in
122  // place, ready to be patched with the target. After jump optimization,
123  // that is the address of the instruction that follows J/JAL/JR/JALR
124  // instruction.
125  // return reinterpret_cast<Address>(
126  //  pc_ + Assembler::kInstructionsFor32BitConstant * Assembler::kInstrSize);
127  return reinterpret_cast<Address>(
128    pc_ + Assembler::kInstructionsFor64BitConstant * Assembler::kInstrSize);
129}
130
131
132Address RelocInfo::constant_pool_entry_address() {
133  UNREACHABLE();
134  return NULL;
135}
136
137
138int RelocInfo::target_address_size() {
139  return Assembler::kSpecialTargetSize;
140}
141
142Address Assembler::target_address_at(Address pc, Code* code) {
143  Address constant_pool = code ? code->constant_pool() : NULL;
144  return target_address_at(pc, constant_pool);
145}
146
147void Assembler::set_target_address_at(Isolate* isolate, Address pc, Code* code,
148                                      Address target,
149                                      ICacheFlushMode icache_flush_mode) {
150  Address constant_pool = code ? code->constant_pool() : NULL;
151  set_target_address_at(isolate, pc, constant_pool, target, icache_flush_mode);
152}
153
154Address Assembler::target_address_from_return_address(Address pc) {
155  return pc - kCallTargetAddressOffset;
156}
157
158
159void Assembler::set_target_internal_reference_encoded_at(Address pc,
160                                                         Address target) {
161  // Encoded internal references are j/jal instructions.
162  Instr instr = Assembler::instr_at(pc + 0 * Assembler::kInstrSize);
163
164  uint64_t imm28 =
165      (reinterpret_cast<uint64_t>(target) & static_cast<uint64_t>(kImm28Mask));
166
167  instr &= ~kImm26Mask;
168  uint64_t imm26 = imm28 >> 2;
169  DCHECK(is_uint26(imm26));
170
171  instr_at_put(pc, instr | (imm26 & kImm26Mask));
172  // Currently used only by deserializer, and all code will be flushed
173  // after complete deserialization, no need to flush on each reference.
174}
175
176
177void Assembler::deserialization_set_target_internal_reference_at(
178    Isolate* isolate, Address pc, Address target, RelocInfo::Mode mode) {
179  if (mode == RelocInfo::INTERNAL_REFERENCE_ENCODED) {
180    DCHECK(IsJ(instr_at(pc)));
181    set_target_internal_reference_encoded_at(pc, target);
182  } else {
183    DCHECK(mode == RelocInfo::INTERNAL_REFERENCE);
184    Memory::Address_at(pc) = target;
185  }
186}
187
188
189Object* RelocInfo::target_object() {
190  DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
191  return reinterpret_cast<Object*>(Assembler::target_address_at(pc_, host_));
192}
193
194
195Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
196  DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
197  return Handle<Object>(reinterpret_cast<Object**>(
198      Assembler::target_address_at(pc_, host_)));
199}
200
201
202void RelocInfo::set_target_object(Object* target,
203                                  WriteBarrierMode write_barrier_mode,
204                                  ICacheFlushMode icache_flush_mode) {
205  DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
206  Assembler::set_target_address_at(isolate_, pc_, host_,
207                                   reinterpret_cast<Address>(target),
208                                   icache_flush_mode);
209  if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
210      host() != NULL &&
211      target->IsHeapObject()) {
212    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
213        host(), this, HeapObject::cast(target));
214    host()->GetHeap()->RecordWriteIntoCode(host(), this, target);
215  }
216}
217
218
219Address RelocInfo::target_external_reference() {
220  DCHECK(rmode_ == EXTERNAL_REFERENCE);
221  return Assembler::target_address_at(pc_, host_);
222}
223
224
225Address RelocInfo::target_internal_reference() {
226  if (rmode_ == INTERNAL_REFERENCE) {
227    return Memory::Address_at(pc_);
228  } else {
229    // Encoded internal references are j/jal instructions.
230    DCHECK(rmode_ == INTERNAL_REFERENCE_ENCODED);
231    Instr instr = Assembler::instr_at(pc_ + 0 * Assembler::kInstrSize);
232    instr &= kImm26Mask;
233    uint64_t imm28 = instr << 2;
234    uint64_t segment =
235        (reinterpret_cast<uint64_t>(pc_) & ~static_cast<uint64_t>(kImm28Mask));
236    return reinterpret_cast<Address>(segment | imm28);
237  }
238}
239
240
241Address RelocInfo::target_internal_reference_address() {
242  DCHECK(rmode_ == INTERNAL_REFERENCE || rmode_ == INTERNAL_REFERENCE_ENCODED);
243  return reinterpret_cast<Address>(pc_);
244}
245
246
247Address RelocInfo::target_runtime_entry(Assembler* origin) {
248  DCHECK(IsRuntimeEntry(rmode_));
249  return target_address();
250}
251
252
253void RelocInfo::set_target_runtime_entry(Address target,
254                                         WriteBarrierMode write_barrier_mode,
255                                         ICacheFlushMode icache_flush_mode) {
256  DCHECK(IsRuntimeEntry(rmode_));
257  if (target_address() != target)
258    set_target_address(target, write_barrier_mode, icache_flush_mode);
259}
260
261
262Handle<Cell> RelocInfo::target_cell_handle() {
263  DCHECK(rmode_ == RelocInfo::CELL);
264  Address address = Memory::Address_at(pc_);
265  return Handle<Cell>(reinterpret_cast<Cell**>(address));
266}
267
268
269Cell* RelocInfo::target_cell() {
270  DCHECK(rmode_ == RelocInfo::CELL);
271  return Cell::FromValueAddress(Memory::Address_at(pc_));
272}
273
274
275void RelocInfo::set_target_cell(Cell* cell,
276                                WriteBarrierMode write_barrier_mode,
277                                ICacheFlushMode icache_flush_mode) {
278  DCHECK(rmode_ == RelocInfo::CELL);
279  Address address = cell->address() + Cell::kValueOffset;
280  Memory::Address_at(pc_) = address;
281  if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL) {
282    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(host(), this,
283                                                                  cell);
284  }
285}
286
287
288static const int kNoCodeAgeSequenceLength = 9 * Assembler::kInstrSize;
289
290
291Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
292  UNREACHABLE();  // This should never be reached on Arm.
293  return Handle<Object>();
294}
295
296
297Code* RelocInfo::code_age_stub() {
298  DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
299  return Code::GetCodeFromTargetAddress(
300      Assembler::target_address_at(pc_ + Assembler::kInstrSize, host_));
301}
302
303
304void RelocInfo::set_code_age_stub(Code* stub,
305                                  ICacheFlushMode icache_flush_mode) {
306  DCHECK(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
307  Assembler::set_target_address_at(isolate_, pc_ + Assembler::kInstrSize, host_,
308                                   stub->instruction_start());
309}
310
311
312Address RelocInfo::debug_call_address() {
313  // The pc_ offset of 0 assumes patched debug break slot or return
314  // sequence.
315  DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
316  return Assembler::target_address_at(pc_, host_);
317}
318
319
320void RelocInfo::set_debug_call_address(Address target) {
321  DCHECK(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence());
322  // The pc_ offset of 0 assumes patched debug break slot or return
323  // sequence.
324  Assembler::set_target_address_at(isolate_, pc_, host_, target);
325  if (host() != NULL) {
326    Object* target_code = Code::GetCodeFromTargetAddress(target);
327    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
328        host(), this, HeapObject::cast(target_code));
329  }
330}
331
332
333void RelocInfo::WipeOut() {
334  DCHECK(IsEmbeddedObject(rmode_) || IsCodeTarget(rmode_) ||
335         IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) ||
336         IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_));
337  if (IsInternalReference(rmode_)) {
338    Memory::Address_at(pc_) = NULL;
339  } else if (IsInternalReferenceEncoded(rmode_)) {
340    Assembler::set_target_internal_reference_encoded_at(pc_, nullptr);
341  } else {
342    Assembler::set_target_address_at(isolate_, pc_, host_, NULL);
343  }
344}
345
346template <typename ObjectVisitor>
347void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
348  RelocInfo::Mode mode = rmode();
349  if (mode == RelocInfo::EMBEDDED_OBJECT) {
350    visitor->VisitEmbeddedPointer(this);
351  } else if (RelocInfo::IsCodeTarget(mode)) {
352    visitor->VisitCodeTarget(this);
353  } else if (mode == RelocInfo::CELL) {
354    visitor->VisitCell(this);
355  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
356    visitor->VisitExternalReference(this);
357  } else if (mode == RelocInfo::INTERNAL_REFERENCE ||
358             mode == RelocInfo::INTERNAL_REFERENCE_ENCODED) {
359    visitor->VisitInternalReference(this);
360  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
361    visitor->VisitCodeAgeSequence(this);
362  } else if (RelocInfo::IsDebugBreakSlot(mode) &&
363             IsPatchedDebugBreakSlotSequence()) {
364    visitor->VisitDebugTarget(this);
365  } else if (RelocInfo::IsRuntimeEntry(mode)) {
366    visitor->VisitRuntimeEntry(this);
367  }
368}
369
370
371template<typename StaticVisitor>
372void RelocInfo::Visit(Heap* heap) {
373  RelocInfo::Mode mode = rmode();
374  if (mode == RelocInfo::EMBEDDED_OBJECT) {
375    StaticVisitor::VisitEmbeddedPointer(heap, this);
376  } else if (RelocInfo::IsCodeTarget(mode)) {
377    StaticVisitor::VisitCodeTarget(heap, this);
378  } else if (mode == RelocInfo::CELL) {
379    StaticVisitor::VisitCell(heap, this);
380  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
381    StaticVisitor::VisitExternalReference(this);
382  } else if (mode == RelocInfo::INTERNAL_REFERENCE ||
383             mode == RelocInfo::INTERNAL_REFERENCE_ENCODED) {
384    StaticVisitor::VisitInternalReference(this);
385  } else if (RelocInfo::IsCodeAgeSequence(mode)) {
386    StaticVisitor::VisitCodeAgeSequence(heap, this);
387  } else if (RelocInfo::IsDebugBreakSlot(mode) &&
388             IsPatchedDebugBreakSlotSequence()) {
389    StaticVisitor::VisitDebugTarget(heap, this);
390  } else if (RelocInfo::IsRuntimeEntry(mode)) {
391    StaticVisitor::VisitRuntimeEntry(this);
392  }
393}
394
395
396// -----------------------------------------------------------------------------
397// Assembler.
398
399
400void Assembler::CheckBuffer() {
401  if (buffer_space() <= kGap) {
402    GrowBuffer();
403  }
404}
405
406
407void Assembler::CheckTrampolinePoolQuick(int extra_instructions) {
408  if (pc_offset() >= next_buffer_check_ - extra_instructions * kInstrSize) {
409    CheckTrampolinePool();
410  }
411}
412
413
414void Assembler::CheckForEmitInForbiddenSlot() {
415  if (!is_buffer_growth_blocked()) {
416    CheckBuffer();
417  }
418  if (IsPrevInstrCompactBranch()) {
419    // Nop instruction to preceed a CTI in forbidden slot:
420    Instr nop = SPECIAL | SLL;
421    *reinterpret_cast<Instr*>(pc_) = nop;
422    pc_ += kInstrSize;
423
424    ClearCompactBranchState();
425  }
426}
427
428
429void Assembler::EmitHelper(Instr x, CompactBranchType is_compact_branch) {
430  if (IsPrevInstrCompactBranch()) {
431    if (Instruction::IsForbiddenAfterBranchInstr(x)) {
432      // Nop instruction to preceed a CTI in forbidden slot:
433      Instr nop = SPECIAL | SLL;
434      *reinterpret_cast<Instr*>(pc_) = nop;
435      pc_ += kInstrSize;
436    }
437    ClearCompactBranchState();
438  }
439  *reinterpret_cast<Instr*>(pc_) = x;
440  pc_ += kInstrSize;
441  if (is_compact_branch == CompactBranchType::COMPACT_BRANCH) {
442    EmittedCompactBranchInstruction();
443  }
444  CheckTrampolinePoolQuick();
445}
446
447template <>
448inline void Assembler::EmitHelper(uint8_t x);
449
450template <typename T>
451void Assembler::EmitHelper(T x) {
452  *reinterpret_cast<T*>(pc_) = x;
453  pc_ += sizeof(x);
454  CheckTrampolinePoolQuick();
455}
456
457template <>
458void Assembler::EmitHelper(uint8_t x) {
459  *reinterpret_cast<uint8_t*>(pc_) = x;
460  pc_ += sizeof(x);
461  if (reinterpret_cast<intptr_t>(pc_) % kInstrSize == 0) {
462    CheckTrampolinePoolQuick();
463  }
464}
465
466void Assembler::emit(Instr x, CompactBranchType is_compact_branch) {
467  if (!is_buffer_growth_blocked()) {
468    CheckBuffer();
469  }
470  EmitHelper(x, is_compact_branch);
471}
472
473
474void Assembler::emit(uint64_t data) {
475  CheckForEmitInForbiddenSlot();
476  EmitHelper(data);
477}
478
479
480}  // namespace internal
481}  // namespace v8
482
483#endif  // V8_MIPS_ASSEMBLER_MIPS_INL_H_
484