assembler-mips-inl.h revision 592a9fc1d8ea420377a2e7efd0600e20b058be2b
1// Copyright (c) 1994-2006 Sun Microsystems Inc.
2// All Rights Reserved.
3//
4// Redistribution and use in source and binary forms, with or without
5// modification, are permitted provided that the following conditions are
6// met:
7//
8// - Redistributions of source code must retain the above copyright notice,
9// this list of conditions and the following disclaimer.
10//
11// - Redistribution in binary form must reproduce the above copyright
12// notice, this list of conditions and the following disclaimer in the
13// documentation and/or other materials provided with the distribution.
14//
15// - Neither the name of Sun Microsystems or the names of contributors may
16// be used to endorse or promote products derived from this software without
17// specific prior written permission.
18//
19// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
20// IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
21// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
23// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
24// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
25// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
26// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
27// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
28// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
29// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30
31// The original source code covered by the above license above has been
32// modified significantly by Google Inc.
33// Copyright 2011 the V8 project authors. All rights reserved.
34
35
36#ifndef V8_MIPS_ASSEMBLER_MIPS_INL_H_
37#define V8_MIPS_ASSEMBLER_MIPS_INL_H_
38
39#include "mips/assembler-mips.h"
40#include "cpu.h"
41#include "debug.h"
42
43
44namespace v8 {
45namespace internal {
46
47// -----------------------------------------------------------------------------
48// Operand and MemOperand.
49
50Operand::Operand(int32_t immediate, RelocInfo::Mode rmode)  {
51  rm_ = no_reg;
52  imm32_ = immediate;
53  rmode_ = rmode;
54}
55
56
57Operand::Operand(const ExternalReference& f)  {
58  rm_ = no_reg;
59  imm32_ = reinterpret_cast<int32_t>(f.address());
60  rmode_ = RelocInfo::EXTERNAL_REFERENCE;
61}
62
63
64Operand::Operand(Smi* value) {
65  rm_ = no_reg;
66  imm32_ =  reinterpret_cast<intptr_t>(value);
67  rmode_ = RelocInfo::NONE;
68}
69
70
71Operand::Operand(Register rm) {
72  rm_ = rm;
73}
74
75
76bool Operand::is_reg() const {
77  return rm_.is_valid();
78}
79
80
81// -----------------------------------------------------------------------------
82// RelocInfo.
83
84void RelocInfo::apply(intptr_t delta) {
85  if (IsCodeTarget(rmode_)) {
86    uint32_t scope1 = (uint32_t) target_address() & ~kImm28Mask;
87    uint32_t scope2 = reinterpret_cast<uint32_t>(pc_) & ~kImm28Mask;
88
89    if (scope1 != scope2) {
90      Assembler::JumpLabelToJumpRegister(pc_);
91    }
92  }
93  if (IsInternalReference(rmode_)) {
94    // Absolute code pointer inside code object moves with the code object.
95    byte* p = reinterpret_cast<byte*>(pc_);
96    int count = Assembler::RelocateInternalReference(p, delta);
97    CPU::FlushICache(p, count * sizeof(uint32_t));
98  }
99}
100
101
102Address RelocInfo::target_address() {
103  ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
104  return Assembler::target_address_at(pc_);
105}
106
107
108Address RelocInfo::target_address_address() {
109  ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
110  return reinterpret_cast<Address>(pc_);
111}
112
113
114int RelocInfo::target_address_size() {
115  return Assembler::kExternalTargetSize;
116}
117
118
119void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) {
120  ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
121  Assembler::set_target_address_at(pc_, target);
122  if (mode == UPDATE_WRITE_BARRIER && host() != NULL && IsCodeTarget(rmode_)) {
123    Object* target_code = Code::GetCodeFromTargetAddress(target);
124    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
125        host(), this, HeapObject::cast(target_code));
126  }
127}
128
129
130Object* RelocInfo::target_object() {
131  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
132  return reinterpret_cast<Object*>(Assembler::target_address_at(pc_));
133}
134
135
136Handle<Object> RelocInfo::target_object_handle(Assembler *origin) {
137  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
138  return Handle<Object>(reinterpret_cast<Object**>(
139      Assembler::target_address_at(pc_)));
140}
141
142
143Object** RelocInfo::target_object_address() {
144  // Provide a "natural pointer" to the embedded object,
145  // which can be de-referenced during heap iteration.
146  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
147  reconstructed_obj_ptr_ =
148      reinterpret_cast<Object*>(Assembler::target_address_at(pc_));
149  return &reconstructed_obj_ptr_;
150}
151
152
153void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) {
154  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
155  Assembler::set_target_address_at(pc_, reinterpret_cast<Address>(target));
156  if (mode == UPDATE_WRITE_BARRIER &&
157      host() != NULL &&
158      target->IsHeapObject()) {
159    host()->GetHeap()->incremental_marking()->RecordWrite(
160        host(), &Memory::Object_at(pc_), HeapObject::cast(target));
161  }
162}
163
164
165Address* RelocInfo::target_reference_address() {
166  ASSERT(rmode_ == EXTERNAL_REFERENCE);
167  reconstructed_adr_ptr_ = Assembler::target_address_at(pc_);
168  return &reconstructed_adr_ptr_;
169}
170
171
172Handle<JSGlobalPropertyCell> RelocInfo::target_cell_handle() {
173  ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
174  Address address = Memory::Address_at(pc_);
175  return Handle<JSGlobalPropertyCell>(
176      reinterpret_cast<JSGlobalPropertyCell**>(address));
177}
178
179
180JSGlobalPropertyCell* RelocInfo::target_cell() {
181  ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
182  Address address = Memory::Address_at(pc_);
183  Object* object = HeapObject::FromAddress(
184      address - JSGlobalPropertyCell::kValueOffset);
185  return reinterpret_cast<JSGlobalPropertyCell*>(object);
186}
187
188
189void RelocInfo::set_target_cell(JSGlobalPropertyCell* cell,
190                                WriteBarrierMode mode) {
191  ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
192  Address address = cell->address() + JSGlobalPropertyCell::kValueOffset;
193  Memory::Address_at(pc_) = address;
194  if (mode == UPDATE_WRITE_BARRIER && host() != NULL) {
195    // TODO(1550) We are passing NULL as a slot because cell can never be on
196    // evacuation candidate.
197    host()->GetHeap()->incremental_marking()->RecordWrite(
198        host(), NULL, cell);
199  }
200}
201
202
203Address RelocInfo::call_address() {
204  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
205         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
206  // The pc_ offset of 0 assumes mips patched return sequence per
207  // debug-mips.cc BreakLocationIterator::SetDebugBreakAtReturn(), or
208  // debug break slot per BreakLocationIterator::SetDebugBreakAtSlot().
209  return Assembler::target_address_at(pc_);
210}
211
212
213void RelocInfo::set_call_address(Address target) {
214  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
215         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
216  // The pc_ offset of 0 assumes mips patched return sequence per
217  // debug-mips.cc BreakLocationIterator::SetDebugBreakAtReturn(), or
218  // debug break slot per BreakLocationIterator::SetDebugBreakAtSlot().
219  Assembler::set_target_address_at(pc_, target);
220  if (host() != NULL) {
221    Object* target_code = Code::GetCodeFromTargetAddress(target);
222    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
223        host(), this, HeapObject::cast(target_code));
224  }
225}
226
227
228Object* RelocInfo::call_object() {
229  return *call_object_address();
230}
231
232
233Object** RelocInfo::call_object_address() {
234  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
235         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
236  return reinterpret_cast<Object**>(pc_ + 2 * Assembler::kInstrSize);
237}
238
239
240void RelocInfo::set_call_object(Object* target) {
241  *call_object_address() = target;
242}
243
244
245bool RelocInfo::IsPatchedReturnSequence() {
246  Instr instr0 = Assembler::instr_at(pc_);
247  Instr instr1 = Assembler::instr_at(pc_ + 1 * Assembler::kInstrSize);
248  Instr instr2 = Assembler::instr_at(pc_ + 2 * Assembler::kInstrSize);
249  bool patched_return = ((instr0 & kOpcodeMask) == LUI &&
250                         (instr1 & kOpcodeMask) == ORI &&
251                         ((instr2 & kOpcodeMask) == JAL ||
252                          ((instr2 & kOpcodeMask) == SPECIAL &&
253                           (instr2 & kFunctionFieldMask) == JALR)));
254  return patched_return;
255}
256
257
258bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
259  Instr current_instr = Assembler::instr_at(pc_);
260  return !Assembler::IsNop(current_instr, Assembler::DEBUG_BREAK_NOP);
261}
262
263
264void RelocInfo::Visit(ObjectVisitor* visitor) {
265  RelocInfo::Mode mode = rmode();
266  if (mode == RelocInfo::EMBEDDED_OBJECT) {
267    visitor->VisitEmbeddedPointer(this);
268  } else if (RelocInfo::IsCodeTarget(mode)) {
269    visitor->VisitCodeTarget(this);
270  } else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
271    visitor->VisitGlobalPropertyCell(this);
272  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
273    visitor->VisitExternalReference(target_reference_address());
274#ifdef ENABLE_DEBUGGER_SUPPORT
275  // TODO(isolates): Get a cached isolate below.
276  } else if (((RelocInfo::IsJSReturn(mode) &&
277              IsPatchedReturnSequence()) ||
278             (RelocInfo::IsDebugBreakSlot(mode) &&
279             IsPatchedDebugBreakSlotSequence())) &&
280             Isolate::Current()->debug()->has_break_points()) {
281    visitor->VisitDebugTarget(this);
282#endif
283  } else if (mode == RelocInfo::RUNTIME_ENTRY) {
284    visitor->VisitRuntimeEntry(this);
285  }
286}
287
288
289template<typename StaticVisitor>
290void RelocInfo::Visit(Heap* heap) {
291  RelocInfo::Mode mode = rmode();
292  if (mode == RelocInfo::EMBEDDED_OBJECT) {
293    StaticVisitor::VisitEmbeddedPointer(heap, this);
294  } else if (RelocInfo::IsCodeTarget(mode)) {
295    StaticVisitor::VisitCodeTarget(heap, this);
296  } else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
297    StaticVisitor::VisitGlobalPropertyCell(heap, this);
298  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
299    StaticVisitor::VisitExternalReference(target_reference_address());
300#ifdef ENABLE_DEBUGGER_SUPPORT
301  } else if (heap->isolate()->debug()->has_break_points() &&
302             ((RelocInfo::IsJSReturn(mode) &&
303              IsPatchedReturnSequence()) ||
304             (RelocInfo::IsDebugBreakSlot(mode) &&
305              IsPatchedDebugBreakSlotSequence()))) {
306    StaticVisitor::VisitDebugTarget(heap, this);
307#endif
308  } else if (mode == RelocInfo::RUNTIME_ENTRY) {
309    StaticVisitor::VisitRuntimeEntry(this);
310  }
311}
312
313
314// -----------------------------------------------------------------------------
315// Assembler.
316
317
318void Assembler::CheckBuffer() {
319  if (buffer_space() <= kGap) {
320    GrowBuffer();
321  }
322}
323
324
325void Assembler::CheckTrampolinePoolQuick() {
326  if (pc_offset() >= next_buffer_check_) {
327    CheckTrampolinePool();
328  }
329}
330
331
332void Assembler::emit(Instr x) {
333  if (!is_buffer_growth_blocked()) {
334    CheckBuffer();
335  }
336  *reinterpret_cast<Instr*>(pc_) = x;
337  pc_ += kInstrSize;
338  CheckTrampolinePoolQuick();
339}
340
341
342} }  // namespace v8::internal
343
344#endif  // V8_MIPS_ASSEMBLER_MIPS_INL_H_
345