assembler-x64-inl.h revision 44f0eee88ff00398ff7f715fab053374d808c90d
1// Copyright 2011 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_X64_ASSEMBLER_X64_INL_H_
29#define V8_X64_ASSEMBLER_X64_INL_H_
30
31#include "cpu.h"
32#include "debug.h"
33#include "v8memory.h"
34
35namespace v8 {
36namespace internal {
37
38
39// -----------------------------------------------------------------------------
40// Implementation of Assembler
41
42
43void Assembler::emitl(uint32_t x) {
44  Memory::uint32_at(pc_) = x;
45  pc_ += sizeof(uint32_t);
46}
47
48
49void Assembler::emitq(uint64_t x, RelocInfo::Mode rmode) {
50  Memory::uint64_at(pc_) = x;
51  if (rmode != RelocInfo::NONE) {
52    RecordRelocInfo(rmode, x);
53  }
54  pc_ += sizeof(uint64_t);
55}
56
57
58void Assembler::emitw(uint16_t x) {
59  Memory::uint16_at(pc_) = x;
60  pc_ += sizeof(uint16_t);
61}
62
63
64void Assembler::emit_code_target(Handle<Code> target, RelocInfo::Mode rmode) {
65  ASSERT(RelocInfo::IsCodeTarget(rmode));
66  RecordRelocInfo(rmode);
67  int current = code_targets_.length();
68  if (current > 0 && code_targets_.last().is_identical_to(target)) {
69    // Optimization if we keep jumping to the same code target.
70    emitl(current - 1);
71  } else {
72    code_targets_.Add(target);
73    emitl(current);
74  }
75}
76
77
78void Assembler::emit_rex_64(Register reg, Register rm_reg) {
79  emit(0x48 | reg.high_bit() << 2 | rm_reg.high_bit());
80}
81
82
83void Assembler::emit_rex_64(XMMRegister reg, Register rm_reg) {
84  emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
85}
86
87
88void Assembler::emit_rex_64(Register reg, XMMRegister rm_reg) {
89  emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
90}
91
92
93void Assembler::emit_rex_64(Register reg, const Operand& op) {
94  emit(0x48 | reg.high_bit() << 2 | op.rex_);
95}
96
97
98void Assembler::emit_rex_64(XMMRegister reg, const Operand& op) {
99  emit(0x48 | (reg.code() & 0x8) >> 1 | op.rex_);
100}
101
102
103void Assembler::emit_rex_64(Register rm_reg) {
104  ASSERT_EQ(rm_reg.code() & 0xf, rm_reg.code());
105  emit(0x48 | rm_reg.high_bit());
106}
107
108
109void Assembler::emit_rex_64(const Operand& op) {
110  emit(0x48 | op.rex_);
111}
112
113
114void Assembler::emit_rex_32(Register reg, Register rm_reg) {
115  emit(0x40 | reg.high_bit() << 2 | rm_reg.high_bit());
116}
117
118
119void Assembler::emit_rex_32(Register reg, const Operand& op) {
120  emit(0x40 | reg.high_bit() << 2  | op.rex_);
121}
122
123
124void Assembler::emit_rex_32(Register rm_reg) {
125  emit(0x40 | rm_reg.high_bit());
126}
127
128
129void Assembler::emit_rex_32(const Operand& op) {
130  emit(0x40 | op.rex_);
131}
132
133
134void Assembler::emit_optional_rex_32(Register reg, Register rm_reg) {
135  byte rex_bits = reg.high_bit() << 2 | rm_reg.high_bit();
136  if (rex_bits != 0) emit(0x40 | rex_bits);
137}
138
139
140void Assembler::emit_optional_rex_32(Register reg, const Operand& op) {
141  byte rex_bits =  reg.high_bit() << 2 | op.rex_;
142  if (rex_bits != 0) emit(0x40 | rex_bits);
143}
144
145
146void Assembler::emit_optional_rex_32(XMMRegister reg, const Operand& op) {
147  byte rex_bits =  (reg.code() & 0x8) >> 1 | op.rex_;
148  if (rex_bits != 0) emit(0x40 | rex_bits);
149}
150
151
152void Assembler::emit_optional_rex_32(XMMRegister reg, XMMRegister base) {
153  byte rex_bits =  (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
154  if (rex_bits != 0) emit(0x40 | rex_bits);
155}
156
157
158void Assembler::emit_optional_rex_32(XMMRegister reg, Register base) {
159  byte rex_bits =  (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
160  if (rex_bits != 0) emit(0x40 | rex_bits);
161}
162
163
164void Assembler::emit_optional_rex_32(Register reg, XMMRegister base) {
165  byte rex_bits =  (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
166  if (rex_bits != 0) emit(0x40 | rex_bits);
167}
168
169
170void Assembler::emit_optional_rex_32(Register rm_reg) {
171  if (rm_reg.high_bit()) emit(0x41);
172}
173
174
175void Assembler::emit_optional_rex_32(const Operand& op) {
176  if (op.rex_ != 0) emit(0x40 | op.rex_);
177}
178
179
180Address Assembler::target_address_at(Address pc) {
181  return Memory::int32_at(pc) + pc + 4;
182}
183
184
185void Assembler::set_target_address_at(Address pc, Address target) {
186  Memory::int32_at(pc) = static_cast<int32_t>(target - pc - 4);
187  CPU::FlushICache(pc, sizeof(int32_t));
188}
189
190Handle<Object> Assembler::code_target_object_handle_at(Address pc) {
191  return code_targets_[Memory::int32_at(pc)];
192}
193
194// -----------------------------------------------------------------------------
195// Implementation of RelocInfo
196
197// The modes possibly affected by apply must be in kApplyMask.
198void RelocInfo::apply(intptr_t delta) {
199  if (IsInternalReference(rmode_)) {
200    // absolute code pointer inside code object moves with the code object.
201    Memory::Address_at(pc_) += static_cast<int32_t>(delta);
202    CPU::FlushICache(pc_, sizeof(Address));
203  } else if (IsCodeTarget(rmode_)) {
204    Memory::int32_at(pc_) -= static_cast<int32_t>(delta);
205    CPU::FlushICache(pc_, sizeof(int32_t));
206  }
207}
208
209
210Address RelocInfo::target_address() {
211  ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
212  if (IsCodeTarget(rmode_)) {
213    return Assembler::target_address_at(pc_);
214  } else {
215    return Memory::Address_at(pc_);
216  }
217}
218
219
220Address RelocInfo::target_address_address() {
221  ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
222  return reinterpret_cast<Address>(pc_);
223}
224
225
226int RelocInfo::target_address_size() {
227  if (IsCodedSpecially()) {
228    return Assembler::kCallTargetSize;
229  } else {
230    return Assembler::kExternalTargetSize;
231  }
232}
233
234
235void RelocInfo::set_target_address(Address target) {
236  ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
237  if (IsCodeTarget(rmode_)) {
238    Assembler::set_target_address_at(pc_, target);
239  } else {
240    Memory::Address_at(pc_) = target;
241    CPU::FlushICache(pc_, sizeof(Address));
242  }
243}
244
245
246Object* RelocInfo::target_object() {
247  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
248  return Memory::Object_at(pc_);
249}
250
251
252Handle<Object> RelocInfo::target_object_handle(Assembler *origin) {
253  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
254  if (rmode_ == EMBEDDED_OBJECT) {
255    return Memory::Object_Handle_at(pc_);
256  } else {
257    return origin->code_target_object_handle_at(pc_);
258  }
259}
260
261
262Object** RelocInfo::target_object_address() {
263  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
264  return reinterpret_cast<Object**>(pc_);
265}
266
267
268Address* RelocInfo::target_reference_address() {
269  ASSERT(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
270  return reinterpret_cast<Address*>(pc_);
271}
272
273
274void RelocInfo::set_target_object(Object* target) {
275  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
276  *reinterpret_cast<Object**>(pc_) = target;
277  CPU::FlushICache(pc_, sizeof(Address));
278}
279
280
281Handle<JSGlobalPropertyCell> RelocInfo::target_cell_handle() {
282  ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
283  Address address = Memory::Address_at(pc_);
284  return Handle<JSGlobalPropertyCell>(
285      reinterpret_cast<JSGlobalPropertyCell**>(address));
286}
287
288
289JSGlobalPropertyCell* RelocInfo::target_cell() {
290  ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
291  Address address = Memory::Address_at(pc_);
292  Object* object = HeapObject::FromAddress(
293      address - JSGlobalPropertyCell::kValueOffset);
294  return reinterpret_cast<JSGlobalPropertyCell*>(object);
295}
296
297
298void RelocInfo::set_target_cell(JSGlobalPropertyCell* cell) {
299  ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
300  Address address = cell->address() + JSGlobalPropertyCell::kValueOffset;
301  Memory::Address_at(pc_) = address;
302  CPU::FlushICache(pc_, sizeof(Address));
303}
304
305
306bool RelocInfo::IsPatchedReturnSequence() {
307  // The recognized call sequence is:
308  //  movq(kScratchRegister, immediate64); call(kScratchRegister);
309  // It only needs to be distinguished from a return sequence
310  //  movq(rsp, rbp); pop(rbp); ret(n); int3 *6
311  // The 11th byte is int3 (0xCC) in the return sequence and
312  // REX.WB (0x48+register bit) for the call sequence.
313#ifdef ENABLE_DEBUGGER_SUPPORT
314  return pc_[10] != 0xCC;
315#else
316  return false;
317#endif
318}
319
320
321bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
322  return !Assembler::IsNop(pc());
323}
324
325
326Address RelocInfo::call_address() {
327  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
328         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
329  return Memory::Address_at(
330      pc_ + Assembler::kRealPatchReturnSequenceAddressOffset);
331}
332
333
334void RelocInfo::set_call_address(Address target) {
335  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
336         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
337  Memory::Address_at(pc_ + Assembler::kRealPatchReturnSequenceAddressOffset) =
338      target;
339  CPU::FlushICache(pc_ + Assembler::kRealPatchReturnSequenceAddressOffset,
340                   sizeof(Address));
341}
342
343
344Object* RelocInfo::call_object() {
345  return *call_object_address();
346}
347
348
349void RelocInfo::set_call_object(Object* target) {
350  *call_object_address() = target;
351}
352
353
354Object** RelocInfo::call_object_address() {
355  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
356         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
357  return reinterpret_cast<Object**>(
358      pc_ + Assembler::kPatchReturnSequenceAddressOffset);
359}
360
361
362void RelocInfo::Visit(ObjectVisitor* visitor) {
363  RelocInfo::Mode mode = rmode();
364  if (mode == RelocInfo::EMBEDDED_OBJECT) {
365    visitor->VisitPointer(target_object_address());
366    CPU::FlushICache(pc_, sizeof(Address));
367  } else if (RelocInfo::IsCodeTarget(mode)) {
368    visitor->VisitCodeTarget(this);
369  } else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
370    visitor->VisitGlobalPropertyCell(this);
371  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
372    visitor->VisitExternalReference(target_reference_address());
373    CPU::FlushICache(pc_, sizeof(Address));
374#ifdef ENABLE_DEBUGGER_SUPPORT
375  // TODO(isolates): Get a cached isolate below.
376  } else if (((RelocInfo::IsJSReturn(mode) &&
377              IsPatchedReturnSequence()) ||
378             (RelocInfo::IsDebugBreakSlot(mode) &&
379              IsPatchedDebugBreakSlotSequence())) &&
380             Isolate::Current()->debug()->has_break_points()) {
381    visitor->VisitDebugTarget(this);
382#endif
383  } else if (mode == RelocInfo::RUNTIME_ENTRY) {
384    visitor->VisitRuntimeEntry(this);
385  }
386}
387
388
389template<typename StaticVisitor>
390void RelocInfo::Visit(Heap* heap) {
391  RelocInfo::Mode mode = rmode();
392  if (mode == RelocInfo::EMBEDDED_OBJECT) {
393    StaticVisitor::VisitPointer(heap, target_object_address());
394    CPU::FlushICache(pc_, sizeof(Address));
395  } else if (RelocInfo::IsCodeTarget(mode)) {
396    StaticVisitor::VisitCodeTarget(this);
397  } else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
398    StaticVisitor::VisitGlobalPropertyCell(this);
399  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
400    StaticVisitor::VisitExternalReference(target_reference_address());
401    CPU::FlushICache(pc_, sizeof(Address));
402#ifdef ENABLE_DEBUGGER_SUPPORT
403  } else if (heap->isolate()->debug()->has_break_points() &&
404             ((RelocInfo::IsJSReturn(mode) &&
405              IsPatchedReturnSequence()) ||
406             (RelocInfo::IsDebugBreakSlot(mode) &&
407              IsPatchedDebugBreakSlotSequence()))) {
408    StaticVisitor::VisitDebugTarget(this);
409#endif
410  } else if (mode == RelocInfo::RUNTIME_ENTRY) {
411    StaticVisitor::VisitRuntimeEntry(this);
412  }
413}
414
415
416// -----------------------------------------------------------------------------
417// Implementation of Operand
418
419void Operand::set_modrm(int mod, Register rm_reg) {
420  ASSERT(is_uint2(mod));
421  buf_[0] = mod << 6 | rm_reg.low_bits();
422  // Set REX.B to the high bit of rm.code().
423  rex_ |= rm_reg.high_bit();
424}
425
426
427void Operand::set_sib(ScaleFactor scale, Register index, Register base) {
428  ASSERT(len_ == 1);
429  ASSERT(is_uint2(scale));
430  // Use SIB with no index register only for base rsp or r12. Otherwise we
431  // would skip the SIB byte entirely.
432  ASSERT(!index.is(rsp) || base.is(rsp) || base.is(r12));
433  buf_[1] = (scale << 6) | (index.low_bits() << 3) | base.low_bits();
434  rex_ |= index.high_bit() << 1 | base.high_bit();
435  len_ = 2;
436}
437
438void Operand::set_disp8(int disp) {
439  ASSERT(is_int8(disp));
440  ASSERT(len_ == 1 || len_ == 2);
441  int8_t* p = reinterpret_cast<int8_t*>(&buf_[len_]);
442  *p = disp;
443  len_ += sizeof(int8_t);
444}
445
446void Operand::set_disp32(int disp) {
447  ASSERT(len_ == 1 || len_ == 2);
448  int32_t* p = reinterpret_cast<int32_t*>(&buf_[len_]);
449  *p = disp;
450  len_ += sizeof(int32_t);
451}
452
453
454} }  // namespace v8::internal
455
456#endif  // V8_X64_ASSEMBLER_X64_INL_H_
457