assembler-x64-inl.h revision 592a9fc1d8ea420377a2e7efd0600e20b058be2b
1// Copyright 2011 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_X64_ASSEMBLER_X64_INL_H_
29#define V8_X64_ASSEMBLER_X64_INL_H_
30
31#include "cpu.h"
32#include "debug.h"
33#include "v8memory.h"
34
35namespace v8 {
36namespace internal {
37
38
39// -----------------------------------------------------------------------------
40// Implementation of Assembler
41
42
43void Assembler::emitl(uint32_t x) {
44  Memory::uint32_at(pc_) = x;
45  pc_ += sizeof(uint32_t);
46}
47
48
49void Assembler::emitq(uint64_t x, RelocInfo::Mode rmode) {
50  Memory::uint64_at(pc_) = x;
51  if (rmode != RelocInfo::NONE) {
52    RecordRelocInfo(rmode, x);
53  }
54  pc_ += sizeof(uint64_t);
55}
56
57
58void Assembler::emitw(uint16_t x) {
59  Memory::uint16_at(pc_) = x;
60  pc_ += sizeof(uint16_t);
61}
62
63
64void Assembler::emit_code_target(Handle<Code> target,
65                                 RelocInfo::Mode rmode,
66                                 unsigned ast_id) {
67  ASSERT(RelocInfo::IsCodeTarget(rmode));
68  if (rmode == RelocInfo::CODE_TARGET && ast_id != kNoASTId) {
69    RecordRelocInfo(RelocInfo::CODE_TARGET_WITH_ID, ast_id);
70  } else {
71    RecordRelocInfo(rmode);
72  }
73  int current = code_targets_.length();
74  if (current > 0 && code_targets_.last().is_identical_to(target)) {
75    // Optimization if we keep jumping to the same code target.
76    emitl(current - 1);
77  } else {
78    code_targets_.Add(target);
79    emitl(current);
80  }
81}
82
83
84void Assembler::emit_rex_64(Register reg, Register rm_reg) {
85  emit(0x48 | reg.high_bit() << 2 | rm_reg.high_bit());
86}
87
88
89void Assembler::emit_rex_64(XMMRegister reg, Register rm_reg) {
90  emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
91}
92
93
94void Assembler::emit_rex_64(Register reg, XMMRegister rm_reg) {
95  emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
96}
97
98
99void Assembler::emit_rex_64(Register reg, const Operand& op) {
100  emit(0x48 | reg.high_bit() << 2 | op.rex_);
101}
102
103
104void Assembler::emit_rex_64(XMMRegister reg, const Operand& op) {
105  emit(0x48 | (reg.code() & 0x8) >> 1 | op.rex_);
106}
107
108
109void Assembler::emit_rex_64(Register rm_reg) {
110  ASSERT_EQ(rm_reg.code() & 0xf, rm_reg.code());
111  emit(0x48 | rm_reg.high_bit());
112}
113
114
115void Assembler::emit_rex_64(const Operand& op) {
116  emit(0x48 | op.rex_);
117}
118
119
120void Assembler::emit_rex_32(Register reg, Register rm_reg) {
121  emit(0x40 | reg.high_bit() << 2 | rm_reg.high_bit());
122}
123
124
125void Assembler::emit_rex_32(Register reg, const Operand& op) {
126  emit(0x40 | reg.high_bit() << 2  | op.rex_);
127}
128
129
130void Assembler::emit_rex_32(Register rm_reg) {
131  emit(0x40 | rm_reg.high_bit());
132}
133
134
135void Assembler::emit_rex_32(const Operand& op) {
136  emit(0x40 | op.rex_);
137}
138
139
140void Assembler::emit_optional_rex_32(Register reg, Register rm_reg) {
141  byte rex_bits = reg.high_bit() << 2 | rm_reg.high_bit();
142  if (rex_bits != 0) emit(0x40 | rex_bits);
143}
144
145
146void Assembler::emit_optional_rex_32(Register reg, const Operand& op) {
147  byte rex_bits =  reg.high_bit() << 2 | op.rex_;
148  if (rex_bits != 0) emit(0x40 | rex_bits);
149}
150
151
152void Assembler::emit_optional_rex_32(XMMRegister reg, const Operand& op) {
153  byte rex_bits =  (reg.code() & 0x8) >> 1 | op.rex_;
154  if (rex_bits != 0) emit(0x40 | rex_bits);
155}
156
157
158void Assembler::emit_optional_rex_32(XMMRegister reg, XMMRegister base) {
159  byte rex_bits =  (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
160  if (rex_bits != 0) emit(0x40 | rex_bits);
161}
162
163
164void Assembler::emit_optional_rex_32(XMMRegister reg, Register base) {
165  byte rex_bits =  (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
166  if (rex_bits != 0) emit(0x40 | rex_bits);
167}
168
169
170void Assembler::emit_optional_rex_32(Register reg, XMMRegister base) {
171  byte rex_bits =  (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
172  if (rex_bits != 0) emit(0x40 | rex_bits);
173}
174
175
176void Assembler::emit_optional_rex_32(Register rm_reg) {
177  if (rm_reg.high_bit()) emit(0x41);
178}
179
180
181void Assembler::emit_optional_rex_32(const Operand& op) {
182  if (op.rex_ != 0) emit(0x40 | op.rex_);
183}
184
185
186Address Assembler::target_address_at(Address pc) {
187  return Memory::int32_at(pc) + pc + 4;
188}
189
190
191void Assembler::set_target_address_at(Address pc, Address target) {
192  Memory::int32_at(pc) = static_cast<int32_t>(target - pc - 4);
193  CPU::FlushICache(pc, sizeof(int32_t));
194}
195
196Handle<Object> Assembler::code_target_object_handle_at(Address pc) {
197  return code_targets_[Memory::int32_at(pc)];
198}
199
200// -----------------------------------------------------------------------------
201// Implementation of RelocInfo
202
203// The modes possibly affected by apply must be in kApplyMask.
204void RelocInfo::apply(intptr_t delta) {
205  if (IsInternalReference(rmode_)) {
206    // absolute code pointer inside code object moves with the code object.
207    Memory::Address_at(pc_) += static_cast<int32_t>(delta);
208    CPU::FlushICache(pc_, sizeof(Address));
209  } else if (IsCodeTarget(rmode_)) {
210    Memory::int32_at(pc_) -= static_cast<int32_t>(delta);
211    CPU::FlushICache(pc_, sizeof(int32_t));
212  }
213}
214
215
216Address RelocInfo::target_address() {
217  ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
218  if (IsCodeTarget(rmode_)) {
219    return Assembler::target_address_at(pc_);
220  } else {
221    return Memory::Address_at(pc_);
222  }
223}
224
225
226Address RelocInfo::target_address_address() {
227  ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY
228                              || rmode_ == EMBEDDED_OBJECT
229                              || rmode_ == EXTERNAL_REFERENCE);
230  return reinterpret_cast<Address>(pc_);
231}
232
233
234int RelocInfo::target_address_size() {
235  if (IsCodedSpecially()) {
236    return Assembler::kCallTargetSize;
237  } else {
238    return Assembler::kExternalTargetSize;
239  }
240}
241
242
243void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) {
244  ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
245  if (IsCodeTarget(rmode_)) {
246    Assembler::set_target_address_at(pc_, target);
247    Object* target_code = Code::GetCodeFromTargetAddress(target);
248    if (mode == UPDATE_WRITE_BARRIER && host() != NULL) {
249      host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
250          host(), this, HeapObject::cast(target_code));
251    }
252  } else {
253    Memory::Address_at(pc_) = target;
254    CPU::FlushICache(pc_, sizeof(Address));
255  }
256}
257
258
259Object* RelocInfo::target_object() {
260  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
261  return Memory::Object_at(pc_);
262}
263
264
265Handle<Object> RelocInfo::target_object_handle(Assembler *origin) {
266  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
267  if (rmode_ == EMBEDDED_OBJECT) {
268    return Memory::Object_Handle_at(pc_);
269  } else {
270    return origin->code_target_object_handle_at(pc_);
271  }
272}
273
274
275Object** RelocInfo::target_object_address() {
276  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
277  return reinterpret_cast<Object**>(pc_);
278}
279
280
281Address* RelocInfo::target_reference_address() {
282  ASSERT(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
283  return reinterpret_cast<Address*>(pc_);
284}
285
286
287void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) {
288  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
289  Memory::Object_at(pc_) = target;
290  CPU::FlushICache(pc_, sizeof(Address));
291  if (mode == UPDATE_WRITE_BARRIER &&
292      host() != NULL &&
293      target->IsHeapObject()) {
294    host()->GetHeap()->incremental_marking()->RecordWrite(
295        host(), &Memory::Object_at(pc_), HeapObject::cast(target));
296  }
297}
298
299
300Handle<JSGlobalPropertyCell> RelocInfo::target_cell_handle() {
301  ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
302  Address address = Memory::Address_at(pc_);
303  return Handle<JSGlobalPropertyCell>(
304      reinterpret_cast<JSGlobalPropertyCell**>(address));
305}
306
307
308JSGlobalPropertyCell* RelocInfo::target_cell() {
309  ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
310  Address address = Memory::Address_at(pc_);
311  Object* object = HeapObject::FromAddress(
312      address - JSGlobalPropertyCell::kValueOffset);
313  return reinterpret_cast<JSGlobalPropertyCell*>(object);
314}
315
316
317void RelocInfo::set_target_cell(JSGlobalPropertyCell* cell,
318                                WriteBarrierMode mode) {
319  ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
320  Address address = cell->address() + JSGlobalPropertyCell::kValueOffset;
321  Memory::Address_at(pc_) = address;
322  CPU::FlushICache(pc_, sizeof(Address));
323  if (mode == UPDATE_WRITE_BARRIER &&
324      host() != NULL) {
325    // TODO(1550) We are passing NULL as a slot because cell can never be on
326    // evacuation candidate.
327    host()->GetHeap()->incremental_marking()->RecordWrite(
328        host(), NULL, cell);
329  }
330}
331
332
333bool RelocInfo::IsPatchedReturnSequence() {
334  // The recognized call sequence is:
335  //  movq(kScratchRegister, immediate64); call(kScratchRegister);
336  // It only needs to be distinguished from a return sequence
337  //  movq(rsp, rbp); pop(rbp); ret(n); int3 *6
338  // The 11th byte is int3 (0xCC) in the return sequence and
339  // REX.WB (0x48+register bit) for the call sequence.
340#ifdef ENABLE_DEBUGGER_SUPPORT
341  return pc_[10] != 0xCC;
342#else
343  return false;
344#endif
345}
346
347
348bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
349  return !Assembler::IsNop(pc());
350}
351
352
353Address RelocInfo::call_address() {
354  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
355         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
356  return Memory::Address_at(
357      pc_ + Assembler::kRealPatchReturnSequenceAddressOffset);
358}
359
360
361void RelocInfo::set_call_address(Address target) {
362  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
363         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
364  Memory::Address_at(pc_ + Assembler::kRealPatchReturnSequenceAddressOffset) =
365      target;
366  CPU::FlushICache(pc_ + Assembler::kRealPatchReturnSequenceAddressOffset,
367                   sizeof(Address));
368  if (host() != NULL) {
369    Object* target_code = Code::GetCodeFromTargetAddress(target);
370    host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
371        host(), this, HeapObject::cast(target_code));
372  }
373}
374
375
376Object* RelocInfo::call_object() {
377  return *call_object_address();
378}
379
380
381void RelocInfo::set_call_object(Object* target) {
382  *call_object_address() = target;
383}
384
385
386Object** RelocInfo::call_object_address() {
387  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
388         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
389  return reinterpret_cast<Object**>(
390      pc_ + Assembler::kPatchReturnSequenceAddressOffset);
391}
392
393
394void RelocInfo::Visit(ObjectVisitor* visitor) {
395  RelocInfo::Mode mode = rmode();
396  if (mode == RelocInfo::EMBEDDED_OBJECT) {
397    visitor->VisitEmbeddedPointer(this);
398    CPU::FlushICache(pc_, sizeof(Address));
399  } else if (RelocInfo::IsCodeTarget(mode)) {
400    visitor->VisitCodeTarget(this);
401  } else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
402    visitor->VisitGlobalPropertyCell(this);
403  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
404    visitor->VisitExternalReference(this);
405    CPU::FlushICache(pc_, sizeof(Address));
406#ifdef ENABLE_DEBUGGER_SUPPORT
407  // TODO(isolates): Get a cached isolate below.
408  } else if (((RelocInfo::IsJSReturn(mode) &&
409              IsPatchedReturnSequence()) ||
410             (RelocInfo::IsDebugBreakSlot(mode) &&
411              IsPatchedDebugBreakSlotSequence())) &&
412             Isolate::Current()->debug()->has_break_points()) {
413    visitor->VisitDebugTarget(this);
414#endif
415  } else if (mode == RelocInfo::RUNTIME_ENTRY) {
416    visitor->VisitRuntimeEntry(this);
417  }
418}
419
420
421template<typename StaticVisitor>
422void RelocInfo::Visit(Heap* heap) {
423  RelocInfo::Mode mode = rmode();
424  if (mode == RelocInfo::EMBEDDED_OBJECT) {
425    StaticVisitor::VisitEmbeddedPointer(heap, this);
426    CPU::FlushICache(pc_, sizeof(Address));
427  } else if (RelocInfo::IsCodeTarget(mode)) {
428    StaticVisitor::VisitCodeTarget(heap, this);
429  } else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
430    StaticVisitor::VisitGlobalPropertyCell(heap, this);
431  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
432    StaticVisitor::VisitExternalReference(this);
433    CPU::FlushICache(pc_, sizeof(Address));
434#ifdef ENABLE_DEBUGGER_SUPPORT
435  } else if (heap->isolate()->debug()->has_break_points() &&
436             ((RelocInfo::IsJSReturn(mode) &&
437              IsPatchedReturnSequence()) ||
438             (RelocInfo::IsDebugBreakSlot(mode) &&
439              IsPatchedDebugBreakSlotSequence()))) {
440    StaticVisitor::VisitDebugTarget(heap, this);
441#endif
442  } else if (mode == RelocInfo::RUNTIME_ENTRY) {
443    StaticVisitor::VisitRuntimeEntry(this);
444  }
445}
446
447
448// -----------------------------------------------------------------------------
449// Implementation of Operand
450
451void Operand::set_modrm(int mod, Register rm_reg) {
452  ASSERT(is_uint2(mod));
453  buf_[0] = mod << 6 | rm_reg.low_bits();
454  // Set REX.B to the high bit of rm.code().
455  rex_ |= rm_reg.high_bit();
456}
457
458
459void Operand::set_sib(ScaleFactor scale, Register index, Register base) {
460  ASSERT(len_ == 1);
461  ASSERT(is_uint2(scale));
462  // Use SIB with no index register only for base rsp or r12. Otherwise we
463  // would skip the SIB byte entirely.
464  ASSERT(!index.is(rsp) || base.is(rsp) || base.is(r12));
465  buf_[1] = (scale << 6) | (index.low_bits() << 3) | base.low_bits();
466  rex_ |= index.high_bit() << 1 | base.high_bit();
467  len_ = 2;
468}
469
470void Operand::set_disp8(int disp) {
471  ASSERT(is_int8(disp));
472  ASSERT(len_ == 1 || len_ == 2);
473  int8_t* p = reinterpret_cast<int8_t*>(&buf_[len_]);
474  *p = disp;
475  len_ += sizeof(int8_t);
476}
477
478void Operand::set_disp32(int disp) {
479  ASSERT(len_ == 1 || len_ == 2);
480  int32_t* p = reinterpret_cast<int32_t*>(&buf_[len_]);
481  *p = disp;
482  len_ += sizeof(int32_t);
483}
484
485
486} }  // namespace v8::internal
487
488#endif  // V8_X64_ASSEMBLER_X64_INL_H_
489