assembler-x64-inl.h revision 756813857a4c2a4d8ad2e805969d5768d3cf43a0
1// Copyright 2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_X64_ASSEMBLER_X64_INL_H_
29#define V8_X64_ASSEMBLER_X64_INL_H_
30
31#include "cpu.h"
32#include "debug.h"
33#include "memory.h"
34
35namespace v8 {
36namespace internal {
37
38
39// -----------------------------------------------------------------------------
40// Implementation of Assembler
41
42
43void Assembler::emitl(uint32_t x) {
44  Memory::uint32_at(pc_) = x;
45  pc_ += sizeof(uint32_t);
46}
47
48
49void Assembler::emitq(uint64_t x, RelocInfo::Mode rmode) {
50  Memory::uint64_at(pc_) = x;
51  if (rmode != RelocInfo::NONE) {
52    RecordRelocInfo(rmode, x);
53  }
54  pc_ += sizeof(uint64_t);
55}
56
57
58void Assembler::emitw(uint16_t x) {
59  Memory::uint16_at(pc_) = x;
60  pc_ += sizeof(uint16_t);
61}
62
63
64void Assembler::emit_code_target(Handle<Code> target, RelocInfo::Mode rmode) {
65  ASSERT(RelocInfo::IsCodeTarget(rmode));
66  RecordRelocInfo(rmode);
67  int current = code_targets_.length();
68  if (current > 0 && code_targets_.last().is_identical_to(target)) {
69    // Optimization if we keep jumping to the same code target.
70    emitl(current - 1);
71  } else {
72    code_targets_.Add(target);
73    emitl(current);
74  }
75}
76
77
78void Assembler::emit_rex_64(Register reg, Register rm_reg) {
79  emit(0x48 | reg.high_bit() << 2 | rm_reg.high_bit());
80}
81
82
83void Assembler::emit_rex_64(XMMRegister reg, Register rm_reg) {
84  emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
85}
86
87
88void Assembler::emit_rex_64(Register reg, XMMRegister rm_reg) {
89  emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
90}
91
92
93void Assembler::emit_rex_64(Register reg, const Operand& op) {
94  emit(0x48 | reg.high_bit() << 2 | op.rex_);
95}
96
97
98void Assembler::emit_rex_64(XMMRegister reg, const Operand& op) {
99  emit(0x48 | (reg.code() & 0x8) >> 1 | op.rex_);
100}
101
102
103void Assembler::emit_rex_64(Register rm_reg) {
104  ASSERT_EQ(rm_reg.code() & 0xf, rm_reg.code());
105  emit(0x48 | rm_reg.high_bit());
106}
107
108
109void Assembler::emit_rex_64(const Operand& op) {
110  emit(0x48 | op.rex_);
111}
112
113
114void Assembler::emit_rex_32(Register reg, Register rm_reg) {
115  emit(0x40 | reg.high_bit() << 2 | rm_reg.high_bit());
116}
117
118
119void Assembler::emit_rex_32(Register reg, const Operand& op) {
120  emit(0x40 | reg.high_bit() << 2  | op.rex_);
121}
122
123
124void Assembler::emit_rex_32(Register rm_reg) {
125  emit(0x40 | rm_reg.high_bit());
126}
127
128
129void Assembler::emit_rex_32(const Operand& op) {
130  emit(0x40 | op.rex_);
131}
132
133
134void Assembler::emit_optional_rex_32(Register reg, Register rm_reg) {
135  byte rex_bits = reg.high_bit() << 2 | rm_reg.high_bit();
136  if (rex_bits != 0) emit(0x40 | rex_bits);
137}
138
139
140void Assembler::emit_optional_rex_32(Register reg, const Operand& op) {
141  byte rex_bits =  reg.high_bit() << 2 | op.rex_;
142  if (rex_bits != 0) emit(0x40 | rex_bits);
143}
144
145
146void Assembler::emit_optional_rex_32(XMMRegister reg, const Operand& op) {
147  byte rex_bits =  (reg.code() & 0x8) >> 1 | op.rex_;
148  if (rex_bits != 0) emit(0x40 | rex_bits);
149}
150
151
152void Assembler::emit_optional_rex_32(XMMRegister reg, XMMRegister base) {
153  byte rex_bits =  (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
154  if (rex_bits != 0) emit(0x40 | rex_bits);
155}
156
157
158void Assembler::emit_optional_rex_32(XMMRegister reg, Register base) {
159  byte rex_bits =  (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
160  if (rex_bits != 0) emit(0x40 | rex_bits);
161}
162
163
164void Assembler::emit_optional_rex_32(Register reg, XMMRegister base) {
165  byte rex_bits =  (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
166  if (rex_bits != 0) emit(0x40 | rex_bits);
167}
168
169
170void Assembler::emit_optional_rex_32(Register rm_reg) {
171  if (rm_reg.high_bit()) emit(0x41);
172}
173
174
175void Assembler::emit_optional_rex_32(const Operand& op) {
176  if (op.rex_ != 0) emit(0x40 | op.rex_);
177}
178
179
180Address Assembler::target_address_at(Address pc) {
181  return Memory::int32_at(pc) + pc + 4;
182}
183
184
185void Assembler::set_target_address_at(Address pc, Address target) {
186  Memory::int32_at(pc) = static_cast<int32_t>(target - pc - 4);
187  CPU::FlushICache(pc, sizeof(int32_t));
188}
189
190Handle<Object> Assembler::code_target_object_handle_at(Address pc) {
191  return code_targets_[Memory::int32_at(pc)];
192}
193
194// -----------------------------------------------------------------------------
195// Implementation of RelocInfo
196
197// The modes possibly affected by apply must be in kApplyMask.
198void RelocInfo::apply(intptr_t delta) {
199  if (IsInternalReference(rmode_)) {
200    // absolute code pointer inside code object moves with the code object.
201    Memory::Address_at(pc_) += static_cast<int32_t>(delta);
202  } else if (IsCodeTarget(rmode_)) {
203    Memory::int32_at(pc_) -= static_cast<int32_t>(delta);
204  }
205}
206
207
208Address RelocInfo::target_address() {
209  ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
210  if (IsCodeTarget(rmode_)) {
211    return Assembler::target_address_at(pc_);
212  } else {
213    return Memory::Address_at(pc_);
214  }
215}
216
217
218Address RelocInfo::target_address_address() {
219  ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
220  return reinterpret_cast<Address>(pc_);
221}
222
223
224int RelocInfo::target_address_size() {
225  if (IsCodedSpecially()) {
226    return Assembler::kCallTargetSize;
227  } else {
228    return Assembler::kExternalTargetSize;
229  }
230}
231
232
233void RelocInfo::set_target_address(Address target) {
234  ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
235  if (IsCodeTarget(rmode_)) {
236    Assembler::set_target_address_at(pc_, target);
237  } else {
238    Memory::Address_at(pc_) = target;
239  }
240}
241
242
243Object* RelocInfo::target_object() {
244  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
245  return Memory::Object_at(pc_);
246}
247
248
249Handle<Object> RelocInfo::target_object_handle(Assembler *origin) {
250  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
251  if (rmode_ == EMBEDDED_OBJECT) {
252    return Memory::Object_Handle_at(pc_);
253  } else {
254    return origin->code_target_object_handle_at(pc_);
255  }
256}
257
258
259Object** RelocInfo::target_object_address() {
260  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
261  return reinterpret_cast<Object**>(pc_);
262}
263
264
265Address* RelocInfo::target_reference_address() {
266  ASSERT(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
267  return reinterpret_cast<Address*>(pc_);
268}
269
270
271void RelocInfo::set_target_object(Object* target) {
272  ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
273  *reinterpret_cast<Object**>(pc_) = target;
274}
275
276
277bool RelocInfo::IsPatchedReturnSequence() {
278  // The recognized call sequence is:
279  //  movq(kScratchRegister, immediate64); call(kScratchRegister);
280  // It only needs to be distinguished from a return sequence
281  //  movq(rsp, rbp); pop(rbp); ret(n); int3 *6
282  // The 11th byte is int3 (0xCC) in the return sequence and
283  // REX.WB (0x48+register bit) for the call sequence.
284#ifdef ENABLE_DEBUGGER_SUPPORT
285  return pc_[10] != 0xCC;
286#else
287  return false;
288#endif
289}
290
291
292bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
293  return !Assembler::IsNop(pc());
294}
295
296
297Address RelocInfo::call_address() {
298  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
299         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
300  return Memory::Address_at(
301      pc_ + Assembler::kRealPatchReturnSequenceAddressOffset);
302}
303
304
305void RelocInfo::set_call_address(Address target) {
306  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
307         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
308  Memory::Address_at(pc_ + Assembler::kRealPatchReturnSequenceAddressOffset) =
309      target;
310}
311
312
313Object* RelocInfo::call_object() {
314  return *call_object_address();
315}
316
317
318void RelocInfo::set_call_object(Object* target) {
319  *call_object_address() = target;
320}
321
322
323Object** RelocInfo::call_object_address() {
324  ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
325         (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
326  return reinterpret_cast<Object**>(
327      pc_ + Assembler::kPatchReturnSequenceAddressOffset);
328}
329
330
331void RelocInfo::Visit(ObjectVisitor* visitor) {
332  RelocInfo::Mode mode = rmode();
333  if (mode == RelocInfo::EMBEDDED_OBJECT) {
334    visitor->VisitPointer(target_object_address());
335  } else if (RelocInfo::IsCodeTarget(mode)) {
336    visitor->VisitCodeTarget(this);
337  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
338    visitor->VisitExternalReference(target_reference_address());
339#ifdef ENABLE_DEBUGGER_SUPPORT
340  } else if (Debug::has_break_points() &&
341             ((RelocInfo::IsJSReturn(mode) &&
342              IsPatchedReturnSequence()) ||
343             (RelocInfo::IsDebugBreakSlot(mode) &&
344              IsPatchedDebugBreakSlotSequence()))) {
345    visitor->VisitDebugTarget(this);
346#endif
347  } else if (mode == RelocInfo::RUNTIME_ENTRY) {
348    visitor->VisitRuntimeEntry(this);
349  }
350}
351
352
353template<typename StaticVisitor>
354void RelocInfo::Visit() {
355  RelocInfo::Mode mode = rmode();
356  if (mode == RelocInfo::EMBEDDED_OBJECT) {
357    StaticVisitor::VisitPointer(target_object_address());
358  } else if (RelocInfo::IsCodeTarget(mode)) {
359    StaticVisitor::VisitCodeTarget(this);
360  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
361    StaticVisitor::VisitExternalReference(target_reference_address());
362#ifdef ENABLE_DEBUGGER_SUPPORT
363  } else if (Debug::has_break_points() &&
364             ((RelocInfo::IsJSReturn(mode) &&
365              IsPatchedReturnSequence()) ||
366             (RelocInfo::IsDebugBreakSlot(mode) &&
367              IsPatchedDebugBreakSlotSequence()))) {
368    StaticVisitor::VisitDebugTarget(this);
369#endif
370  } else if (mode == RelocInfo::RUNTIME_ENTRY) {
371    StaticVisitor::VisitRuntimeEntry(this);
372  }
373}
374
375
376// -----------------------------------------------------------------------------
377// Implementation of Operand
378
379void Operand::set_modrm(int mod, Register rm_reg) {
380  ASSERT(is_uint2(mod));
381  buf_[0] = mod << 6 | rm_reg.low_bits();
382  // Set REX.B to the high bit of rm.code().
383  rex_ |= rm_reg.high_bit();
384}
385
386
387void Operand::set_sib(ScaleFactor scale, Register index, Register base) {
388  ASSERT(len_ == 1);
389  ASSERT(is_uint2(scale));
390  // Use SIB with no index register only for base rsp or r12. Otherwise we
391  // would skip the SIB byte entirely.
392  ASSERT(!index.is(rsp) || base.is(rsp) || base.is(r12));
393  buf_[1] = scale << 6 | index.low_bits() << 3 | base.low_bits();
394  rex_ |= index.high_bit() << 1 | base.high_bit();
395  len_ = 2;
396}
397
398void Operand::set_disp8(int disp) {
399  ASSERT(is_int8(disp));
400  ASSERT(len_ == 1 || len_ == 2);
401  int8_t* p = reinterpret_cast<int8_t*>(&buf_[len_]);
402  *p = disp;
403  len_ += sizeof(int8_t);
404}
405
406void Operand::set_disp32(int disp) {
407  ASSERT(len_ == 1 || len_ == 2);
408  int32_t* p = reinterpret_cast<int32_t*>(&buf_[len_]);
409  *p = disp;
410  len_ += sizeof(int32_t);
411}
412
413
414} }  // namespace v8::internal
415
416#endif  // V8_X64_ASSEMBLER_X64_INL_H_
417