1// Copyright 2012 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#if defined(V8_TARGET_ARCH_X64)
31
32#include "macro-assembler.h"
33#include "serialize.h"
34
35namespace v8 {
36namespace internal {
37
38// -----------------------------------------------------------------------------
39// Implementation of CpuFeatures
40
41
42#ifdef DEBUG
43bool CpuFeatures::initialized_ = false;
44#endif
45uint64_t CpuFeatures::supported_ = CpuFeatures::kDefaultCpuFeatures;
46uint64_t CpuFeatures::found_by_runtime_probing_ = 0;
47
48
49void CpuFeatures::Probe() {
50  ASSERT(supported_ == CpuFeatures::kDefaultCpuFeatures);
51#ifdef DEBUG
52  initialized_ = true;
53#endif
54  supported_ = kDefaultCpuFeatures;
55  if (Serializer::enabled()) {
56    supported_ |= OS::CpuFeaturesImpliedByPlatform();
57    return;  // No features if we might serialize.
58  }
59
60  const int kBufferSize = 4 * KB;
61  VirtualMemory* memory = new VirtualMemory(kBufferSize);
62  if (!memory->IsReserved()) {
63    delete memory;
64    return;
65  }
66  ASSERT(memory->size() >= static_cast<size_t>(kBufferSize));
67  if (!memory->Commit(memory->address(), kBufferSize, true/*executable*/)) {
68    delete memory;
69    return;
70  }
71
72  Assembler assm(NULL, memory->address(), kBufferSize);
73  Label cpuid, done;
74#define __ assm.
75  // Save old rsp, since we are going to modify the stack.
76  __ push(rbp);
77  __ pushfq();
78  __ push(rcx);
79  __ push(rbx);
80  __ movq(rbp, rsp);
81
82  // If we can modify bit 21 of the EFLAGS register, then CPUID is supported.
83  __ pushfq();
84  __ pop(rax);
85  __ movq(rdx, rax);
86  __ xor_(rax, Immediate(0x200000));  // Flip bit 21.
87  __ push(rax);
88  __ popfq();
89  __ pushfq();
90  __ pop(rax);
91  __ xor_(rax, rdx);  // Different if CPUID is supported.
92  __ j(not_zero, &cpuid);
93
94  // CPUID not supported. Clear the supported features in rax.
95  __ xor_(rax, rax);
96  __ jmp(&done);
97
98  // Invoke CPUID with 1 in eax to get feature information in
99  // ecx:edx. Temporarily enable CPUID support because we know it's
100  // safe here.
101  __ bind(&cpuid);
102  __ movl(rax, Immediate(1));
103  supported_ = kDefaultCpuFeatures | (1 << CPUID);
104  { Scope fscope(CPUID);
105    __ cpuid();
106    // Move the result from ecx:edx to rdi.
107    __ movl(rdi, rdx);  // Zero-extended to 64 bits.
108    __ shl(rcx, Immediate(32));
109    __ or_(rdi, rcx);
110
111    // Get the sahf supported flag, from CPUID(0x80000001)
112    __ movq(rax, 0x80000001, RelocInfo::NONE);
113    __ cpuid();
114  }
115  supported_ = kDefaultCpuFeatures;
116
117  // Put the CPU flags in rax.
118  // rax = (rcx & 1) | (rdi & ~1) | (1 << CPUID).
119  __ movl(rax, Immediate(1));
120  __ and_(rcx, rax);  // Bit 0 is set if SAHF instruction supported.
121  __ not_(rax);
122  __ and_(rax, rdi);
123  __ or_(rax, rcx);
124  __ or_(rax, Immediate(1 << CPUID));
125
126  // Done.
127  __ bind(&done);
128  __ movq(rsp, rbp);
129  __ pop(rbx);
130  __ pop(rcx);
131  __ popfq();
132  __ pop(rbp);
133  __ ret(0);
134#undef __
135
136  typedef uint64_t (*F0)();
137  F0 probe = FUNCTION_CAST<F0>(reinterpret_cast<Address>(memory->address()));
138  supported_ = probe();
139  found_by_runtime_probing_ = supported_;
140  found_by_runtime_probing_ &= ~kDefaultCpuFeatures;
141  uint64_t os_guarantees = OS::CpuFeaturesImpliedByPlatform();
142  supported_ |= os_guarantees;
143  found_by_runtime_probing_ &= ~os_guarantees;
144  // SSE2 and CMOV must be available on an X64 CPU.
145  ASSERT(IsSupported(CPUID));
146  ASSERT(IsSupported(SSE2));
147  ASSERT(IsSupported(CMOV));
148
149  delete memory;
150}
151
152
153// -----------------------------------------------------------------------------
154// Implementation of RelocInfo
155
156// Patch the code at the current PC with a call to the target address.
157// Additional guard int3 instructions can be added if required.
158void RelocInfo::PatchCodeWithCall(Address target, int guard_bytes) {
159  // Load register with immediate 64 and call through a register instructions
160  // takes up 13 bytes and int3 takes up one byte.
161  static const int kCallCodeSize = 13;
162  int code_size = kCallCodeSize + guard_bytes;
163
164  // Create a code patcher.
165  CodePatcher patcher(pc_, code_size);
166
167  // Add a label for checking the size of the code used for returning.
168#ifdef DEBUG
169  Label check_codesize;
170  patcher.masm()->bind(&check_codesize);
171#endif
172
173  // Patch the code.
174  patcher.masm()->movq(r10, target, RelocInfo::NONE);
175  patcher.masm()->call(r10);
176
177  // Check that the size of the code generated is as expected.
178  ASSERT_EQ(kCallCodeSize,
179            patcher.masm()->SizeOfCodeGeneratedSince(&check_codesize));
180
181  // Add the requested number of int3 instructions after the call.
182  for (int i = 0; i < guard_bytes; i++) {
183    patcher.masm()->int3();
184  }
185}
186
187
188void RelocInfo::PatchCode(byte* instructions, int instruction_count) {
189  // Patch the code at the current address with the supplied instructions.
190  for (int i = 0; i < instruction_count; i++) {
191    *(pc_ + i) = *(instructions + i);
192  }
193
194  // Indicate that code has changed.
195  CPU::FlushICache(pc_, instruction_count);
196}
197
198
199// -----------------------------------------------------------------------------
200// Register constants.
201
202const int Register::kRegisterCodeByAllocationIndex[kNumAllocatableRegisters] = {
203  // rax, rbx, rdx, rcx, rdi, r8, r9, r11, r14, r15
204  0, 3, 2, 1, 7, 8, 9, 11, 14, 15
205};
206
207const int Register::kAllocationIndexByRegisterCode[kNumRegisters] = {
208  0, 3, 2, 1, -1, -1, -1, 4, 5, 6, -1, 7, -1, -1, 8, 9
209};
210
211
212// -----------------------------------------------------------------------------
213// Implementation of Operand
214
215Operand::Operand(Register base, int32_t disp) : rex_(0) {
216  len_ = 1;
217  if (base.is(rsp) || base.is(r12)) {
218    // SIB byte is needed to encode (rsp + offset) or (r12 + offset).
219    set_sib(times_1, rsp, base);
220  }
221
222  if (disp == 0 && !base.is(rbp) && !base.is(r13)) {
223    set_modrm(0, base);
224  } else if (is_int8(disp)) {
225    set_modrm(1, base);
226    set_disp8(disp);
227  } else {
228    set_modrm(2, base);
229    set_disp32(disp);
230  }
231}
232
233
234Operand::Operand(Register base,
235                 Register index,
236                 ScaleFactor scale,
237                 int32_t disp) : rex_(0) {
238  ASSERT(!index.is(rsp));
239  len_ = 1;
240  set_sib(scale, index, base);
241  if (disp == 0 && !base.is(rbp) && !base.is(r13)) {
242    // This call to set_modrm doesn't overwrite the REX.B (or REX.X) bits
243    // possibly set by set_sib.
244    set_modrm(0, rsp);
245  } else if (is_int8(disp)) {
246    set_modrm(1, rsp);
247    set_disp8(disp);
248  } else {
249    set_modrm(2, rsp);
250    set_disp32(disp);
251  }
252}
253
254
255Operand::Operand(Register index,
256                 ScaleFactor scale,
257                 int32_t disp) : rex_(0) {
258  ASSERT(!index.is(rsp));
259  len_ = 1;
260  set_modrm(0, rsp);
261  set_sib(scale, index, rbp);
262  set_disp32(disp);
263}
264
265
266Operand::Operand(const Operand& operand, int32_t offset) {
267  ASSERT(operand.len_ >= 1);
268  // Operand encodes REX ModR/M [SIB] [Disp].
269  byte modrm = operand.buf_[0];
270  ASSERT(modrm < 0xC0);  // Disallow mode 3 (register target).
271  bool has_sib = ((modrm & 0x07) == 0x04);
272  byte mode = modrm & 0xC0;
273  int disp_offset = has_sib ? 2 : 1;
274  int base_reg = (has_sib ? operand.buf_[1] : modrm) & 0x07;
275  // Mode 0 with rbp/r13 as ModR/M or SIB base register always has a 32-bit
276  // displacement.
277  bool is_baseless = (mode == 0) && (base_reg == 0x05);  // No base or RIP base.
278  int32_t disp_value = 0;
279  if (mode == 0x80 || is_baseless) {
280    // Mode 2 or mode 0 with rbp/r13 as base: Word displacement.
281    disp_value = *BitCast<const int32_t*>(&operand.buf_[disp_offset]);
282  } else if (mode == 0x40) {
283    // Mode 1: Byte displacement.
284    disp_value = static_cast<signed char>(operand.buf_[disp_offset]);
285  }
286
287  // Write new operand with same registers, but with modified displacement.
288  ASSERT(offset >= 0 ? disp_value + offset > disp_value
289                     : disp_value + offset < disp_value);  // No overflow.
290  disp_value += offset;
291  rex_ = operand.rex_;
292  if (!is_int8(disp_value) || is_baseless) {
293    // Need 32 bits of displacement, mode 2 or mode 1 with register rbp/r13.
294    buf_[0] = (modrm & 0x3f) | (is_baseless ? 0x00 : 0x80);
295    len_ = disp_offset + 4;
296    Memory::int32_at(&buf_[disp_offset]) = disp_value;
297  } else if (disp_value != 0 || (base_reg == 0x05)) {
298    // Need 8 bits of displacement.
299    buf_[0] = (modrm & 0x3f) | 0x40;  // Mode 1.
300    len_ = disp_offset + 1;
301    buf_[disp_offset] = static_cast<byte>(disp_value);
302  } else {
303    // Need no displacement.
304    buf_[0] = (modrm & 0x3f);  // Mode 0.
305    len_ = disp_offset;
306  }
307  if (has_sib) {
308    buf_[1] = operand.buf_[1];
309  }
310}
311
312
313bool Operand::AddressUsesRegister(Register reg) const {
314  int code = reg.code();
315  ASSERT((buf_[0] & 0xC0) != 0xC0);  // Always a memory operand.
316  // Start with only low three bits of base register. Initial decoding doesn't
317  // distinguish on the REX.B bit.
318  int base_code = buf_[0] & 0x07;
319  if (base_code == rsp.code()) {
320    // SIB byte present in buf_[1].
321    // Check the index register from the SIB byte + REX.X prefix.
322    int index_code = ((buf_[1] >> 3) & 0x07) | ((rex_ & 0x02) << 2);
323    // Index code (including REX.X) of 0x04 (rsp) means no index register.
324    if (index_code != rsp.code() && index_code == code) return true;
325    // Add REX.B to get the full base register code.
326    base_code = (buf_[1] & 0x07) | ((rex_ & 0x01) << 3);
327    // A base register of 0x05 (rbp) with mod = 0 means no base register.
328    if (base_code == rbp.code() && ((buf_[0] & 0xC0) == 0)) return false;
329    return code == base_code;
330  } else {
331    // A base register with low bits of 0x05 (rbp or r13) and mod = 0 means
332    // no base register.
333    if (base_code == rbp.code() && ((buf_[0] & 0xC0) == 0)) return false;
334    base_code |= ((rex_ & 0x01) << 3);
335    return code == base_code;
336  }
337}
338
339
340// -----------------------------------------------------------------------------
341// Implementation of Assembler.
342
343#ifdef GENERATED_CODE_COVERAGE
344static void InitCoverageLog();
345#endif
346
347Assembler::Assembler(Isolate* arg_isolate, void* buffer, int buffer_size)
348    : AssemblerBase(arg_isolate),
349      code_targets_(100),
350      positions_recorder_(this),
351      emit_debug_code_(FLAG_debug_code) {
352  if (buffer == NULL) {
353    // Do our own buffer management.
354    if (buffer_size <= kMinimalBufferSize) {
355      buffer_size = kMinimalBufferSize;
356
357      if (isolate() != NULL && isolate()->assembler_spare_buffer() != NULL) {
358        buffer = isolate()->assembler_spare_buffer();
359        isolate()->set_assembler_spare_buffer(NULL);
360      }
361    }
362    if (buffer == NULL) {
363      buffer_ = NewArray<byte>(buffer_size);
364    } else {
365      buffer_ = static_cast<byte*>(buffer);
366    }
367    buffer_size_ = buffer_size;
368    own_buffer_ = true;
369  } else {
370    // Use externally provided buffer instead.
371    ASSERT(buffer_size > 0);
372    buffer_ = static_cast<byte*>(buffer);
373    buffer_size_ = buffer_size;
374    own_buffer_ = false;
375  }
376
377  // Clear the buffer in debug mode unless it was provided by the
378  // caller in which case we can't be sure it's okay to overwrite
379  // existing code in it.
380#ifdef DEBUG
381  if (own_buffer_) {
382    memset(buffer_, 0xCC, buffer_size);  // int3
383  }
384#endif
385
386  // Set up buffer pointers.
387  ASSERT(buffer_ != NULL);
388  pc_ = buffer_;
389  reloc_info_writer.Reposition(buffer_ + buffer_size, pc_);
390
391
392#ifdef GENERATED_CODE_COVERAGE
393  InitCoverageLog();
394#endif
395}
396
397
398Assembler::~Assembler() {
399  if (own_buffer_) {
400    if (isolate() != NULL &&
401        isolate()->assembler_spare_buffer() == NULL &&
402        buffer_size_ == kMinimalBufferSize) {
403      isolate()->set_assembler_spare_buffer(buffer_);
404    } else {
405      DeleteArray(buffer_);
406    }
407  }
408}
409
410
411void Assembler::GetCode(CodeDesc* desc) {
412  // Finalize code (at this point overflow() may be true, but the gap ensures
413  // that we are still not overlapping instructions and relocation info).
414  ASSERT(pc_ <= reloc_info_writer.pos());  // No overlap.
415  // Set up code descriptor.
416  desc->buffer = buffer_;
417  desc->buffer_size = buffer_size_;
418  desc->instr_size = pc_offset();
419  ASSERT(desc->instr_size > 0);  // Zero-size code objects upset the system.
420  desc->reloc_size =
421      static_cast<int>((buffer_ + buffer_size_) - reloc_info_writer.pos());
422  desc->origin = this;
423}
424
425
426void Assembler::Align(int m) {
427  ASSERT(IsPowerOf2(m));
428  int delta = (m - (pc_offset() & (m - 1))) & (m - 1);
429  Nop(delta);
430}
431
432
433void Assembler::CodeTargetAlign() {
434  Align(16);  // Preferred alignment of jump targets on x64.
435}
436
437
438bool Assembler::IsNop(Address addr) {
439  Address a = addr;
440  while (*a == 0x66) a++;
441  if (*a == 0x90) return true;
442  if (a[0] == 0xf && a[1] == 0x1f) return true;
443  return false;
444}
445
446
447void Assembler::bind_to(Label* L, int pos) {
448  ASSERT(!L->is_bound());  // Label may only be bound once.
449  ASSERT(0 <= pos && pos <= pc_offset());  // Position must be valid.
450  if (L->is_linked()) {
451    int current = L->pos();
452    int next = long_at(current);
453    while (next != current) {
454      // Relative address, relative to point after address.
455      int imm32 = pos - (current + sizeof(int32_t));
456      long_at_put(current, imm32);
457      current = next;
458      next = long_at(next);
459    }
460    // Fix up last fixup on linked list.
461    int last_imm32 = pos - (current + sizeof(int32_t));
462    long_at_put(current, last_imm32);
463  }
464  while (L->is_near_linked()) {
465    int fixup_pos = L->near_link_pos();
466    int offset_to_next =
467        static_cast<int>(*reinterpret_cast<int8_t*>(addr_at(fixup_pos)));
468    ASSERT(offset_to_next <= 0);
469    int disp = pos - (fixup_pos + sizeof(int8_t));
470    ASSERT(is_int8(disp));
471    set_byte_at(fixup_pos, disp);
472    if (offset_to_next < 0) {
473      L->link_to(fixup_pos + offset_to_next, Label::kNear);
474    } else {
475      L->UnuseNear();
476    }
477  }
478  L->bind_to(pos);
479}
480
481
482void Assembler::bind(Label* L) {
483  bind_to(L, pc_offset());
484}
485
486
487void Assembler::GrowBuffer() {
488  ASSERT(buffer_overflow());
489  if (!own_buffer_) FATAL("external code buffer is too small");
490
491  // Compute new buffer size.
492  CodeDesc desc;  // the new buffer
493  if (buffer_size_ < 4*KB) {
494    desc.buffer_size = 4*KB;
495  } else {
496    desc.buffer_size = 2*buffer_size_;
497  }
498  // Some internal data structures overflow for very large buffers,
499  // they must ensure that kMaximalBufferSize is not too large.
500  if ((desc.buffer_size > kMaximalBufferSize) ||
501      (desc.buffer_size > HEAP->MaxOldGenerationSize())) {
502    V8::FatalProcessOutOfMemory("Assembler::GrowBuffer");
503  }
504
505  // Set up new buffer.
506  desc.buffer = NewArray<byte>(desc.buffer_size);
507  desc.instr_size = pc_offset();
508  desc.reloc_size =
509      static_cast<int>((buffer_ + buffer_size_) - (reloc_info_writer.pos()));
510
511  // Clear the buffer in debug mode. Use 'int3' instructions to make
512  // sure to get into problems if we ever run uninitialized code.
513#ifdef DEBUG
514  memset(desc.buffer, 0xCC, desc.buffer_size);
515#endif
516
517  // Copy the data.
518  intptr_t pc_delta = desc.buffer - buffer_;
519  intptr_t rc_delta = (desc.buffer + desc.buffer_size) -
520      (buffer_ + buffer_size_);
521  memmove(desc.buffer, buffer_, desc.instr_size);
522  memmove(rc_delta + reloc_info_writer.pos(),
523          reloc_info_writer.pos(), desc.reloc_size);
524
525  // Switch buffers.
526  if (isolate() != NULL &&
527      isolate()->assembler_spare_buffer() == NULL &&
528      buffer_size_ == kMinimalBufferSize) {
529    isolate()->set_assembler_spare_buffer(buffer_);
530  } else {
531    DeleteArray(buffer_);
532  }
533  buffer_ = desc.buffer;
534  buffer_size_ = desc.buffer_size;
535  pc_ += pc_delta;
536  reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta,
537                               reloc_info_writer.last_pc() + pc_delta);
538
539  // Relocate runtime entries.
540  for (RelocIterator it(desc); !it.done(); it.next()) {
541    RelocInfo::Mode rmode = it.rinfo()->rmode();
542    if (rmode == RelocInfo::INTERNAL_REFERENCE) {
543      intptr_t* p = reinterpret_cast<intptr_t*>(it.rinfo()->pc());
544      if (*p != 0) {  // 0 means uninitialized.
545        *p += pc_delta;
546      }
547    }
548  }
549
550  ASSERT(!buffer_overflow());
551}
552
553
554void Assembler::emit_operand(int code, const Operand& adr) {
555  ASSERT(is_uint3(code));
556  const unsigned length = adr.len_;
557  ASSERT(length > 0);
558
559  // Emit updated ModR/M byte containing the given register.
560  ASSERT((adr.buf_[0] & 0x38) == 0);
561  pc_[0] = adr.buf_[0] | code << 3;
562
563  // Emit the rest of the encoded operand.
564  for (unsigned i = 1; i < length; i++) pc_[i] = adr.buf_[i];
565  pc_ += length;
566}
567
568
569// Assembler Instruction implementations.
570
571void Assembler::arithmetic_op(byte opcode, Register reg, const Operand& op) {
572  EnsureSpace ensure_space(this);
573  emit_rex_64(reg, op);
574  emit(opcode);
575  emit_operand(reg, op);
576}
577
578
579void Assembler::arithmetic_op(byte opcode, Register reg, Register rm_reg) {
580  EnsureSpace ensure_space(this);
581  ASSERT((opcode & 0xC6) == 2);
582  if (rm_reg.low_bits() == 4)  {  // Forces SIB byte.
583    // Swap reg and rm_reg and change opcode operand order.
584    emit_rex_64(rm_reg, reg);
585    emit(opcode ^ 0x02);
586    emit_modrm(rm_reg, reg);
587  } else {
588    emit_rex_64(reg, rm_reg);
589    emit(opcode);
590    emit_modrm(reg, rm_reg);
591  }
592}
593
594
595void Assembler::arithmetic_op_16(byte opcode, Register reg, Register rm_reg) {
596  EnsureSpace ensure_space(this);
597  ASSERT((opcode & 0xC6) == 2);
598  if (rm_reg.low_bits() == 4) {  // Forces SIB byte.
599    // Swap reg and rm_reg and change opcode operand order.
600    emit(0x66);
601    emit_optional_rex_32(rm_reg, reg);
602    emit(opcode ^ 0x02);
603    emit_modrm(rm_reg, reg);
604  } else {
605    emit(0x66);
606    emit_optional_rex_32(reg, rm_reg);
607    emit(opcode);
608    emit_modrm(reg, rm_reg);
609  }
610}
611
612
613void Assembler::arithmetic_op_16(byte opcode,
614                                 Register reg,
615                                 const Operand& rm_reg) {
616  EnsureSpace ensure_space(this);
617  emit(0x66);
618  emit_optional_rex_32(reg, rm_reg);
619  emit(opcode);
620  emit_operand(reg, rm_reg);
621}
622
623
624void Assembler::arithmetic_op_32(byte opcode, Register reg, Register rm_reg) {
625  EnsureSpace ensure_space(this);
626  ASSERT((opcode & 0xC6) == 2);
627  if (rm_reg.low_bits() == 4) {  // Forces SIB byte.
628    // Swap reg and rm_reg and change opcode operand order.
629    emit_optional_rex_32(rm_reg, reg);
630    emit(opcode ^ 0x02);  // E.g. 0x03 -> 0x01 for ADD.
631    emit_modrm(rm_reg, reg);
632  } else {
633    emit_optional_rex_32(reg, rm_reg);
634    emit(opcode);
635    emit_modrm(reg, rm_reg);
636  }
637}
638
639
640void Assembler::arithmetic_op_32(byte opcode,
641                                 Register reg,
642                                 const Operand& rm_reg) {
643  EnsureSpace ensure_space(this);
644  emit_optional_rex_32(reg, rm_reg);
645  emit(opcode);
646  emit_operand(reg, rm_reg);
647}
648
649
650void Assembler::immediate_arithmetic_op(byte subcode,
651                                        Register dst,
652                                        Immediate src) {
653  EnsureSpace ensure_space(this);
654  emit_rex_64(dst);
655  if (is_int8(src.value_)) {
656    emit(0x83);
657    emit_modrm(subcode, dst);
658    emit(src.value_);
659  } else if (dst.is(rax)) {
660    emit(0x05 | (subcode << 3));
661    emitl(src.value_);
662  } else {
663    emit(0x81);
664    emit_modrm(subcode, dst);
665    emitl(src.value_);
666  }
667}
668
669void Assembler::immediate_arithmetic_op(byte subcode,
670                                        const Operand& dst,
671                                        Immediate src) {
672  EnsureSpace ensure_space(this);
673  emit_rex_64(dst);
674  if (is_int8(src.value_)) {
675    emit(0x83);
676    emit_operand(subcode, dst);
677    emit(src.value_);
678  } else {
679    emit(0x81);
680    emit_operand(subcode, dst);
681    emitl(src.value_);
682  }
683}
684
685
686void Assembler::immediate_arithmetic_op_16(byte subcode,
687                                           Register dst,
688                                           Immediate src) {
689  EnsureSpace ensure_space(this);
690  emit(0x66);  // Operand size override prefix.
691  emit_optional_rex_32(dst);
692  if (is_int8(src.value_)) {
693    emit(0x83);
694    emit_modrm(subcode, dst);
695    emit(src.value_);
696  } else if (dst.is(rax)) {
697    emit(0x05 | (subcode << 3));
698    emitw(src.value_);
699  } else {
700    emit(0x81);
701    emit_modrm(subcode, dst);
702    emitw(src.value_);
703  }
704}
705
706
707void Assembler::immediate_arithmetic_op_16(byte subcode,
708                                           const Operand& dst,
709                                           Immediate src) {
710  EnsureSpace ensure_space(this);
711  emit(0x66);  // Operand size override prefix.
712  emit_optional_rex_32(dst);
713  if (is_int8(src.value_)) {
714    emit(0x83);
715    emit_operand(subcode, dst);
716    emit(src.value_);
717  } else {
718    emit(0x81);
719    emit_operand(subcode, dst);
720    emitw(src.value_);
721  }
722}
723
724
725void Assembler::immediate_arithmetic_op_32(byte subcode,
726                                           Register dst,
727                                           Immediate src) {
728  EnsureSpace ensure_space(this);
729  emit_optional_rex_32(dst);
730  if (is_int8(src.value_)) {
731    emit(0x83);
732    emit_modrm(subcode, dst);
733    emit(src.value_);
734  } else if (dst.is(rax)) {
735    emit(0x05 | (subcode << 3));
736    emitl(src.value_);
737  } else {
738    emit(0x81);
739    emit_modrm(subcode, dst);
740    emitl(src.value_);
741  }
742}
743
744
745void Assembler::immediate_arithmetic_op_32(byte subcode,
746                                           const Operand& dst,
747                                           Immediate src) {
748  EnsureSpace ensure_space(this);
749  emit_optional_rex_32(dst);
750  if (is_int8(src.value_)) {
751    emit(0x83);
752    emit_operand(subcode, dst);
753    emit(src.value_);
754  } else {
755    emit(0x81);
756    emit_operand(subcode, dst);
757    emitl(src.value_);
758  }
759}
760
761
762void Assembler::immediate_arithmetic_op_8(byte subcode,
763                                          const Operand& dst,
764                                          Immediate src) {
765  EnsureSpace ensure_space(this);
766  emit_optional_rex_32(dst);
767  ASSERT(is_int8(src.value_) || is_uint8(src.value_));
768  emit(0x80);
769  emit_operand(subcode, dst);
770  emit(src.value_);
771}
772
773
774void Assembler::immediate_arithmetic_op_8(byte subcode,
775                                          Register dst,
776                                          Immediate src) {
777  EnsureSpace ensure_space(this);
778  if (!dst.is_byte_register()) {
779    // Use 64-bit mode byte registers.
780    emit_rex_64(dst);
781  }
782  ASSERT(is_int8(src.value_) || is_uint8(src.value_));
783  emit(0x80);
784  emit_modrm(subcode, dst);
785  emit(src.value_);
786}
787
788
789void Assembler::shift(Register dst, Immediate shift_amount, int subcode) {
790  EnsureSpace ensure_space(this);
791  ASSERT(is_uint6(shift_amount.value_));  // illegal shift count
792  if (shift_amount.value_ == 1) {
793    emit_rex_64(dst);
794    emit(0xD1);
795    emit_modrm(subcode, dst);
796  } else {
797    emit_rex_64(dst);
798    emit(0xC1);
799    emit_modrm(subcode, dst);
800    emit(shift_amount.value_);
801  }
802}
803
804
805void Assembler::shift(Register dst, int subcode) {
806  EnsureSpace ensure_space(this);
807  emit_rex_64(dst);
808  emit(0xD3);
809  emit_modrm(subcode, dst);
810}
811
812
813void Assembler::shift_32(Register dst, int subcode) {
814  EnsureSpace ensure_space(this);
815  emit_optional_rex_32(dst);
816  emit(0xD3);
817  emit_modrm(subcode, dst);
818}
819
820
821void Assembler::shift_32(Register dst, Immediate shift_amount, int subcode) {
822  EnsureSpace ensure_space(this);
823  ASSERT(is_uint5(shift_amount.value_));  // illegal shift count
824  if (shift_amount.value_ == 1) {
825    emit_optional_rex_32(dst);
826    emit(0xD1);
827    emit_modrm(subcode, dst);
828  } else {
829    emit_optional_rex_32(dst);
830    emit(0xC1);
831    emit_modrm(subcode, dst);
832    emit(shift_amount.value_);
833  }
834}
835
836
837void Assembler::bt(const Operand& dst, Register src) {
838  EnsureSpace ensure_space(this);
839  emit_rex_64(src, dst);
840  emit(0x0F);
841  emit(0xA3);
842  emit_operand(src, dst);
843}
844
845
846void Assembler::bts(const Operand& dst, Register src) {
847  EnsureSpace ensure_space(this);
848  emit_rex_64(src, dst);
849  emit(0x0F);
850  emit(0xAB);
851  emit_operand(src, dst);
852}
853
854
855void Assembler::call(Label* L) {
856  positions_recorder()->WriteRecordedPositions();
857  EnsureSpace ensure_space(this);
858  // 1110 1000 #32-bit disp.
859  emit(0xE8);
860  if (L->is_bound()) {
861    int offset = L->pos() - pc_offset() - sizeof(int32_t);
862    ASSERT(offset <= 0);
863    emitl(offset);
864  } else if (L->is_linked()) {
865    emitl(L->pos());
866    L->link_to(pc_offset() - sizeof(int32_t));
867  } else {
868    ASSERT(L->is_unused());
869    int32_t current = pc_offset();
870    emitl(current);
871    L->link_to(current);
872  }
873}
874
875
876void Assembler::call(Handle<Code> target,
877                     RelocInfo::Mode rmode,
878                     unsigned ast_id) {
879  positions_recorder()->WriteRecordedPositions();
880  EnsureSpace ensure_space(this);
881  // 1110 1000 #32-bit disp.
882  emit(0xE8);
883  emit_code_target(target, rmode, ast_id);
884}
885
886
887void Assembler::call(Register adr) {
888  positions_recorder()->WriteRecordedPositions();
889  EnsureSpace ensure_space(this);
890  // Opcode: FF /2 r64.
891  emit_optional_rex_32(adr);
892  emit(0xFF);
893  emit_modrm(0x2, adr);
894}
895
896
897void Assembler::call(const Operand& op) {
898  positions_recorder()->WriteRecordedPositions();
899  EnsureSpace ensure_space(this);
900  // Opcode: FF /2 m64.
901  emit_optional_rex_32(op);
902  emit(0xFF);
903  emit_operand(0x2, op);
904}
905
906
907// Calls directly to the given address using a relative offset.
908// Should only ever be used in Code objects for calls within the
909// same Code object. Should not be used when generating new code (use labels),
910// but only when patching existing code.
911void Assembler::call(Address target) {
912  positions_recorder()->WriteRecordedPositions();
913  EnsureSpace ensure_space(this);
914  // 1110 1000 #32-bit disp.
915  emit(0xE8);
916  Address source = pc_ + 4;
917  intptr_t displacement = target - source;
918  ASSERT(is_int32(displacement));
919  emitl(static_cast<int32_t>(displacement));
920}
921
922
923void Assembler::clc() {
924  EnsureSpace ensure_space(this);
925  emit(0xF8);
926}
927
928void Assembler::cld() {
929  EnsureSpace ensure_space(this);
930  emit(0xFC);
931}
932
933void Assembler::cdq() {
934  EnsureSpace ensure_space(this);
935  emit(0x99);
936}
937
938
939void Assembler::cmovq(Condition cc, Register dst, Register src) {
940  if (cc == always) {
941    movq(dst, src);
942  } else if (cc == never) {
943    return;
944  }
945  // No need to check CpuInfo for CMOV support, it's a required part of the
946  // 64-bit architecture.
947  ASSERT(cc >= 0);  // Use mov for unconditional moves.
948  EnsureSpace ensure_space(this);
949  // Opcode: REX.W 0f 40 + cc /r.
950  emit_rex_64(dst, src);
951  emit(0x0f);
952  emit(0x40 + cc);
953  emit_modrm(dst, src);
954}
955
956
957void Assembler::cmovq(Condition cc, Register dst, const Operand& src) {
958  if (cc == always) {
959    movq(dst, src);
960  } else if (cc == never) {
961    return;
962  }
963  ASSERT(cc >= 0);
964  EnsureSpace ensure_space(this);
965  // Opcode: REX.W 0f 40 + cc /r.
966  emit_rex_64(dst, src);
967  emit(0x0f);
968  emit(0x40 + cc);
969  emit_operand(dst, src);
970}
971
972
973void Assembler::cmovl(Condition cc, Register dst, Register src) {
974  if (cc == always) {
975    movl(dst, src);
976  } else if (cc == never) {
977    return;
978  }
979  ASSERT(cc >= 0);
980  EnsureSpace ensure_space(this);
981  // Opcode: 0f 40 + cc /r.
982  emit_optional_rex_32(dst, src);
983  emit(0x0f);
984  emit(0x40 + cc);
985  emit_modrm(dst, src);
986}
987
988
989void Assembler::cmovl(Condition cc, Register dst, const Operand& src) {
990  if (cc == always) {
991    movl(dst, src);
992  } else if (cc == never) {
993    return;
994  }
995  ASSERT(cc >= 0);
996  EnsureSpace ensure_space(this);
997  // Opcode: 0f 40 + cc /r.
998  emit_optional_rex_32(dst, src);
999  emit(0x0f);
1000  emit(0x40 + cc);
1001  emit_operand(dst, src);
1002}
1003
1004
1005void Assembler::cmpb_al(Immediate imm8) {
1006  ASSERT(is_int8(imm8.value_) || is_uint8(imm8.value_));
1007  EnsureSpace ensure_space(this);
1008  emit(0x3c);
1009  emit(imm8.value_);
1010}
1011
1012
1013void Assembler::cpuid() {
1014  ASSERT(CpuFeatures::IsEnabled(CPUID));
1015  EnsureSpace ensure_space(this);
1016  emit(0x0F);
1017  emit(0xA2);
1018}
1019
1020
1021void Assembler::cqo() {
1022  EnsureSpace ensure_space(this);
1023  emit_rex_64();
1024  emit(0x99);
1025}
1026
1027
1028void Assembler::decq(Register dst) {
1029  EnsureSpace ensure_space(this);
1030  emit_rex_64(dst);
1031  emit(0xFF);
1032  emit_modrm(0x1, dst);
1033}
1034
1035
1036void Assembler::decq(const Operand& dst) {
1037  EnsureSpace ensure_space(this);
1038  emit_rex_64(dst);
1039  emit(0xFF);
1040  emit_operand(1, dst);
1041}
1042
1043
1044void Assembler::decl(Register dst) {
1045  EnsureSpace ensure_space(this);
1046  emit_optional_rex_32(dst);
1047  emit(0xFF);
1048  emit_modrm(0x1, dst);
1049}
1050
1051
1052void Assembler::decl(const Operand& dst) {
1053  EnsureSpace ensure_space(this);
1054  emit_optional_rex_32(dst);
1055  emit(0xFF);
1056  emit_operand(1, dst);
1057}
1058
1059
1060void Assembler::decb(Register dst) {
1061  EnsureSpace ensure_space(this);
1062  if (!dst.is_byte_register()) {
1063    // Register is not one of al, bl, cl, dl.  Its encoding needs REX.
1064    emit_rex_32(dst);
1065  }
1066  emit(0xFE);
1067  emit_modrm(0x1, dst);
1068}
1069
1070
1071void Assembler::decb(const Operand& dst) {
1072  EnsureSpace ensure_space(this);
1073  emit_optional_rex_32(dst);
1074  emit(0xFE);
1075  emit_operand(1, dst);
1076}
1077
1078
1079void Assembler::enter(Immediate size) {
1080  EnsureSpace ensure_space(this);
1081  emit(0xC8);
1082  emitw(size.value_);  // 16 bit operand, always.
1083  emit(0);
1084}
1085
1086
1087void Assembler::hlt() {
1088  EnsureSpace ensure_space(this);
1089  emit(0xF4);
1090}
1091
1092
1093void Assembler::idivq(Register src) {
1094  EnsureSpace ensure_space(this);
1095  emit_rex_64(src);
1096  emit(0xF7);
1097  emit_modrm(0x7, src);
1098}
1099
1100
1101void Assembler::idivl(Register src) {
1102  EnsureSpace ensure_space(this);
1103  emit_optional_rex_32(src);
1104  emit(0xF7);
1105  emit_modrm(0x7, src);
1106}
1107
1108
1109void Assembler::imul(Register src) {
1110  EnsureSpace ensure_space(this);
1111  emit_rex_64(src);
1112  emit(0xF7);
1113  emit_modrm(0x5, src);
1114}
1115
1116
1117void Assembler::imul(Register dst, Register src) {
1118  EnsureSpace ensure_space(this);
1119  emit_rex_64(dst, src);
1120  emit(0x0F);
1121  emit(0xAF);
1122  emit_modrm(dst, src);
1123}
1124
1125
1126void Assembler::imul(Register dst, const Operand& src) {
1127  EnsureSpace ensure_space(this);
1128  emit_rex_64(dst, src);
1129  emit(0x0F);
1130  emit(0xAF);
1131  emit_operand(dst, src);
1132}
1133
1134
1135void Assembler::imul(Register dst, Register src, Immediate imm) {
1136  EnsureSpace ensure_space(this);
1137  emit_rex_64(dst, src);
1138  if (is_int8(imm.value_)) {
1139    emit(0x6B);
1140    emit_modrm(dst, src);
1141    emit(imm.value_);
1142  } else {
1143    emit(0x69);
1144    emit_modrm(dst, src);
1145    emitl(imm.value_);
1146  }
1147}
1148
1149
1150void Assembler::imull(Register dst, Register src) {
1151  EnsureSpace ensure_space(this);
1152  emit_optional_rex_32(dst, src);
1153  emit(0x0F);
1154  emit(0xAF);
1155  emit_modrm(dst, src);
1156}
1157
1158
1159void Assembler::imull(Register dst, const Operand& src) {
1160  EnsureSpace ensure_space(this);
1161  emit_optional_rex_32(dst, src);
1162  emit(0x0F);
1163  emit(0xAF);
1164  emit_operand(dst, src);
1165}
1166
1167
1168void Assembler::imull(Register dst, Register src, Immediate imm) {
1169  EnsureSpace ensure_space(this);
1170  emit_optional_rex_32(dst, src);
1171  if (is_int8(imm.value_)) {
1172    emit(0x6B);
1173    emit_modrm(dst, src);
1174    emit(imm.value_);
1175  } else {
1176    emit(0x69);
1177    emit_modrm(dst, src);
1178    emitl(imm.value_);
1179  }
1180}
1181
1182
1183void Assembler::incq(Register dst) {
1184  EnsureSpace ensure_space(this);
1185  emit_rex_64(dst);
1186  emit(0xFF);
1187  emit_modrm(0x0, dst);
1188}
1189
1190
1191void Assembler::incq(const Operand& dst) {
1192  EnsureSpace ensure_space(this);
1193  emit_rex_64(dst);
1194  emit(0xFF);
1195  emit_operand(0, dst);
1196}
1197
1198
1199void Assembler::incl(const Operand& dst) {
1200  EnsureSpace ensure_space(this);
1201  emit_optional_rex_32(dst);
1202  emit(0xFF);
1203  emit_operand(0, dst);
1204}
1205
1206
1207void Assembler::incl(Register dst) {
1208  EnsureSpace ensure_space(this);
1209  emit_optional_rex_32(dst);
1210  emit(0xFF);
1211  emit_modrm(0, dst);
1212}
1213
1214
1215void Assembler::int3() {
1216  EnsureSpace ensure_space(this);
1217  emit(0xCC);
1218}
1219
1220
1221void Assembler::j(Condition cc, Label* L, Label::Distance distance) {
1222  if (cc == always) {
1223    jmp(L);
1224    return;
1225  } else if (cc == never) {
1226    return;
1227  }
1228  EnsureSpace ensure_space(this);
1229  ASSERT(is_uint4(cc));
1230  if (L->is_bound()) {
1231    const int short_size = 2;
1232    const int long_size  = 6;
1233    int offs = L->pos() - pc_offset();
1234    ASSERT(offs <= 0);
1235    if (is_int8(offs - short_size)) {
1236      // 0111 tttn #8-bit disp.
1237      emit(0x70 | cc);
1238      emit((offs - short_size) & 0xFF);
1239    } else {
1240      // 0000 1111 1000 tttn #32-bit disp.
1241      emit(0x0F);
1242      emit(0x80 | cc);
1243      emitl(offs - long_size);
1244    }
1245  } else if (distance == Label::kNear) {
1246    // 0111 tttn #8-bit disp
1247    emit(0x70 | cc);
1248    byte disp = 0x00;
1249    if (L->is_near_linked()) {
1250      int offset = L->near_link_pos() - pc_offset();
1251      ASSERT(is_int8(offset));
1252      disp = static_cast<byte>(offset & 0xFF);
1253    }
1254    L->link_to(pc_offset(), Label::kNear);
1255    emit(disp);
1256  } else if (L->is_linked()) {
1257    // 0000 1111 1000 tttn #32-bit disp.
1258    emit(0x0F);
1259    emit(0x80 | cc);
1260    emitl(L->pos());
1261    L->link_to(pc_offset() - sizeof(int32_t));
1262  } else {
1263    ASSERT(L->is_unused());
1264    emit(0x0F);
1265    emit(0x80 | cc);
1266    int32_t current = pc_offset();
1267    emitl(current);
1268    L->link_to(current);
1269  }
1270}
1271
1272
1273void Assembler::j(Condition cc,
1274                  Handle<Code> target,
1275                  RelocInfo::Mode rmode) {
1276  EnsureSpace ensure_space(this);
1277  ASSERT(is_uint4(cc));
1278  // 0000 1111 1000 tttn #32-bit disp.
1279  emit(0x0F);
1280  emit(0x80 | cc);
1281  emit_code_target(target, rmode);
1282}
1283
1284
1285void Assembler::jmp(Label* L, Label::Distance distance) {
1286  EnsureSpace ensure_space(this);
1287  const int short_size = sizeof(int8_t);
1288  const int long_size = sizeof(int32_t);
1289  if (L->is_bound()) {
1290    int offs = L->pos() - pc_offset() - 1;
1291    ASSERT(offs <= 0);
1292    if (is_int8(offs - short_size)) {
1293      // 1110 1011 #8-bit disp.
1294      emit(0xEB);
1295      emit((offs - short_size) & 0xFF);
1296    } else {
1297      // 1110 1001 #32-bit disp.
1298      emit(0xE9);
1299      emitl(offs - long_size);
1300    }
1301  } else if (distance == Label::kNear) {
1302    emit(0xEB);
1303    byte disp = 0x00;
1304    if (L->is_near_linked()) {
1305      int offset = L->near_link_pos() - pc_offset();
1306      ASSERT(is_int8(offset));
1307      disp = static_cast<byte>(offset & 0xFF);
1308    }
1309    L->link_to(pc_offset(), Label::kNear);
1310    emit(disp);
1311  } else if (L->is_linked()) {
1312    // 1110 1001 #32-bit disp.
1313    emit(0xE9);
1314    emitl(L->pos());
1315    L->link_to(pc_offset() - long_size);
1316  } else {
1317    // 1110 1001 #32-bit disp.
1318    ASSERT(L->is_unused());
1319    emit(0xE9);
1320    int32_t current = pc_offset();
1321    emitl(current);
1322    L->link_to(current);
1323  }
1324}
1325
1326
1327void Assembler::jmp(Handle<Code> target, RelocInfo::Mode rmode) {
1328  EnsureSpace ensure_space(this);
1329  // 1110 1001 #32-bit disp.
1330  emit(0xE9);
1331  emit_code_target(target, rmode);
1332}
1333
1334
1335void Assembler::jmp(Register target) {
1336  EnsureSpace ensure_space(this);
1337  // Opcode FF/4 r64.
1338  emit_optional_rex_32(target);
1339  emit(0xFF);
1340  emit_modrm(0x4, target);
1341}
1342
1343
1344void Assembler::jmp(const Operand& src) {
1345  EnsureSpace ensure_space(this);
1346  // Opcode FF/4 m64.
1347  emit_optional_rex_32(src);
1348  emit(0xFF);
1349  emit_operand(0x4, src);
1350}
1351
1352
1353void Assembler::lea(Register dst, const Operand& src) {
1354  EnsureSpace ensure_space(this);
1355  emit_rex_64(dst, src);
1356  emit(0x8D);
1357  emit_operand(dst, src);
1358}
1359
1360
1361void Assembler::leal(Register dst, const Operand& src) {
1362  EnsureSpace ensure_space(this);
1363  emit_optional_rex_32(dst, src);
1364  emit(0x8D);
1365  emit_operand(dst, src);
1366}
1367
1368
1369void Assembler::load_rax(void* value, RelocInfo::Mode mode) {
1370  EnsureSpace ensure_space(this);
1371  emit(0x48);  // REX.W
1372  emit(0xA1);
1373  emitq(reinterpret_cast<uintptr_t>(value), mode);
1374}
1375
1376
1377void Assembler::load_rax(ExternalReference ref) {
1378  load_rax(ref.address(), RelocInfo::EXTERNAL_REFERENCE);
1379}
1380
1381
1382void Assembler::leave() {
1383  EnsureSpace ensure_space(this);
1384  emit(0xC9);
1385}
1386
1387
1388void Assembler::movb(Register dst, const Operand& src) {
1389  EnsureSpace ensure_space(this);
1390  if (!dst.is_byte_register()) {
1391    // Register is not one of al, bl, cl, dl.  Its encoding needs REX.
1392    emit_rex_32(dst, src);
1393  } else {
1394    emit_optional_rex_32(dst, src);
1395  }
1396  emit(0x8A);
1397  emit_operand(dst, src);
1398}
1399
1400
1401void Assembler::movb(Register dst, Immediate imm) {
1402  EnsureSpace ensure_space(this);
1403  if (!dst.is_byte_register()) {
1404    emit_rex_32(dst);
1405  }
1406  emit(0xB0 + dst.low_bits());
1407  emit(imm.value_);
1408}
1409
1410
1411void Assembler::movb(const Operand& dst, Register src) {
1412  EnsureSpace ensure_space(this);
1413  if (!src.is_byte_register()) {
1414    emit_rex_32(src, dst);
1415  } else {
1416    emit_optional_rex_32(src, dst);
1417  }
1418  emit(0x88);
1419  emit_operand(src, dst);
1420}
1421
1422
1423void Assembler::movw(const Operand& dst, Register src) {
1424  EnsureSpace ensure_space(this);
1425  emit(0x66);
1426  emit_optional_rex_32(src, dst);
1427  emit(0x89);
1428  emit_operand(src, dst);
1429}
1430
1431
1432void Assembler::movl(Register dst, const Operand& src) {
1433  EnsureSpace ensure_space(this);
1434  emit_optional_rex_32(dst, src);
1435  emit(0x8B);
1436  emit_operand(dst, src);
1437}
1438
1439
1440void Assembler::movl(Register dst, Register src) {
1441  EnsureSpace ensure_space(this);
1442  if (src.low_bits() == 4) {
1443    emit_optional_rex_32(src, dst);
1444    emit(0x89);
1445    emit_modrm(src, dst);
1446  } else {
1447    emit_optional_rex_32(dst, src);
1448    emit(0x8B);
1449    emit_modrm(dst, src);
1450  }
1451}
1452
1453
1454void Assembler::movl(const Operand& dst, Register src) {
1455  EnsureSpace ensure_space(this);
1456  emit_optional_rex_32(src, dst);
1457  emit(0x89);
1458  emit_operand(src, dst);
1459}
1460
1461
1462void Assembler::movl(const Operand& dst, Immediate value) {
1463  EnsureSpace ensure_space(this);
1464  emit_optional_rex_32(dst);
1465  emit(0xC7);
1466  emit_operand(0x0, dst);
1467  emit(value);
1468}
1469
1470
1471void Assembler::movl(Register dst, Immediate value) {
1472  EnsureSpace ensure_space(this);
1473  emit_optional_rex_32(dst);
1474  emit(0xB8 + dst.low_bits());
1475  emit(value);
1476}
1477
1478
1479void Assembler::movq(Register dst, const Operand& src) {
1480  EnsureSpace ensure_space(this);
1481  emit_rex_64(dst, src);
1482  emit(0x8B);
1483  emit_operand(dst, src);
1484}
1485
1486
1487void Assembler::movq(Register dst, Register src) {
1488  EnsureSpace ensure_space(this);
1489  if (src.low_bits() == 4) {
1490    emit_rex_64(src, dst);
1491    emit(0x89);
1492    emit_modrm(src, dst);
1493  } else {
1494    emit_rex_64(dst, src);
1495    emit(0x8B);
1496    emit_modrm(dst, src);
1497  }
1498}
1499
1500
1501void Assembler::movq(Register dst, Immediate value) {
1502  EnsureSpace ensure_space(this);
1503  emit_rex_64(dst);
1504  emit(0xC7);
1505  emit_modrm(0x0, dst);
1506  emit(value);  // Only 32-bit immediates are possible, not 8-bit immediates.
1507}
1508
1509
1510void Assembler::movq(const Operand& dst, Register src) {
1511  EnsureSpace ensure_space(this);
1512  emit_rex_64(src, dst);
1513  emit(0x89);
1514  emit_operand(src, dst);
1515}
1516
1517
1518void Assembler::movq(Register dst, void* value, RelocInfo::Mode rmode) {
1519  // This method must not be used with heap object references. The stored
1520  // address is not GC safe. Use the handle version instead.
1521  ASSERT(rmode > RelocInfo::LAST_GCED_ENUM);
1522  EnsureSpace ensure_space(this);
1523  emit_rex_64(dst);
1524  emit(0xB8 | dst.low_bits());
1525  emitq(reinterpret_cast<uintptr_t>(value), rmode);
1526}
1527
1528
1529void Assembler::movq(Register dst, int64_t value, RelocInfo::Mode rmode) {
1530  // Non-relocatable values might not need a 64-bit representation.
1531  if (rmode == RelocInfo::NONE) {
1532    // Sadly, there is no zero or sign extending move for 8-bit immediates.
1533    if (is_int32(value)) {
1534      movq(dst, Immediate(static_cast<int32_t>(value)));
1535      return;
1536    } else if (is_uint32(value)) {
1537      movl(dst, Immediate(static_cast<int32_t>(value)));
1538      return;
1539    }
1540    // Value cannot be represented by 32 bits, so do a full 64 bit immediate
1541    // value.
1542  }
1543  EnsureSpace ensure_space(this);
1544  emit_rex_64(dst);
1545  emit(0xB8 | dst.low_bits());
1546  emitq(value, rmode);
1547}
1548
1549
1550void Assembler::movq(Register dst, ExternalReference ref) {
1551  int64_t value = reinterpret_cast<int64_t>(ref.address());
1552  movq(dst, value, RelocInfo::EXTERNAL_REFERENCE);
1553}
1554
1555
1556void Assembler::movq(const Operand& dst, Immediate value) {
1557  EnsureSpace ensure_space(this);
1558  emit_rex_64(dst);
1559  emit(0xC7);
1560  emit_operand(0, dst);
1561  emit(value);
1562}
1563
1564
1565// Loads the ip-relative location of the src label into the target location
1566// (as a 32-bit offset sign extended to 64-bit).
1567void Assembler::movl(const Operand& dst, Label* src) {
1568  EnsureSpace ensure_space(this);
1569  emit_optional_rex_32(dst);
1570  emit(0xC7);
1571  emit_operand(0, dst);
1572  if (src->is_bound()) {
1573    int offset = src->pos() - pc_offset() - sizeof(int32_t);
1574    ASSERT(offset <= 0);
1575    emitl(offset);
1576  } else if (src->is_linked()) {
1577    emitl(src->pos());
1578    src->link_to(pc_offset() - sizeof(int32_t));
1579  } else {
1580    ASSERT(src->is_unused());
1581    int32_t current = pc_offset();
1582    emitl(current);
1583    src->link_to(current);
1584  }
1585}
1586
1587
1588void Assembler::movq(Register dst, Handle<Object> value, RelocInfo::Mode mode) {
1589  // If there is no relocation info, emit the value of the handle efficiently
1590  // (possibly using less that 8 bytes for the value).
1591  if (mode == RelocInfo::NONE) {
1592    // There is no possible reason to store a heap pointer without relocation
1593    // info, so it must be a smi.
1594    ASSERT(value->IsSmi());
1595    movq(dst, reinterpret_cast<int64_t>(*value), RelocInfo::NONE);
1596  } else {
1597    EnsureSpace ensure_space(this);
1598    ASSERT(value->IsHeapObject());
1599    ASSERT(!HEAP->InNewSpace(*value));
1600    emit_rex_64(dst);
1601    emit(0xB8 | dst.low_bits());
1602    emitq(reinterpret_cast<uintptr_t>(value.location()), mode);
1603  }
1604}
1605
1606
1607void Assembler::movsxbq(Register dst, const Operand& src) {
1608  EnsureSpace ensure_space(this);
1609  emit_rex_64(dst, src);
1610  emit(0x0F);
1611  emit(0xBE);
1612  emit_operand(dst, src);
1613}
1614
1615
1616void Assembler::movsxwq(Register dst, const Operand& src) {
1617  EnsureSpace ensure_space(this);
1618  emit_rex_64(dst, src);
1619  emit(0x0F);
1620  emit(0xBF);
1621  emit_operand(dst, src);
1622}
1623
1624
1625void Assembler::movsxlq(Register dst, Register src) {
1626  EnsureSpace ensure_space(this);
1627  emit_rex_64(dst, src);
1628  emit(0x63);
1629  emit_modrm(dst, src);
1630}
1631
1632
1633void Assembler::movsxlq(Register dst, const Operand& src) {
1634  EnsureSpace ensure_space(this);
1635  emit_rex_64(dst, src);
1636  emit(0x63);
1637  emit_operand(dst, src);
1638}
1639
1640
1641void Assembler::movzxbq(Register dst, const Operand& src) {
1642  EnsureSpace ensure_space(this);
1643  // 32 bit operations zero the top 32 bits of 64 bit registers.  Therefore
1644  // there is no need to make this a 64 bit operation.
1645  emit_optional_rex_32(dst, src);
1646  emit(0x0F);
1647  emit(0xB6);
1648  emit_operand(dst, src);
1649}
1650
1651
1652void Assembler::movzxbl(Register dst, const Operand& src) {
1653  EnsureSpace ensure_space(this);
1654  emit_optional_rex_32(dst, src);
1655  emit(0x0F);
1656  emit(0xB6);
1657  emit_operand(dst, src);
1658}
1659
1660
1661void Assembler::movzxwq(Register dst, const Operand& src) {
1662  EnsureSpace ensure_space(this);
1663  emit_optional_rex_32(dst, src);
1664  emit(0x0F);
1665  emit(0xB7);
1666  emit_operand(dst, src);
1667}
1668
1669
1670void Assembler::movzxwl(Register dst, const Operand& src) {
1671  EnsureSpace ensure_space(this);
1672  emit_optional_rex_32(dst, src);
1673  emit(0x0F);
1674  emit(0xB7);
1675  emit_operand(dst, src);
1676}
1677
1678
1679void Assembler::repmovsb() {
1680  EnsureSpace ensure_space(this);
1681  emit(0xF3);
1682  emit(0xA4);
1683}
1684
1685
1686void Assembler::repmovsw() {
1687  EnsureSpace ensure_space(this);
1688  emit(0x66);  // Operand size override.
1689  emit(0xF3);
1690  emit(0xA4);
1691}
1692
1693
1694void Assembler::repmovsl() {
1695  EnsureSpace ensure_space(this);
1696  emit(0xF3);
1697  emit(0xA5);
1698}
1699
1700
1701void Assembler::repmovsq() {
1702  EnsureSpace ensure_space(this);
1703  emit(0xF3);
1704  emit_rex_64();
1705  emit(0xA5);
1706}
1707
1708
1709void Assembler::mul(Register src) {
1710  EnsureSpace ensure_space(this);
1711  emit_rex_64(src);
1712  emit(0xF7);
1713  emit_modrm(0x4, src);
1714}
1715
1716
1717void Assembler::neg(Register dst) {
1718  EnsureSpace ensure_space(this);
1719  emit_rex_64(dst);
1720  emit(0xF7);
1721  emit_modrm(0x3, dst);
1722}
1723
1724
1725void Assembler::negl(Register dst) {
1726  EnsureSpace ensure_space(this);
1727  emit_optional_rex_32(dst);
1728  emit(0xF7);
1729  emit_modrm(0x3, dst);
1730}
1731
1732
1733void Assembler::neg(const Operand& dst) {
1734  EnsureSpace ensure_space(this);
1735  emit_rex_64(dst);
1736  emit(0xF7);
1737  emit_operand(3, dst);
1738}
1739
1740
1741void Assembler::nop() {
1742  EnsureSpace ensure_space(this);
1743  emit(0x90);
1744}
1745
1746
1747void Assembler::not_(Register dst) {
1748  EnsureSpace ensure_space(this);
1749  emit_rex_64(dst);
1750  emit(0xF7);
1751  emit_modrm(0x2, dst);
1752}
1753
1754
1755void Assembler::not_(const Operand& dst) {
1756  EnsureSpace ensure_space(this);
1757  emit_rex_64(dst);
1758  emit(0xF7);
1759  emit_operand(2, dst);
1760}
1761
1762
1763void Assembler::notl(Register dst) {
1764  EnsureSpace ensure_space(this);
1765  emit_optional_rex_32(dst);
1766  emit(0xF7);
1767  emit_modrm(0x2, dst);
1768}
1769
1770
1771void Assembler::Nop(int n) {
1772  // The recommended muti-byte sequences of NOP instructions from the Intel 64
1773  // and IA-32 Architectures Software Developer's Manual.
1774  //
1775  // Length   Assembly                                Byte Sequence
1776  // 2 bytes  66 NOP                                  66 90H
1777  // 3 bytes  NOP DWORD ptr [EAX]                     0F 1F 00H
1778  // 4 bytes  NOP DWORD ptr [EAX + 00H]               0F 1F 40 00H
1779  // 5 bytes  NOP DWORD ptr [EAX + EAX*1 + 00H]       0F 1F 44 00 00H
1780  // 6 bytes  66 NOP DWORD ptr [EAX + EAX*1 + 00H]    66 0F 1F 44 00 00H
1781  // 7 bytes  NOP DWORD ptr [EAX + 00000000H]         0F 1F 80 00 00 00 00H
1782  // 8 bytes  NOP DWORD ptr [EAX + EAX*1 + 00000000H] 0F 1F 84 00 00 00 00 00H
1783  // 9 bytes  66 NOP DWORD ptr [EAX + EAX*1 +         66 0F 1F 84 00 00 00 00
1784  //          00000000H]                              00H
1785
1786  EnsureSpace ensure_space(this);
1787  while (n > 0) {
1788    switch (n) {
1789      case 2:
1790        emit(0x66);
1791      case 1:
1792        emit(0x90);
1793        return;
1794      case 3:
1795        emit(0x0f);
1796        emit(0x1f);
1797        emit(0x00);
1798        return;
1799      case 4:
1800        emit(0x0f);
1801        emit(0x1f);
1802        emit(0x40);
1803        emit(0x00);
1804        return;
1805      case 6:
1806        emit(0x66);
1807      case 5:
1808        emit(0x0f);
1809        emit(0x1f);
1810        emit(0x44);
1811        emit(0x00);
1812        emit(0x00);
1813        return;
1814      case 7:
1815        emit(0x0f);
1816        emit(0x1f);
1817        emit(0x80);
1818        emit(0x00);
1819        emit(0x00);
1820        emit(0x00);
1821        emit(0x00);
1822        return;
1823      default:
1824      case 11:
1825        emit(0x66);
1826        n--;
1827      case 10:
1828        emit(0x66);
1829        n--;
1830      case 9:
1831        emit(0x66);
1832        n--;
1833      case 8:
1834        emit(0x0f);
1835        emit(0x1f);
1836        emit(0x84);
1837        emit(0x00);
1838        emit(0x00);
1839        emit(0x00);
1840        emit(0x00);
1841        emit(0x00);
1842        n -= 8;
1843    }
1844  }
1845}
1846
1847
1848void Assembler::pop(Register dst) {
1849  EnsureSpace ensure_space(this);
1850  emit_optional_rex_32(dst);
1851  emit(0x58 | dst.low_bits());
1852}
1853
1854
1855void Assembler::pop(const Operand& dst) {
1856  EnsureSpace ensure_space(this);
1857  emit_optional_rex_32(dst);
1858  emit(0x8F);
1859  emit_operand(0, dst);
1860}
1861
1862
1863void Assembler::popfq() {
1864  EnsureSpace ensure_space(this);
1865  emit(0x9D);
1866}
1867
1868
1869void Assembler::push(Register src) {
1870  EnsureSpace ensure_space(this);
1871  emit_optional_rex_32(src);
1872  emit(0x50 | src.low_bits());
1873}
1874
1875
1876void Assembler::push(const Operand& src) {
1877  EnsureSpace ensure_space(this);
1878  emit_optional_rex_32(src);
1879  emit(0xFF);
1880  emit_operand(6, src);
1881}
1882
1883
1884void Assembler::push(Immediate value) {
1885  EnsureSpace ensure_space(this);
1886  if (is_int8(value.value_)) {
1887    emit(0x6A);
1888    emit(value.value_);  // Emit low byte of value.
1889  } else {
1890    emit(0x68);
1891    emitl(value.value_);
1892  }
1893}
1894
1895
1896void Assembler::push_imm32(int32_t imm32) {
1897  EnsureSpace ensure_space(this);
1898  emit(0x68);
1899  emitl(imm32);
1900}
1901
1902
1903void Assembler::pushfq() {
1904  EnsureSpace ensure_space(this);
1905  emit(0x9C);
1906}
1907
1908
1909void Assembler::rdtsc() {
1910  EnsureSpace ensure_space(this);
1911  emit(0x0F);
1912  emit(0x31);
1913}
1914
1915
1916void Assembler::ret(int imm16) {
1917  EnsureSpace ensure_space(this);
1918  ASSERT(is_uint16(imm16));
1919  if (imm16 == 0) {
1920    emit(0xC3);
1921  } else {
1922    emit(0xC2);
1923    emit(imm16 & 0xFF);
1924    emit((imm16 >> 8) & 0xFF);
1925  }
1926}
1927
1928
1929void Assembler::setcc(Condition cc, Register reg) {
1930  if (cc > last_condition) {
1931    movb(reg, Immediate(cc == always ? 1 : 0));
1932    return;
1933  }
1934  EnsureSpace ensure_space(this);
1935  ASSERT(is_uint4(cc));
1936  if (!reg.is_byte_register()) {  // Use x64 byte registers, where different.
1937    emit_rex_32(reg);
1938  }
1939  emit(0x0F);
1940  emit(0x90 | cc);
1941  emit_modrm(0x0, reg);
1942}
1943
1944
1945void Assembler::shld(Register dst, Register src) {
1946  EnsureSpace ensure_space(this);
1947  emit_rex_64(src, dst);
1948  emit(0x0F);
1949  emit(0xA5);
1950  emit_modrm(src, dst);
1951}
1952
1953
1954void Assembler::shrd(Register dst, Register src) {
1955  EnsureSpace ensure_space(this);
1956  emit_rex_64(src, dst);
1957  emit(0x0F);
1958  emit(0xAD);
1959  emit_modrm(src, dst);
1960}
1961
1962
1963void Assembler::xchg(Register dst, Register src) {
1964  EnsureSpace ensure_space(this);
1965  if (src.is(rax) || dst.is(rax)) {  // Single-byte encoding
1966    Register other = src.is(rax) ? dst : src;
1967    emit_rex_64(other);
1968    emit(0x90 | other.low_bits());
1969  } else if (dst.low_bits() == 4) {
1970    emit_rex_64(dst, src);
1971    emit(0x87);
1972    emit_modrm(dst, src);
1973  } else {
1974    emit_rex_64(src, dst);
1975    emit(0x87);
1976    emit_modrm(src, dst);
1977  }
1978}
1979
1980
1981void Assembler::store_rax(void* dst, RelocInfo::Mode mode) {
1982  EnsureSpace ensure_space(this);
1983  emit(0x48);  // REX.W
1984  emit(0xA3);
1985  emitq(reinterpret_cast<uintptr_t>(dst), mode);
1986}
1987
1988
1989void Assembler::store_rax(ExternalReference ref) {
1990  store_rax(ref.address(), RelocInfo::EXTERNAL_REFERENCE);
1991}
1992
1993
1994void Assembler::testb(Register dst, Register src) {
1995  EnsureSpace ensure_space(this);
1996  if (src.low_bits() == 4) {
1997    emit_rex_32(src, dst);
1998    emit(0x84);
1999    emit_modrm(src, dst);
2000  } else {
2001    if (!dst.is_byte_register() || !src.is_byte_register()) {
2002      // Register is not one of al, bl, cl, dl.  Its encoding needs REX.
2003      emit_rex_32(dst, src);
2004    }
2005    emit(0x84);
2006    emit_modrm(dst, src);
2007  }
2008}
2009
2010
2011void Assembler::testb(Register reg, Immediate mask) {
2012  ASSERT(is_int8(mask.value_) || is_uint8(mask.value_));
2013  EnsureSpace ensure_space(this);
2014  if (reg.is(rax)) {
2015    emit(0xA8);
2016    emit(mask.value_);  // Low byte emitted.
2017  } else {
2018    if (!reg.is_byte_register()) {
2019      // Register is not one of al, bl, cl, dl.  Its encoding needs REX.
2020      emit_rex_32(reg);
2021    }
2022    emit(0xF6);
2023    emit_modrm(0x0, reg);
2024    emit(mask.value_);  // Low byte emitted.
2025  }
2026}
2027
2028
2029void Assembler::testb(const Operand& op, Immediate mask) {
2030  ASSERT(is_int8(mask.value_) || is_uint8(mask.value_));
2031  EnsureSpace ensure_space(this);
2032  emit_optional_rex_32(rax, op);
2033  emit(0xF6);
2034  emit_operand(rax, op);  // Operation code 0
2035  emit(mask.value_);  // Low byte emitted.
2036}
2037
2038
2039void Assembler::testb(const Operand& op, Register reg) {
2040  EnsureSpace ensure_space(this);
2041  if (!reg.is_byte_register()) {
2042    // Register is not one of al, bl, cl, dl.  Its encoding needs REX.
2043    emit_rex_32(reg, op);
2044  } else {
2045    emit_optional_rex_32(reg, op);
2046  }
2047  emit(0x84);
2048  emit_operand(reg, op);
2049}
2050
2051
2052void Assembler::testl(Register dst, Register src) {
2053  EnsureSpace ensure_space(this);
2054  if (src.low_bits() == 4) {
2055    emit_optional_rex_32(src, dst);
2056    emit(0x85);
2057    emit_modrm(src, dst);
2058  } else {
2059    emit_optional_rex_32(dst, src);
2060    emit(0x85);
2061    emit_modrm(dst, src);
2062  }
2063}
2064
2065
2066void Assembler::testl(Register reg, Immediate mask) {
2067  // testl with a mask that fits in the low byte is exactly testb.
2068  if (is_uint8(mask.value_)) {
2069    testb(reg, mask);
2070    return;
2071  }
2072  EnsureSpace ensure_space(this);
2073  if (reg.is(rax)) {
2074    emit(0xA9);
2075    emit(mask);
2076  } else {
2077    emit_optional_rex_32(rax, reg);
2078    emit(0xF7);
2079    emit_modrm(0x0, reg);
2080    emit(mask);
2081  }
2082}
2083
2084
2085void Assembler::testl(const Operand& op, Immediate mask) {
2086  // testl with a mask that fits in the low byte is exactly testb.
2087  if (is_uint8(mask.value_)) {
2088    testb(op, mask);
2089    return;
2090  }
2091  EnsureSpace ensure_space(this);
2092  emit_optional_rex_32(rax, op);
2093  emit(0xF7);
2094  emit_operand(rax, op);  // Operation code 0
2095  emit(mask);
2096}
2097
2098
2099void Assembler::testq(const Operand& op, Register reg) {
2100  EnsureSpace ensure_space(this);
2101  emit_rex_64(reg, op);
2102  emit(0x85);
2103  emit_operand(reg, op);
2104}
2105
2106
2107void Assembler::testq(Register dst, Register src) {
2108  EnsureSpace ensure_space(this);
2109  if (src.low_bits() == 4) {
2110    emit_rex_64(src, dst);
2111    emit(0x85);
2112    emit_modrm(src, dst);
2113  } else {
2114    emit_rex_64(dst, src);
2115    emit(0x85);
2116    emit_modrm(dst, src);
2117  }
2118}
2119
2120
2121void Assembler::testq(Register dst, Immediate mask) {
2122  EnsureSpace ensure_space(this);
2123  if (dst.is(rax)) {
2124    emit_rex_64();
2125    emit(0xA9);
2126    emit(mask);
2127  } else {
2128    emit_rex_64(dst);
2129    emit(0xF7);
2130    emit_modrm(0, dst);
2131    emit(mask);
2132  }
2133}
2134
2135
2136// FPU instructions.
2137
2138
2139void Assembler::fld(int i) {
2140  EnsureSpace ensure_space(this);
2141  emit_farith(0xD9, 0xC0, i);
2142}
2143
2144
2145void Assembler::fld1() {
2146  EnsureSpace ensure_space(this);
2147  emit(0xD9);
2148  emit(0xE8);
2149}
2150
2151
2152void Assembler::fldz() {
2153  EnsureSpace ensure_space(this);
2154  emit(0xD9);
2155  emit(0xEE);
2156}
2157
2158
2159void Assembler::fldpi() {
2160  EnsureSpace ensure_space(this);
2161  emit(0xD9);
2162  emit(0xEB);
2163}
2164
2165
2166void Assembler::fldln2() {
2167  EnsureSpace ensure_space(this);
2168  emit(0xD9);
2169  emit(0xED);
2170}
2171
2172
2173void Assembler::fld_s(const Operand& adr) {
2174  EnsureSpace ensure_space(this);
2175  emit_optional_rex_32(adr);
2176  emit(0xD9);
2177  emit_operand(0, adr);
2178}
2179
2180
2181void Assembler::fld_d(const Operand& adr) {
2182  EnsureSpace ensure_space(this);
2183  emit_optional_rex_32(adr);
2184  emit(0xDD);
2185  emit_operand(0, adr);
2186}
2187
2188
2189void Assembler::fstp_s(const Operand& adr) {
2190  EnsureSpace ensure_space(this);
2191  emit_optional_rex_32(adr);
2192  emit(0xD9);
2193  emit_operand(3, adr);
2194}
2195
2196
2197void Assembler::fstp_d(const Operand& adr) {
2198  EnsureSpace ensure_space(this);
2199  emit_optional_rex_32(adr);
2200  emit(0xDD);
2201  emit_operand(3, adr);
2202}
2203
2204
2205void Assembler::fstp(int index) {
2206  ASSERT(is_uint3(index));
2207  EnsureSpace ensure_space(this);
2208  emit_farith(0xDD, 0xD8, index);
2209}
2210
2211
2212void Assembler::fild_s(const Operand& adr) {
2213  EnsureSpace ensure_space(this);
2214  emit_optional_rex_32(adr);
2215  emit(0xDB);
2216  emit_operand(0, adr);
2217}
2218
2219
2220void Assembler::fild_d(const Operand& adr) {
2221  EnsureSpace ensure_space(this);
2222  emit_optional_rex_32(adr);
2223  emit(0xDF);
2224  emit_operand(5, adr);
2225}
2226
2227
2228void Assembler::fistp_s(const Operand& adr) {
2229  EnsureSpace ensure_space(this);
2230  emit_optional_rex_32(adr);
2231  emit(0xDB);
2232  emit_operand(3, adr);
2233}
2234
2235
2236void Assembler::fisttp_s(const Operand& adr) {
2237  ASSERT(CpuFeatures::IsEnabled(SSE3));
2238  EnsureSpace ensure_space(this);
2239  emit_optional_rex_32(adr);
2240  emit(0xDB);
2241  emit_operand(1, adr);
2242}
2243
2244
2245void Assembler::fisttp_d(const Operand& adr) {
2246  ASSERT(CpuFeatures::IsEnabled(SSE3));
2247  EnsureSpace ensure_space(this);
2248  emit_optional_rex_32(adr);
2249  emit(0xDD);
2250  emit_operand(1, adr);
2251}
2252
2253
2254void Assembler::fist_s(const Operand& adr) {
2255  EnsureSpace ensure_space(this);
2256  emit_optional_rex_32(adr);
2257  emit(0xDB);
2258  emit_operand(2, adr);
2259}
2260
2261
2262void Assembler::fistp_d(const Operand& adr) {
2263  EnsureSpace ensure_space(this);
2264  emit_optional_rex_32(adr);
2265  emit(0xDF);
2266  emit_operand(7, adr);
2267}
2268
2269
2270void Assembler::fabs() {
2271  EnsureSpace ensure_space(this);
2272  emit(0xD9);
2273  emit(0xE1);
2274}
2275
2276
2277void Assembler::fchs() {
2278  EnsureSpace ensure_space(this);
2279  emit(0xD9);
2280  emit(0xE0);
2281}
2282
2283
2284void Assembler::fcos() {
2285  EnsureSpace ensure_space(this);
2286  emit(0xD9);
2287  emit(0xFF);
2288}
2289
2290
2291void Assembler::fsin() {
2292  EnsureSpace ensure_space(this);
2293  emit(0xD9);
2294  emit(0xFE);
2295}
2296
2297
2298void Assembler::fptan() {
2299  EnsureSpace ensure_space(this);
2300  emit(0xD9);
2301  emit(0xF2);
2302}
2303
2304
2305void Assembler::fyl2x() {
2306  EnsureSpace ensure_space(this);
2307  emit(0xD9);
2308  emit(0xF1);
2309}
2310
2311
2312void Assembler::f2xm1() {
2313  EnsureSpace ensure_space(this);
2314  emit(0xD9);
2315  emit(0xF0);
2316}
2317
2318
2319void Assembler::fscale() {
2320  EnsureSpace ensure_space(this);
2321  emit(0xD9);
2322  emit(0xFD);
2323}
2324
2325
2326void Assembler::fninit() {
2327  EnsureSpace ensure_space(this);
2328  emit(0xDB);
2329  emit(0xE3);
2330}
2331
2332
2333void Assembler::fadd(int i) {
2334  EnsureSpace ensure_space(this);
2335  emit_farith(0xDC, 0xC0, i);
2336}
2337
2338
2339void Assembler::fsub(int i) {
2340  EnsureSpace ensure_space(this);
2341  emit_farith(0xDC, 0xE8, i);
2342}
2343
2344
2345void Assembler::fisub_s(const Operand& adr) {
2346  EnsureSpace ensure_space(this);
2347  emit_optional_rex_32(adr);
2348  emit(0xDA);
2349  emit_operand(4, adr);
2350}
2351
2352
2353void Assembler::fmul(int i) {
2354  EnsureSpace ensure_space(this);
2355  emit_farith(0xDC, 0xC8, i);
2356}
2357
2358
2359void Assembler::fdiv(int i) {
2360  EnsureSpace ensure_space(this);
2361  emit_farith(0xDC, 0xF8, i);
2362}
2363
2364
2365void Assembler::faddp(int i) {
2366  EnsureSpace ensure_space(this);
2367  emit_farith(0xDE, 0xC0, i);
2368}
2369
2370
2371void Assembler::fsubp(int i) {
2372  EnsureSpace ensure_space(this);
2373  emit_farith(0xDE, 0xE8, i);
2374}
2375
2376
2377void Assembler::fsubrp(int i) {
2378  EnsureSpace ensure_space(this);
2379  emit_farith(0xDE, 0xE0, i);
2380}
2381
2382
2383void Assembler::fmulp(int i) {
2384  EnsureSpace ensure_space(this);
2385  emit_farith(0xDE, 0xC8, i);
2386}
2387
2388
2389void Assembler::fdivp(int i) {
2390  EnsureSpace ensure_space(this);
2391  emit_farith(0xDE, 0xF8, i);
2392}
2393
2394
2395void Assembler::fprem() {
2396  EnsureSpace ensure_space(this);
2397  emit(0xD9);
2398  emit(0xF8);
2399}
2400
2401
2402void Assembler::fprem1() {
2403  EnsureSpace ensure_space(this);
2404  emit(0xD9);
2405  emit(0xF5);
2406}
2407
2408
2409void Assembler::fxch(int i) {
2410  EnsureSpace ensure_space(this);
2411  emit_farith(0xD9, 0xC8, i);
2412}
2413
2414
2415void Assembler::fincstp() {
2416  EnsureSpace ensure_space(this);
2417  emit(0xD9);
2418  emit(0xF7);
2419}
2420
2421
2422void Assembler::ffree(int i) {
2423  EnsureSpace ensure_space(this);
2424  emit_farith(0xDD, 0xC0, i);
2425}
2426
2427
2428void Assembler::ftst() {
2429  EnsureSpace ensure_space(this);
2430  emit(0xD9);
2431  emit(0xE4);
2432}
2433
2434
2435void Assembler::fucomp(int i) {
2436  EnsureSpace ensure_space(this);
2437  emit_farith(0xDD, 0xE8, i);
2438}
2439
2440
2441void Assembler::fucompp() {
2442  EnsureSpace ensure_space(this);
2443  emit(0xDA);
2444  emit(0xE9);
2445}
2446
2447
2448void Assembler::fucomi(int i) {
2449  EnsureSpace ensure_space(this);
2450  emit(0xDB);
2451  emit(0xE8 + i);
2452}
2453
2454
2455void Assembler::fucomip() {
2456  EnsureSpace ensure_space(this);
2457  emit(0xDF);
2458  emit(0xE9);
2459}
2460
2461
2462void Assembler::fcompp() {
2463  EnsureSpace ensure_space(this);
2464  emit(0xDE);
2465  emit(0xD9);
2466}
2467
2468
2469void Assembler::fnstsw_ax() {
2470  EnsureSpace ensure_space(this);
2471  emit(0xDF);
2472  emit(0xE0);
2473}
2474
2475
2476void Assembler::fwait() {
2477  EnsureSpace ensure_space(this);
2478  emit(0x9B);
2479}
2480
2481
2482void Assembler::frndint() {
2483  EnsureSpace ensure_space(this);
2484  emit(0xD9);
2485  emit(0xFC);
2486}
2487
2488
2489void Assembler::fnclex() {
2490  EnsureSpace ensure_space(this);
2491  emit(0xDB);
2492  emit(0xE2);
2493}
2494
2495
2496void Assembler::sahf() {
2497  // TODO(X64): Test for presence. Not all 64-bit intel CPU's have sahf
2498  // in 64-bit mode. Test CpuID.
2499  EnsureSpace ensure_space(this);
2500  emit(0x9E);
2501}
2502
2503
2504void Assembler::emit_farith(int b1, int b2, int i) {
2505  ASSERT(is_uint8(b1) && is_uint8(b2));  // wrong opcode
2506  ASSERT(is_uint3(i));  // illegal stack offset
2507  emit(b1);
2508  emit(b2 + i);
2509}
2510
2511// SSE 2 operations.
2512
2513void Assembler::movd(XMMRegister dst, Register src) {
2514  EnsureSpace ensure_space(this);
2515  emit(0x66);
2516  emit_optional_rex_32(dst, src);
2517  emit(0x0F);
2518  emit(0x6E);
2519  emit_sse_operand(dst, src);
2520}
2521
2522
2523void Assembler::movd(Register dst, XMMRegister src) {
2524  EnsureSpace ensure_space(this);
2525  emit(0x66);
2526  emit_optional_rex_32(src, dst);
2527  emit(0x0F);
2528  emit(0x7E);
2529  emit_sse_operand(src, dst);
2530}
2531
2532
2533void Assembler::movq(XMMRegister dst, Register src) {
2534  EnsureSpace ensure_space(this);
2535  emit(0x66);
2536  emit_rex_64(dst, src);
2537  emit(0x0F);
2538  emit(0x6E);
2539  emit_sse_operand(dst, src);
2540}
2541
2542
2543void Assembler::movq(Register dst, XMMRegister src) {
2544  EnsureSpace ensure_space(this);
2545  emit(0x66);
2546  emit_rex_64(src, dst);
2547  emit(0x0F);
2548  emit(0x7E);
2549  emit_sse_operand(src, dst);
2550}
2551
2552
2553void Assembler::movq(XMMRegister dst, XMMRegister src) {
2554  EnsureSpace ensure_space(this);
2555  if (dst.low_bits() == 4) {
2556    // Avoid unnecessary SIB byte.
2557    emit(0xf3);
2558    emit_optional_rex_32(dst, src);
2559    emit(0x0F);
2560    emit(0x7e);
2561    emit_sse_operand(dst, src);
2562  } else {
2563    emit(0x66);
2564    emit_optional_rex_32(src, dst);
2565    emit(0x0F);
2566    emit(0xD6);
2567    emit_sse_operand(src, dst);
2568  }
2569}
2570
2571void Assembler::movdqa(const Operand& dst, XMMRegister src) {
2572  EnsureSpace ensure_space(this);
2573  emit(0x66);
2574  emit_rex_64(src, dst);
2575  emit(0x0F);
2576  emit(0x7F);
2577  emit_sse_operand(src, dst);
2578}
2579
2580
2581void Assembler::movdqa(XMMRegister dst, const Operand& src) {
2582  EnsureSpace ensure_space(this);
2583  emit(0x66);
2584  emit_rex_64(dst, src);
2585  emit(0x0F);
2586  emit(0x6F);
2587  emit_sse_operand(dst, src);
2588}
2589
2590
2591void Assembler::extractps(Register dst, XMMRegister src, byte imm8) {
2592  ASSERT(CpuFeatures::IsSupported(SSE4_1));
2593  ASSERT(is_uint8(imm8));
2594  EnsureSpace ensure_space(this);
2595  emit(0x66);
2596  emit_optional_rex_32(dst, src);
2597  emit(0x0F);
2598  emit(0x3A);
2599  emit(0x17);
2600  emit_sse_operand(dst, src);
2601  emit(imm8);
2602}
2603
2604
2605void Assembler::movsd(const Operand& dst, XMMRegister src) {
2606  EnsureSpace ensure_space(this);
2607  emit(0xF2);  // double
2608  emit_optional_rex_32(src, dst);
2609  emit(0x0F);
2610  emit(0x11);  // store
2611  emit_sse_operand(src, dst);
2612}
2613
2614
2615void Assembler::movsd(XMMRegister dst, XMMRegister src) {
2616  EnsureSpace ensure_space(this);
2617  emit(0xF2);  // double
2618  emit_optional_rex_32(dst, src);
2619  emit(0x0F);
2620  emit(0x10);  // load
2621  emit_sse_operand(dst, src);
2622}
2623
2624
2625void Assembler::movsd(XMMRegister dst, const Operand& src) {
2626  EnsureSpace ensure_space(this);
2627  emit(0xF2);  // double
2628  emit_optional_rex_32(dst, src);
2629  emit(0x0F);
2630  emit(0x10);  // load
2631  emit_sse_operand(dst, src);
2632}
2633
2634
2635void Assembler::movaps(XMMRegister dst, XMMRegister src) {
2636  EnsureSpace ensure_space(this);
2637  if (src.low_bits() == 4) {
2638    // Try to avoid an unnecessary SIB byte.
2639    emit_optional_rex_32(src, dst);
2640    emit(0x0F);
2641    emit(0x29);
2642    emit_sse_operand(src, dst);
2643  } else {
2644    emit_optional_rex_32(dst, src);
2645    emit(0x0F);
2646    emit(0x28);
2647    emit_sse_operand(dst, src);
2648  }
2649}
2650
2651
2652void Assembler::movapd(XMMRegister dst, XMMRegister src) {
2653  EnsureSpace ensure_space(this);
2654  if (src.low_bits() == 4) {
2655    // Try to avoid an unnecessary SIB byte.
2656    emit(0x66);
2657    emit_optional_rex_32(src, dst);
2658    emit(0x0F);
2659    emit(0x29);
2660    emit_sse_operand(src, dst);
2661  } else {
2662    emit(0x66);
2663    emit_optional_rex_32(dst, src);
2664    emit(0x0F);
2665    emit(0x28);
2666    emit_sse_operand(dst, src);
2667  }
2668}
2669
2670
2671void Assembler::movss(XMMRegister dst, const Operand& src) {
2672  EnsureSpace ensure_space(this);
2673  emit(0xF3);  // single
2674  emit_optional_rex_32(dst, src);
2675  emit(0x0F);
2676  emit(0x10);  // load
2677  emit_sse_operand(dst, src);
2678}
2679
2680
2681void Assembler::movss(const Operand& src, XMMRegister dst) {
2682  EnsureSpace ensure_space(this);
2683  emit(0xF3);  // single
2684  emit_optional_rex_32(dst, src);
2685  emit(0x0F);
2686  emit(0x11);  // store
2687  emit_sse_operand(dst, src);
2688}
2689
2690
2691void Assembler::cvttss2si(Register dst, const Operand& src) {
2692  EnsureSpace ensure_space(this);
2693  emit(0xF3);
2694  emit_optional_rex_32(dst, src);
2695  emit(0x0F);
2696  emit(0x2C);
2697  emit_operand(dst, src);
2698}
2699
2700
2701void Assembler::cvttss2si(Register dst, XMMRegister src) {
2702  EnsureSpace ensure_space(this);
2703  emit(0xF3);
2704  emit_optional_rex_32(dst, src);
2705  emit(0x0F);
2706  emit(0x2C);
2707  emit_sse_operand(dst, src);
2708}
2709
2710
2711void Assembler::cvttsd2si(Register dst, const Operand& src) {
2712  EnsureSpace ensure_space(this);
2713  emit(0xF2);
2714  emit_optional_rex_32(dst, src);
2715  emit(0x0F);
2716  emit(0x2C);
2717  emit_operand(dst, src);
2718}
2719
2720
2721void Assembler::cvttsd2si(Register dst, XMMRegister src) {
2722  EnsureSpace ensure_space(this);
2723  emit(0xF2);
2724  emit_optional_rex_32(dst, src);
2725  emit(0x0F);
2726  emit(0x2C);
2727  emit_sse_operand(dst, src);
2728}
2729
2730
2731void Assembler::cvttsd2siq(Register dst, XMMRegister src) {
2732  EnsureSpace ensure_space(this);
2733  emit(0xF2);
2734  emit_rex_64(dst, src);
2735  emit(0x0F);
2736  emit(0x2C);
2737  emit_sse_operand(dst, src);
2738}
2739
2740
2741void Assembler::cvtlsi2sd(XMMRegister dst, const Operand& src) {
2742  EnsureSpace ensure_space(this);
2743  emit(0xF2);
2744  emit_optional_rex_32(dst, src);
2745  emit(0x0F);
2746  emit(0x2A);
2747  emit_sse_operand(dst, src);
2748}
2749
2750
2751void Assembler::cvtlsi2sd(XMMRegister dst, Register src) {
2752  EnsureSpace ensure_space(this);
2753  emit(0xF2);
2754  emit_optional_rex_32(dst, src);
2755  emit(0x0F);
2756  emit(0x2A);
2757  emit_sse_operand(dst, src);
2758}
2759
2760
2761void Assembler::cvtlsi2ss(XMMRegister dst, Register src) {
2762  EnsureSpace ensure_space(this);
2763  emit(0xF3);
2764  emit_optional_rex_32(dst, src);
2765  emit(0x0F);
2766  emit(0x2A);
2767  emit_sse_operand(dst, src);
2768}
2769
2770
2771void Assembler::cvtqsi2sd(XMMRegister dst, Register src) {
2772  EnsureSpace ensure_space(this);
2773  emit(0xF2);
2774  emit_rex_64(dst, src);
2775  emit(0x0F);
2776  emit(0x2A);
2777  emit_sse_operand(dst, src);
2778}
2779
2780
2781void Assembler::cvtss2sd(XMMRegister dst, XMMRegister src) {
2782  EnsureSpace ensure_space(this);
2783  emit(0xF3);
2784  emit_optional_rex_32(dst, src);
2785  emit(0x0F);
2786  emit(0x5A);
2787  emit_sse_operand(dst, src);
2788}
2789
2790
2791void Assembler::cvtss2sd(XMMRegister dst, const Operand& src) {
2792  EnsureSpace ensure_space(this);
2793  emit(0xF3);
2794  emit_optional_rex_32(dst, src);
2795  emit(0x0F);
2796  emit(0x5A);
2797  emit_sse_operand(dst, src);
2798}
2799
2800
2801void Assembler::cvtsd2ss(XMMRegister dst, XMMRegister src) {
2802  EnsureSpace ensure_space(this);
2803  emit(0xF2);
2804  emit_optional_rex_32(dst, src);
2805  emit(0x0F);
2806  emit(0x5A);
2807  emit_sse_operand(dst, src);
2808}
2809
2810
2811void Assembler::cvtsd2si(Register dst, XMMRegister src) {
2812  EnsureSpace ensure_space(this);
2813  emit(0xF2);
2814  emit_optional_rex_32(dst, src);
2815  emit(0x0F);
2816  emit(0x2D);
2817  emit_sse_operand(dst, src);
2818}
2819
2820
2821void Assembler::cvtsd2siq(Register dst, XMMRegister src) {
2822  EnsureSpace ensure_space(this);
2823  emit(0xF2);
2824  emit_rex_64(dst, src);
2825  emit(0x0F);
2826  emit(0x2D);
2827  emit_sse_operand(dst, src);
2828}
2829
2830
2831void Assembler::addsd(XMMRegister dst, XMMRegister src) {
2832  EnsureSpace ensure_space(this);
2833  emit(0xF2);
2834  emit_optional_rex_32(dst, src);
2835  emit(0x0F);
2836  emit(0x58);
2837  emit_sse_operand(dst, src);
2838}
2839
2840
2841void Assembler::mulsd(XMMRegister dst, XMMRegister src) {
2842  EnsureSpace ensure_space(this);
2843  emit(0xF2);
2844  emit_optional_rex_32(dst, src);
2845  emit(0x0F);
2846  emit(0x59);
2847  emit_sse_operand(dst, src);
2848}
2849
2850
2851void Assembler::subsd(XMMRegister dst, XMMRegister src) {
2852  EnsureSpace ensure_space(this);
2853  emit(0xF2);
2854  emit_optional_rex_32(dst, src);
2855  emit(0x0F);
2856  emit(0x5C);
2857  emit_sse_operand(dst, src);
2858}
2859
2860
2861void Assembler::divsd(XMMRegister dst, XMMRegister src) {
2862  EnsureSpace ensure_space(this);
2863  emit(0xF2);
2864  emit_optional_rex_32(dst, src);
2865  emit(0x0F);
2866  emit(0x5E);
2867  emit_sse_operand(dst, src);
2868}
2869
2870
2871void Assembler::andpd(XMMRegister dst, XMMRegister src) {
2872  EnsureSpace ensure_space(this);
2873  emit(0x66);
2874  emit_optional_rex_32(dst, src);
2875  emit(0x0F);
2876  emit(0x54);
2877  emit_sse_operand(dst, src);
2878}
2879
2880
2881void Assembler::orpd(XMMRegister dst, XMMRegister src) {
2882  EnsureSpace ensure_space(this);
2883  emit(0x66);
2884  emit_optional_rex_32(dst, src);
2885  emit(0x0F);
2886  emit(0x56);
2887  emit_sse_operand(dst, src);
2888}
2889
2890
2891void Assembler::xorpd(XMMRegister dst, XMMRegister src) {
2892  EnsureSpace ensure_space(this);
2893  emit(0x66);
2894  emit_optional_rex_32(dst, src);
2895  emit(0x0F);
2896  emit(0x57);
2897  emit_sse_operand(dst, src);
2898}
2899
2900
2901void Assembler::xorps(XMMRegister dst, XMMRegister src) {
2902  EnsureSpace ensure_space(this);
2903  emit_optional_rex_32(dst, src);
2904  emit(0x0F);
2905  emit(0x57);
2906  emit_sse_operand(dst, src);
2907}
2908
2909
2910void Assembler::sqrtsd(XMMRegister dst, XMMRegister src) {
2911  EnsureSpace ensure_space(this);
2912  emit(0xF2);
2913  emit_optional_rex_32(dst, src);
2914  emit(0x0F);
2915  emit(0x51);
2916  emit_sse_operand(dst, src);
2917}
2918
2919
2920void Assembler::ucomisd(XMMRegister dst, XMMRegister src) {
2921  EnsureSpace ensure_space(this);
2922  emit(0x66);
2923  emit_optional_rex_32(dst, src);
2924  emit(0x0f);
2925  emit(0x2e);
2926  emit_sse_operand(dst, src);
2927}
2928
2929
2930void Assembler::ucomisd(XMMRegister dst, const Operand& src) {
2931  EnsureSpace ensure_space(this);
2932  emit(0x66);
2933  emit_optional_rex_32(dst, src);
2934  emit(0x0f);
2935  emit(0x2e);
2936  emit_sse_operand(dst, src);
2937}
2938
2939
2940void Assembler::roundsd(XMMRegister dst, XMMRegister src,
2941                        Assembler::RoundingMode mode) {
2942  ASSERT(CpuFeatures::IsEnabled(SSE4_1));
2943  EnsureSpace ensure_space(this);
2944  emit(0x66);
2945  emit_optional_rex_32(dst, src);
2946  emit(0x0f);
2947  emit(0x3a);
2948  emit(0x0b);
2949  emit_sse_operand(dst, src);
2950  // Mask precision exeption.
2951  emit(static_cast<byte>(mode) | 0x8);
2952}
2953
2954
2955void Assembler::movmskpd(Register dst, XMMRegister src) {
2956  EnsureSpace ensure_space(this);
2957  emit(0x66);
2958  emit_optional_rex_32(dst, src);
2959  emit(0x0f);
2960  emit(0x50);
2961  emit_sse_operand(dst, src);
2962}
2963
2964
2965void Assembler::emit_sse_operand(XMMRegister reg, const Operand& adr) {
2966  Register ireg = { reg.code() };
2967  emit_operand(ireg, adr);
2968}
2969
2970
2971void Assembler::emit_sse_operand(XMMRegister dst, XMMRegister src) {
2972  emit(0xC0 | (dst.low_bits() << 3) | src.low_bits());
2973}
2974
2975void Assembler::emit_sse_operand(XMMRegister dst, Register src) {
2976  emit(0xC0 | (dst.low_bits() << 3) | src.low_bits());
2977}
2978
2979void Assembler::emit_sse_operand(Register dst, XMMRegister src) {
2980  emit(0xC0 | (dst.low_bits() << 3) | src.low_bits());
2981}
2982
2983
2984void Assembler::db(uint8_t data) {
2985  EnsureSpace ensure_space(this);
2986  emit(data);
2987}
2988
2989
2990void Assembler::dd(uint32_t data) {
2991  EnsureSpace ensure_space(this);
2992  emitl(data);
2993}
2994
2995
2996// Relocation information implementations.
2997
2998void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
2999  ASSERT(rmode != RelocInfo::NONE);
3000  // Don't record external references unless the heap will be serialized.
3001  if (rmode == RelocInfo::EXTERNAL_REFERENCE) {
3002#ifdef DEBUG
3003    if (!Serializer::enabled()) {
3004      Serializer::TooLateToEnableNow();
3005    }
3006#endif
3007    if (!Serializer::enabled() && !emit_debug_code()) {
3008      return;
3009    }
3010  }
3011  RelocInfo rinfo(pc_, rmode, data, NULL);
3012  reloc_info_writer.Write(&rinfo);
3013}
3014
3015void Assembler::RecordJSReturn() {
3016  positions_recorder()->WriteRecordedPositions();
3017  EnsureSpace ensure_space(this);
3018  RecordRelocInfo(RelocInfo::JS_RETURN);
3019}
3020
3021
3022void Assembler::RecordDebugBreakSlot() {
3023  positions_recorder()->WriteRecordedPositions();
3024  EnsureSpace ensure_space(this);
3025  RecordRelocInfo(RelocInfo::DEBUG_BREAK_SLOT);
3026}
3027
3028
3029void Assembler::RecordComment(const char* msg, bool force) {
3030  if (FLAG_code_comments || force) {
3031    EnsureSpace ensure_space(this);
3032    RecordRelocInfo(RelocInfo::COMMENT, reinterpret_cast<intptr_t>(msg));
3033  }
3034}
3035
3036
3037const int RelocInfo::kApplyMask = RelocInfo::kCodeTargetMask |
3038                                  1 << RelocInfo::INTERNAL_REFERENCE;
3039
3040
3041bool RelocInfo::IsCodedSpecially() {
3042  // The deserializer needs to know whether a pointer is specially coded.  Being
3043  // specially coded on x64 means that it is a relative 32 bit address, as used
3044  // by branch instructions.
3045  return (1 << rmode_) & kApplyMask;
3046}
3047
3048} }  // namespace v8::internal
3049
3050#endif  // V8_TARGET_ARCH_X64
3051