1// Copyright 2012 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#if V8_TARGET_ARCH_X64
31
32#include "macro-assembler.h"
33#include "serialize.h"
34
35namespace v8 {
36namespace internal {
37
38// -----------------------------------------------------------------------------
39// Implementation of CpuFeatures
40
41
42#ifdef DEBUG
43bool CpuFeatures::initialized_ = false;
44#endif
45uint64_t CpuFeatures::supported_ = CpuFeatures::kDefaultCpuFeatures;
46uint64_t CpuFeatures::found_by_runtime_probing_only_ = 0;
47
48
49ExternalReference ExternalReference::cpu_features() {
50  ASSERT(CpuFeatures::initialized_);
51  return ExternalReference(&CpuFeatures::supported_);
52}
53
54
55void CpuFeatures::Probe() {
56  ASSERT(supported_ == CpuFeatures::kDefaultCpuFeatures);
57#ifdef DEBUG
58  initialized_ = true;
59#endif
60  supported_ = kDefaultCpuFeatures;
61  if (Serializer::enabled()) {
62    supported_ |= OS::CpuFeaturesImpliedByPlatform();
63    return;  // No features if we might serialize.
64  }
65
66  const int kBufferSize = 4 * KB;
67  VirtualMemory* memory = new VirtualMemory(kBufferSize);
68  if (!memory->IsReserved()) {
69    delete memory;
70    return;
71  }
72  ASSERT(memory->size() >= static_cast<size_t>(kBufferSize));
73  if (!memory->Commit(memory->address(), kBufferSize, true/*executable*/)) {
74    delete memory;
75    return;
76  }
77
78  Assembler assm(NULL, memory->address(), kBufferSize);
79  Label cpuid, done;
80#define __ assm.
81  // Save old rsp, since we are going to modify the stack.
82  __ push(rbp);
83  __ pushfq();
84  __ push(rdi);
85  __ push(rcx);
86  __ push(rbx);
87  __ movq(rbp, rsp);
88
89  // If we can modify bit 21 of the EFLAGS register, then CPUID is supported.
90  __ pushfq();
91  __ pop(rax);
92  __ movq(rdx, rax);
93  __ xor_(rax, Immediate(0x200000));  // Flip bit 21.
94  __ push(rax);
95  __ popfq();
96  __ pushfq();
97  __ pop(rax);
98  __ xor_(rax, rdx);  // Different if CPUID is supported.
99  __ j(not_zero, &cpuid);
100
101  // CPUID not supported. Clear the supported features in rax.
102  __ xor_(rax, rax);
103  __ jmp(&done);
104
105  // Invoke CPUID with 1 in eax to get feature information in
106  // ecx:edx. Temporarily enable CPUID support because we know it's
107  // safe here.
108  __ bind(&cpuid);
109  __ movl(rax, Immediate(1));
110  supported_ = kDefaultCpuFeatures | (1 << CPUID);
111  { CpuFeatureScope fscope(&assm, CPUID);
112    __ cpuid();
113    // Move the result from ecx:edx to rdi.
114    __ movl(rdi, rdx);  // Zero-extended to 64 bits.
115    __ shl(rcx, Immediate(32));
116    __ or_(rdi, rcx);
117
118    // Get the sahf supported flag, from CPUID(0x80000001)
119    __ movq(rax, 0x80000001, RelocInfo::NONE64);
120    __ cpuid();
121  }
122  supported_ = kDefaultCpuFeatures;
123
124  // Put the CPU flags in rax.
125  // rax = (rcx & 1) | (rdi & ~1) | (1 << CPUID).
126  __ movl(rax, Immediate(1));
127  __ and_(rcx, rax);  // Bit 0 is set if SAHF instruction supported.
128  __ not_(rax);
129  __ and_(rax, rdi);
130  __ or_(rax, rcx);
131  __ or_(rax, Immediate(1 << CPUID));
132
133  // Done.
134  __ bind(&done);
135  __ movq(rsp, rbp);
136  __ pop(rbx);
137  __ pop(rcx);
138  __ pop(rdi);
139  __ popfq();
140  __ pop(rbp);
141  __ ret(0);
142#undef __
143
144  typedef uint64_t (*F0)();
145  F0 probe = FUNCTION_CAST<F0>(reinterpret_cast<Address>(memory->address()));
146
147  uint64_t probed_features = probe();
148  uint64_t platform_features = OS::CpuFeaturesImpliedByPlatform();
149  supported_ = probed_features | platform_features;
150  found_by_runtime_probing_only_
151      = probed_features & ~kDefaultCpuFeatures & ~platform_features;
152
153  // CMOV must be available on an X64 CPU.
154  ASSERT(IsSupported(CPUID));
155  ASSERT(IsSupported(CMOV));
156
157  delete memory;
158}
159
160
161// -----------------------------------------------------------------------------
162// Implementation of RelocInfo
163
164// Patch the code at the current PC with a call to the target address.
165// Additional guard int3 instructions can be added if required.
166void RelocInfo::PatchCodeWithCall(Address target, int guard_bytes) {
167  int code_size = Assembler::kCallSequenceLength + guard_bytes;
168
169  // Create a code patcher.
170  CodePatcher patcher(pc_, code_size);
171
172  // Add a label for checking the size of the code used for returning.
173#ifdef DEBUG
174  Label check_codesize;
175  patcher.masm()->bind(&check_codesize);
176#endif
177
178  // Patch the code.
179  patcher.masm()->movq(r10, target, RelocInfo::NONE64);
180  patcher.masm()->call(r10);
181
182  // Check that the size of the code generated is as expected.
183  ASSERT_EQ(Assembler::kCallSequenceLength,
184            patcher.masm()->SizeOfCodeGeneratedSince(&check_codesize));
185
186  // Add the requested number of int3 instructions after the call.
187  for (int i = 0; i < guard_bytes; i++) {
188    patcher.masm()->int3();
189  }
190}
191
192
193void RelocInfo::PatchCode(byte* instructions, int instruction_count) {
194  // Patch the code at the current address with the supplied instructions.
195  for (int i = 0; i < instruction_count; i++) {
196    *(pc_ + i) = *(instructions + i);
197  }
198
199  // Indicate that code has changed.
200  CPU::FlushICache(pc_, instruction_count);
201}
202
203
204// -----------------------------------------------------------------------------
205// Register constants.
206
207const int
208    Register::kRegisterCodeByAllocationIndex[kMaxNumAllocatableRegisters] = {
209  // rax, rbx, rdx, rcx, rdi, r8, r9, r11, r14, r15
210  0, 3, 2, 1, 7, 8, 9, 11, 14, 15
211};
212
213const int Register::kAllocationIndexByRegisterCode[kNumRegisters] = {
214  0, 3, 2, 1, -1, -1, -1, 4, 5, 6, -1, 7, -1, -1, 8, 9
215};
216
217
218// -----------------------------------------------------------------------------
219// Implementation of Operand
220
221Operand::Operand(Register base, int32_t disp) : rex_(0) {
222  len_ = 1;
223  if (base.is(rsp) || base.is(r12)) {
224    // SIB byte is needed to encode (rsp + offset) or (r12 + offset).
225    set_sib(times_1, rsp, base);
226  }
227
228  if (disp == 0 && !base.is(rbp) && !base.is(r13)) {
229    set_modrm(0, base);
230  } else if (is_int8(disp)) {
231    set_modrm(1, base);
232    set_disp8(disp);
233  } else {
234    set_modrm(2, base);
235    set_disp32(disp);
236  }
237}
238
239
240Operand::Operand(Register base,
241                 Register index,
242                 ScaleFactor scale,
243                 int32_t disp) : rex_(0) {
244  ASSERT(!index.is(rsp));
245  len_ = 1;
246  set_sib(scale, index, base);
247  if (disp == 0 && !base.is(rbp) && !base.is(r13)) {
248    // This call to set_modrm doesn't overwrite the REX.B (or REX.X) bits
249    // possibly set by set_sib.
250    set_modrm(0, rsp);
251  } else if (is_int8(disp)) {
252    set_modrm(1, rsp);
253    set_disp8(disp);
254  } else {
255    set_modrm(2, rsp);
256    set_disp32(disp);
257  }
258}
259
260
261Operand::Operand(Register index,
262                 ScaleFactor scale,
263                 int32_t disp) : rex_(0) {
264  ASSERT(!index.is(rsp));
265  len_ = 1;
266  set_modrm(0, rsp);
267  set_sib(scale, index, rbp);
268  set_disp32(disp);
269}
270
271
272Operand::Operand(const Operand& operand, int32_t offset) {
273  ASSERT(operand.len_ >= 1);
274  // Operand encodes REX ModR/M [SIB] [Disp].
275  byte modrm = operand.buf_[0];
276  ASSERT(modrm < 0xC0);  // Disallow mode 3 (register target).
277  bool has_sib = ((modrm & 0x07) == 0x04);
278  byte mode = modrm & 0xC0;
279  int disp_offset = has_sib ? 2 : 1;
280  int base_reg = (has_sib ? operand.buf_[1] : modrm) & 0x07;
281  // Mode 0 with rbp/r13 as ModR/M or SIB base register always has a 32-bit
282  // displacement.
283  bool is_baseless = (mode == 0) && (base_reg == 0x05);  // No base or RIP base.
284  int32_t disp_value = 0;
285  if (mode == 0x80 || is_baseless) {
286    // Mode 2 or mode 0 with rbp/r13 as base: Word displacement.
287    disp_value = *BitCast<const int32_t*>(&operand.buf_[disp_offset]);
288  } else if (mode == 0x40) {
289    // Mode 1: Byte displacement.
290    disp_value = static_cast<signed char>(operand.buf_[disp_offset]);
291  }
292
293  // Write new operand with same registers, but with modified displacement.
294  ASSERT(offset >= 0 ? disp_value + offset > disp_value
295                     : disp_value + offset < disp_value);  // No overflow.
296  disp_value += offset;
297  rex_ = operand.rex_;
298  if (!is_int8(disp_value) || is_baseless) {
299    // Need 32 bits of displacement, mode 2 or mode 1 with register rbp/r13.
300    buf_[0] = (modrm & 0x3f) | (is_baseless ? 0x00 : 0x80);
301    len_ = disp_offset + 4;
302    Memory::int32_at(&buf_[disp_offset]) = disp_value;
303  } else if (disp_value != 0 || (base_reg == 0x05)) {
304    // Need 8 bits of displacement.
305    buf_[0] = (modrm & 0x3f) | 0x40;  // Mode 1.
306    len_ = disp_offset + 1;
307    buf_[disp_offset] = static_cast<byte>(disp_value);
308  } else {
309    // Need no displacement.
310    buf_[0] = (modrm & 0x3f);  // Mode 0.
311    len_ = disp_offset;
312  }
313  if (has_sib) {
314    buf_[1] = operand.buf_[1];
315  }
316}
317
318
319bool Operand::AddressUsesRegister(Register reg) const {
320  int code = reg.code();
321  ASSERT((buf_[0] & 0xC0) != 0xC0);  // Always a memory operand.
322  // Start with only low three bits of base register. Initial decoding doesn't
323  // distinguish on the REX.B bit.
324  int base_code = buf_[0] & 0x07;
325  if (base_code == rsp.code()) {
326    // SIB byte present in buf_[1].
327    // Check the index register from the SIB byte + REX.X prefix.
328    int index_code = ((buf_[1] >> 3) & 0x07) | ((rex_ & 0x02) << 2);
329    // Index code (including REX.X) of 0x04 (rsp) means no index register.
330    if (index_code != rsp.code() && index_code == code) return true;
331    // Add REX.B to get the full base register code.
332    base_code = (buf_[1] & 0x07) | ((rex_ & 0x01) << 3);
333    // A base register of 0x05 (rbp) with mod = 0 means no base register.
334    if (base_code == rbp.code() && ((buf_[0] & 0xC0) == 0)) return false;
335    return code == base_code;
336  } else {
337    // A base register with low bits of 0x05 (rbp or r13) and mod = 0 means
338    // no base register.
339    if (base_code == rbp.code() && ((buf_[0] & 0xC0) == 0)) return false;
340    base_code |= ((rex_ & 0x01) << 3);
341    return code == base_code;
342  }
343}
344
345
346// -----------------------------------------------------------------------------
347// Implementation of Assembler.
348
349#ifdef GENERATED_CODE_COVERAGE
350static void InitCoverageLog();
351#endif
352
353Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size)
354    : AssemblerBase(isolate, buffer, buffer_size),
355      code_targets_(100),
356      positions_recorder_(this) {
357  // Clear the buffer in debug mode unless it was provided by the
358  // caller in which case we can't be sure it's okay to overwrite
359  // existing code in it.
360#ifdef DEBUG
361  if (own_buffer_) {
362    memset(buffer_, 0xCC, buffer_size_);  // int3
363  }
364#endif
365
366  reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_);
367
368
369#ifdef GENERATED_CODE_COVERAGE
370  InitCoverageLog();
371#endif
372}
373
374
375void Assembler::GetCode(CodeDesc* desc) {
376  // Finalize code (at this point overflow() may be true, but the gap ensures
377  // that we are still not overlapping instructions and relocation info).
378  ASSERT(pc_ <= reloc_info_writer.pos());  // No overlap.
379  // Set up code descriptor.
380  desc->buffer = buffer_;
381  desc->buffer_size = buffer_size_;
382  desc->instr_size = pc_offset();
383  ASSERT(desc->instr_size > 0);  // Zero-size code objects upset the system.
384  desc->reloc_size =
385      static_cast<int>((buffer_ + buffer_size_) - reloc_info_writer.pos());
386  desc->origin = this;
387}
388
389
390void Assembler::Align(int m) {
391  ASSERT(IsPowerOf2(m));
392  int delta = (m - (pc_offset() & (m - 1))) & (m - 1);
393  Nop(delta);
394}
395
396
397void Assembler::CodeTargetAlign() {
398  Align(16);  // Preferred alignment of jump targets on x64.
399}
400
401
402bool Assembler::IsNop(Address addr) {
403  Address a = addr;
404  while (*a == 0x66) a++;
405  if (*a == 0x90) return true;
406  if (a[0] == 0xf && a[1] == 0x1f) return true;
407  return false;
408}
409
410
411void Assembler::bind_to(Label* L, int pos) {
412  ASSERT(!L->is_bound());  // Label may only be bound once.
413  ASSERT(0 <= pos && pos <= pc_offset());  // Position must be valid.
414  if (L->is_linked()) {
415    int current = L->pos();
416    int next = long_at(current);
417    while (next != current) {
418      // Relative address, relative to point after address.
419      int imm32 = pos - (current + sizeof(int32_t));
420      long_at_put(current, imm32);
421      current = next;
422      next = long_at(next);
423    }
424    // Fix up last fixup on linked list.
425    int last_imm32 = pos - (current + sizeof(int32_t));
426    long_at_put(current, last_imm32);
427  }
428  while (L->is_near_linked()) {
429    int fixup_pos = L->near_link_pos();
430    int offset_to_next =
431        static_cast<int>(*reinterpret_cast<int8_t*>(addr_at(fixup_pos)));
432    ASSERT(offset_to_next <= 0);
433    int disp = pos - (fixup_pos + sizeof(int8_t));
434    CHECK(is_int8(disp));
435    set_byte_at(fixup_pos, disp);
436    if (offset_to_next < 0) {
437      L->link_to(fixup_pos + offset_to_next, Label::kNear);
438    } else {
439      L->UnuseNear();
440    }
441  }
442  L->bind_to(pos);
443}
444
445
446void Assembler::bind(Label* L) {
447  bind_to(L, pc_offset());
448}
449
450
451void Assembler::GrowBuffer() {
452  ASSERT(buffer_overflow());
453  if (!own_buffer_) FATAL("external code buffer is too small");
454
455  // Compute new buffer size.
456  CodeDesc desc;  // the new buffer
457  if (buffer_size_ < 4*KB) {
458    desc.buffer_size = 4*KB;
459  } else {
460    desc.buffer_size = 2*buffer_size_;
461  }
462  // Some internal data structures overflow for very large buffers,
463  // they must ensure that kMaximalBufferSize is not too large.
464  if ((desc.buffer_size > kMaximalBufferSize) ||
465      (desc.buffer_size > HEAP->MaxOldGenerationSize())) {
466    V8::FatalProcessOutOfMemory("Assembler::GrowBuffer");
467  }
468
469  // Set up new buffer.
470  desc.buffer = NewArray<byte>(desc.buffer_size);
471  desc.instr_size = pc_offset();
472  desc.reloc_size =
473      static_cast<int>((buffer_ + buffer_size_) - (reloc_info_writer.pos()));
474
475  // Clear the buffer in debug mode. Use 'int3' instructions to make
476  // sure to get into problems if we ever run uninitialized code.
477#ifdef DEBUG
478  memset(desc.buffer, 0xCC, desc.buffer_size);
479#endif
480
481  // Copy the data.
482  intptr_t pc_delta = desc.buffer - buffer_;
483  intptr_t rc_delta = (desc.buffer + desc.buffer_size) -
484      (buffer_ + buffer_size_);
485  OS::MemMove(desc.buffer, buffer_, desc.instr_size);
486  OS::MemMove(rc_delta + reloc_info_writer.pos(),
487              reloc_info_writer.pos(), desc.reloc_size);
488
489  // Switch buffers.
490  if (isolate() != NULL &&
491      isolate()->assembler_spare_buffer() == NULL &&
492      buffer_size_ == kMinimalBufferSize) {
493    isolate()->set_assembler_spare_buffer(buffer_);
494  } else {
495    DeleteArray(buffer_);
496  }
497  buffer_ = desc.buffer;
498  buffer_size_ = desc.buffer_size;
499  pc_ += pc_delta;
500  reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta,
501                               reloc_info_writer.last_pc() + pc_delta);
502
503  // Relocate runtime entries.
504  for (RelocIterator it(desc); !it.done(); it.next()) {
505    RelocInfo::Mode rmode = it.rinfo()->rmode();
506    if (rmode == RelocInfo::INTERNAL_REFERENCE) {
507      intptr_t* p = reinterpret_cast<intptr_t*>(it.rinfo()->pc());
508      if (*p != 0) {  // 0 means uninitialized.
509        *p += pc_delta;
510      }
511    }
512  }
513
514  ASSERT(!buffer_overflow());
515}
516
517
518void Assembler::emit_operand(int code, const Operand& adr) {
519  ASSERT(is_uint3(code));
520  const unsigned length = adr.len_;
521  ASSERT(length > 0);
522
523  // Emit updated ModR/M byte containing the given register.
524  ASSERT((adr.buf_[0] & 0x38) == 0);
525  pc_[0] = adr.buf_[0] | code << 3;
526
527  // Emit the rest of the encoded operand.
528  for (unsigned i = 1; i < length; i++) pc_[i] = adr.buf_[i];
529  pc_ += length;
530}
531
532
533// Assembler Instruction implementations.
534
535void Assembler::arithmetic_op(byte opcode, Register reg, const Operand& op) {
536  EnsureSpace ensure_space(this);
537  emit_rex_64(reg, op);
538  emit(opcode);
539  emit_operand(reg, op);
540}
541
542
543void Assembler::arithmetic_op(byte opcode, Register reg, Register rm_reg) {
544  EnsureSpace ensure_space(this);
545  ASSERT((opcode & 0xC6) == 2);
546  if (rm_reg.low_bits() == 4)  {  // Forces SIB byte.
547    // Swap reg and rm_reg and change opcode operand order.
548    emit_rex_64(rm_reg, reg);
549    emit(opcode ^ 0x02);
550    emit_modrm(rm_reg, reg);
551  } else {
552    emit_rex_64(reg, rm_reg);
553    emit(opcode);
554    emit_modrm(reg, rm_reg);
555  }
556}
557
558
559void Assembler::arithmetic_op_16(byte opcode, Register reg, Register rm_reg) {
560  EnsureSpace ensure_space(this);
561  ASSERT((opcode & 0xC6) == 2);
562  if (rm_reg.low_bits() == 4) {  // Forces SIB byte.
563    // Swap reg and rm_reg and change opcode operand order.
564    emit(0x66);
565    emit_optional_rex_32(rm_reg, reg);
566    emit(opcode ^ 0x02);
567    emit_modrm(rm_reg, reg);
568  } else {
569    emit(0x66);
570    emit_optional_rex_32(reg, rm_reg);
571    emit(opcode);
572    emit_modrm(reg, rm_reg);
573  }
574}
575
576
577void Assembler::arithmetic_op_16(byte opcode,
578                                 Register reg,
579                                 const Operand& rm_reg) {
580  EnsureSpace ensure_space(this);
581  emit(0x66);
582  emit_optional_rex_32(reg, rm_reg);
583  emit(opcode);
584  emit_operand(reg, rm_reg);
585}
586
587
588void Assembler::arithmetic_op_32(byte opcode, Register reg, Register rm_reg) {
589  EnsureSpace ensure_space(this);
590  ASSERT((opcode & 0xC6) == 2);
591  if (rm_reg.low_bits() == 4) {  // Forces SIB byte.
592    // Swap reg and rm_reg and change opcode operand order.
593    emit_optional_rex_32(rm_reg, reg);
594    emit(opcode ^ 0x02);  // E.g. 0x03 -> 0x01 for ADD.
595    emit_modrm(rm_reg, reg);
596  } else {
597    emit_optional_rex_32(reg, rm_reg);
598    emit(opcode);
599    emit_modrm(reg, rm_reg);
600  }
601}
602
603
604void Assembler::arithmetic_op_32(byte opcode,
605                                 Register reg,
606                                 const Operand& rm_reg) {
607  EnsureSpace ensure_space(this);
608  emit_optional_rex_32(reg, rm_reg);
609  emit(opcode);
610  emit_operand(reg, rm_reg);
611}
612
613
614void Assembler::immediate_arithmetic_op(byte subcode,
615                                        Register dst,
616                                        Immediate src) {
617  EnsureSpace ensure_space(this);
618  emit_rex_64(dst);
619  if (is_int8(src.value_)) {
620    emit(0x83);
621    emit_modrm(subcode, dst);
622    emit(src.value_);
623  } else if (dst.is(rax)) {
624    emit(0x05 | (subcode << 3));
625    emitl(src.value_);
626  } else {
627    emit(0x81);
628    emit_modrm(subcode, dst);
629    emitl(src.value_);
630  }
631}
632
633void Assembler::immediate_arithmetic_op(byte subcode,
634                                        const Operand& dst,
635                                        Immediate src) {
636  EnsureSpace ensure_space(this);
637  emit_rex_64(dst);
638  if (is_int8(src.value_)) {
639    emit(0x83);
640    emit_operand(subcode, dst);
641    emit(src.value_);
642  } else {
643    emit(0x81);
644    emit_operand(subcode, dst);
645    emitl(src.value_);
646  }
647}
648
649
650void Assembler::immediate_arithmetic_op_16(byte subcode,
651                                           Register dst,
652                                           Immediate src) {
653  EnsureSpace ensure_space(this);
654  emit(0x66);  // Operand size override prefix.
655  emit_optional_rex_32(dst);
656  if (is_int8(src.value_)) {
657    emit(0x83);
658    emit_modrm(subcode, dst);
659    emit(src.value_);
660  } else if (dst.is(rax)) {
661    emit(0x05 | (subcode << 3));
662    emitw(src.value_);
663  } else {
664    emit(0x81);
665    emit_modrm(subcode, dst);
666    emitw(src.value_);
667  }
668}
669
670
671void Assembler::immediate_arithmetic_op_16(byte subcode,
672                                           const Operand& dst,
673                                           Immediate src) {
674  EnsureSpace ensure_space(this);
675  emit(0x66);  // Operand size override prefix.
676  emit_optional_rex_32(dst);
677  if (is_int8(src.value_)) {
678    emit(0x83);
679    emit_operand(subcode, dst);
680    emit(src.value_);
681  } else {
682    emit(0x81);
683    emit_operand(subcode, dst);
684    emitw(src.value_);
685  }
686}
687
688
689void Assembler::immediate_arithmetic_op_32(byte subcode,
690                                           Register dst,
691                                           Immediate src) {
692  EnsureSpace ensure_space(this);
693  emit_optional_rex_32(dst);
694  if (is_int8(src.value_)) {
695    emit(0x83);
696    emit_modrm(subcode, dst);
697    emit(src.value_);
698  } else if (dst.is(rax)) {
699    emit(0x05 | (subcode << 3));
700    emitl(src.value_);
701  } else {
702    emit(0x81);
703    emit_modrm(subcode, dst);
704    emitl(src.value_);
705  }
706}
707
708
709void Assembler::immediate_arithmetic_op_32(byte subcode,
710                                           const Operand& dst,
711                                           Immediate src) {
712  EnsureSpace ensure_space(this);
713  emit_optional_rex_32(dst);
714  if (is_int8(src.value_)) {
715    emit(0x83);
716    emit_operand(subcode, dst);
717    emit(src.value_);
718  } else {
719    emit(0x81);
720    emit_operand(subcode, dst);
721    emitl(src.value_);
722  }
723}
724
725
726void Assembler::immediate_arithmetic_op_8(byte subcode,
727                                          const Operand& dst,
728                                          Immediate src) {
729  EnsureSpace ensure_space(this);
730  emit_optional_rex_32(dst);
731  ASSERT(is_int8(src.value_) || is_uint8(src.value_));
732  emit(0x80);
733  emit_operand(subcode, dst);
734  emit(src.value_);
735}
736
737
738void Assembler::immediate_arithmetic_op_8(byte subcode,
739                                          Register dst,
740                                          Immediate src) {
741  EnsureSpace ensure_space(this);
742  if (!dst.is_byte_register()) {
743    // Use 64-bit mode byte registers.
744    emit_rex_64(dst);
745  }
746  ASSERT(is_int8(src.value_) || is_uint8(src.value_));
747  emit(0x80);
748  emit_modrm(subcode, dst);
749  emit(src.value_);
750}
751
752
753void Assembler::shift(Register dst, Immediate shift_amount, int subcode) {
754  EnsureSpace ensure_space(this);
755  ASSERT(is_uint6(shift_amount.value_));  // illegal shift count
756  if (shift_amount.value_ == 1) {
757    emit_rex_64(dst);
758    emit(0xD1);
759    emit_modrm(subcode, dst);
760  } else {
761    emit_rex_64(dst);
762    emit(0xC1);
763    emit_modrm(subcode, dst);
764    emit(shift_amount.value_);
765  }
766}
767
768
769void Assembler::shift(Register dst, int subcode) {
770  EnsureSpace ensure_space(this);
771  emit_rex_64(dst);
772  emit(0xD3);
773  emit_modrm(subcode, dst);
774}
775
776
777void Assembler::shift_32(Register dst, int subcode) {
778  EnsureSpace ensure_space(this);
779  emit_optional_rex_32(dst);
780  emit(0xD3);
781  emit_modrm(subcode, dst);
782}
783
784
785void Assembler::shift_32(Register dst, Immediate shift_amount, int subcode) {
786  EnsureSpace ensure_space(this);
787  ASSERT(is_uint5(shift_amount.value_));  // illegal shift count
788  if (shift_amount.value_ == 1) {
789    emit_optional_rex_32(dst);
790    emit(0xD1);
791    emit_modrm(subcode, dst);
792  } else {
793    emit_optional_rex_32(dst);
794    emit(0xC1);
795    emit_modrm(subcode, dst);
796    emit(shift_amount.value_);
797  }
798}
799
800
801void Assembler::bt(const Operand& dst, Register src) {
802  EnsureSpace ensure_space(this);
803  emit_rex_64(src, dst);
804  emit(0x0F);
805  emit(0xA3);
806  emit_operand(src, dst);
807}
808
809
810void Assembler::bts(const Operand& dst, Register src) {
811  EnsureSpace ensure_space(this);
812  emit_rex_64(src, dst);
813  emit(0x0F);
814  emit(0xAB);
815  emit_operand(src, dst);
816}
817
818
819void Assembler::call(Label* L) {
820  positions_recorder()->WriteRecordedPositions();
821  EnsureSpace ensure_space(this);
822  // 1110 1000 #32-bit disp.
823  emit(0xE8);
824  if (L->is_bound()) {
825    int offset = L->pos() - pc_offset() - sizeof(int32_t);
826    ASSERT(offset <= 0);
827    emitl(offset);
828  } else if (L->is_linked()) {
829    emitl(L->pos());
830    L->link_to(pc_offset() - sizeof(int32_t));
831  } else {
832    ASSERT(L->is_unused());
833    int32_t current = pc_offset();
834    emitl(current);
835    L->link_to(current);
836  }
837}
838
839
840void Assembler::call(Address entry, RelocInfo::Mode rmode) {
841  ASSERT(RelocInfo::IsRuntimeEntry(rmode));
842  positions_recorder()->WriteRecordedPositions();
843  EnsureSpace ensure_space(this);
844  // 1110 1000 #32-bit disp.
845  emit(0xE8);
846  emit_runtime_entry(entry, rmode);
847}
848
849
850void Assembler::call(Handle<Code> target,
851                     RelocInfo::Mode rmode,
852                     TypeFeedbackId ast_id) {
853  positions_recorder()->WriteRecordedPositions();
854  EnsureSpace ensure_space(this);
855  // 1110 1000 #32-bit disp.
856  emit(0xE8);
857  emit_code_target(target, rmode, ast_id);
858}
859
860
861void Assembler::call(Register adr) {
862  positions_recorder()->WriteRecordedPositions();
863  EnsureSpace ensure_space(this);
864  // Opcode: FF /2 r64.
865  emit_optional_rex_32(adr);
866  emit(0xFF);
867  emit_modrm(0x2, adr);
868}
869
870
871void Assembler::call(const Operand& op) {
872  positions_recorder()->WriteRecordedPositions();
873  EnsureSpace ensure_space(this);
874  // Opcode: FF /2 m64.
875  emit_optional_rex_32(op);
876  emit(0xFF);
877  emit_operand(0x2, op);
878}
879
880
881// Calls directly to the given address using a relative offset.
882// Should only ever be used in Code objects for calls within the
883// same Code object. Should not be used when generating new code (use labels),
884// but only when patching existing code.
885void Assembler::call(Address target) {
886  positions_recorder()->WriteRecordedPositions();
887  EnsureSpace ensure_space(this);
888  // 1110 1000 #32-bit disp.
889  emit(0xE8);
890  Address source = pc_ + 4;
891  intptr_t displacement = target - source;
892  ASSERT(is_int32(displacement));
893  emitl(static_cast<int32_t>(displacement));
894}
895
896
897void Assembler::clc() {
898  EnsureSpace ensure_space(this);
899  emit(0xF8);
900}
901
902
903void Assembler::cld() {
904  EnsureSpace ensure_space(this);
905  emit(0xFC);
906}
907
908
909void Assembler::cdq() {
910  EnsureSpace ensure_space(this);
911  emit(0x99);
912}
913
914
915void Assembler::cmovq(Condition cc, Register dst, Register src) {
916  if (cc == always) {
917    movq(dst, src);
918  } else if (cc == never) {
919    return;
920  }
921  // No need to check CpuInfo for CMOV support, it's a required part of the
922  // 64-bit architecture.
923  ASSERT(cc >= 0);  // Use mov for unconditional moves.
924  EnsureSpace ensure_space(this);
925  // Opcode: REX.W 0f 40 + cc /r.
926  emit_rex_64(dst, src);
927  emit(0x0f);
928  emit(0x40 + cc);
929  emit_modrm(dst, src);
930}
931
932
933void Assembler::cmovq(Condition cc, Register dst, const Operand& src) {
934  if (cc == always) {
935    movq(dst, src);
936  } else if (cc == never) {
937    return;
938  }
939  ASSERT(cc >= 0);
940  EnsureSpace ensure_space(this);
941  // Opcode: REX.W 0f 40 + cc /r.
942  emit_rex_64(dst, src);
943  emit(0x0f);
944  emit(0x40 + cc);
945  emit_operand(dst, src);
946}
947
948
949void Assembler::cmovl(Condition cc, Register dst, Register src) {
950  if (cc == always) {
951    movl(dst, src);
952  } else if (cc == never) {
953    return;
954  }
955  ASSERT(cc >= 0);
956  EnsureSpace ensure_space(this);
957  // Opcode: 0f 40 + cc /r.
958  emit_optional_rex_32(dst, src);
959  emit(0x0f);
960  emit(0x40 + cc);
961  emit_modrm(dst, src);
962}
963
964
965void Assembler::cmovl(Condition cc, Register dst, const Operand& src) {
966  if (cc == always) {
967    movl(dst, src);
968  } else if (cc == never) {
969    return;
970  }
971  ASSERT(cc >= 0);
972  EnsureSpace ensure_space(this);
973  // Opcode: 0f 40 + cc /r.
974  emit_optional_rex_32(dst, src);
975  emit(0x0f);
976  emit(0x40 + cc);
977  emit_operand(dst, src);
978}
979
980
981void Assembler::cmpb_al(Immediate imm8) {
982  ASSERT(is_int8(imm8.value_) || is_uint8(imm8.value_));
983  EnsureSpace ensure_space(this);
984  emit(0x3c);
985  emit(imm8.value_);
986}
987
988
989void Assembler::cpuid() {
990  ASSERT(IsEnabled(CPUID));
991  EnsureSpace ensure_space(this);
992  emit(0x0F);
993  emit(0xA2);
994}
995
996
997void Assembler::cqo() {
998  EnsureSpace ensure_space(this);
999  emit_rex_64();
1000  emit(0x99);
1001}
1002
1003
1004void Assembler::decq(Register dst) {
1005  EnsureSpace ensure_space(this);
1006  emit_rex_64(dst);
1007  emit(0xFF);
1008  emit_modrm(0x1, dst);
1009}
1010
1011
1012void Assembler::decq(const Operand& dst) {
1013  EnsureSpace ensure_space(this);
1014  emit_rex_64(dst);
1015  emit(0xFF);
1016  emit_operand(1, dst);
1017}
1018
1019
1020void Assembler::decl(Register dst) {
1021  EnsureSpace ensure_space(this);
1022  emit_optional_rex_32(dst);
1023  emit(0xFF);
1024  emit_modrm(0x1, dst);
1025}
1026
1027
1028void Assembler::decl(const Operand& dst) {
1029  EnsureSpace ensure_space(this);
1030  emit_optional_rex_32(dst);
1031  emit(0xFF);
1032  emit_operand(1, dst);
1033}
1034
1035
1036void Assembler::decb(Register dst) {
1037  EnsureSpace ensure_space(this);
1038  if (!dst.is_byte_register()) {
1039    // Register is not one of al, bl, cl, dl.  Its encoding needs REX.
1040    emit_rex_32(dst);
1041  }
1042  emit(0xFE);
1043  emit_modrm(0x1, dst);
1044}
1045
1046
1047void Assembler::decb(const Operand& dst) {
1048  EnsureSpace ensure_space(this);
1049  emit_optional_rex_32(dst);
1050  emit(0xFE);
1051  emit_operand(1, dst);
1052}
1053
1054
1055void Assembler::enter(Immediate size) {
1056  EnsureSpace ensure_space(this);
1057  emit(0xC8);
1058  emitw(size.value_);  // 16 bit operand, always.
1059  emit(0);
1060}
1061
1062
1063void Assembler::hlt() {
1064  EnsureSpace ensure_space(this);
1065  emit(0xF4);
1066}
1067
1068
1069void Assembler::idivq(Register src) {
1070  EnsureSpace ensure_space(this);
1071  emit_rex_64(src);
1072  emit(0xF7);
1073  emit_modrm(0x7, src);
1074}
1075
1076
1077void Assembler::idivl(Register src) {
1078  EnsureSpace ensure_space(this);
1079  emit_optional_rex_32(src);
1080  emit(0xF7);
1081  emit_modrm(0x7, src);
1082}
1083
1084
1085void Assembler::imul(Register src) {
1086  EnsureSpace ensure_space(this);
1087  emit_rex_64(src);
1088  emit(0xF7);
1089  emit_modrm(0x5, src);
1090}
1091
1092
1093void Assembler::imul(Register dst, Register src) {
1094  EnsureSpace ensure_space(this);
1095  emit_rex_64(dst, src);
1096  emit(0x0F);
1097  emit(0xAF);
1098  emit_modrm(dst, src);
1099}
1100
1101
1102void Assembler::imul(Register dst, const Operand& src) {
1103  EnsureSpace ensure_space(this);
1104  emit_rex_64(dst, src);
1105  emit(0x0F);
1106  emit(0xAF);
1107  emit_operand(dst, src);
1108}
1109
1110
1111void Assembler::imul(Register dst, Register src, Immediate imm) {
1112  EnsureSpace ensure_space(this);
1113  emit_rex_64(dst, src);
1114  if (is_int8(imm.value_)) {
1115    emit(0x6B);
1116    emit_modrm(dst, src);
1117    emit(imm.value_);
1118  } else {
1119    emit(0x69);
1120    emit_modrm(dst, src);
1121    emitl(imm.value_);
1122  }
1123}
1124
1125
1126void Assembler::imull(Register dst, Register src) {
1127  EnsureSpace ensure_space(this);
1128  emit_optional_rex_32(dst, src);
1129  emit(0x0F);
1130  emit(0xAF);
1131  emit_modrm(dst, src);
1132}
1133
1134
1135void Assembler::imull(Register dst, const Operand& src) {
1136  EnsureSpace ensure_space(this);
1137  emit_optional_rex_32(dst, src);
1138  emit(0x0F);
1139  emit(0xAF);
1140  emit_operand(dst, src);
1141}
1142
1143
1144void Assembler::imull(Register dst, Register src, Immediate imm) {
1145  EnsureSpace ensure_space(this);
1146  emit_optional_rex_32(dst, src);
1147  if (is_int8(imm.value_)) {
1148    emit(0x6B);
1149    emit_modrm(dst, src);
1150    emit(imm.value_);
1151  } else {
1152    emit(0x69);
1153    emit_modrm(dst, src);
1154    emitl(imm.value_);
1155  }
1156}
1157
1158
1159void Assembler::incq(Register dst) {
1160  EnsureSpace ensure_space(this);
1161  emit_rex_64(dst);
1162  emit(0xFF);
1163  emit_modrm(0x0, dst);
1164}
1165
1166
1167void Assembler::incq(const Operand& dst) {
1168  EnsureSpace ensure_space(this);
1169  emit_rex_64(dst);
1170  emit(0xFF);
1171  emit_operand(0, dst);
1172}
1173
1174
1175void Assembler::incl(const Operand& dst) {
1176  EnsureSpace ensure_space(this);
1177  emit_optional_rex_32(dst);
1178  emit(0xFF);
1179  emit_operand(0, dst);
1180}
1181
1182
1183void Assembler::incl(Register dst) {
1184  EnsureSpace ensure_space(this);
1185  emit_optional_rex_32(dst);
1186  emit(0xFF);
1187  emit_modrm(0, dst);
1188}
1189
1190
1191void Assembler::int3() {
1192  EnsureSpace ensure_space(this);
1193  emit(0xCC);
1194}
1195
1196
1197void Assembler::j(Condition cc, Label* L, Label::Distance distance) {
1198  if (cc == always) {
1199    jmp(L);
1200    return;
1201  } else if (cc == never) {
1202    return;
1203  }
1204  EnsureSpace ensure_space(this);
1205  ASSERT(is_uint4(cc));
1206  if (L->is_bound()) {
1207    const int short_size = 2;
1208    const int long_size  = 6;
1209    int offs = L->pos() - pc_offset();
1210    ASSERT(offs <= 0);
1211    // Determine whether we can use 1-byte offsets for backwards branches,
1212    // which have a max range of 128 bytes.
1213
1214    // We also need to check predictable_code_size() flag here, because on x64,
1215    // when the full code generator recompiles code for debugging, some places
1216    // need to be padded out to a certain size. The debugger is keeping track of
1217    // how often it did this so that it can adjust return addresses on the
1218    // stack, but if the size of jump instructions can also change, that's not
1219    // enough and the calculated offsets would be incorrect.
1220    if (is_int8(offs - short_size) && !predictable_code_size()) {
1221      // 0111 tttn #8-bit disp.
1222      emit(0x70 | cc);
1223      emit((offs - short_size) & 0xFF);
1224    } else {
1225      // 0000 1111 1000 tttn #32-bit disp.
1226      emit(0x0F);
1227      emit(0x80 | cc);
1228      emitl(offs - long_size);
1229    }
1230  } else if (distance == Label::kNear) {
1231    // 0111 tttn #8-bit disp
1232    emit(0x70 | cc);
1233    byte disp = 0x00;
1234    if (L->is_near_linked()) {
1235      int offset = L->near_link_pos() - pc_offset();
1236      ASSERT(is_int8(offset));
1237      disp = static_cast<byte>(offset & 0xFF);
1238    }
1239    L->link_to(pc_offset(), Label::kNear);
1240    emit(disp);
1241  } else if (L->is_linked()) {
1242    // 0000 1111 1000 tttn #32-bit disp.
1243    emit(0x0F);
1244    emit(0x80 | cc);
1245    emitl(L->pos());
1246    L->link_to(pc_offset() - sizeof(int32_t));
1247  } else {
1248    ASSERT(L->is_unused());
1249    emit(0x0F);
1250    emit(0x80 | cc);
1251    int32_t current = pc_offset();
1252    emitl(current);
1253    L->link_to(current);
1254  }
1255}
1256
1257
1258void Assembler::j(Condition cc, Address entry, RelocInfo::Mode rmode) {
1259  ASSERT(RelocInfo::IsRuntimeEntry(rmode));
1260  EnsureSpace ensure_space(this);
1261  ASSERT(is_uint4(cc));
1262  emit(0x0F);
1263  emit(0x80 | cc);
1264  emit_runtime_entry(entry, rmode);
1265}
1266
1267
1268void Assembler::j(Condition cc,
1269                  Handle<Code> target,
1270                  RelocInfo::Mode rmode) {
1271  EnsureSpace ensure_space(this);
1272  ASSERT(is_uint4(cc));
1273  // 0000 1111 1000 tttn #32-bit disp.
1274  emit(0x0F);
1275  emit(0x80 | cc);
1276  emit_code_target(target, rmode);
1277}
1278
1279
1280void Assembler::jmp(Label* L, Label::Distance distance) {
1281  EnsureSpace ensure_space(this);
1282  const int short_size = sizeof(int8_t);
1283  const int long_size = sizeof(int32_t);
1284  if (L->is_bound()) {
1285    int offs = L->pos() - pc_offset() - 1;
1286    ASSERT(offs <= 0);
1287    if (is_int8(offs - short_size) && !predictable_code_size()) {
1288      // 1110 1011 #8-bit disp.
1289      emit(0xEB);
1290      emit((offs - short_size) & 0xFF);
1291    } else {
1292      // 1110 1001 #32-bit disp.
1293      emit(0xE9);
1294      emitl(offs - long_size);
1295    }
1296  } else if (distance == Label::kNear) {
1297    emit(0xEB);
1298    byte disp = 0x00;
1299    if (L->is_near_linked()) {
1300      int offset = L->near_link_pos() - pc_offset();
1301      ASSERT(is_int8(offset));
1302      disp = static_cast<byte>(offset & 0xFF);
1303    }
1304    L->link_to(pc_offset(), Label::kNear);
1305    emit(disp);
1306  } else if (L->is_linked()) {
1307    // 1110 1001 #32-bit disp.
1308    emit(0xE9);
1309    emitl(L->pos());
1310    L->link_to(pc_offset() - long_size);
1311  } else {
1312    // 1110 1001 #32-bit disp.
1313    ASSERT(L->is_unused());
1314    emit(0xE9);
1315    int32_t current = pc_offset();
1316    emitl(current);
1317    L->link_to(current);
1318  }
1319}
1320
1321
1322void Assembler::jmp(Handle<Code> target, RelocInfo::Mode rmode) {
1323  EnsureSpace ensure_space(this);
1324  // 1110 1001 #32-bit disp.
1325  emit(0xE9);
1326  emit_code_target(target, rmode);
1327}
1328
1329
1330void Assembler::jmp(Address entry, RelocInfo::Mode rmode) {
1331  ASSERT(RelocInfo::IsRuntimeEntry(rmode));
1332  EnsureSpace ensure_space(this);
1333  ASSERT(RelocInfo::IsRuntimeEntry(rmode));
1334  emit(0xE9);
1335  emit_runtime_entry(entry, rmode);
1336}
1337
1338
1339void Assembler::jmp(Register target) {
1340  EnsureSpace ensure_space(this);
1341  // Opcode FF/4 r64.
1342  emit_optional_rex_32(target);
1343  emit(0xFF);
1344  emit_modrm(0x4, target);
1345}
1346
1347
1348void Assembler::jmp(const Operand& src) {
1349  EnsureSpace ensure_space(this);
1350  // Opcode FF/4 m64.
1351  emit_optional_rex_32(src);
1352  emit(0xFF);
1353  emit_operand(0x4, src);
1354}
1355
1356
1357void Assembler::lea(Register dst, const Operand& src) {
1358  EnsureSpace ensure_space(this);
1359  emit_rex_64(dst, src);
1360  emit(0x8D);
1361  emit_operand(dst, src);
1362}
1363
1364
1365void Assembler::leal(Register dst, const Operand& src) {
1366  EnsureSpace ensure_space(this);
1367  emit_optional_rex_32(dst, src);
1368  emit(0x8D);
1369  emit_operand(dst, src);
1370}
1371
1372
1373void Assembler::load_rax(void* value, RelocInfo::Mode mode) {
1374  EnsureSpace ensure_space(this);
1375  emit(0x48);  // REX.W
1376  emit(0xA1);
1377  emitp(value, mode);
1378}
1379
1380
1381void Assembler::load_rax(ExternalReference ref) {
1382  load_rax(ref.address(), RelocInfo::EXTERNAL_REFERENCE);
1383}
1384
1385
1386void Assembler::leave() {
1387  EnsureSpace ensure_space(this);
1388  emit(0xC9);
1389}
1390
1391
1392void Assembler::movb(Register dst, const Operand& src) {
1393  EnsureSpace ensure_space(this);
1394  if (!dst.is_byte_register()) {
1395    // Register is not one of al, bl, cl, dl.  Its encoding needs REX.
1396    emit_rex_32(dst, src);
1397  } else {
1398    emit_optional_rex_32(dst, src);
1399  }
1400  emit(0x8A);
1401  emit_operand(dst, src);
1402}
1403
1404
1405void Assembler::movb(Register dst, Immediate imm) {
1406  EnsureSpace ensure_space(this);
1407  if (!dst.is_byte_register()) {
1408    emit_rex_32(dst);
1409  }
1410  emit(0xB0 + dst.low_bits());
1411  emit(imm.value_);
1412}
1413
1414
1415void Assembler::movb(const Operand& dst, Register src) {
1416  EnsureSpace ensure_space(this);
1417  if (!src.is_byte_register()) {
1418    emit_rex_32(src, dst);
1419  } else {
1420    emit_optional_rex_32(src, dst);
1421  }
1422  emit(0x88);
1423  emit_operand(src, dst);
1424}
1425
1426
1427void Assembler::movw(const Operand& dst, Register src) {
1428  EnsureSpace ensure_space(this);
1429  emit(0x66);
1430  emit_optional_rex_32(src, dst);
1431  emit(0x89);
1432  emit_operand(src, dst);
1433}
1434
1435
1436void Assembler::movl(Register dst, const Operand& src) {
1437  EnsureSpace ensure_space(this);
1438  emit_optional_rex_32(dst, src);
1439  emit(0x8B);
1440  emit_operand(dst, src);
1441}
1442
1443
1444void Assembler::movl(Register dst, Register src) {
1445  EnsureSpace ensure_space(this);
1446  if (src.low_bits() == 4) {
1447    emit_optional_rex_32(src, dst);
1448    emit(0x89);
1449    emit_modrm(src, dst);
1450  } else {
1451    emit_optional_rex_32(dst, src);
1452    emit(0x8B);
1453    emit_modrm(dst, src);
1454  }
1455}
1456
1457
1458void Assembler::movl(const Operand& dst, Register src) {
1459  EnsureSpace ensure_space(this);
1460  emit_optional_rex_32(src, dst);
1461  emit(0x89);
1462  emit_operand(src, dst);
1463}
1464
1465
1466void Assembler::movl(const Operand& dst, Immediate value) {
1467  EnsureSpace ensure_space(this);
1468  emit_optional_rex_32(dst);
1469  emit(0xC7);
1470  emit_operand(0x0, dst);
1471  emit(value);
1472}
1473
1474
1475void Assembler::movl(Register dst, Immediate value) {
1476  EnsureSpace ensure_space(this);
1477  emit_optional_rex_32(dst);
1478  emit(0xB8 + dst.low_bits());
1479  emit(value);
1480}
1481
1482
1483void Assembler::movq(Register dst, const Operand& src) {
1484  EnsureSpace ensure_space(this);
1485  emit_rex_64(dst, src);
1486  emit(0x8B);
1487  emit_operand(dst, src);
1488}
1489
1490
1491void Assembler::movq(Register dst, Register src) {
1492  EnsureSpace ensure_space(this);
1493  if (src.low_bits() == 4) {
1494    emit_rex_64(src, dst);
1495    emit(0x89);
1496    emit_modrm(src, dst);
1497  } else {
1498    emit_rex_64(dst, src);
1499    emit(0x8B);
1500    emit_modrm(dst, src);
1501  }
1502}
1503
1504
1505void Assembler::movq(Register dst, Immediate value) {
1506  EnsureSpace ensure_space(this);
1507  emit_rex_64(dst);
1508  emit(0xC7);
1509  emit_modrm(0x0, dst);
1510  emit(value);  // Only 32-bit immediates are possible, not 8-bit immediates.
1511}
1512
1513
1514void Assembler::movq(const Operand& dst, Register src) {
1515  EnsureSpace ensure_space(this);
1516  emit_rex_64(src, dst);
1517  emit(0x89);
1518  emit_operand(src, dst);
1519}
1520
1521
1522void Assembler::movq(Register dst, void* value, RelocInfo::Mode rmode) {
1523  // This method must not be used with heap object references. The stored
1524  // address is not GC safe. Use the handle version instead.
1525  ASSERT(rmode > RelocInfo::LAST_GCED_ENUM);
1526  EnsureSpace ensure_space(this);
1527  emit_rex_64(dst);
1528  emit(0xB8 | dst.low_bits());
1529  emitp(value, rmode);
1530}
1531
1532
1533void Assembler::movq(Register dst, int64_t value, RelocInfo::Mode rmode) {
1534  // Non-relocatable values might not need a 64-bit representation.
1535  if (RelocInfo::IsNone(rmode)) {
1536    if (is_uint32(value)) {
1537      movl(dst, Immediate(static_cast<int32_t>(value)));
1538      return;
1539    } else if (is_int32(value)) {
1540      movq(dst, Immediate(static_cast<int32_t>(value)));
1541      return;
1542    }
1543    // Value cannot be represented by 32 bits, so do a full 64 bit immediate
1544    // value.
1545  }
1546  EnsureSpace ensure_space(this);
1547  emit_rex_64(dst);
1548  emit(0xB8 | dst.low_bits());
1549  emitq(value, rmode);
1550}
1551
1552
1553void Assembler::movq(Register dst, ExternalReference ref) {
1554  int64_t value = reinterpret_cast<int64_t>(ref.address());
1555  movq(dst, value, RelocInfo::EXTERNAL_REFERENCE);
1556}
1557
1558
1559void Assembler::movq(const Operand& dst, Immediate value) {
1560  EnsureSpace ensure_space(this);
1561  emit_rex_64(dst);
1562  emit(0xC7);
1563  emit_operand(0, dst);
1564  emit(value);
1565}
1566
1567
1568// Loads the ip-relative location of the src label into the target location
1569// (as a 32-bit offset sign extended to 64-bit).
1570void Assembler::movl(const Operand& dst, Label* src) {
1571  EnsureSpace ensure_space(this);
1572  emit_optional_rex_32(dst);
1573  emit(0xC7);
1574  emit_operand(0, dst);
1575  if (src->is_bound()) {
1576    int offset = src->pos() - pc_offset() - sizeof(int32_t);
1577    ASSERT(offset <= 0);
1578    emitl(offset);
1579  } else if (src->is_linked()) {
1580    emitl(src->pos());
1581    src->link_to(pc_offset() - sizeof(int32_t));
1582  } else {
1583    ASSERT(src->is_unused());
1584    int32_t current = pc_offset();
1585    emitl(current);
1586    src->link_to(current);
1587  }
1588}
1589
1590
1591void Assembler::movq(Register dst, Handle<Object> value, RelocInfo::Mode mode) {
1592  AllowDeferredHandleDereference using_raw_address;
1593  // If there is no relocation info, emit the value of the handle efficiently
1594  // (possibly using less that 8 bytes for the value).
1595  if (RelocInfo::IsNone(mode)) {
1596    // There is no possible reason to store a heap pointer without relocation
1597    // info, so it must be a smi.
1598    ASSERT(value->IsSmi());
1599    movq(dst, reinterpret_cast<int64_t>(*value), RelocInfo::NONE64);
1600  } else {
1601    EnsureSpace ensure_space(this);
1602    ASSERT(value->IsHeapObject());
1603    ASSERT(!HEAP->InNewSpace(*value));
1604    emit_rex_64(dst);
1605    emit(0xB8 | dst.low_bits());
1606    emitp(value.location(), mode);
1607  }
1608}
1609
1610
1611void Assembler::movsxbq(Register dst, const Operand& src) {
1612  EnsureSpace ensure_space(this);
1613  emit_rex_64(dst, src);
1614  emit(0x0F);
1615  emit(0xBE);
1616  emit_operand(dst, src);
1617}
1618
1619
1620void Assembler::movsxwq(Register dst, const Operand& src) {
1621  EnsureSpace ensure_space(this);
1622  emit_rex_64(dst, src);
1623  emit(0x0F);
1624  emit(0xBF);
1625  emit_operand(dst, src);
1626}
1627
1628
1629void Assembler::movsxlq(Register dst, Register src) {
1630  EnsureSpace ensure_space(this);
1631  emit_rex_64(dst, src);
1632  emit(0x63);
1633  emit_modrm(dst, src);
1634}
1635
1636
1637void Assembler::movsxlq(Register dst, const Operand& src) {
1638  EnsureSpace ensure_space(this);
1639  emit_rex_64(dst, src);
1640  emit(0x63);
1641  emit_operand(dst, src);
1642}
1643
1644
1645void Assembler::movzxbq(Register dst, const Operand& src) {
1646  EnsureSpace ensure_space(this);
1647  // 32 bit operations zero the top 32 bits of 64 bit registers.  Therefore
1648  // there is no need to make this a 64 bit operation.
1649  emit_optional_rex_32(dst, src);
1650  emit(0x0F);
1651  emit(0xB6);
1652  emit_operand(dst, src);
1653}
1654
1655
1656void Assembler::movzxbl(Register dst, const Operand& src) {
1657  EnsureSpace ensure_space(this);
1658  emit_optional_rex_32(dst, src);
1659  emit(0x0F);
1660  emit(0xB6);
1661  emit_operand(dst, src);
1662}
1663
1664
1665void Assembler::movzxwq(Register dst, const Operand& src) {
1666  EnsureSpace ensure_space(this);
1667  emit_optional_rex_32(dst, src);
1668  emit(0x0F);
1669  emit(0xB7);
1670  emit_operand(dst, src);
1671}
1672
1673
1674void Assembler::movzxwl(Register dst, const Operand& src) {
1675  EnsureSpace ensure_space(this);
1676  emit_optional_rex_32(dst, src);
1677  emit(0x0F);
1678  emit(0xB7);
1679  emit_operand(dst, src);
1680}
1681
1682
1683void Assembler::movzxwl(Register dst, Register src) {
1684  EnsureSpace ensure_space(this);
1685  emit_optional_rex_32(dst, src);
1686  emit(0x0F);
1687  emit(0xB7);
1688  emit_modrm(dst, src);
1689}
1690
1691
1692void Assembler::repmovsb() {
1693  EnsureSpace ensure_space(this);
1694  emit(0xF3);
1695  emit(0xA4);
1696}
1697
1698
1699void Assembler::repmovsw() {
1700  EnsureSpace ensure_space(this);
1701  emit(0x66);  // Operand size override.
1702  emit(0xF3);
1703  emit(0xA4);
1704}
1705
1706
1707void Assembler::repmovsl() {
1708  EnsureSpace ensure_space(this);
1709  emit(0xF3);
1710  emit(0xA5);
1711}
1712
1713
1714void Assembler::repmovsq() {
1715  EnsureSpace ensure_space(this);
1716  emit(0xF3);
1717  emit_rex_64();
1718  emit(0xA5);
1719}
1720
1721
1722void Assembler::mul(Register src) {
1723  EnsureSpace ensure_space(this);
1724  emit_rex_64(src);
1725  emit(0xF7);
1726  emit_modrm(0x4, src);
1727}
1728
1729
1730void Assembler::neg(Register dst) {
1731  EnsureSpace ensure_space(this);
1732  emit_rex_64(dst);
1733  emit(0xF7);
1734  emit_modrm(0x3, dst);
1735}
1736
1737
1738void Assembler::negl(Register dst) {
1739  EnsureSpace ensure_space(this);
1740  emit_optional_rex_32(dst);
1741  emit(0xF7);
1742  emit_modrm(0x3, dst);
1743}
1744
1745
1746void Assembler::neg(const Operand& dst) {
1747  EnsureSpace ensure_space(this);
1748  emit_rex_64(dst);
1749  emit(0xF7);
1750  emit_operand(3, dst);
1751}
1752
1753
1754void Assembler::nop() {
1755  EnsureSpace ensure_space(this);
1756  emit(0x90);
1757}
1758
1759
1760void Assembler::not_(Register dst) {
1761  EnsureSpace ensure_space(this);
1762  emit_rex_64(dst);
1763  emit(0xF7);
1764  emit_modrm(0x2, dst);
1765}
1766
1767
1768void Assembler::not_(const Operand& dst) {
1769  EnsureSpace ensure_space(this);
1770  emit_rex_64(dst);
1771  emit(0xF7);
1772  emit_operand(2, dst);
1773}
1774
1775
1776void Assembler::notl(Register dst) {
1777  EnsureSpace ensure_space(this);
1778  emit_optional_rex_32(dst);
1779  emit(0xF7);
1780  emit_modrm(0x2, dst);
1781}
1782
1783
1784void Assembler::Nop(int n) {
1785  // The recommended muti-byte sequences of NOP instructions from the Intel 64
1786  // and IA-32 Architectures Software Developer's Manual.
1787  //
1788  // Length   Assembly                                Byte Sequence
1789  // 2 bytes  66 NOP                                  66 90H
1790  // 3 bytes  NOP DWORD ptr [EAX]                     0F 1F 00H
1791  // 4 bytes  NOP DWORD ptr [EAX + 00H]               0F 1F 40 00H
1792  // 5 bytes  NOP DWORD ptr [EAX + EAX*1 + 00H]       0F 1F 44 00 00H
1793  // 6 bytes  66 NOP DWORD ptr [EAX + EAX*1 + 00H]    66 0F 1F 44 00 00H
1794  // 7 bytes  NOP DWORD ptr [EAX + 00000000H]         0F 1F 80 00 00 00 00H
1795  // 8 bytes  NOP DWORD ptr [EAX + EAX*1 + 00000000H] 0F 1F 84 00 00 00 00 00H
1796  // 9 bytes  66 NOP DWORD ptr [EAX + EAX*1 +         66 0F 1F 84 00 00 00 00
1797  //          00000000H]                              00H
1798
1799  EnsureSpace ensure_space(this);
1800  while (n > 0) {
1801    switch (n) {
1802      case 2:
1803        emit(0x66);
1804      case 1:
1805        emit(0x90);
1806        return;
1807      case 3:
1808        emit(0x0f);
1809        emit(0x1f);
1810        emit(0x00);
1811        return;
1812      case 4:
1813        emit(0x0f);
1814        emit(0x1f);
1815        emit(0x40);
1816        emit(0x00);
1817        return;
1818      case 6:
1819        emit(0x66);
1820      case 5:
1821        emit(0x0f);
1822        emit(0x1f);
1823        emit(0x44);
1824        emit(0x00);
1825        emit(0x00);
1826        return;
1827      case 7:
1828        emit(0x0f);
1829        emit(0x1f);
1830        emit(0x80);
1831        emit(0x00);
1832        emit(0x00);
1833        emit(0x00);
1834        emit(0x00);
1835        return;
1836      default:
1837      case 11:
1838        emit(0x66);
1839        n--;
1840      case 10:
1841        emit(0x66);
1842        n--;
1843      case 9:
1844        emit(0x66);
1845        n--;
1846      case 8:
1847        emit(0x0f);
1848        emit(0x1f);
1849        emit(0x84);
1850        emit(0x00);
1851        emit(0x00);
1852        emit(0x00);
1853        emit(0x00);
1854        emit(0x00);
1855        n -= 8;
1856    }
1857  }
1858}
1859
1860
1861void Assembler::pop(Register dst) {
1862  EnsureSpace ensure_space(this);
1863  emit_optional_rex_32(dst);
1864  emit(0x58 | dst.low_bits());
1865}
1866
1867
1868void Assembler::pop(const Operand& dst) {
1869  EnsureSpace ensure_space(this);
1870  emit_optional_rex_32(dst);
1871  emit(0x8F);
1872  emit_operand(0, dst);
1873}
1874
1875
1876void Assembler::popfq() {
1877  EnsureSpace ensure_space(this);
1878  emit(0x9D);
1879}
1880
1881
1882void Assembler::push(Register src) {
1883  EnsureSpace ensure_space(this);
1884  emit_optional_rex_32(src);
1885  emit(0x50 | src.low_bits());
1886}
1887
1888
1889void Assembler::push(const Operand& src) {
1890  EnsureSpace ensure_space(this);
1891  emit_optional_rex_32(src);
1892  emit(0xFF);
1893  emit_operand(6, src);
1894}
1895
1896
1897void Assembler::push(Immediate value) {
1898  EnsureSpace ensure_space(this);
1899  if (is_int8(value.value_)) {
1900    emit(0x6A);
1901    emit(value.value_);  // Emit low byte of value.
1902  } else {
1903    emit(0x68);
1904    emitl(value.value_);
1905  }
1906}
1907
1908
1909void Assembler::push_imm32(int32_t imm32) {
1910  EnsureSpace ensure_space(this);
1911  emit(0x68);
1912  emitl(imm32);
1913}
1914
1915
1916void Assembler::pushfq() {
1917  EnsureSpace ensure_space(this);
1918  emit(0x9C);
1919}
1920
1921
1922void Assembler::rdtsc() {
1923  EnsureSpace ensure_space(this);
1924  emit(0x0F);
1925  emit(0x31);
1926}
1927
1928
1929void Assembler::ret(int imm16) {
1930  EnsureSpace ensure_space(this);
1931  ASSERT(is_uint16(imm16));
1932  if (imm16 == 0) {
1933    emit(0xC3);
1934  } else {
1935    emit(0xC2);
1936    emit(imm16 & 0xFF);
1937    emit((imm16 >> 8) & 0xFF);
1938  }
1939}
1940
1941
1942void Assembler::setcc(Condition cc, Register reg) {
1943  if (cc > last_condition) {
1944    movb(reg, Immediate(cc == always ? 1 : 0));
1945    return;
1946  }
1947  EnsureSpace ensure_space(this);
1948  ASSERT(is_uint4(cc));
1949  if (!reg.is_byte_register()) {  // Use x64 byte registers, where different.
1950    emit_rex_32(reg);
1951  }
1952  emit(0x0F);
1953  emit(0x90 | cc);
1954  emit_modrm(0x0, reg);
1955}
1956
1957
1958void Assembler::shld(Register dst, Register src) {
1959  EnsureSpace ensure_space(this);
1960  emit_rex_64(src, dst);
1961  emit(0x0F);
1962  emit(0xA5);
1963  emit_modrm(src, dst);
1964}
1965
1966
1967void Assembler::shrd(Register dst, Register src) {
1968  EnsureSpace ensure_space(this);
1969  emit_rex_64(src, dst);
1970  emit(0x0F);
1971  emit(0xAD);
1972  emit_modrm(src, dst);
1973}
1974
1975
1976void Assembler::xchg(Register dst, Register src) {
1977  EnsureSpace ensure_space(this);
1978  if (src.is(rax) || dst.is(rax)) {  // Single-byte encoding
1979    Register other = src.is(rax) ? dst : src;
1980    emit_rex_64(other);
1981    emit(0x90 | other.low_bits());
1982  } else if (dst.low_bits() == 4) {
1983    emit_rex_64(dst, src);
1984    emit(0x87);
1985    emit_modrm(dst, src);
1986  } else {
1987    emit_rex_64(src, dst);
1988    emit(0x87);
1989    emit_modrm(src, dst);
1990  }
1991}
1992
1993
1994void Assembler::store_rax(void* dst, RelocInfo::Mode mode) {
1995  EnsureSpace ensure_space(this);
1996  emit(0x48);  // REX.W
1997  emit(0xA3);
1998  emitp(dst, mode);
1999}
2000
2001
2002void Assembler::store_rax(ExternalReference ref) {
2003  store_rax(ref.address(), RelocInfo::EXTERNAL_REFERENCE);
2004}
2005
2006
2007void Assembler::testb(Register dst, Register src) {
2008  EnsureSpace ensure_space(this);
2009  if (src.low_bits() == 4) {
2010    emit_rex_32(src, dst);
2011    emit(0x84);
2012    emit_modrm(src, dst);
2013  } else {
2014    if (!dst.is_byte_register() || !src.is_byte_register()) {
2015      // Register is not one of al, bl, cl, dl.  Its encoding needs REX.
2016      emit_rex_32(dst, src);
2017    }
2018    emit(0x84);
2019    emit_modrm(dst, src);
2020  }
2021}
2022
2023
2024void Assembler::testb(Register reg, Immediate mask) {
2025  ASSERT(is_int8(mask.value_) || is_uint8(mask.value_));
2026  EnsureSpace ensure_space(this);
2027  if (reg.is(rax)) {
2028    emit(0xA8);
2029    emit(mask.value_);  // Low byte emitted.
2030  } else {
2031    if (!reg.is_byte_register()) {
2032      // Register is not one of al, bl, cl, dl.  Its encoding needs REX.
2033      emit_rex_32(reg);
2034    }
2035    emit(0xF6);
2036    emit_modrm(0x0, reg);
2037    emit(mask.value_);  // Low byte emitted.
2038  }
2039}
2040
2041
2042void Assembler::testb(const Operand& op, Immediate mask) {
2043  ASSERT(is_int8(mask.value_) || is_uint8(mask.value_));
2044  EnsureSpace ensure_space(this);
2045  emit_optional_rex_32(rax, op);
2046  emit(0xF6);
2047  emit_operand(rax, op);  // Operation code 0
2048  emit(mask.value_);  // Low byte emitted.
2049}
2050
2051
2052void Assembler::testb(const Operand& op, Register reg) {
2053  EnsureSpace ensure_space(this);
2054  if (!reg.is_byte_register()) {
2055    // Register is not one of al, bl, cl, dl.  Its encoding needs REX.
2056    emit_rex_32(reg, op);
2057  } else {
2058    emit_optional_rex_32(reg, op);
2059  }
2060  emit(0x84);
2061  emit_operand(reg, op);
2062}
2063
2064
2065void Assembler::testl(Register dst, Register src) {
2066  EnsureSpace ensure_space(this);
2067  if (src.low_bits() == 4) {
2068    emit_optional_rex_32(src, dst);
2069    emit(0x85);
2070    emit_modrm(src, dst);
2071  } else {
2072    emit_optional_rex_32(dst, src);
2073    emit(0x85);
2074    emit_modrm(dst, src);
2075  }
2076}
2077
2078
2079void Assembler::testl(Register reg, Immediate mask) {
2080  // testl with a mask that fits in the low byte is exactly testb.
2081  if (is_uint8(mask.value_)) {
2082    testb(reg, mask);
2083    return;
2084  }
2085  EnsureSpace ensure_space(this);
2086  if (reg.is(rax)) {
2087    emit(0xA9);
2088    emit(mask);
2089  } else {
2090    emit_optional_rex_32(rax, reg);
2091    emit(0xF7);
2092    emit_modrm(0x0, reg);
2093    emit(mask);
2094  }
2095}
2096
2097
2098void Assembler::testl(const Operand& op, Immediate mask) {
2099  // testl with a mask that fits in the low byte is exactly testb.
2100  if (is_uint8(mask.value_)) {
2101    testb(op, mask);
2102    return;
2103  }
2104  EnsureSpace ensure_space(this);
2105  emit_optional_rex_32(rax, op);
2106  emit(0xF7);
2107  emit_operand(rax, op);  // Operation code 0
2108  emit(mask);
2109}
2110
2111
2112void Assembler::testq(const Operand& op, Register reg) {
2113  EnsureSpace ensure_space(this);
2114  emit_rex_64(reg, op);
2115  emit(0x85);
2116  emit_operand(reg, op);
2117}
2118
2119
2120void Assembler::testq(Register dst, Register src) {
2121  EnsureSpace ensure_space(this);
2122  if (src.low_bits() == 4) {
2123    emit_rex_64(src, dst);
2124    emit(0x85);
2125    emit_modrm(src, dst);
2126  } else {
2127    emit_rex_64(dst, src);
2128    emit(0x85);
2129    emit_modrm(dst, src);
2130  }
2131}
2132
2133
2134void Assembler::testq(Register dst, Immediate mask) {
2135  EnsureSpace ensure_space(this);
2136  if (dst.is(rax)) {
2137    emit_rex_64();
2138    emit(0xA9);
2139    emit(mask);
2140  } else {
2141    emit_rex_64(dst);
2142    emit(0xF7);
2143    emit_modrm(0, dst);
2144    emit(mask);
2145  }
2146}
2147
2148
2149// FPU instructions.
2150
2151
2152void Assembler::fld(int i) {
2153  EnsureSpace ensure_space(this);
2154  emit_farith(0xD9, 0xC0, i);
2155}
2156
2157
2158void Assembler::fld1() {
2159  EnsureSpace ensure_space(this);
2160  emit(0xD9);
2161  emit(0xE8);
2162}
2163
2164
2165void Assembler::fldz() {
2166  EnsureSpace ensure_space(this);
2167  emit(0xD9);
2168  emit(0xEE);
2169}
2170
2171
2172void Assembler::fldpi() {
2173  EnsureSpace ensure_space(this);
2174  emit(0xD9);
2175  emit(0xEB);
2176}
2177
2178
2179void Assembler::fldln2() {
2180  EnsureSpace ensure_space(this);
2181  emit(0xD9);
2182  emit(0xED);
2183}
2184
2185
2186void Assembler::fld_s(const Operand& adr) {
2187  EnsureSpace ensure_space(this);
2188  emit_optional_rex_32(adr);
2189  emit(0xD9);
2190  emit_operand(0, adr);
2191}
2192
2193
2194void Assembler::fld_d(const Operand& adr) {
2195  EnsureSpace ensure_space(this);
2196  emit_optional_rex_32(adr);
2197  emit(0xDD);
2198  emit_operand(0, adr);
2199}
2200
2201
2202void Assembler::fstp_s(const Operand& adr) {
2203  EnsureSpace ensure_space(this);
2204  emit_optional_rex_32(adr);
2205  emit(0xD9);
2206  emit_operand(3, adr);
2207}
2208
2209
2210void Assembler::fstp_d(const Operand& adr) {
2211  EnsureSpace ensure_space(this);
2212  emit_optional_rex_32(adr);
2213  emit(0xDD);
2214  emit_operand(3, adr);
2215}
2216
2217
2218void Assembler::fstp(int index) {
2219  ASSERT(is_uint3(index));
2220  EnsureSpace ensure_space(this);
2221  emit_farith(0xDD, 0xD8, index);
2222}
2223
2224
2225void Assembler::fild_s(const Operand& adr) {
2226  EnsureSpace ensure_space(this);
2227  emit_optional_rex_32(adr);
2228  emit(0xDB);
2229  emit_operand(0, adr);
2230}
2231
2232
2233void Assembler::fild_d(const Operand& adr) {
2234  EnsureSpace ensure_space(this);
2235  emit_optional_rex_32(adr);
2236  emit(0xDF);
2237  emit_operand(5, adr);
2238}
2239
2240
2241void Assembler::fistp_s(const Operand& adr) {
2242  EnsureSpace ensure_space(this);
2243  emit_optional_rex_32(adr);
2244  emit(0xDB);
2245  emit_operand(3, adr);
2246}
2247
2248
2249void Assembler::fisttp_s(const Operand& adr) {
2250  ASSERT(IsEnabled(SSE3));
2251  EnsureSpace ensure_space(this);
2252  emit_optional_rex_32(adr);
2253  emit(0xDB);
2254  emit_operand(1, adr);
2255}
2256
2257
2258void Assembler::fisttp_d(const Operand& adr) {
2259  ASSERT(IsEnabled(SSE3));
2260  EnsureSpace ensure_space(this);
2261  emit_optional_rex_32(adr);
2262  emit(0xDD);
2263  emit_operand(1, adr);
2264}
2265
2266
2267void Assembler::fist_s(const Operand& adr) {
2268  EnsureSpace ensure_space(this);
2269  emit_optional_rex_32(adr);
2270  emit(0xDB);
2271  emit_operand(2, adr);
2272}
2273
2274
2275void Assembler::fistp_d(const Operand& adr) {
2276  EnsureSpace ensure_space(this);
2277  emit_optional_rex_32(adr);
2278  emit(0xDF);
2279  emit_operand(7, adr);
2280}
2281
2282
2283void Assembler::fabs() {
2284  EnsureSpace ensure_space(this);
2285  emit(0xD9);
2286  emit(0xE1);
2287}
2288
2289
2290void Assembler::fchs() {
2291  EnsureSpace ensure_space(this);
2292  emit(0xD9);
2293  emit(0xE0);
2294}
2295
2296
2297void Assembler::fcos() {
2298  EnsureSpace ensure_space(this);
2299  emit(0xD9);
2300  emit(0xFF);
2301}
2302
2303
2304void Assembler::fsin() {
2305  EnsureSpace ensure_space(this);
2306  emit(0xD9);
2307  emit(0xFE);
2308}
2309
2310
2311void Assembler::fptan() {
2312  EnsureSpace ensure_space(this);
2313  emit(0xD9);
2314  emit(0xF2);
2315}
2316
2317
2318void Assembler::fyl2x() {
2319  EnsureSpace ensure_space(this);
2320  emit(0xD9);
2321  emit(0xF1);
2322}
2323
2324
2325void Assembler::f2xm1() {
2326  EnsureSpace ensure_space(this);
2327  emit(0xD9);
2328  emit(0xF0);
2329}
2330
2331
2332void Assembler::fscale() {
2333  EnsureSpace ensure_space(this);
2334  emit(0xD9);
2335  emit(0xFD);
2336}
2337
2338
2339void Assembler::fninit() {
2340  EnsureSpace ensure_space(this);
2341  emit(0xDB);
2342  emit(0xE3);
2343}
2344
2345
2346void Assembler::fadd(int i) {
2347  EnsureSpace ensure_space(this);
2348  emit_farith(0xDC, 0xC0, i);
2349}
2350
2351
2352void Assembler::fsub(int i) {
2353  EnsureSpace ensure_space(this);
2354  emit_farith(0xDC, 0xE8, i);
2355}
2356
2357
2358void Assembler::fisub_s(const Operand& adr) {
2359  EnsureSpace ensure_space(this);
2360  emit_optional_rex_32(adr);
2361  emit(0xDA);
2362  emit_operand(4, adr);
2363}
2364
2365
2366void Assembler::fmul(int i) {
2367  EnsureSpace ensure_space(this);
2368  emit_farith(0xDC, 0xC8, i);
2369}
2370
2371
2372void Assembler::fdiv(int i) {
2373  EnsureSpace ensure_space(this);
2374  emit_farith(0xDC, 0xF8, i);
2375}
2376
2377
2378void Assembler::faddp(int i) {
2379  EnsureSpace ensure_space(this);
2380  emit_farith(0xDE, 0xC0, i);
2381}
2382
2383
2384void Assembler::fsubp(int i) {
2385  EnsureSpace ensure_space(this);
2386  emit_farith(0xDE, 0xE8, i);
2387}
2388
2389
2390void Assembler::fsubrp(int i) {
2391  EnsureSpace ensure_space(this);
2392  emit_farith(0xDE, 0xE0, i);
2393}
2394
2395
2396void Assembler::fmulp(int i) {
2397  EnsureSpace ensure_space(this);
2398  emit_farith(0xDE, 0xC8, i);
2399}
2400
2401
2402void Assembler::fdivp(int i) {
2403  EnsureSpace ensure_space(this);
2404  emit_farith(0xDE, 0xF8, i);
2405}
2406
2407
2408void Assembler::fprem() {
2409  EnsureSpace ensure_space(this);
2410  emit(0xD9);
2411  emit(0xF8);
2412}
2413
2414
2415void Assembler::fprem1() {
2416  EnsureSpace ensure_space(this);
2417  emit(0xD9);
2418  emit(0xF5);
2419}
2420
2421
2422void Assembler::fxch(int i) {
2423  EnsureSpace ensure_space(this);
2424  emit_farith(0xD9, 0xC8, i);
2425}
2426
2427
2428void Assembler::fincstp() {
2429  EnsureSpace ensure_space(this);
2430  emit(0xD9);
2431  emit(0xF7);
2432}
2433
2434
2435void Assembler::ffree(int i) {
2436  EnsureSpace ensure_space(this);
2437  emit_farith(0xDD, 0xC0, i);
2438}
2439
2440
2441void Assembler::ftst() {
2442  EnsureSpace ensure_space(this);
2443  emit(0xD9);
2444  emit(0xE4);
2445}
2446
2447
2448void Assembler::fucomp(int i) {
2449  EnsureSpace ensure_space(this);
2450  emit_farith(0xDD, 0xE8, i);
2451}
2452
2453
2454void Assembler::fucompp() {
2455  EnsureSpace ensure_space(this);
2456  emit(0xDA);
2457  emit(0xE9);
2458}
2459
2460
2461void Assembler::fucomi(int i) {
2462  EnsureSpace ensure_space(this);
2463  emit(0xDB);
2464  emit(0xE8 + i);
2465}
2466
2467
2468void Assembler::fucomip() {
2469  EnsureSpace ensure_space(this);
2470  emit(0xDF);
2471  emit(0xE9);
2472}
2473
2474
2475void Assembler::fcompp() {
2476  EnsureSpace ensure_space(this);
2477  emit(0xDE);
2478  emit(0xD9);
2479}
2480
2481
2482void Assembler::fnstsw_ax() {
2483  EnsureSpace ensure_space(this);
2484  emit(0xDF);
2485  emit(0xE0);
2486}
2487
2488
2489void Assembler::fwait() {
2490  EnsureSpace ensure_space(this);
2491  emit(0x9B);
2492}
2493
2494
2495void Assembler::frndint() {
2496  EnsureSpace ensure_space(this);
2497  emit(0xD9);
2498  emit(0xFC);
2499}
2500
2501
2502void Assembler::fnclex() {
2503  EnsureSpace ensure_space(this);
2504  emit(0xDB);
2505  emit(0xE2);
2506}
2507
2508
2509void Assembler::sahf() {
2510  // TODO(X64): Test for presence. Not all 64-bit intel CPU's have sahf
2511  // in 64-bit mode. Test CpuID.
2512  EnsureSpace ensure_space(this);
2513  emit(0x9E);
2514}
2515
2516
2517void Assembler::emit_farith(int b1, int b2, int i) {
2518  ASSERT(is_uint8(b1) && is_uint8(b2));  // wrong opcode
2519  ASSERT(is_uint3(i));  // illegal stack offset
2520  emit(b1);
2521  emit(b2 + i);
2522}
2523
2524
2525// SSE 2 operations.
2526
2527void Assembler::movd(XMMRegister dst, Register src) {
2528  EnsureSpace ensure_space(this);
2529  emit(0x66);
2530  emit_optional_rex_32(dst, src);
2531  emit(0x0F);
2532  emit(0x6E);
2533  emit_sse_operand(dst, src);
2534}
2535
2536
2537void Assembler::movd(Register dst, XMMRegister src) {
2538  EnsureSpace ensure_space(this);
2539  emit(0x66);
2540  emit_optional_rex_32(src, dst);
2541  emit(0x0F);
2542  emit(0x7E);
2543  emit_sse_operand(src, dst);
2544}
2545
2546
2547void Assembler::movq(XMMRegister dst, Register src) {
2548  EnsureSpace ensure_space(this);
2549  emit(0x66);
2550  emit_rex_64(dst, src);
2551  emit(0x0F);
2552  emit(0x6E);
2553  emit_sse_operand(dst, src);
2554}
2555
2556
2557void Assembler::movq(Register dst, XMMRegister src) {
2558  EnsureSpace ensure_space(this);
2559  emit(0x66);
2560  emit_rex_64(src, dst);
2561  emit(0x0F);
2562  emit(0x7E);
2563  emit_sse_operand(src, dst);
2564}
2565
2566
2567void Assembler::movq(XMMRegister dst, XMMRegister src) {
2568  EnsureSpace ensure_space(this);
2569  if (dst.low_bits() == 4) {
2570    // Avoid unnecessary SIB byte.
2571    emit(0xf3);
2572    emit_optional_rex_32(dst, src);
2573    emit(0x0F);
2574    emit(0x7e);
2575    emit_sse_operand(dst, src);
2576  } else {
2577    emit(0x66);
2578    emit_optional_rex_32(src, dst);
2579    emit(0x0F);
2580    emit(0xD6);
2581    emit_sse_operand(src, dst);
2582  }
2583}
2584
2585
2586void Assembler::movdqa(const Operand& dst, XMMRegister src) {
2587  EnsureSpace ensure_space(this);
2588  emit(0x66);
2589  emit_rex_64(src, dst);
2590  emit(0x0F);
2591  emit(0x7F);
2592  emit_sse_operand(src, dst);
2593}
2594
2595
2596void Assembler::movdqa(XMMRegister dst, const Operand& src) {
2597  EnsureSpace ensure_space(this);
2598  emit(0x66);
2599  emit_rex_64(dst, src);
2600  emit(0x0F);
2601  emit(0x6F);
2602  emit_sse_operand(dst, src);
2603}
2604
2605
2606void Assembler::movdqu(const Operand& dst, XMMRegister src) {
2607  EnsureSpace ensure_space(this);
2608  emit(0xF3);
2609  emit_rex_64(src, dst);
2610  emit(0x0F);
2611  emit(0x7F);
2612  emit_sse_operand(src, dst);
2613}
2614
2615
2616void Assembler::movdqu(XMMRegister dst, const Operand& src) {
2617  EnsureSpace ensure_space(this);
2618  emit(0xF3);
2619  emit_rex_64(dst, src);
2620  emit(0x0F);
2621  emit(0x6F);
2622  emit_sse_operand(dst, src);
2623}
2624
2625
2626void Assembler::extractps(Register dst, XMMRegister src, byte imm8) {
2627  ASSERT(CpuFeatures::IsSupported(SSE4_1));
2628  ASSERT(is_uint8(imm8));
2629  EnsureSpace ensure_space(this);
2630  emit(0x66);
2631  emit_optional_rex_32(dst, src);
2632  emit(0x0F);
2633  emit(0x3A);
2634  emit(0x17);
2635  emit_sse_operand(dst, src);
2636  emit(imm8);
2637}
2638
2639
2640void Assembler::movsd(const Operand& dst, XMMRegister src) {
2641  EnsureSpace ensure_space(this);
2642  emit(0xF2);  // double
2643  emit_optional_rex_32(src, dst);
2644  emit(0x0F);
2645  emit(0x11);  // store
2646  emit_sse_operand(src, dst);
2647}
2648
2649
2650void Assembler::movsd(XMMRegister dst, XMMRegister src) {
2651  EnsureSpace ensure_space(this);
2652  emit(0xF2);  // double
2653  emit_optional_rex_32(dst, src);
2654  emit(0x0F);
2655  emit(0x10);  // load
2656  emit_sse_operand(dst, src);
2657}
2658
2659
2660void Assembler::movsd(XMMRegister dst, const Operand& src) {
2661  EnsureSpace ensure_space(this);
2662  emit(0xF2);  // double
2663  emit_optional_rex_32(dst, src);
2664  emit(0x0F);
2665  emit(0x10);  // load
2666  emit_sse_operand(dst, src);
2667}
2668
2669
2670void Assembler::movaps(XMMRegister dst, XMMRegister src) {
2671  EnsureSpace ensure_space(this);
2672  if (src.low_bits() == 4) {
2673    // Try to avoid an unnecessary SIB byte.
2674    emit_optional_rex_32(src, dst);
2675    emit(0x0F);
2676    emit(0x29);
2677    emit_sse_operand(src, dst);
2678  } else {
2679    emit_optional_rex_32(dst, src);
2680    emit(0x0F);
2681    emit(0x28);
2682    emit_sse_operand(dst, src);
2683  }
2684}
2685
2686
2687void Assembler::movapd(XMMRegister dst, XMMRegister src) {
2688  EnsureSpace ensure_space(this);
2689  if (src.low_bits() == 4) {
2690    // Try to avoid an unnecessary SIB byte.
2691    emit(0x66);
2692    emit_optional_rex_32(src, dst);
2693    emit(0x0F);
2694    emit(0x29);
2695    emit_sse_operand(src, dst);
2696  } else {
2697    emit(0x66);
2698    emit_optional_rex_32(dst, src);
2699    emit(0x0F);
2700    emit(0x28);
2701    emit_sse_operand(dst, src);
2702  }
2703}
2704
2705
2706void Assembler::movss(XMMRegister dst, const Operand& src) {
2707  EnsureSpace ensure_space(this);
2708  emit(0xF3);  // single
2709  emit_optional_rex_32(dst, src);
2710  emit(0x0F);
2711  emit(0x10);  // load
2712  emit_sse_operand(dst, src);
2713}
2714
2715
2716void Assembler::movss(const Operand& src, XMMRegister dst) {
2717  EnsureSpace ensure_space(this);
2718  emit(0xF3);  // single
2719  emit_optional_rex_32(dst, src);
2720  emit(0x0F);
2721  emit(0x11);  // store
2722  emit_sse_operand(dst, src);
2723}
2724
2725
2726void Assembler::cvttss2si(Register dst, const Operand& src) {
2727  EnsureSpace ensure_space(this);
2728  emit(0xF3);
2729  emit_optional_rex_32(dst, src);
2730  emit(0x0F);
2731  emit(0x2C);
2732  emit_operand(dst, src);
2733}
2734
2735
2736void Assembler::cvttss2si(Register dst, XMMRegister src) {
2737  EnsureSpace ensure_space(this);
2738  emit(0xF3);
2739  emit_optional_rex_32(dst, src);
2740  emit(0x0F);
2741  emit(0x2C);
2742  emit_sse_operand(dst, src);
2743}
2744
2745
2746void Assembler::cvttsd2si(Register dst, const Operand& src) {
2747  EnsureSpace ensure_space(this);
2748  emit(0xF2);
2749  emit_optional_rex_32(dst, src);
2750  emit(0x0F);
2751  emit(0x2C);
2752  emit_operand(dst, src);
2753}
2754
2755
2756void Assembler::cvttsd2si(Register dst, XMMRegister src) {
2757  EnsureSpace ensure_space(this);
2758  emit(0xF2);
2759  emit_optional_rex_32(dst, src);
2760  emit(0x0F);
2761  emit(0x2C);
2762  emit_sse_operand(dst, src);
2763}
2764
2765
2766void Assembler::cvttsd2siq(Register dst, XMMRegister src) {
2767  EnsureSpace ensure_space(this);
2768  emit(0xF2);
2769  emit_rex_64(dst, src);
2770  emit(0x0F);
2771  emit(0x2C);
2772  emit_sse_operand(dst, src);
2773}
2774
2775
2776void Assembler::cvtlsi2sd(XMMRegister dst, const Operand& src) {
2777  EnsureSpace ensure_space(this);
2778  emit(0xF2);
2779  emit_optional_rex_32(dst, src);
2780  emit(0x0F);
2781  emit(0x2A);
2782  emit_sse_operand(dst, src);
2783}
2784
2785
2786void Assembler::cvtlsi2sd(XMMRegister dst, Register src) {
2787  EnsureSpace ensure_space(this);
2788  emit(0xF2);
2789  emit_optional_rex_32(dst, src);
2790  emit(0x0F);
2791  emit(0x2A);
2792  emit_sse_operand(dst, src);
2793}
2794
2795
2796void Assembler::cvtlsi2ss(XMMRegister dst, Register src) {
2797  EnsureSpace ensure_space(this);
2798  emit(0xF3);
2799  emit_optional_rex_32(dst, src);
2800  emit(0x0F);
2801  emit(0x2A);
2802  emit_sse_operand(dst, src);
2803}
2804
2805
2806void Assembler::cvtqsi2sd(XMMRegister dst, Register src) {
2807  EnsureSpace ensure_space(this);
2808  emit(0xF2);
2809  emit_rex_64(dst, src);
2810  emit(0x0F);
2811  emit(0x2A);
2812  emit_sse_operand(dst, src);
2813}
2814
2815
2816void Assembler::cvtss2sd(XMMRegister dst, XMMRegister src) {
2817  EnsureSpace ensure_space(this);
2818  emit(0xF3);
2819  emit_optional_rex_32(dst, src);
2820  emit(0x0F);
2821  emit(0x5A);
2822  emit_sse_operand(dst, src);
2823}
2824
2825
2826void Assembler::cvtss2sd(XMMRegister dst, const Operand& src) {
2827  EnsureSpace ensure_space(this);
2828  emit(0xF3);
2829  emit_optional_rex_32(dst, src);
2830  emit(0x0F);
2831  emit(0x5A);
2832  emit_sse_operand(dst, src);
2833}
2834
2835
2836void Assembler::cvtsd2ss(XMMRegister dst, XMMRegister src) {
2837  EnsureSpace ensure_space(this);
2838  emit(0xF2);
2839  emit_optional_rex_32(dst, src);
2840  emit(0x0F);
2841  emit(0x5A);
2842  emit_sse_operand(dst, src);
2843}
2844
2845
2846void Assembler::cvtsd2si(Register dst, XMMRegister src) {
2847  EnsureSpace ensure_space(this);
2848  emit(0xF2);
2849  emit_optional_rex_32(dst, src);
2850  emit(0x0F);
2851  emit(0x2D);
2852  emit_sse_operand(dst, src);
2853}
2854
2855
2856void Assembler::cvtsd2siq(Register dst, XMMRegister src) {
2857  EnsureSpace ensure_space(this);
2858  emit(0xF2);
2859  emit_rex_64(dst, src);
2860  emit(0x0F);
2861  emit(0x2D);
2862  emit_sse_operand(dst, src);
2863}
2864
2865
2866void Assembler::addsd(XMMRegister dst, XMMRegister src) {
2867  EnsureSpace ensure_space(this);
2868  emit(0xF2);
2869  emit_optional_rex_32(dst, src);
2870  emit(0x0F);
2871  emit(0x58);
2872  emit_sse_operand(dst, src);
2873}
2874
2875
2876void Assembler::addsd(XMMRegister dst, const Operand& src) {
2877  EnsureSpace ensure_space(this);
2878  emit(0xF2);
2879  emit_optional_rex_32(dst, src);
2880  emit(0x0F);
2881  emit(0x58);
2882  emit_sse_operand(dst, src);
2883}
2884
2885
2886void Assembler::mulsd(XMMRegister dst, XMMRegister src) {
2887  EnsureSpace ensure_space(this);
2888  emit(0xF2);
2889  emit_optional_rex_32(dst, src);
2890  emit(0x0F);
2891  emit(0x59);
2892  emit_sse_operand(dst, src);
2893}
2894
2895
2896void Assembler::mulsd(XMMRegister dst, const Operand& src) {
2897  EnsureSpace ensure_space(this);
2898  emit(0xF2);
2899  emit_optional_rex_32(dst, src);
2900  emit(0x0F);
2901  emit(0x59);
2902  emit_sse_operand(dst, src);
2903}
2904
2905
2906void Assembler::subsd(XMMRegister dst, XMMRegister src) {
2907  EnsureSpace ensure_space(this);
2908  emit(0xF2);
2909  emit_optional_rex_32(dst, src);
2910  emit(0x0F);
2911  emit(0x5C);
2912  emit_sse_operand(dst, src);
2913}
2914
2915
2916void Assembler::divsd(XMMRegister dst, XMMRegister src) {
2917  EnsureSpace ensure_space(this);
2918  emit(0xF2);
2919  emit_optional_rex_32(dst, src);
2920  emit(0x0F);
2921  emit(0x5E);
2922  emit_sse_operand(dst, src);
2923}
2924
2925
2926void Assembler::andpd(XMMRegister dst, XMMRegister src) {
2927  EnsureSpace ensure_space(this);
2928  emit(0x66);
2929  emit_optional_rex_32(dst, src);
2930  emit(0x0F);
2931  emit(0x54);
2932  emit_sse_operand(dst, src);
2933}
2934
2935
2936void Assembler::orpd(XMMRegister dst, XMMRegister src) {
2937  EnsureSpace ensure_space(this);
2938  emit(0x66);
2939  emit_optional_rex_32(dst, src);
2940  emit(0x0F);
2941  emit(0x56);
2942  emit_sse_operand(dst, src);
2943}
2944
2945
2946void Assembler::xorpd(XMMRegister dst, XMMRegister src) {
2947  EnsureSpace ensure_space(this);
2948  emit(0x66);
2949  emit_optional_rex_32(dst, src);
2950  emit(0x0F);
2951  emit(0x57);
2952  emit_sse_operand(dst, src);
2953}
2954
2955
2956void Assembler::xorps(XMMRegister dst, XMMRegister src) {
2957  EnsureSpace ensure_space(this);
2958  emit_optional_rex_32(dst, src);
2959  emit(0x0F);
2960  emit(0x57);
2961  emit_sse_operand(dst, src);
2962}
2963
2964
2965void Assembler::sqrtsd(XMMRegister dst, XMMRegister src) {
2966  EnsureSpace ensure_space(this);
2967  emit(0xF2);
2968  emit_optional_rex_32(dst, src);
2969  emit(0x0F);
2970  emit(0x51);
2971  emit_sse_operand(dst, src);
2972}
2973
2974
2975void Assembler::ucomisd(XMMRegister dst, XMMRegister src) {
2976  EnsureSpace ensure_space(this);
2977  emit(0x66);
2978  emit_optional_rex_32(dst, src);
2979  emit(0x0f);
2980  emit(0x2e);
2981  emit_sse_operand(dst, src);
2982}
2983
2984
2985void Assembler::ucomisd(XMMRegister dst, const Operand& src) {
2986  EnsureSpace ensure_space(this);
2987  emit(0x66);
2988  emit_optional_rex_32(dst, src);
2989  emit(0x0f);
2990  emit(0x2e);
2991  emit_sse_operand(dst, src);
2992}
2993
2994
2995void Assembler::roundsd(XMMRegister dst, XMMRegister src,
2996                        Assembler::RoundingMode mode) {
2997  ASSERT(IsEnabled(SSE4_1));
2998  EnsureSpace ensure_space(this);
2999  emit(0x66);
3000  emit_optional_rex_32(dst, src);
3001  emit(0x0f);
3002  emit(0x3a);
3003  emit(0x0b);
3004  emit_sse_operand(dst, src);
3005  // Mask precision exeption.
3006  emit(static_cast<byte>(mode) | 0x8);
3007}
3008
3009
3010void Assembler::movmskpd(Register dst, XMMRegister src) {
3011  EnsureSpace ensure_space(this);
3012  emit(0x66);
3013  emit_optional_rex_32(dst, src);
3014  emit(0x0f);
3015  emit(0x50);
3016  emit_sse_operand(dst, src);
3017}
3018
3019
3020void Assembler::movmskps(Register dst, XMMRegister src) {
3021  EnsureSpace ensure_space(this);
3022  emit_optional_rex_32(dst, src);
3023  emit(0x0f);
3024  emit(0x50);
3025  emit_sse_operand(dst, src);
3026}
3027
3028
3029void Assembler::emit_sse_operand(XMMRegister reg, const Operand& adr) {
3030  Register ireg = { reg.code() };
3031  emit_operand(ireg, adr);
3032}
3033
3034
3035void Assembler::emit_sse_operand(XMMRegister dst, XMMRegister src) {
3036  emit(0xC0 | (dst.low_bits() << 3) | src.low_bits());
3037}
3038
3039
3040void Assembler::emit_sse_operand(XMMRegister dst, Register src) {
3041  emit(0xC0 | (dst.low_bits() << 3) | src.low_bits());
3042}
3043
3044
3045void Assembler::emit_sse_operand(Register dst, XMMRegister src) {
3046  emit(0xC0 | (dst.low_bits() << 3) | src.low_bits());
3047}
3048
3049
3050void Assembler::db(uint8_t data) {
3051  EnsureSpace ensure_space(this);
3052  emit(data);
3053}
3054
3055
3056void Assembler::dd(uint32_t data) {
3057  EnsureSpace ensure_space(this);
3058  emitl(data);
3059}
3060
3061
3062// Relocation information implementations.
3063
3064void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
3065  ASSERT(!RelocInfo::IsNone(rmode));
3066  // Don't record external references unless the heap will be serialized.
3067  if (rmode == RelocInfo::EXTERNAL_REFERENCE) {
3068#ifdef DEBUG
3069    if (!Serializer::enabled()) {
3070      Serializer::TooLateToEnableNow();
3071    }
3072#endif
3073    if (!Serializer::enabled() && !emit_debug_code()) {
3074      return;
3075    }
3076  }
3077  RelocInfo rinfo(pc_, rmode, data, NULL);
3078  reloc_info_writer.Write(&rinfo);
3079}
3080
3081
3082void Assembler::RecordJSReturn() {
3083  positions_recorder()->WriteRecordedPositions();
3084  EnsureSpace ensure_space(this);
3085  RecordRelocInfo(RelocInfo::JS_RETURN);
3086}
3087
3088
3089void Assembler::RecordDebugBreakSlot() {
3090  positions_recorder()->WriteRecordedPositions();
3091  EnsureSpace ensure_space(this);
3092  RecordRelocInfo(RelocInfo::DEBUG_BREAK_SLOT);
3093}
3094
3095
3096void Assembler::RecordComment(const char* msg, bool force) {
3097  if (FLAG_code_comments || force) {
3098    EnsureSpace ensure_space(this);
3099    RecordRelocInfo(RelocInfo::COMMENT, reinterpret_cast<intptr_t>(msg));
3100  }
3101}
3102
3103
3104const int RelocInfo::kApplyMask = RelocInfo::kCodeTargetMask |
3105    1 << RelocInfo::RUNTIME_ENTRY |
3106    1 << RelocInfo::INTERNAL_REFERENCE |
3107    1 << RelocInfo::CODE_AGE_SEQUENCE;
3108
3109
3110bool RelocInfo::IsCodedSpecially() {
3111  // The deserializer needs to know whether a pointer is specially coded.  Being
3112  // specially coded on x64 means that it is a relative 32 bit address, as used
3113  // by branch instructions.
3114  return (1 << rmode_) & kApplyMask;
3115}
3116
3117} }  // namespace v8::internal
3118
3119#endif  // V8_TARGET_ARCH_X64
3120