1// Copyright 2009 the V8 project authors. All rights reserved. 2// Redistribution and use in source and binary forms, with or without 3// modification, are permitted provided that the following conditions are 4// met: 5// 6// * Redistributions of source code must retain the above copyright 7// notice, this list of conditions and the following disclaimer. 8// * Redistributions in binary form must reproduce the above 9// copyright notice, this list of conditions and the following 10// disclaimer in the documentation and/or other materials provided 11// with the distribution. 12// * Neither the name of Google Inc. nor the names of its 13// contributors may be used to endorse or promote products derived 14// from this software without specific prior written permission. 15// 16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28#include "v8.h" 29 30#include "macro-assembler.h" 31#include "serialize.h" 32 33namespace v8 { 34namespace internal { 35 36// ----------------------------------------------------------------------------- 37// Implementation of CpuFeatures 38 39// The required user mode extensions in X64 are (from AMD64 ABI Table A.1): 40// fpu, tsc, cx8, cmov, mmx, sse, sse2, fxsr, syscall 41uint64_t CpuFeatures::supported_ = kDefaultCpuFeatures; 42uint64_t CpuFeatures::enabled_ = 0; 43uint64_t CpuFeatures::found_by_runtime_probing_ = 0; 44 45void CpuFeatures::Probe() { 46 ASSERT(Heap::HasBeenSetup()); 47 ASSERT(supported_ == kDefaultCpuFeatures); 48 if (Serializer::enabled()) { 49 supported_ |= OS::CpuFeaturesImpliedByPlatform(); 50 return; // No features if we might serialize. 51 } 52 53 Assembler assm(NULL, 0); 54 Label cpuid, done; 55#define __ assm. 56 // Save old rsp, since we are going to modify the stack. 57 __ push(rbp); 58 __ pushfq(); 59 __ push(rcx); 60 __ push(rbx); 61 __ movq(rbp, rsp); 62 63 // If we can modify bit 21 of the EFLAGS register, then CPUID is supported. 64 __ pushfq(); 65 __ pop(rax); 66 __ movq(rdx, rax); 67 __ xor_(rax, Immediate(0x200000)); // Flip bit 21. 68 __ push(rax); 69 __ popfq(); 70 __ pushfq(); 71 __ pop(rax); 72 __ xor_(rax, rdx); // Different if CPUID is supported. 73 __ j(not_zero, &cpuid); 74 75 // CPUID not supported. Clear the supported features in edx:eax. 76 __ xor_(rax, rax); 77 __ jmp(&done); 78 79 // Invoke CPUID with 1 in eax to get feature information in 80 // ecx:edx. Temporarily enable CPUID support because we know it's 81 // safe here. 82 __ bind(&cpuid); 83 __ movq(rax, Immediate(1)); 84 supported_ = kDefaultCpuFeatures | (1 << CPUID); 85 { Scope fscope(CPUID); 86 __ cpuid(); 87 // Move the result from ecx:edx to rdi. 88 __ movl(rdi, rdx); // Zero-extended to 64 bits. 89 __ shl(rcx, Immediate(32)); 90 __ or_(rdi, rcx); 91 92 // Get the sahf supported flag, from CPUID(0x80000001) 93 __ movq(rax, 0x80000001, RelocInfo::NONE); 94 __ cpuid(); 95 } 96 supported_ = kDefaultCpuFeatures; 97 98 // Put the CPU flags in rax. 99 // rax = (rcx & 1) | (rdi & ~1) | (1 << CPUID). 100 __ movl(rax, Immediate(1)); 101 __ and_(rcx, rax); // Bit 0 is set if SAHF instruction supported. 102 __ not_(rax); 103 __ and_(rax, rdi); 104 __ or_(rax, rcx); 105 __ or_(rax, Immediate(1 << CPUID)); 106 107 // Done. 108 __ bind(&done); 109 __ movq(rsp, rbp); 110 __ pop(rbx); 111 __ pop(rcx); 112 __ popfq(); 113 __ pop(rbp); 114 __ ret(0); 115#undef __ 116 117 CodeDesc desc; 118 assm.GetCode(&desc); 119 Object* code = 120 Heap::CreateCode(desc, NULL, Code::ComputeFlags(Code::STUB), NULL); 121 if (!code->IsCode()) return; 122 LOG(CodeCreateEvent(Logger::BUILTIN_TAG, 123 Code::cast(code), "CpuFeatures::Probe")); 124 typedef uint64_t (*F0)(); 125 F0 probe = FUNCTION_CAST<F0>(Code::cast(code)->entry()); 126 supported_ = probe(); 127 found_by_runtime_probing_ = supported_; 128 found_by_runtime_probing_ &= ~kDefaultCpuFeatures; 129 uint64_t os_guarantees = OS::CpuFeaturesImpliedByPlatform(); 130 supported_ |= os_guarantees; 131 found_by_runtime_probing_ &= ~os_guarantees; 132 // SSE2 and CMOV must be available on an X64 CPU. 133 ASSERT(IsSupported(CPUID)); 134 ASSERT(IsSupported(SSE2)); 135 ASSERT(IsSupported(CMOV)); 136} 137 138 139// ----------------------------------------------------------------------------- 140// Implementation of RelocInfo 141 142// Patch the code at the current PC with a call to the target address. 143// Additional guard int3 instructions can be added if required. 144void RelocInfo::PatchCodeWithCall(Address target, int guard_bytes) { 145 // Load register with immediate 64 and call through a register instructions 146 // takes up 13 bytes and int3 takes up one byte. 147 static const int kCallCodeSize = 13; 148 int code_size = kCallCodeSize + guard_bytes; 149 150 // Create a code patcher. 151 CodePatcher patcher(pc_, code_size); 152 153 // Add a label for checking the size of the code used for returning. 154#ifdef DEBUG 155 Label check_codesize; 156 patcher.masm()->bind(&check_codesize); 157#endif 158 159 // Patch the code. 160 patcher.masm()->movq(r10, target, RelocInfo::NONE); 161 patcher.masm()->call(r10); 162 163 // Check that the size of the code generated is as expected. 164 ASSERT_EQ(kCallCodeSize, 165 patcher.masm()->SizeOfCodeGeneratedSince(&check_codesize)); 166 167 // Add the requested number of int3 instructions after the call. 168 for (int i = 0; i < guard_bytes; i++) { 169 patcher.masm()->int3(); 170 } 171} 172 173 174void RelocInfo::PatchCode(byte* instructions, int instruction_count) { 175 // Patch the code at the current address with the supplied instructions. 176 for (int i = 0; i < instruction_count; i++) { 177 *(pc_ + i) = *(instructions + i); 178 } 179 180 // Indicate that code has changed. 181 CPU::FlushICache(pc_, instruction_count); 182} 183 184// ----------------------------------------------------------------------------- 185// Implementation of Operand 186 187Operand::Operand(Register base, int32_t disp) : rex_(0) { 188 len_ = 1; 189 if (base.is(rsp) || base.is(r12)) { 190 // SIB byte is needed to encode (rsp + offset) or (r12 + offset). 191 set_sib(times_1, rsp, base); 192 } 193 194 if (disp == 0 && !base.is(rbp) && !base.is(r13)) { 195 set_modrm(0, base); 196 } else if (is_int8(disp)) { 197 set_modrm(1, base); 198 set_disp8(disp); 199 } else { 200 set_modrm(2, base); 201 set_disp32(disp); 202 } 203} 204 205 206Operand::Operand(Register base, 207 Register index, 208 ScaleFactor scale, 209 int32_t disp) : rex_(0) { 210 ASSERT(!index.is(rsp)); 211 len_ = 1; 212 set_sib(scale, index, base); 213 if (disp == 0 && !base.is(rbp) && !base.is(r13)) { 214 // This call to set_modrm doesn't overwrite the REX.B (or REX.X) bits 215 // possibly set by set_sib. 216 set_modrm(0, rsp); 217 } else if (is_int8(disp)) { 218 set_modrm(1, rsp); 219 set_disp8(disp); 220 } else { 221 set_modrm(2, rsp); 222 set_disp32(disp); 223 } 224} 225 226 227Operand::Operand(Register index, 228 ScaleFactor scale, 229 int32_t disp) : rex_(0) { 230 ASSERT(!index.is(rsp)); 231 len_ = 1; 232 set_modrm(0, rsp); 233 set_sib(scale, index, rbp); 234 set_disp32(disp); 235} 236 237 238// ----------------------------------------------------------------------------- 239// Implementation of Assembler. 240 241#ifdef GENERATED_CODE_COVERAGE 242static void InitCoverageLog(); 243#endif 244 245byte* Assembler::spare_buffer_ = NULL; 246 247Assembler::Assembler(void* buffer, int buffer_size) 248 : code_targets_(100) { 249 if (buffer == NULL) { 250 // Do our own buffer management. 251 if (buffer_size <= kMinimalBufferSize) { 252 buffer_size = kMinimalBufferSize; 253 254 if (spare_buffer_ != NULL) { 255 buffer = spare_buffer_; 256 spare_buffer_ = NULL; 257 } 258 } 259 if (buffer == NULL) { 260 buffer_ = NewArray<byte>(buffer_size); 261 } else { 262 buffer_ = static_cast<byte*>(buffer); 263 } 264 buffer_size_ = buffer_size; 265 own_buffer_ = true; 266 } else { 267 // Use externally provided buffer instead. 268 ASSERT(buffer_size > 0); 269 buffer_ = static_cast<byte*>(buffer); 270 buffer_size_ = buffer_size; 271 own_buffer_ = false; 272 } 273 274 // Clear the buffer in debug mode unless it was provided by the 275 // caller in which case we can't be sure it's okay to overwrite 276 // existing code in it. 277#ifdef DEBUG 278 if (own_buffer_) { 279 memset(buffer_, 0xCC, buffer_size); // int3 280 } 281#endif 282 283 // Setup buffer pointers. 284 ASSERT(buffer_ != NULL); 285 pc_ = buffer_; 286 reloc_info_writer.Reposition(buffer_ + buffer_size, pc_); 287 288 last_pc_ = NULL; 289 current_statement_position_ = RelocInfo::kNoPosition; 290 current_position_ = RelocInfo::kNoPosition; 291 written_statement_position_ = current_statement_position_; 292 written_position_ = current_position_; 293#ifdef GENERATED_CODE_COVERAGE 294 InitCoverageLog(); 295#endif 296} 297 298 299Assembler::~Assembler() { 300 if (own_buffer_) { 301 if (spare_buffer_ == NULL && buffer_size_ == kMinimalBufferSize) { 302 spare_buffer_ = buffer_; 303 } else { 304 DeleteArray(buffer_); 305 } 306 } 307} 308 309 310void Assembler::GetCode(CodeDesc* desc) { 311 // Finalize code (at this point overflow() may be true, but the gap ensures 312 // that we are still not overlapping instructions and relocation info). 313 ASSERT(pc_ <= reloc_info_writer.pos()); // No overlap. 314 // Setup code descriptor. 315 desc->buffer = buffer_; 316 desc->buffer_size = buffer_size_; 317 desc->instr_size = pc_offset(); 318 ASSERT(desc->instr_size > 0); // Zero-size code objects upset the system. 319 desc->reloc_size = 320 static_cast<int>((buffer_ + buffer_size_) - reloc_info_writer.pos()); 321 desc->origin = this; 322 323 Counters::reloc_info_size.Increment(desc->reloc_size); 324} 325 326 327void Assembler::Align(int m) { 328 ASSERT(IsPowerOf2(m)); 329 while ((pc_offset() & (m - 1)) != 0) { 330 nop(); 331 } 332} 333 334 335void Assembler::bind_to(Label* L, int pos) { 336 ASSERT(!L->is_bound()); // Label may only be bound once. 337 last_pc_ = NULL; 338 ASSERT(0 <= pos && pos <= pc_offset()); // Position must be valid. 339 if (L->is_linked()) { 340 int current = L->pos(); 341 int next = long_at(current); 342 while (next != current) { 343 // Relative address, relative to point after address. 344 int imm32 = pos - (current + sizeof(int32_t)); 345 long_at_put(current, imm32); 346 current = next; 347 next = long_at(next); 348 } 349 // Fix up last fixup on linked list. 350 int last_imm32 = pos - (current + sizeof(int32_t)); 351 long_at_put(current, last_imm32); 352 } 353 L->bind_to(pos); 354} 355 356 357void Assembler::bind(Label* L) { 358 bind_to(L, pc_offset()); 359} 360 361 362void Assembler::GrowBuffer() { 363 ASSERT(buffer_overflow()); 364 if (!own_buffer_) FATAL("external code buffer is too small"); 365 366 // Compute new buffer size. 367 CodeDesc desc; // the new buffer 368 if (buffer_size_ < 4*KB) { 369 desc.buffer_size = 4*KB; 370 } else { 371 desc.buffer_size = 2*buffer_size_; 372 } 373 // Some internal data structures overflow for very large buffers, 374 // they must ensure that kMaximalBufferSize is not too large. 375 if ((desc.buffer_size > kMaximalBufferSize) || 376 (desc.buffer_size > Heap::MaxOldGenerationSize())) { 377 V8::FatalProcessOutOfMemory("Assembler::GrowBuffer"); 378 } 379 380 // Setup new buffer. 381 desc.buffer = NewArray<byte>(desc.buffer_size); 382 desc.instr_size = pc_offset(); 383 desc.reloc_size = 384 static_cast<int>((buffer_ + buffer_size_) - (reloc_info_writer.pos())); 385 386 // Clear the buffer in debug mode. Use 'int3' instructions to make 387 // sure to get into problems if we ever run uninitialized code. 388#ifdef DEBUG 389 memset(desc.buffer, 0xCC, desc.buffer_size); 390#endif 391 392 // Copy the data. 393 intptr_t pc_delta = desc.buffer - buffer_; 394 intptr_t rc_delta = (desc.buffer + desc.buffer_size) - 395 (buffer_ + buffer_size_); 396 memmove(desc.buffer, buffer_, desc.instr_size); 397 memmove(rc_delta + reloc_info_writer.pos(), 398 reloc_info_writer.pos(), desc.reloc_size); 399 400 // Switch buffers. 401 if (spare_buffer_ == NULL && buffer_size_ == kMinimalBufferSize) { 402 spare_buffer_ = buffer_; 403 } else { 404 DeleteArray(buffer_); 405 } 406 buffer_ = desc.buffer; 407 buffer_size_ = desc.buffer_size; 408 pc_ += pc_delta; 409 if (last_pc_ != NULL) { 410 last_pc_ += pc_delta; 411 } 412 reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta, 413 reloc_info_writer.last_pc() + pc_delta); 414 415 // Relocate runtime entries. 416 for (RelocIterator it(desc); !it.done(); it.next()) { 417 RelocInfo::Mode rmode = it.rinfo()->rmode(); 418 if (rmode == RelocInfo::INTERNAL_REFERENCE) { 419 intptr_t* p = reinterpret_cast<intptr_t*>(it.rinfo()->pc()); 420 if (*p != 0) { // 0 means uninitialized. 421 *p += pc_delta; 422 } 423 } 424 } 425 426 ASSERT(!buffer_overflow()); 427} 428 429 430void Assembler::emit_operand(int code, const Operand& adr) { 431 ASSERT(is_uint3(code)); 432 const unsigned length = adr.len_; 433 ASSERT(length > 0); 434 435 // Emit updated ModR/M byte containing the given register. 436 ASSERT((adr.buf_[0] & 0x38) == 0); 437 pc_[0] = adr.buf_[0] | code << 3; 438 439 // Emit the rest of the encoded operand. 440 for (unsigned i = 1; i < length; i++) pc_[i] = adr.buf_[i]; 441 pc_ += length; 442} 443 444 445// Assembler Instruction implementations. 446 447void Assembler::arithmetic_op(byte opcode, Register reg, const Operand& op) { 448 EnsureSpace ensure_space(this); 449 last_pc_ = pc_; 450 emit_rex_64(reg, op); 451 emit(opcode); 452 emit_operand(reg, op); 453} 454 455 456void Assembler::arithmetic_op(byte opcode, Register reg, Register rm_reg) { 457 EnsureSpace ensure_space(this); 458 last_pc_ = pc_; 459 emit_rex_64(reg, rm_reg); 460 emit(opcode); 461 emit_modrm(reg, rm_reg); 462} 463 464 465void Assembler::arithmetic_op_16(byte opcode, Register reg, Register rm_reg) { 466 EnsureSpace ensure_space(this); 467 last_pc_ = pc_; 468 emit(0x66); 469 emit_optional_rex_32(reg, rm_reg); 470 emit(opcode); 471 emit_modrm(reg, rm_reg); 472} 473 474 475void Assembler::arithmetic_op_16(byte opcode, 476 Register reg, 477 const Operand& rm_reg) { 478 EnsureSpace ensure_space(this); 479 last_pc_ = pc_; 480 emit(0x66); 481 emit_optional_rex_32(reg, rm_reg); 482 emit(opcode); 483 emit_operand(reg, rm_reg); 484} 485 486 487void Assembler::arithmetic_op_32(byte opcode, Register reg, Register rm_reg) { 488 EnsureSpace ensure_space(this); 489 last_pc_ = pc_; 490 emit_optional_rex_32(reg, rm_reg); 491 emit(opcode); 492 emit_modrm(reg, rm_reg); 493} 494 495 496void Assembler::arithmetic_op_32(byte opcode, 497 Register reg, 498 const Operand& rm_reg) { 499 EnsureSpace ensure_space(this); 500 last_pc_ = pc_; 501 emit_optional_rex_32(reg, rm_reg); 502 emit(opcode); 503 emit_operand(reg, rm_reg); 504} 505 506 507void Assembler::immediate_arithmetic_op(byte subcode, 508 Register dst, 509 Immediate src) { 510 EnsureSpace ensure_space(this); 511 last_pc_ = pc_; 512 emit_rex_64(dst); 513 if (is_int8(src.value_)) { 514 emit(0x83); 515 emit_modrm(subcode, dst); 516 emit(src.value_); 517 } else if (dst.is(rax)) { 518 emit(0x05 | (subcode << 3)); 519 emitl(src.value_); 520 } else { 521 emit(0x81); 522 emit_modrm(subcode, dst); 523 emitl(src.value_); 524 } 525} 526 527void Assembler::immediate_arithmetic_op(byte subcode, 528 const Operand& dst, 529 Immediate src) { 530 EnsureSpace ensure_space(this); 531 last_pc_ = pc_; 532 emit_rex_64(dst); 533 if (is_int8(src.value_)) { 534 emit(0x83); 535 emit_operand(subcode, dst); 536 emit(src.value_); 537 } else { 538 emit(0x81); 539 emit_operand(subcode, dst); 540 emitl(src.value_); 541 } 542} 543 544 545void Assembler::immediate_arithmetic_op_16(byte subcode, 546 Register dst, 547 Immediate src) { 548 EnsureSpace ensure_space(this); 549 last_pc_ = pc_; 550 emit(0x66); // Operand size override prefix. 551 emit_optional_rex_32(dst); 552 if (is_int8(src.value_)) { 553 emit(0x83); 554 emit_modrm(subcode, dst); 555 emit(src.value_); 556 } else if (dst.is(rax)) { 557 emit(0x05 | (subcode << 3)); 558 emitw(src.value_); 559 } else { 560 emit(0x81); 561 emit_modrm(subcode, dst); 562 emitw(src.value_); 563 } 564} 565 566 567void Assembler::immediate_arithmetic_op_16(byte subcode, 568 const Operand& dst, 569 Immediate src) { 570 EnsureSpace ensure_space(this); 571 last_pc_ = pc_; 572 emit(0x66); // Operand size override prefix. 573 emit_optional_rex_32(dst); 574 if (is_int8(src.value_)) { 575 emit(0x83); 576 emit_operand(subcode, dst); 577 emit(src.value_); 578 } else { 579 emit(0x81); 580 emit_operand(subcode, dst); 581 emitw(src.value_); 582 } 583} 584 585 586void Assembler::immediate_arithmetic_op_32(byte subcode, 587 Register dst, 588 Immediate src) { 589 EnsureSpace ensure_space(this); 590 last_pc_ = pc_; 591 emit_optional_rex_32(dst); 592 if (is_int8(src.value_)) { 593 emit(0x83); 594 emit_modrm(subcode, dst); 595 emit(src.value_); 596 } else if (dst.is(rax)) { 597 emit(0x05 | (subcode << 3)); 598 emitl(src.value_); 599 } else { 600 emit(0x81); 601 emit_modrm(subcode, dst); 602 emitl(src.value_); 603 } 604} 605 606 607void Assembler::immediate_arithmetic_op_32(byte subcode, 608 const Operand& dst, 609 Immediate src) { 610 EnsureSpace ensure_space(this); 611 last_pc_ = pc_; 612 emit_optional_rex_32(dst); 613 if (is_int8(src.value_)) { 614 emit(0x83); 615 emit_operand(subcode, dst); 616 emit(src.value_); 617 } else { 618 emit(0x81); 619 emit_operand(subcode, dst); 620 emitl(src.value_); 621 } 622} 623 624 625void Assembler::immediate_arithmetic_op_8(byte subcode, 626 const Operand& dst, 627 Immediate src) { 628 EnsureSpace ensure_space(this); 629 last_pc_ = pc_; 630 emit_optional_rex_32(dst); 631 ASSERT(is_int8(src.value_) || is_uint8(src.value_)); 632 emit(0x80); 633 emit_operand(subcode, dst); 634 emit(src.value_); 635} 636 637 638void Assembler::immediate_arithmetic_op_8(byte subcode, 639 Register dst, 640 Immediate src) { 641 EnsureSpace ensure_space(this); 642 last_pc_ = pc_; 643 if (dst.code() > 3) { 644 // Use 64-bit mode byte registers. 645 emit_rex_64(dst); 646 } 647 ASSERT(is_int8(src.value_) || is_uint8(src.value_)); 648 emit(0x80); 649 emit_modrm(subcode, dst); 650 emit(src.value_); 651} 652 653 654void Assembler::shift(Register dst, Immediate shift_amount, int subcode) { 655 EnsureSpace ensure_space(this); 656 last_pc_ = pc_; 657 ASSERT(is_uint6(shift_amount.value_)); // illegal shift count 658 if (shift_amount.value_ == 1) { 659 emit_rex_64(dst); 660 emit(0xD1); 661 emit_modrm(subcode, dst); 662 } else { 663 emit_rex_64(dst); 664 emit(0xC1); 665 emit_modrm(subcode, dst); 666 emit(shift_amount.value_); 667 } 668} 669 670 671void Assembler::shift(Register dst, int subcode) { 672 EnsureSpace ensure_space(this); 673 last_pc_ = pc_; 674 emit_rex_64(dst); 675 emit(0xD3); 676 emit_modrm(subcode, dst); 677} 678 679 680void Assembler::shift_32(Register dst, int subcode) { 681 EnsureSpace ensure_space(this); 682 last_pc_ = pc_; 683 emit_optional_rex_32(dst); 684 emit(0xD3); 685 emit_modrm(subcode, dst); 686} 687 688 689void Assembler::shift_32(Register dst, Immediate shift_amount, int subcode) { 690 EnsureSpace ensure_space(this); 691 last_pc_ = pc_; 692 ASSERT(is_uint5(shift_amount.value_)); // illegal shift count 693 if (shift_amount.value_ == 1) { 694 emit_optional_rex_32(dst); 695 emit(0xD1); 696 emit_modrm(subcode, dst); 697 } else { 698 emit_optional_rex_32(dst); 699 emit(0xC1); 700 emit_modrm(subcode, dst); 701 emit(shift_amount.value_); 702 } 703} 704 705 706void Assembler::bt(const Operand& dst, Register src) { 707 EnsureSpace ensure_space(this); 708 last_pc_ = pc_; 709 emit_rex_64(src, dst); 710 emit(0x0F); 711 emit(0xA3); 712 emit_operand(src, dst); 713} 714 715 716void Assembler::bts(const Operand& dst, Register src) { 717 EnsureSpace ensure_space(this); 718 last_pc_ = pc_; 719 emit_rex_64(src, dst); 720 emit(0x0F); 721 emit(0xAB); 722 emit_operand(src, dst); 723} 724 725 726void Assembler::call(Label* L) { 727 EnsureSpace ensure_space(this); 728 last_pc_ = pc_; 729 // 1110 1000 #32-bit disp. 730 emit(0xE8); 731 if (L->is_bound()) { 732 int offset = L->pos() - pc_offset() - sizeof(int32_t); 733 ASSERT(offset <= 0); 734 emitl(offset); 735 } else if (L->is_linked()) { 736 emitl(L->pos()); 737 L->link_to(pc_offset() - sizeof(int32_t)); 738 } else { 739 ASSERT(L->is_unused()); 740 int32_t current = pc_offset(); 741 emitl(current); 742 L->link_to(current); 743 } 744} 745 746 747void Assembler::call(Handle<Code> target, RelocInfo::Mode rmode) { 748 EnsureSpace ensure_space(this); 749 last_pc_ = pc_; 750 // 1110 1000 #32-bit disp. 751 emit(0xE8); 752 emit_code_target(target, rmode); 753} 754 755 756void Assembler::call(Register adr) { 757 EnsureSpace ensure_space(this); 758 last_pc_ = pc_; 759 // Opcode: FF /2 r64. 760 if (adr.high_bit()) { 761 emit_rex_64(adr); 762 } 763 emit(0xFF); 764 emit_modrm(0x2, adr); 765} 766 767 768void Assembler::call(const Operand& op) { 769 EnsureSpace ensure_space(this); 770 last_pc_ = pc_; 771 // Opcode: FF /2 m64. 772 emit_rex_64(op); 773 emit(0xFF); 774 emit_operand(2, op); 775} 776 777 778void Assembler::clc() { 779 EnsureSpace ensure_space(this); 780 last_pc_ = pc_; 781 emit(0xF8); 782} 783 784void Assembler::cdq() { 785 EnsureSpace ensure_space(this); 786 last_pc_ = pc_; 787 emit(0x99); 788} 789 790 791void Assembler::cmovq(Condition cc, Register dst, Register src) { 792 if (cc == always) { 793 movq(dst, src); 794 } else if (cc == never) { 795 return; 796 } 797 // No need to check CpuInfo for CMOV support, it's a required part of the 798 // 64-bit architecture. 799 ASSERT(cc >= 0); // Use mov for unconditional moves. 800 EnsureSpace ensure_space(this); 801 last_pc_ = pc_; 802 // Opcode: REX.W 0f 40 + cc /r. 803 emit_rex_64(dst, src); 804 emit(0x0f); 805 emit(0x40 + cc); 806 emit_modrm(dst, src); 807} 808 809 810void Assembler::cmovq(Condition cc, Register dst, const Operand& src) { 811 if (cc == always) { 812 movq(dst, src); 813 } else if (cc == never) { 814 return; 815 } 816 ASSERT(cc >= 0); 817 EnsureSpace ensure_space(this); 818 last_pc_ = pc_; 819 // Opcode: REX.W 0f 40 + cc /r. 820 emit_rex_64(dst, src); 821 emit(0x0f); 822 emit(0x40 + cc); 823 emit_operand(dst, src); 824} 825 826 827void Assembler::cmovl(Condition cc, Register dst, Register src) { 828 if (cc == always) { 829 movl(dst, src); 830 } else if (cc == never) { 831 return; 832 } 833 ASSERT(cc >= 0); 834 EnsureSpace ensure_space(this); 835 last_pc_ = pc_; 836 // Opcode: 0f 40 + cc /r. 837 emit_optional_rex_32(dst, src); 838 emit(0x0f); 839 emit(0x40 + cc); 840 emit_modrm(dst, src); 841} 842 843 844void Assembler::cmovl(Condition cc, Register dst, const Operand& src) { 845 if (cc == always) { 846 movl(dst, src); 847 } else if (cc == never) { 848 return; 849 } 850 ASSERT(cc >= 0); 851 EnsureSpace ensure_space(this); 852 last_pc_ = pc_; 853 // Opcode: 0f 40 + cc /r. 854 emit_optional_rex_32(dst, src); 855 emit(0x0f); 856 emit(0x40 + cc); 857 emit_operand(dst, src); 858} 859 860 861void Assembler::cmpb_al(Immediate imm8) { 862 ASSERT(is_int8(imm8.value_) || is_uint8(imm8.value_)); 863 EnsureSpace ensure_space(this); 864 last_pc_ = pc_; 865 emit(0x3c); 866 emit(imm8.value_); 867} 868 869 870void Assembler::cpuid() { 871 ASSERT(CpuFeatures::IsEnabled(CPUID)); 872 EnsureSpace ensure_space(this); 873 last_pc_ = pc_; 874 emit(0x0F); 875 emit(0xA2); 876} 877 878 879void Assembler::cqo() { 880 EnsureSpace ensure_space(this); 881 last_pc_ = pc_; 882 emit_rex_64(); 883 emit(0x99); 884} 885 886 887void Assembler::decq(Register dst) { 888 EnsureSpace ensure_space(this); 889 last_pc_ = pc_; 890 emit_rex_64(dst); 891 emit(0xFF); 892 emit_modrm(0x1, dst); 893} 894 895 896void Assembler::decq(const Operand& dst) { 897 EnsureSpace ensure_space(this); 898 last_pc_ = pc_; 899 emit_rex_64(dst); 900 emit(0xFF); 901 emit_operand(1, dst); 902} 903 904 905void Assembler::decl(Register dst) { 906 EnsureSpace ensure_space(this); 907 last_pc_ = pc_; 908 emit_optional_rex_32(dst); 909 emit(0xFF); 910 emit_modrm(0x1, dst); 911} 912 913 914void Assembler::decl(const Operand& dst) { 915 EnsureSpace ensure_space(this); 916 last_pc_ = pc_; 917 emit_optional_rex_32(dst); 918 emit(0xFF); 919 emit_operand(1, dst); 920} 921 922 923void Assembler::decb(Register dst) { 924 EnsureSpace ensure_space(this); 925 last_pc_ = pc_; 926 if (dst.code() > 3) { 927 // Register is not one of al, bl, cl, dl. Its encoding needs REX. 928 emit_rex_32(dst); 929 } 930 emit(0xFE); 931 emit_modrm(0x1, dst); 932} 933 934 935void Assembler::decb(const Operand& dst) { 936 EnsureSpace ensure_space(this); 937 last_pc_ = pc_; 938 emit_optional_rex_32(dst); 939 emit(0xFE); 940 emit_operand(1, dst); 941} 942 943 944void Assembler::enter(Immediate size) { 945 EnsureSpace ensure_space(this); 946 last_pc_ = pc_; 947 emit(0xC8); 948 emitw(size.value_); // 16 bit operand, always. 949 emit(0); 950} 951 952 953void Assembler::hlt() { 954 EnsureSpace ensure_space(this); 955 last_pc_ = pc_; 956 emit(0xF4); 957} 958 959 960void Assembler::idivq(Register src) { 961 EnsureSpace ensure_space(this); 962 last_pc_ = pc_; 963 emit_rex_64(src); 964 emit(0xF7); 965 emit_modrm(0x7, src); 966} 967 968 969void Assembler::idivl(Register src) { 970 EnsureSpace ensure_space(this); 971 last_pc_ = pc_; 972 emit_optional_rex_32(src); 973 emit(0xF7); 974 emit_modrm(0x7, src); 975} 976 977 978void Assembler::imul(Register src) { 979 EnsureSpace ensure_space(this); 980 last_pc_ = pc_; 981 emit_rex_64(src); 982 emit(0xF7); 983 emit_modrm(0x5, src); 984} 985 986 987void Assembler::imul(Register dst, Register src) { 988 EnsureSpace ensure_space(this); 989 last_pc_ = pc_; 990 emit_rex_64(dst, src); 991 emit(0x0F); 992 emit(0xAF); 993 emit_modrm(dst, src); 994} 995 996 997void Assembler::imul(Register dst, const Operand& src) { 998 EnsureSpace ensure_space(this); 999 last_pc_ = pc_; 1000 emit_rex_64(dst, src); 1001 emit(0x0F); 1002 emit(0xAF); 1003 emit_operand(dst, src); 1004} 1005 1006 1007void Assembler::imul(Register dst, Register src, Immediate imm) { 1008 EnsureSpace ensure_space(this); 1009 last_pc_ = pc_; 1010 emit_rex_64(dst, src); 1011 if (is_int8(imm.value_)) { 1012 emit(0x6B); 1013 emit_modrm(dst, src); 1014 emit(imm.value_); 1015 } else { 1016 emit(0x69); 1017 emit_modrm(dst, src); 1018 emitl(imm.value_); 1019 } 1020} 1021 1022 1023void Assembler::imull(Register dst, Register src) { 1024 EnsureSpace ensure_space(this); 1025 last_pc_ = pc_; 1026 emit_optional_rex_32(dst, src); 1027 emit(0x0F); 1028 emit(0xAF); 1029 emit_modrm(dst, src); 1030} 1031 1032 1033void Assembler::incq(Register dst) { 1034 EnsureSpace ensure_space(this); 1035 last_pc_ = pc_; 1036 emit_rex_64(dst); 1037 emit(0xFF); 1038 emit_modrm(0x0, dst); 1039} 1040 1041 1042void Assembler::incq(const Operand& dst) { 1043 EnsureSpace ensure_space(this); 1044 last_pc_ = pc_; 1045 emit_rex_64(dst); 1046 emit(0xFF); 1047 emit_operand(0, dst); 1048} 1049 1050 1051void Assembler::incl(const Operand& dst) { 1052 EnsureSpace ensure_space(this); 1053 last_pc_ = pc_; 1054 emit_optional_rex_32(dst); 1055 emit(0xFF); 1056 emit_operand(0, dst); 1057} 1058 1059 1060void Assembler::int3() { 1061 EnsureSpace ensure_space(this); 1062 last_pc_ = pc_; 1063 emit(0xCC); 1064} 1065 1066 1067void Assembler::j(Condition cc, Label* L) { 1068 if (cc == always) { 1069 jmp(L); 1070 return; 1071 } else if (cc == never) { 1072 return; 1073 } 1074 EnsureSpace ensure_space(this); 1075 last_pc_ = pc_; 1076 ASSERT(is_uint4(cc)); 1077 if (L->is_bound()) { 1078 const int short_size = 2; 1079 const int long_size = 6; 1080 int offs = L->pos() - pc_offset(); 1081 ASSERT(offs <= 0); 1082 if (is_int8(offs - short_size)) { 1083 // 0111 tttn #8-bit disp. 1084 emit(0x70 | cc); 1085 emit((offs - short_size) & 0xFF); 1086 } else { 1087 // 0000 1111 1000 tttn #32-bit disp. 1088 emit(0x0F); 1089 emit(0x80 | cc); 1090 emitl(offs - long_size); 1091 } 1092 } else if (L->is_linked()) { 1093 // 0000 1111 1000 tttn #32-bit disp. 1094 emit(0x0F); 1095 emit(0x80 | cc); 1096 emitl(L->pos()); 1097 L->link_to(pc_offset() - sizeof(int32_t)); 1098 } else { 1099 ASSERT(L->is_unused()); 1100 emit(0x0F); 1101 emit(0x80 | cc); 1102 int32_t current = pc_offset(); 1103 emitl(current); 1104 L->link_to(current); 1105 } 1106} 1107 1108 1109void Assembler::j(Condition cc, 1110 Handle<Code> target, 1111 RelocInfo::Mode rmode) { 1112 EnsureSpace ensure_space(this); 1113 last_pc_ = pc_; 1114 ASSERT(is_uint4(cc)); 1115 // 0000 1111 1000 tttn #32-bit disp. 1116 emit(0x0F); 1117 emit(0x80 | cc); 1118 emit_code_target(target, rmode); 1119} 1120 1121 1122void Assembler::jmp(Label* L) { 1123 EnsureSpace ensure_space(this); 1124 last_pc_ = pc_; 1125 if (L->is_bound()) { 1126 int offs = L->pos() - pc_offset() - 1; 1127 ASSERT(offs <= 0); 1128 if (is_int8(offs - sizeof(int8_t))) { 1129 // 1110 1011 #8-bit disp. 1130 emit(0xEB); 1131 emit((offs - sizeof(int8_t)) & 0xFF); 1132 } else { 1133 // 1110 1001 #32-bit disp. 1134 emit(0xE9); 1135 emitl(offs - sizeof(int32_t)); 1136 } 1137 } else if (L->is_linked()) { 1138 // 1110 1001 #32-bit disp. 1139 emit(0xE9); 1140 emitl(L->pos()); 1141 L->link_to(pc_offset() - sizeof(int32_t)); 1142 } else { 1143 // 1110 1001 #32-bit disp. 1144 ASSERT(L->is_unused()); 1145 emit(0xE9); 1146 int32_t current = pc_offset(); 1147 emitl(current); 1148 L->link_to(current); 1149 } 1150} 1151 1152 1153void Assembler::jmp(Handle<Code> target, RelocInfo::Mode rmode) { 1154 EnsureSpace ensure_space(this); 1155 last_pc_ = pc_; 1156 // 1110 1001 #32-bit disp. 1157 emit(0xE9); 1158 emit_code_target(target, rmode); 1159} 1160 1161 1162void Assembler::jmp(Register target) { 1163 EnsureSpace ensure_space(this); 1164 last_pc_ = pc_; 1165 // Opcode FF/4 r64. 1166 if (target.high_bit()) { 1167 emit_rex_64(target); 1168 } 1169 emit(0xFF); 1170 emit_modrm(0x4, target); 1171} 1172 1173 1174void Assembler::jmp(const Operand& src) { 1175 EnsureSpace ensure_space(this); 1176 last_pc_ = pc_; 1177 // Opcode FF/4 m64. 1178 emit_optional_rex_32(src); 1179 emit(0xFF); 1180 emit_operand(0x4, src); 1181} 1182 1183 1184void Assembler::lea(Register dst, const Operand& src) { 1185 EnsureSpace ensure_space(this); 1186 last_pc_ = pc_; 1187 emit_rex_64(dst, src); 1188 emit(0x8D); 1189 emit_operand(dst, src); 1190} 1191 1192 1193void Assembler::load_rax(void* value, RelocInfo::Mode mode) { 1194 EnsureSpace ensure_space(this); 1195 last_pc_ = pc_; 1196 emit(0x48); // REX.W 1197 emit(0xA1); 1198 emitq(reinterpret_cast<uintptr_t>(value), mode); 1199} 1200 1201 1202void Assembler::load_rax(ExternalReference ref) { 1203 load_rax(ref.address(), RelocInfo::EXTERNAL_REFERENCE); 1204} 1205 1206 1207void Assembler::leave() { 1208 EnsureSpace ensure_space(this); 1209 last_pc_ = pc_; 1210 emit(0xC9); 1211} 1212 1213 1214void Assembler::movb(Register dst, const Operand& src) { 1215 EnsureSpace ensure_space(this); 1216 last_pc_ = pc_; 1217 emit_rex_32(dst, src); 1218 emit(0x8A); 1219 emit_operand(dst, src); 1220} 1221 1222void Assembler::movb(Register dst, Immediate imm) { 1223 EnsureSpace ensure_space(this); 1224 last_pc_ = pc_; 1225 emit_rex_32(dst); 1226 emit(0xC6); 1227 emit_modrm(0x0, dst); 1228 emit(imm.value_); 1229} 1230 1231void Assembler::movb(const Operand& dst, Register src) { 1232 EnsureSpace ensure_space(this); 1233 last_pc_ = pc_; 1234 emit_rex_32(src, dst); 1235 emit(0x88); 1236 emit_operand(src, dst); 1237} 1238 1239void Assembler::movw(const Operand& dst, Register src) { 1240 EnsureSpace ensure_space(this); 1241 last_pc_ = pc_; 1242 emit(0x66); 1243 emit_optional_rex_32(src, dst); 1244 emit(0x89); 1245 emit_operand(src, dst); 1246} 1247 1248void Assembler::movl(Register dst, const Operand& src) { 1249 EnsureSpace ensure_space(this); 1250 last_pc_ = pc_; 1251 emit_optional_rex_32(dst, src); 1252 emit(0x8B); 1253 emit_operand(dst, src); 1254} 1255 1256 1257void Assembler::movl(Register dst, Register src) { 1258 EnsureSpace ensure_space(this); 1259 last_pc_ = pc_; 1260 emit_optional_rex_32(dst, src); 1261 emit(0x8B); 1262 emit_modrm(dst, src); 1263} 1264 1265 1266void Assembler::movl(const Operand& dst, Register src) { 1267 EnsureSpace ensure_space(this); 1268 last_pc_ = pc_; 1269 emit_optional_rex_32(src, dst); 1270 emit(0x89); 1271 emit_operand(src, dst); 1272} 1273 1274 1275void Assembler::movl(const Operand& dst, Immediate value) { 1276 EnsureSpace ensure_space(this); 1277 last_pc_ = pc_; 1278 emit_optional_rex_32(dst); 1279 emit(0xC7); 1280 emit_operand(0x0, dst); 1281 emit(value); // Only 32-bit immediates are possible, not 8-bit immediates. 1282} 1283 1284 1285void Assembler::movl(Register dst, Immediate value) { 1286 EnsureSpace ensure_space(this); 1287 last_pc_ = pc_; 1288 emit_optional_rex_32(dst); 1289 emit(0xC7); 1290 emit_modrm(0x0, dst); 1291 emit(value); // Only 32-bit immediates are possible, not 8-bit immediates. 1292} 1293 1294 1295void Assembler::movq(Register dst, const Operand& src) { 1296 EnsureSpace ensure_space(this); 1297 last_pc_ = pc_; 1298 emit_rex_64(dst, src); 1299 emit(0x8B); 1300 emit_operand(dst, src); 1301} 1302 1303 1304void Assembler::movq(Register dst, Register src) { 1305 EnsureSpace ensure_space(this); 1306 last_pc_ = pc_; 1307 emit_rex_64(dst, src); 1308 emit(0x8B); 1309 emit_modrm(dst, src); 1310} 1311 1312 1313void Assembler::movq(Register dst, Immediate value) { 1314 EnsureSpace ensure_space(this); 1315 last_pc_ = pc_; 1316 emit_rex_64(dst); 1317 emit(0xC7); 1318 emit_modrm(0x0, dst); 1319 emit(value); // Only 32-bit immediates are possible, not 8-bit immediates. 1320} 1321 1322 1323void Assembler::movq(const Operand& dst, Register src) { 1324 EnsureSpace ensure_space(this); 1325 last_pc_ = pc_; 1326 emit_rex_64(src, dst); 1327 emit(0x89); 1328 emit_operand(src, dst); 1329} 1330 1331 1332void Assembler::movq(Register dst, void* value, RelocInfo::Mode rmode) { 1333 // This method must not be used with heap object references. The stored 1334 // address is not GC safe. Use the handle version instead. 1335 ASSERT(rmode > RelocInfo::LAST_GCED_ENUM); 1336 EnsureSpace ensure_space(this); 1337 last_pc_ = pc_; 1338 emit_rex_64(dst); 1339 emit(0xB8 | dst.low_bits()); 1340 emitq(reinterpret_cast<uintptr_t>(value), rmode); 1341} 1342 1343 1344void Assembler::movq(Register dst, int64_t value, RelocInfo::Mode rmode) { 1345 // Non-relocatable values might not need a 64-bit representation. 1346 if (rmode == RelocInfo::NONE) { 1347 // Sadly, there is no zero or sign extending move for 8-bit immediates. 1348 if (is_int32(value)) { 1349 movq(dst, Immediate(static_cast<int32_t>(value))); 1350 return; 1351 } else if (is_uint32(value)) { 1352 movl(dst, Immediate(static_cast<int32_t>(value))); 1353 return; 1354 } 1355 // Value cannot be represented by 32 bits, so do a full 64 bit immediate 1356 // value. 1357 } 1358 EnsureSpace ensure_space(this); 1359 last_pc_ = pc_; 1360 emit_rex_64(dst); 1361 emit(0xB8 | dst.low_bits()); 1362 emitq(value, rmode); 1363} 1364 1365 1366void Assembler::movq(Register dst, ExternalReference ref) { 1367 EnsureSpace ensure_space(this); 1368 last_pc_ = pc_; 1369 emit_rex_64(dst); 1370 emit(0xB8 | dst.low_bits()); 1371 emitq(reinterpret_cast<uintptr_t>(ref.address()), 1372 RelocInfo::EXTERNAL_REFERENCE); 1373} 1374 1375 1376void Assembler::movq(const Operand& dst, Immediate value) { 1377 EnsureSpace ensure_space(this); 1378 last_pc_ = pc_; 1379 emit_rex_64(dst); 1380 emit(0xC7); 1381 emit_operand(0, dst); 1382 emit(value); 1383} 1384 1385 1386// Loads the ip-relative location of the src label into the target location 1387// (as a 32-bit offset sign extended to 64-bit). 1388void Assembler::movl(const Operand& dst, Label* src) { 1389 EnsureSpace ensure_space(this); 1390 last_pc_ = pc_; 1391 emit_optional_rex_32(dst); 1392 emit(0xC7); 1393 emit_operand(0, dst); 1394 if (src->is_bound()) { 1395 int offset = src->pos() - pc_offset() - sizeof(int32_t); 1396 ASSERT(offset <= 0); 1397 emitl(offset); 1398 } else if (src->is_linked()) { 1399 emitl(src->pos()); 1400 src->link_to(pc_offset() - sizeof(int32_t)); 1401 } else { 1402 ASSERT(src->is_unused()); 1403 int32_t current = pc_offset(); 1404 emitl(current); 1405 src->link_to(current); 1406 } 1407} 1408 1409 1410void Assembler::movq(Register dst, Handle<Object> value, RelocInfo::Mode mode) { 1411 // If there is no relocation info, emit the value of the handle efficiently 1412 // (possibly using less that 8 bytes for the value). 1413 if (mode == RelocInfo::NONE) { 1414 // There is no possible reason to store a heap pointer without relocation 1415 // info, so it must be a smi. 1416 ASSERT(value->IsSmi()); 1417 movq(dst, reinterpret_cast<int64_t>(*value), RelocInfo::NONE); 1418 } else { 1419 EnsureSpace ensure_space(this); 1420 last_pc_ = pc_; 1421 ASSERT(value->IsHeapObject()); 1422 ASSERT(!Heap::InNewSpace(*value)); 1423 emit_rex_64(dst); 1424 emit(0xB8 | dst.low_bits()); 1425 emitq(reinterpret_cast<uintptr_t>(value.location()), mode); 1426 } 1427} 1428 1429 1430void Assembler::movsxbq(Register dst, const Operand& src) { 1431 EnsureSpace ensure_space(this); 1432 last_pc_ = pc_; 1433 emit_rex_32(dst, src); 1434 emit(0x0F); 1435 emit(0xBE); 1436 emit_operand(dst, src); 1437} 1438 1439 1440void Assembler::movsxwq(Register dst, const Operand& src) { 1441 EnsureSpace ensure_space(this); 1442 last_pc_ = pc_; 1443 emit_rex_64(dst, src); 1444 emit(0x0F); 1445 emit(0xBF); 1446 emit_operand(dst, src); 1447} 1448 1449 1450void Assembler::movsxlq(Register dst, Register src) { 1451 EnsureSpace ensure_space(this); 1452 last_pc_ = pc_; 1453 emit_rex_64(dst, src); 1454 emit(0x63); 1455 emit_modrm(dst, src); 1456} 1457 1458 1459void Assembler::movsxlq(Register dst, const Operand& src) { 1460 EnsureSpace ensure_space(this); 1461 last_pc_ = pc_; 1462 emit_rex_64(dst, src); 1463 emit(0x63); 1464 emit_operand(dst, src); 1465} 1466 1467 1468void Assembler::movzxbq(Register dst, const Operand& src) { 1469 EnsureSpace ensure_space(this); 1470 last_pc_ = pc_; 1471 emit_rex_64(dst, src); 1472 emit(0x0F); 1473 emit(0xB6); 1474 emit_operand(dst, src); 1475} 1476 1477 1478void Assembler::movzxbl(Register dst, const Operand& src) { 1479 EnsureSpace ensure_space(this); 1480 last_pc_ = pc_; 1481 emit_optional_rex_32(dst, src); 1482 emit(0x0F); 1483 emit(0xB6); 1484 emit_operand(dst, src); 1485} 1486 1487 1488void Assembler::movzxwq(Register dst, const Operand& src) { 1489 EnsureSpace ensure_space(this); 1490 last_pc_ = pc_; 1491 emit_rex_64(dst, src); 1492 emit(0x0F); 1493 emit(0xB7); 1494 emit_operand(dst, src); 1495} 1496 1497 1498void Assembler::movzxwl(Register dst, const Operand& src) { 1499 EnsureSpace ensure_space(this); 1500 last_pc_ = pc_; 1501 emit_optional_rex_32(dst, src); 1502 emit(0x0F); 1503 emit(0xB7); 1504 emit_operand(dst, src); 1505} 1506 1507 1508void Assembler::repmovsb() { 1509 EnsureSpace ensure_space(this); 1510 last_pc_ = pc_; 1511 emit(0xF3); 1512 emit(0xA4); 1513} 1514 1515 1516void Assembler::repmovsw() { 1517 EnsureSpace ensure_space(this); 1518 last_pc_ = pc_; 1519 emit(0x66); // Operand size override. 1520 emit(0xF3); 1521 emit(0xA4); 1522} 1523 1524 1525void Assembler::repmovsl() { 1526 EnsureSpace ensure_space(this); 1527 last_pc_ = pc_; 1528 emit(0xF3); 1529 emit(0xA5); 1530} 1531 1532 1533void Assembler::repmovsq() { 1534 EnsureSpace ensure_space(this); 1535 last_pc_ = pc_; 1536 emit(0xF3); 1537 emit_rex_64(); 1538 emit(0xA5); 1539} 1540 1541 1542void Assembler::mul(Register src) { 1543 EnsureSpace ensure_space(this); 1544 last_pc_ = pc_; 1545 emit_rex_64(src); 1546 emit(0xF7); 1547 emit_modrm(0x4, src); 1548} 1549 1550 1551void Assembler::neg(Register dst) { 1552 EnsureSpace ensure_space(this); 1553 last_pc_ = pc_; 1554 emit_rex_64(dst); 1555 emit(0xF7); 1556 emit_modrm(0x3, dst); 1557} 1558 1559 1560void Assembler::negl(Register dst) { 1561 EnsureSpace ensure_space(this); 1562 last_pc_ = pc_; 1563 emit_optional_rex_32(dst); 1564 emit(0xF7); 1565 emit_modrm(0x3, dst); 1566} 1567 1568 1569void Assembler::neg(const Operand& dst) { 1570 EnsureSpace ensure_space(this); 1571 last_pc_ = pc_; 1572 emit_rex_64(dst); 1573 emit(0xF7); 1574 emit_operand(3, dst); 1575} 1576 1577 1578void Assembler::nop() { 1579 EnsureSpace ensure_space(this); 1580 last_pc_ = pc_; 1581 emit(0x90); 1582} 1583 1584 1585void Assembler::not_(Register dst) { 1586 EnsureSpace ensure_space(this); 1587 last_pc_ = pc_; 1588 emit_rex_64(dst); 1589 emit(0xF7); 1590 emit_modrm(0x2, dst); 1591} 1592 1593 1594void Assembler::not_(const Operand& dst) { 1595 EnsureSpace ensure_space(this); 1596 last_pc_ = pc_; 1597 emit_rex_64(dst); 1598 emit(0xF7); 1599 emit_operand(2, dst); 1600} 1601 1602 1603void Assembler::nop(int n) { 1604 // The recommended muti-byte sequences of NOP instructions from the Intel 64 1605 // and IA-32 Architectures Software Developer's Manual. 1606 // 1607 // Length Assembly Byte Sequence 1608 // 2 bytes 66 NOP 66 90H 1609 // 3 bytes NOP DWORD ptr [EAX] 0F 1F 00H 1610 // 4 bytes NOP DWORD ptr [EAX + 00H] 0F 1F 40 00H 1611 // 5 bytes NOP DWORD ptr [EAX + EAX*1 + 00H] 0F 1F 44 00 00H 1612 // 6 bytes 66 NOP DWORD ptr [EAX + EAX*1 + 00H] 66 0F 1F 44 00 00H 1613 // 7 bytes NOP DWORD ptr [EAX + 00000000H] 0F 1F 80 00 00 00 00H 1614 // 8 bytes NOP DWORD ptr [EAX + EAX*1 + 00000000H] 0F 1F 84 00 00 00 00 00H 1615 // 9 bytes 66 NOP DWORD ptr [EAX + EAX*1 + 66 0F 1F 84 00 00 00 00 1616 // 00000000H] 00H 1617 1618 ASSERT(1 <= n); 1619 ASSERT(n <= 9); 1620 EnsureSpace ensure_space(this); 1621 last_pc_ = pc_; 1622 switch (n) { 1623 case 1: 1624 emit(0x90); 1625 return; 1626 case 2: 1627 emit(0x66); 1628 emit(0x90); 1629 return; 1630 case 3: 1631 emit(0x0f); 1632 emit(0x1f); 1633 emit(0x00); 1634 return; 1635 case 4: 1636 emit(0x0f); 1637 emit(0x1f); 1638 emit(0x40); 1639 emit(0x00); 1640 return; 1641 case 5: 1642 emit(0x0f); 1643 emit(0x1f); 1644 emit(0x44); 1645 emit(0x00); 1646 emit(0x00); 1647 return; 1648 case 6: 1649 emit(0x66); 1650 emit(0x0f); 1651 emit(0x1f); 1652 emit(0x44); 1653 emit(0x00); 1654 emit(0x00); 1655 return; 1656 case 7: 1657 emit(0x0f); 1658 emit(0x1f); 1659 emit(0x80); 1660 emit(0x00); 1661 emit(0x00); 1662 emit(0x00); 1663 emit(0x00); 1664 return; 1665 case 8: 1666 emit(0x0f); 1667 emit(0x1f); 1668 emit(0x84); 1669 emit(0x00); 1670 emit(0x00); 1671 emit(0x00); 1672 emit(0x00); 1673 emit(0x00); 1674 return; 1675 case 9: 1676 emit(0x66); 1677 emit(0x0f); 1678 emit(0x1f); 1679 emit(0x84); 1680 emit(0x00); 1681 emit(0x00); 1682 emit(0x00); 1683 emit(0x00); 1684 emit(0x00); 1685 return; 1686 } 1687} 1688 1689 1690void Assembler::pop(Register dst) { 1691 EnsureSpace ensure_space(this); 1692 last_pc_ = pc_; 1693 if (dst.high_bit()) { 1694 emit_rex_64(dst); 1695 } 1696 emit(0x58 | dst.low_bits()); 1697} 1698 1699 1700void Assembler::pop(const Operand& dst) { 1701 EnsureSpace ensure_space(this); 1702 last_pc_ = pc_; 1703 emit_rex_64(dst); // Could be omitted in some cases. 1704 emit(0x8F); 1705 emit_operand(0, dst); 1706} 1707 1708 1709void Assembler::popfq() { 1710 EnsureSpace ensure_space(this); 1711 last_pc_ = pc_; 1712 emit(0x9D); 1713} 1714 1715 1716void Assembler::push(Register src) { 1717 EnsureSpace ensure_space(this); 1718 last_pc_ = pc_; 1719 if (src.high_bit()) { 1720 emit_rex_64(src); 1721 } 1722 emit(0x50 | src.low_bits()); 1723} 1724 1725 1726void Assembler::push(const Operand& src) { 1727 EnsureSpace ensure_space(this); 1728 last_pc_ = pc_; 1729 emit_rex_64(src); // Could be omitted in some cases. 1730 emit(0xFF); 1731 emit_operand(6, src); 1732} 1733 1734 1735void Assembler::push(Immediate value) { 1736 EnsureSpace ensure_space(this); 1737 last_pc_ = pc_; 1738 if (is_int8(value.value_)) { 1739 emit(0x6A); 1740 emit(value.value_); // Emit low byte of value. 1741 } else { 1742 emit(0x68); 1743 emitl(value.value_); 1744 } 1745} 1746 1747 1748void Assembler::pushfq() { 1749 EnsureSpace ensure_space(this); 1750 last_pc_ = pc_; 1751 emit(0x9C); 1752} 1753 1754 1755void Assembler::rdtsc() { 1756 EnsureSpace ensure_space(this); 1757 last_pc_ = pc_; 1758 emit(0x0F); 1759 emit(0x31); 1760} 1761 1762 1763void Assembler::ret(int imm16) { 1764 EnsureSpace ensure_space(this); 1765 last_pc_ = pc_; 1766 ASSERT(is_uint16(imm16)); 1767 if (imm16 == 0) { 1768 emit(0xC3); 1769 } else { 1770 emit(0xC2); 1771 emit(imm16 & 0xFF); 1772 emit((imm16 >> 8) & 0xFF); 1773 } 1774} 1775 1776 1777void Assembler::setcc(Condition cc, Register reg) { 1778 if (cc > last_condition) { 1779 movb(reg, Immediate(cc == always ? 1 : 0)); 1780 return; 1781 } 1782 EnsureSpace ensure_space(this); 1783 last_pc_ = pc_; 1784 ASSERT(is_uint4(cc)); 1785 if (reg.code() > 3) { // Use x64 byte registers, where different. 1786 emit_rex_32(reg); 1787 } 1788 emit(0x0F); 1789 emit(0x90 | cc); 1790 emit_modrm(0x0, reg); 1791} 1792 1793 1794void Assembler::shld(Register dst, Register src) { 1795 EnsureSpace ensure_space(this); 1796 last_pc_ = pc_; 1797 emit_rex_64(src, dst); 1798 emit(0x0F); 1799 emit(0xA5); 1800 emit_modrm(src, dst); 1801} 1802 1803 1804void Assembler::shrd(Register dst, Register src) { 1805 EnsureSpace ensure_space(this); 1806 last_pc_ = pc_; 1807 emit_rex_64(src, dst); 1808 emit(0x0F); 1809 emit(0xAD); 1810 emit_modrm(src, dst); 1811} 1812 1813 1814void Assembler::xchg(Register dst, Register src) { 1815 EnsureSpace ensure_space(this); 1816 last_pc_ = pc_; 1817 if (src.is(rax) || dst.is(rax)) { // Single-byte encoding 1818 Register other = src.is(rax) ? dst : src; 1819 emit_rex_64(other); 1820 emit(0x90 | other.low_bits()); 1821 } else { 1822 emit_rex_64(src, dst); 1823 emit(0x87); 1824 emit_modrm(src, dst); 1825 } 1826} 1827 1828 1829void Assembler::store_rax(void* dst, RelocInfo::Mode mode) { 1830 EnsureSpace ensure_space(this); 1831 last_pc_ = pc_; 1832 emit(0x48); // REX.W 1833 emit(0xA3); 1834 emitq(reinterpret_cast<uintptr_t>(dst), mode); 1835} 1836 1837 1838void Assembler::store_rax(ExternalReference ref) { 1839 store_rax(ref.address(), RelocInfo::EXTERNAL_REFERENCE); 1840} 1841 1842 1843void Assembler::testb(Register dst, Register src) { 1844 EnsureSpace ensure_space(this); 1845 last_pc_ = pc_; 1846 if (dst.code() > 3 || src.code() > 3) { 1847 // Register is not one of al, bl, cl, dl. Its encoding needs REX. 1848 emit_rex_32(dst, src); 1849 } 1850 emit(0x84); 1851 emit_modrm(dst, src); 1852} 1853 1854 1855void Assembler::testb(Register reg, Immediate mask) { 1856 ASSERT(is_int8(mask.value_) || is_uint8(mask.value_)); 1857 EnsureSpace ensure_space(this); 1858 last_pc_ = pc_; 1859 if (reg.is(rax)) { 1860 emit(0xA8); 1861 emit(mask.value_); // Low byte emitted. 1862 } else { 1863 if (reg.code() > 3) { 1864 // Register is not one of al, bl, cl, dl. Its encoding needs REX. 1865 emit_rex_32(reg); 1866 } 1867 emit(0xF6); 1868 emit_modrm(0x0, reg); 1869 emit(mask.value_); // Low byte emitted. 1870 } 1871} 1872 1873 1874void Assembler::testb(const Operand& op, Immediate mask) { 1875 ASSERT(is_int8(mask.value_) || is_uint8(mask.value_)); 1876 EnsureSpace ensure_space(this); 1877 last_pc_ = pc_; 1878 emit_optional_rex_32(rax, op); 1879 emit(0xF6); 1880 emit_operand(rax, op); // Operation code 0 1881 emit(mask.value_); // Low byte emitted. 1882} 1883 1884 1885void Assembler::testb(const Operand& op, Register reg) { 1886 EnsureSpace ensure_space(this); 1887 last_pc_ = pc_; 1888 if (reg.code() > 3) { 1889 // Register is not one of al, bl, cl, dl. Its encoding needs REX. 1890 emit_rex_32(reg, op); 1891 } else { 1892 emit_optional_rex_32(reg, op); 1893 } 1894 emit(0x84); 1895 emit_operand(reg, op); 1896} 1897 1898 1899void Assembler::testl(Register dst, Register src) { 1900 EnsureSpace ensure_space(this); 1901 last_pc_ = pc_; 1902 emit_optional_rex_32(dst, src); 1903 emit(0x85); 1904 emit_modrm(dst, src); 1905} 1906 1907 1908void Assembler::testl(Register reg, Immediate mask) { 1909 // testl with a mask that fits in the low byte is exactly testb. 1910 if (is_uint8(mask.value_)) { 1911 testb(reg, mask); 1912 return; 1913 } 1914 EnsureSpace ensure_space(this); 1915 last_pc_ = pc_; 1916 if (reg.is(rax)) { 1917 emit(0xA9); 1918 emit(mask); 1919 } else { 1920 emit_optional_rex_32(rax, reg); 1921 emit(0xF7); 1922 emit_modrm(0x0, reg); 1923 emit(mask); 1924 } 1925} 1926 1927 1928void Assembler::testl(const Operand& op, Immediate mask) { 1929 // testl with a mask that fits in the low byte is exactly testb. 1930 if (is_uint8(mask.value_)) { 1931 testb(op, mask); 1932 return; 1933 } 1934 EnsureSpace ensure_space(this); 1935 last_pc_ = pc_; 1936 emit_optional_rex_32(rax, op); 1937 emit(0xF7); 1938 emit_operand(rax, op); // Operation code 0 1939 emit(mask); 1940} 1941 1942 1943void Assembler::testq(const Operand& op, Register reg) { 1944 EnsureSpace ensure_space(this); 1945 last_pc_ = pc_; 1946 emit_rex_64(reg, op); 1947 emit(0x85); 1948 emit_operand(reg, op); 1949} 1950 1951 1952void Assembler::testq(Register dst, Register src) { 1953 EnsureSpace ensure_space(this); 1954 last_pc_ = pc_; 1955 emit_rex_64(dst, src); 1956 emit(0x85); 1957 emit_modrm(dst, src); 1958} 1959 1960 1961void Assembler::testq(Register dst, Immediate mask) { 1962 EnsureSpace ensure_space(this); 1963 last_pc_ = pc_; 1964 if (dst.is(rax)) { 1965 emit_rex_64(); 1966 emit(0xA9); 1967 emit(mask); 1968 } else { 1969 emit_rex_64(dst); 1970 emit(0xF7); 1971 emit_modrm(0, dst); 1972 emit(mask); 1973 } 1974} 1975 1976 1977// FPU instructions. 1978 1979 1980void Assembler::fld(int i) { 1981 EnsureSpace ensure_space(this); 1982 last_pc_ = pc_; 1983 emit_farith(0xD9, 0xC0, i); 1984} 1985 1986 1987void Assembler::fld1() { 1988 EnsureSpace ensure_space(this); 1989 last_pc_ = pc_; 1990 emit(0xD9); 1991 emit(0xE8); 1992} 1993 1994 1995void Assembler::fldz() { 1996 EnsureSpace ensure_space(this); 1997 last_pc_ = pc_; 1998 emit(0xD9); 1999 emit(0xEE); 2000} 2001 2002 2003void Assembler::fld_s(const Operand& adr) { 2004 EnsureSpace ensure_space(this); 2005 last_pc_ = pc_; 2006 emit_optional_rex_32(adr); 2007 emit(0xD9); 2008 emit_operand(0, adr); 2009} 2010 2011 2012void Assembler::fld_d(const Operand& adr) { 2013 EnsureSpace ensure_space(this); 2014 last_pc_ = pc_; 2015 emit_optional_rex_32(adr); 2016 emit(0xDD); 2017 emit_operand(0, adr); 2018} 2019 2020 2021void Assembler::fstp_s(const Operand& adr) { 2022 EnsureSpace ensure_space(this); 2023 last_pc_ = pc_; 2024 emit_optional_rex_32(adr); 2025 emit(0xD9); 2026 emit_operand(3, adr); 2027} 2028 2029 2030void Assembler::fstp_d(const Operand& adr) { 2031 EnsureSpace ensure_space(this); 2032 last_pc_ = pc_; 2033 emit_optional_rex_32(adr); 2034 emit(0xDD); 2035 emit_operand(3, adr); 2036} 2037 2038 2039void Assembler::fstp(int index) { 2040 ASSERT(is_uint3(index)); 2041 EnsureSpace ensure_space(this); 2042 last_pc_ = pc_; 2043 emit_farith(0xDD, 0xD8, index); 2044} 2045 2046 2047void Assembler::fild_s(const Operand& adr) { 2048 EnsureSpace ensure_space(this); 2049 last_pc_ = pc_; 2050 emit_optional_rex_32(adr); 2051 emit(0xDB); 2052 emit_operand(0, adr); 2053} 2054 2055 2056void Assembler::fild_d(const Operand& adr) { 2057 EnsureSpace ensure_space(this); 2058 last_pc_ = pc_; 2059 emit_optional_rex_32(adr); 2060 emit(0xDF); 2061 emit_operand(5, adr); 2062} 2063 2064 2065void Assembler::fistp_s(const Operand& adr) { 2066 EnsureSpace ensure_space(this); 2067 last_pc_ = pc_; 2068 emit_optional_rex_32(adr); 2069 emit(0xDB); 2070 emit_operand(3, adr); 2071} 2072 2073 2074void Assembler::fisttp_s(const Operand& adr) { 2075 ASSERT(CpuFeatures::IsEnabled(SSE3)); 2076 EnsureSpace ensure_space(this); 2077 last_pc_ = pc_; 2078 emit_optional_rex_32(adr); 2079 emit(0xDB); 2080 emit_operand(1, adr); 2081} 2082 2083 2084void Assembler::fisttp_d(const Operand& adr) { 2085 ASSERT(CpuFeatures::IsEnabled(SSE3)); 2086 EnsureSpace ensure_space(this); 2087 last_pc_ = pc_; 2088 emit_optional_rex_32(adr); 2089 emit(0xDD); 2090 emit_operand(1, adr); 2091} 2092 2093 2094void Assembler::fist_s(const Operand& adr) { 2095 EnsureSpace ensure_space(this); 2096 last_pc_ = pc_; 2097 emit_optional_rex_32(adr); 2098 emit(0xDB); 2099 emit_operand(2, adr); 2100} 2101 2102 2103void Assembler::fistp_d(const Operand& adr) { 2104 EnsureSpace ensure_space(this); 2105 last_pc_ = pc_; 2106 emit_optional_rex_32(adr); 2107 emit(0xDF); 2108 emit_operand(7, adr); 2109} 2110 2111 2112void Assembler::fabs() { 2113 EnsureSpace ensure_space(this); 2114 last_pc_ = pc_; 2115 emit(0xD9); 2116 emit(0xE1); 2117} 2118 2119 2120void Assembler::fchs() { 2121 EnsureSpace ensure_space(this); 2122 last_pc_ = pc_; 2123 emit(0xD9); 2124 emit(0xE0); 2125} 2126 2127 2128void Assembler::fcos() { 2129 EnsureSpace ensure_space(this); 2130 last_pc_ = pc_; 2131 emit(0xD9); 2132 emit(0xFF); 2133} 2134 2135 2136void Assembler::fsin() { 2137 EnsureSpace ensure_space(this); 2138 last_pc_ = pc_; 2139 emit(0xD9); 2140 emit(0xFE); 2141} 2142 2143 2144void Assembler::fadd(int i) { 2145 EnsureSpace ensure_space(this); 2146 last_pc_ = pc_; 2147 emit_farith(0xDC, 0xC0, i); 2148} 2149 2150 2151void Assembler::fsub(int i) { 2152 EnsureSpace ensure_space(this); 2153 last_pc_ = pc_; 2154 emit_farith(0xDC, 0xE8, i); 2155} 2156 2157 2158void Assembler::fisub_s(const Operand& adr) { 2159 EnsureSpace ensure_space(this); 2160 last_pc_ = pc_; 2161 emit_optional_rex_32(adr); 2162 emit(0xDA); 2163 emit_operand(4, adr); 2164} 2165 2166 2167void Assembler::fmul(int i) { 2168 EnsureSpace ensure_space(this); 2169 last_pc_ = pc_; 2170 emit_farith(0xDC, 0xC8, i); 2171} 2172 2173 2174void Assembler::fdiv(int i) { 2175 EnsureSpace ensure_space(this); 2176 last_pc_ = pc_; 2177 emit_farith(0xDC, 0xF8, i); 2178} 2179 2180 2181void Assembler::faddp(int i) { 2182 EnsureSpace ensure_space(this); 2183 last_pc_ = pc_; 2184 emit_farith(0xDE, 0xC0, i); 2185} 2186 2187 2188void Assembler::fsubp(int i) { 2189 EnsureSpace ensure_space(this); 2190 last_pc_ = pc_; 2191 emit_farith(0xDE, 0xE8, i); 2192} 2193 2194 2195void Assembler::fsubrp(int i) { 2196 EnsureSpace ensure_space(this); 2197 last_pc_ = pc_; 2198 emit_farith(0xDE, 0xE0, i); 2199} 2200 2201 2202void Assembler::fmulp(int i) { 2203 EnsureSpace ensure_space(this); 2204 last_pc_ = pc_; 2205 emit_farith(0xDE, 0xC8, i); 2206} 2207 2208 2209void Assembler::fdivp(int i) { 2210 EnsureSpace ensure_space(this); 2211 last_pc_ = pc_; 2212 emit_farith(0xDE, 0xF8, i); 2213} 2214 2215 2216void Assembler::fprem() { 2217 EnsureSpace ensure_space(this); 2218 last_pc_ = pc_; 2219 emit(0xD9); 2220 emit(0xF8); 2221} 2222 2223 2224void Assembler::fprem1() { 2225 EnsureSpace ensure_space(this); 2226 last_pc_ = pc_; 2227 emit(0xD9); 2228 emit(0xF5); 2229} 2230 2231 2232void Assembler::fxch(int i) { 2233 EnsureSpace ensure_space(this); 2234 last_pc_ = pc_; 2235 emit_farith(0xD9, 0xC8, i); 2236} 2237 2238 2239void Assembler::fincstp() { 2240 EnsureSpace ensure_space(this); 2241 last_pc_ = pc_; 2242 emit(0xD9); 2243 emit(0xF7); 2244} 2245 2246 2247void Assembler::ffree(int i) { 2248 EnsureSpace ensure_space(this); 2249 last_pc_ = pc_; 2250 emit_farith(0xDD, 0xC0, i); 2251} 2252 2253 2254void Assembler::ftst() { 2255 EnsureSpace ensure_space(this); 2256 last_pc_ = pc_; 2257 emit(0xD9); 2258 emit(0xE4); 2259} 2260 2261 2262void Assembler::fucomp(int i) { 2263 EnsureSpace ensure_space(this); 2264 last_pc_ = pc_; 2265 emit_farith(0xDD, 0xE8, i); 2266} 2267 2268 2269void Assembler::fucompp() { 2270 EnsureSpace ensure_space(this); 2271 last_pc_ = pc_; 2272 emit(0xDA); 2273 emit(0xE9); 2274} 2275 2276 2277void Assembler::fucomi(int i) { 2278 EnsureSpace ensure_space(this); 2279 last_pc_ = pc_; 2280 emit(0xDB); 2281 emit(0xE8 + i); 2282} 2283 2284 2285void Assembler::fucomip() { 2286 EnsureSpace ensure_space(this); 2287 last_pc_ = pc_; 2288 emit(0xDF); 2289 emit(0xE9); 2290} 2291 2292 2293void Assembler::fcompp() { 2294 EnsureSpace ensure_space(this); 2295 last_pc_ = pc_; 2296 emit(0xDE); 2297 emit(0xD9); 2298} 2299 2300 2301void Assembler::fnstsw_ax() { 2302 EnsureSpace ensure_space(this); 2303 last_pc_ = pc_; 2304 emit(0xDF); 2305 emit(0xE0); 2306} 2307 2308 2309void Assembler::fwait() { 2310 EnsureSpace ensure_space(this); 2311 last_pc_ = pc_; 2312 emit(0x9B); 2313} 2314 2315 2316void Assembler::frndint() { 2317 EnsureSpace ensure_space(this); 2318 last_pc_ = pc_; 2319 emit(0xD9); 2320 emit(0xFC); 2321} 2322 2323 2324void Assembler::fnclex() { 2325 EnsureSpace ensure_space(this); 2326 last_pc_ = pc_; 2327 emit(0xDB); 2328 emit(0xE2); 2329} 2330 2331 2332void Assembler::sahf() { 2333 // TODO(X64): Test for presence. Not all 64-bit intel CPU's have sahf 2334 // in 64-bit mode. Test CpuID. 2335 EnsureSpace ensure_space(this); 2336 last_pc_ = pc_; 2337 emit(0x9E); 2338} 2339 2340 2341void Assembler::emit_farith(int b1, int b2, int i) { 2342 ASSERT(is_uint8(b1) && is_uint8(b2)); // wrong opcode 2343 ASSERT(is_uint3(i)); // illegal stack offset 2344 emit(b1); 2345 emit(b2 + i); 2346} 2347 2348// SSE 2 operations. 2349 2350void Assembler::movsd(const Operand& dst, XMMRegister src) { 2351 EnsureSpace ensure_space(this); 2352 last_pc_ = pc_; 2353 emit(0xF2); // double 2354 emit_optional_rex_32(src, dst); 2355 emit(0x0F); 2356 emit(0x11); // store 2357 emit_sse_operand(src, dst); 2358} 2359 2360 2361void Assembler::movsd(XMMRegister dst, XMMRegister src) { 2362 EnsureSpace ensure_space(this); 2363 last_pc_ = pc_; 2364 emit(0xF2); // double 2365 emit_optional_rex_32(dst, src); 2366 emit(0x0F); 2367 emit(0x10); // load 2368 emit_sse_operand(dst, src); 2369} 2370 2371 2372void Assembler::movsd(XMMRegister dst, const Operand& src) { 2373 EnsureSpace ensure_space(this); 2374 last_pc_ = pc_; 2375 emit(0xF2); // double 2376 emit_optional_rex_32(dst, src); 2377 emit(0x0F); 2378 emit(0x10); // load 2379 emit_sse_operand(dst, src); 2380} 2381 2382 2383void Assembler::cvttss2si(Register dst, const Operand& src) { 2384 EnsureSpace ensure_space(this); 2385 last_pc_ = pc_; 2386 emit(0xF3); 2387 emit_optional_rex_32(dst, src); 2388 emit(0x0F); 2389 emit(0x2C); 2390 emit_operand(dst, src); 2391} 2392 2393 2394void Assembler::cvttsd2si(Register dst, const Operand& src) { 2395 EnsureSpace ensure_space(this); 2396 last_pc_ = pc_; 2397 emit(0xF2); 2398 emit_optional_rex_32(dst, src); 2399 emit(0x0F); 2400 emit(0x2C); 2401 emit_operand(dst, src); 2402} 2403 2404 2405void Assembler::cvtlsi2sd(XMMRegister dst, const Operand& src) { 2406 EnsureSpace ensure_space(this); 2407 last_pc_ = pc_; 2408 emit(0xF2); 2409 emit_optional_rex_32(dst, src); 2410 emit(0x0F); 2411 emit(0x2A); 2412 emit_sse_operand(dst, src); 2413} 2414 2415 2416void Assembler::cvtlsi2sd(XMMRegister dst, Register src) { 2417 EnsureSpace ensure_space(this); 2418 last_pc_ = pc_; 2419 emit(0xF2); 2420 emit_optional_rex_32(dst, src); 2421 emit(0x0F); 2422 emit(0x2A); 2423 emit_sse_operand(dst, src); 2424} 2425 2426 2427void Assembler::cvtqsi2sd(XMMRegister dst, Register src) { 2428 EnsureSpace ensure_space(this); 2429 last_pc_ = pc_; 2430 emit(0xF2); 2431 emit_rex_64(dst, src); 2432 emit(0x0F); 2433 emit(0x2A); 2434 emit_sse_operand(dst, src); 2435} 2436 2437 2438void Assembler::addsd(XMMRegister dst, XMMRegister src) { 2439 EnsureSpace ensure_space(this); 2440 last_pc_ = pc_; 2441 emit(0xF2); 2442 emit_optional_rex_32(dst, src); 2443 emit(0x0F); 2444 emit(0x58); 2445 emit_sse_operand(dst, src); 2446} 2447 2448 2449void Assembler::mulsd(XMMRegister dst, XMMRegister src) { 2450 EnsureSpace ensure_space(this); 2451 last_pc_ = pc_; 2452 emit(0xF2); 2453 emit_optional_rex_32(dst, src); 2454 emit(0x0F); 2455 emit(0x59); 2456 emit_sse_operand(dst, src); 2457} 2458 2459 2460void Assembler::subsd(XMMRegister dst, XMMRegister src) { 2461 EnsureSpace ensure_space(this); 2462 last_pc_ = pc_; 2463 emit(0xF2); 2464 emit_optional_rex_32(dst, src); 2465 emit(0x0F); 2466 emit(0x5C); 2467 emit_sse_operand(dst, src); 2468} 2469 2470 2471void Assembler::divsd(XMMRegister dst, XMMRegister src) { 2472 EnsureSpace ensure_space(this); 2473 last_pc_ = pc_; 2474 emit(0xF2); 2475 emit_optional_rex_32(dst, src); 2476 emit(0x0F); 2477 emit(0x5E); 2478 emit_sse_operand(dst, src); 2479} 2480 2481 2482void Assembler::xorpd(XMMRegister dst, XMMRegister src) { 2483 EnsureSpace ensure_space(this); 2484 last_pc_ = pc_; 2485 emit(0x66); 2486 emit_optional_rex_32(dst, src); 2487 emit(0x0f); 2488 emit(0x57); 2489 emit_sse_operand(dst, src); 2490} 2491 2492 2493void Assembler::comisd(XMMRegister dst, XMMRegister src) { 2494 EnsureSpace ensure_space(this); 2495 last_pc_ = pc_; 2496 emit(0x66); 2497 emit_optional_rex_32(dst, src); 2498 emit(0x0f); 2499 emit(0x2f); 2500 emit_sse_operand(dst, src); 2501} 2502 2503 2504void Assembler::ucomisd(XMMRegister dst, XMMRegister src) { 2505 EnsureSpace ensure_space(this); 2506 last_pc_ = pc_; 2507 emit(0x66); 2508 emit_optional_rex_32(dst, src); 2509 emit(0x0f); 2510 emit(0x2e); 2511 emit_sse_operand(dst, src); 2512} 2513 2514 2515void Assembler::emit_sse_operand(XMMRegister reg, const Operand& adr) { 2516 Register ireg = { reg.code() }; 2517 emit_operand(ireg, adr); 2518} 2519 2520 2521void Assembler::emit_sse_operand(XMMRegister dst, XMMRegister src) { 2522 emit(0xC0 | (dst.low_bits() << 3) | src.low_bits()); 2523} 2524 2525void Assembler::emit_sse_operand(XMMRegister dst, Register src) { 2526 emit(0xC0 | (dst.low_bits() << 3) | src.low_bits()); 2527} 2528 2529 2530// Relocation information implementations. 2531 2532void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { 2533 ASSERT(rmode != RelocInfo::NONE); 2534 // Don't record external references unless the heap will be serialized. 2535 if (rmode == RelocInfo::EXTERNAL_REFERENCE && 2536 !Serializer::enabled() && 2537 !FLAG_debug_code) { 2538 return; 2539 } 2540 RelocInfo rinfo(pc_, rmode, data); 2541 reloc_info_writer.Write(&rinfo); 2542} 2543 2544void Assembler::RecordJSReturn() { 2545 WriteRecordedPositions(); 2546 EnsureSpace ensure_space(this); 2547 RecordRelocInfo(RelocInfo::JS_RETURN); 2548} 2549 2550 2551void Assembler::RecordComment(const char* msg) { 2552 if (FLAG_debug_code) { 2553 EnsureSpace ensure_space(this); 2554 RecordRelocInfo(RelocInfo::COMMENT, reinterpret_cast<intptr_t>(msg)); 2555 } 2556} 2557 2558 2559void Assembler::RecordPosition(int pos) { 2560 ASSERT(pos != RelocInfo::kNoPosition); 2561 ASSERT(pos >= 0); 2562 current_position_ = pos; 2563} 2564 2565 2566void Assembler::RecordStatementPosition(int pos) { 2567 ASSERT(pos != RelocInfo::kNoPosition); 2568 ASSERT(pos >= 0); 2569 current_statement_position_ = pos; 2570} 2571 2572 2573void Assembler::WriteRecordedPositions() { 2574 // Write the statement position if it is different from what was written last 2575 // time. 2576 if (current_statement_position_ != written_statement_position_) { 2577 EnsureSpace ensure_space(this); 2578 RecordRelocInfo(RelocInfo::STATEMENT_POSITION, current_statement_position_); 2579 written_statement_position_ = current_statement_position_; 2580 } 2581 2582 // Write the position if it is different from what was written last time and 2583 // also different from the written statement position. 2584 if (current_position_ != written_position_ && 2585 current_position_ != written_statement_position_) { 2586 EnsureSpace ensure_space(this); 2587 RecordRelocInfo(RelocInfo::POSITION, current_position_); 2588 written_position_ = current_position_; 2589 } 2590} 2591 2592 2593const int RelocInfo::kApplyMask = RelocInfo::kCodeTargetMask | 2594 1 << RelocInfo::INTERNAL_REFERENCE | 2595 1 << RelocInfo::JS_RETURN; 2596 2597} } // namespace v8::internal 2598