assembler-ia32.cc revision 3ef787dbeca8a5fb1086949cda830dccee07bfbd
1// Copyright (c) 1994-2006 Sun Microsystems Inc. 2// All Rights Reserved. 3// 4// Redistribution and use in source and binary forms, with or without 5// modification, are permitted provided that the following conditions 6// are met: 7// 8// - Redistributions of source code must retain the above copyright notice, 9// this list of conditions and the following disclaimer. 10// 11// - Redistribution in binary form must reproduce the above copyright 12// notice, this list of conditions and the following disclaimer in the 13// documentation and/or other materials provided with the 14// distribution. 15// 16// - Neither the name of Sun Microsystems or the names of contributors may 17// be used to endorse or promote products derived from this software without 18// specific prior written permission. 19// 20// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 21// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 22// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS 23// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE 24// COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, 25// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 26// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 27// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 28// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, 29// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 30// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED 31// OF THE POSSIBILITY OF SUCH DAMAGE. 32 33// The original source code covered by the above license above has been modified 34// significantly by Google Inc. 35// Copyright 2012 the V8 project authors. All rights reserved. 36 37#include "v8.h" 38 39#if defined(V8_TARGET_ARCH_IA32) 40 41#include "disassembler.h" 42#include "macro-assembler.h" 43#include "serialize.h" 44 45namespace v8 { 46namespace internal { 47 48// ----------------------------------------------------------------------------- 49// Implementation of CpuFeatures 50 51#ifdef DEBUG 52bool CpuFeatures::initialized_ = false; 53#endif 54uint64_t CpuFeatures::supported_ = 0; 55uint64_t CpuFeatures::found_by_runtime_probing_ = 0; 56 57 58// The Probe method needs executable memory, so it uses Heap::CreateCode. 59// Allocation failure is silent and leads to safe default. 60void CpuFeatures::Probe() { 61 ASSERT(!initialized_); 62 ASSERT(supported_ == 0); 63#ifdef DEBUG 64 initialized_ = true; 65#endif 66 if (Serializer::enabled()) { 67 supported_ |= OS::CpuFeaturesImpliedByPlatform(); 68 return; // No features if we might serialize. 69 } 70 71 const int kBufferSize = 4 * KB; 72 VirtualMemory* memory = new VirtualMemory(kBufferSize); 73 if (!memory->IsReserved()) { 74 delete memory; 75 return; 76 } 77 ASSERT(memory->size() >= static_cast<size_t>(kBufferSize)); 78 if (!memory->Commit(memory->address(), kBufferSize, true/*executable*/)) { 79 delete memory; 80 return; 81 } 82 83 Assembler assm(NULL, memory->address(), kBufferSize); 84 Label cpuid, done; 85#define __ assm. 86 // Save old esp, since we are going to modify the stack. 87 __ push(ebp); 88 __ pushfd(); 89 __ push(ecx); 90 __ push(ebx); 91 __ mov(ebp, esp); 92 93 // If we can modify bit 21 of the EFLAGS register, then CPUID is supported. 94 __ pushfd(); 95 __ pop(eax); 96 __ mov(edx, eax); 97 __ xor_(eax, 0x200000); // Flip bit 21. 98 __ push(eax); 99 __ popfd(); 100 __ pushfd(); 101 __ pop(eax); 102 __ xor_(eax, edx); // Different if CPUID is supported. 103 __ j(not_zero, &cpuid); 104 105 // CPUID not supported. Clear the supported features in edx:eax. 106 __ xor_(eax, eax); 107 __ xor_(edx, edx); 108 __ jmp(&done); 109 110 // Invoke CPUID with 1 in eax to get feature information in 111 // ecx:edx. Temporarily enable CPUID support because we know it's 112 // safe here. 113 __ bind(&cpuid); 114 __ mov(eax, 1); 115 supported_ = (1 << CPUID); 116 { Scope fscope(CPUID); 117 __ cpuid(); 118 } 119 supported_ = 0; 120 121 // Move the result from ecx:edx to edx:eax and make sure to mark the 122 // CPUID feature as supported. 123 __ mov(eax, edx); 124 __ or_(eax, 1 << CPUID); 125 __ mov(edx, ecx); 126 127 // Done. 128 __ bind(&done); 129 __ mov(esp, ebp); 130 __ pop(ebx); 131 __ pop(ecx); 132 __ popfd(); 133 __ pop(ebp); 134 __ ret(0); 135#undef __ 136 137 typedef uint64_t (*F0)(); 138 F0 probe = FUNCTION_CAST<F0>(reinterpret_cast<Address>(memory->address())); 139 supported_ = probe(); 140 found_by_runtime_probing_ = supported_; 141 uint64_t os_guarantees = OS::CpuFeaturesImpliedByPlatform(); 142 supported_ |= os_guarantees; 143 found_by_runtime_probing_ &= ~os_guarantees; 144 145 delete memory; 146} 147 148 149// ----------------------------------------------------------------------------- 150// Implementation of Displacement 151 152void Displacement::init(Label* L, Type type) { 153 ASSERT(!L->is_bound()); 154 int next = 0; 155 if (L->is_linked()) { 156 next = L->pos(); 157 ASSERT(next > 0); // Displacements must be at positions > 0 158 } 159 // Ensure that we _never_ overflow the next field. 160 ASSERT(NextField::is_valid(Assembler::kMaximalBufferSize)); 161 data_ = NextField::encode(next) | TypeField::encode(type); 162} 163 164 165// ----------------------------------------------------------------------------- 166// Implementation of RelocInfo 167 168 169const int RelocInfo::kApplyMask = 170 RelocInfo::kCodeTargetMask | 1 << RelocInfo::RUNTIME_ENTRY | 171 1 << RelocInfo::JS_RETURN | 1 << RelocInfo::INTERNAL_REFERENCE | 172 1 << RelocInfo::DEBUG_BREAK_SLOT; 173 174 175bool RelocInfo::IsCodedSpecially() { 176 // The deserializer needs to know whether a pointer is specially coded. Being 177 // specially coded on IA32 means that it is a relative address, as used by 178 // branch instructions. These are also the ones that need changing when a 179 // code object moves. 180 return (1 << rmode_) & kApplyMask; 181} 182 183 184void RelocInfo::PatchCode(byte* instructions, int instruction_count) { 185 // Patch the code at the current address with the supplied instructions. 186 for (int i = 0; i < instruction_count; i++) { 187 *(pc_ + i) = *(instructions + i); 188 } 189 190 // Indicate that code has changed. 191 CPU::FlushICache(pc_, instruction_count); 192} 193 194 195// Patch the code at the current PC with a call to the target address. 196// Additional guard int3 instructions can be added if required. 197void RelocInfo::PatchCodeWithCall(Address target, int guard_bytes) { 198 // Call instruction takes up 5 bytes and int3 takes up one byte. 199 static const int kCallCodeSize = 5; 200 int code_size = kCallCodeSize + guard_bytes; 201 202 // Create a code patcher. 203 CodePatcher patcher(pc_, code_size); 204 205 // Add a label for checking the size of the code used for returning. 206#ifdef DEBUG 207 Label check_codesize; 208 patcher.masm()->bind(&check_codesize); 209#endif 210 211 // Patch the code. 212 patcher.masm()->call(target, RelocInfo::NONE); 213 214 // Check that the size of the code generated is as expected. 215 ASSERT_EQ(kCallCodeSize, 216 patcher.masm()->SizeOfCodeGeneratedSince(&check_codesize)); 217 218 // Add the requested number of int3 instructions after the call. 219 ASSERT_GE(guard_bytes, 0); 220 for (int i = 0; i < guard_bytes; i++) { 221 patcher.masm()->int3(); 222 } 223} 224 225 226// ----------------------------------------------------------------------------- 227// Implementation of Operand 228 229Operand::Operand(Register base, int32_t disp, RelocInfo::Mode rmode) { 230 // [base + disp/r] 231 if (disp == 0 && rmode == RelocInfo::NONE && !base.is(ebp)) { 232 // [base] 233 set_modrm(0, base); 234 if (base.is(esp)) set_sib(times_1, esp, base); 235 } else if (is_int8(disp) && rmode == RelocInfo::NONE) { 236 // [base + disp8] 237 set_modrm(1, base); 238 if (base.is(esp)) set_sib(times_1, esp, base); 239 set_disp8(disp); 240 } else { 241 // [base + disp/r] 242 set_modrm(2, base); 243 if (base.is(esp)) set_sib(times_1, esp, base); 244 set_dispr(disp, rmode); 245 } 246} 247 248 249Operand::Operand(Register base, 250 Register index, 251 ScaleFactor scale, 252 int32_t disp, 253 RelocInfo::Mode rmode) { 254 ASSERT(!index.is(esp)); // illegal addressing mode 255 // [base + index*scale + disp/r] 256 if (disp == 0 && rmode == RelocInfo::NONE && !base.is(ebp)) { 257 // [base + index*scale] 258 set_modrm(0, esp); 259 set_sib(scale, index, base); 260 } else if (is_int8(disp) && rmode == RelocInfo::NONE) { 261 // [base + index*scale + disp8] 262 set_modrm(1, esp); 263 set_sib(scale, index, base); 264 set_disp8(disp); 265 } else { 266 // [base + index*scale + disp/r] 267 set_modrm(2, esp); 268 set_sib(scale, index, base); 269 set_dispr(disp, rmode); 270 } 271} 272 273 274Operand::Operand(Register index, 275 ScaleFactor scale, 276 int32_t disp, 277 RelocInfo::Mode rmode) { 278 ASSERT(!index.is(esp)); // illegal addressing mode 279 // [index*scale + disp/r] 280 set_modrm(0, esp); 281 set_sib(scale, index, ebp); 282 set_dispr(disp, rmode); 283} 284 285 286bool Operand::is_reg(Register reg) const { 287 return ((buf_[0] & 0xF8) == 0xC0) // addressing mode is register only. 288 && ((buf_[0] & 0x07) == reg.code()); // register codes match. 289} 290 291 292bool Operand::is_reg_only() const { 293 return (buf_[0] & 0xF8) == 0xC0; // Addressing mode is register only. 294} 295 296 297Register Operand::reg() const { 298 ASSERT(is_reg_only()); 299 return Register::from_code(buf_[0] & 0x07); 300} 301 302 303// ----------------------------------------------------------------------------- 304// Implementation of Assembler. 305 306// Emit a single byte. Must always be inlined. 307#define EMIT(x) \ 308 *pc_++ = (x) 309 310 311#ifdef GENERATED_CODE_COVERAGE 312static void InitCoverageLog(); 313#endif 314 315Assembler::Assembler(Isolate* arg_isolate, void* buffer, int buffer_size) 316 : AssemblerBase(arg_isolate), 317 positions_recorder_(this), 318 emit_debug_code_(FLAG_debug_code) { 319 if (buffer == NULL) { 320 // Do our own buffer management. 321 if (buffer_size <= kMinimalBufferSize) { 322 buffer_size = kMinimalBufferSize; 323 324 if (isolate()->assembler_spare_buffer() != NULL) { 325 buffer = isolate()->assembler_spare_buffer(); 326 isolate()->set_assembler_spare_buffer(NULL); 327 } 328 } 329 if (buffer == NULL) { 330 buffer_ = NewArray<byte>(buffer_size); 331 } else { 332 buffer_ = static_cast<byte*>(buffer); 333 } 334 buffer_size_ = buffer_size; 335 own_buffer_ = true; 336 } else { 337 // Use externally provided buffer instead. 338 ASSERT(buffer_size > 0); 339 buffer_ = static_cast<byte*>(buffer); 340 buffer_size_ = buffer_size; 341 own_buffer_ = false; 342 } 343 344 // Clear the buffer in debug mode unless it was provided by the 345 // caller in which case we can't be sure it's okay to overwrite 346 // existing code in it; see CodePatcher::CodePatcher(...). 347#ifdef DEBUG 348 if (own_buffer_) { 349 memset(buffer_, 0xCC, buffer_size); // int3 350 } 351#endif 352 353 // Set up buffer pointers. 354 ASSERT(buffer_ != NULL); 355 pc_ = buffer_; 356 reloc_info_writer.Reposition(buffer_ + buffer_size, pc_); 357 358#ifdef GENERATED_CODE_COVERAGE 359 InitCoverageLog(); 360#endif 361} 362 363 364Assembler::~Assembler() { 365 if (own_buffer_) { 366 if (isolate()->assembler_spare_buffer() == NULL && 367 buffer_size_ == kMinimalBufferSize) { 368 isolate()->set_assembler_spare_buffer(buffer_); 369 } else { 370 DeleteArray(buffer_); 371 } 372 } 373} 374 375 376void Assembler::GetCode(CodeDesc* desc) { 377 // Finalize code (at this point overflow() may be true, but the gap ensures 378 // that we are still not overlapping instructions and relocation info). 379 ASSERT(pc_ <= reloc_info_writer.pos()); // No overlap. 380 // Set up code descriptor. 381 desc->buffer = buffer_; 382 desc->buffer_size = buffer_size_; 383 desc->instr_size = pc_offset(); 384 desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos(); 385 desc->origin = this; 386} 387 388 389void Assembler::Align(int m) { 390 ASSERT(IsPowerOf2(m)); 391 int mask = m - 1; 392 int addr = pc_offset(); 393 Nop((m - (addr & mask)) & mask); 394} 395 396 397bool Assembler::IsNop(Address addr) { 398 Address a = addr; 399 while (*a == 0x66) a++; 400 if (*a == 0x90) return true; 401 if (a[0] == 0xf && a[1] == 0x1f) return true; 402 return false; 403} 404 405 406void Assembler::Nop(int bytes) { 407 EnsureSpace ensure_space(this); 408 409 if (!CpuFeatures::IsSupported(SSE2)) { 410 // Older CPUs that do not support SSE2 may not support multibyte NOP 411 // instructions. 412 for (; bytes > 0; bytes--) { 413 EMIT(0x90); 414 } 415 return; 416 } 417 418 // Multi byte nops from http://support.amd.com/us/Processor_TechDocs/40546.pdf 419 while (bytes > 0) { 420 switch (bytes) { 421 case 2: 422 EMIT(0x66); 423 case 1: 424 EMIT(0x90); 425 return; 426 case 3: 427 EMIT(0xf); 428 EMIT(0x1f); 429 EMIT(0); 430 return; 431 case 4: 432 EMIT(0xf); 433 EMIT(0x1f); 434 EMIT(0x40); 435 EMIT(0); 436 return; 437 case 6: 438 EMIT(0x66); 439 case 5: 440 EMIT(0xf); 441 EMIT(0x1f); 442 EMIT(0x44); 443 EMIT(0); 444 EMIT(0); 445 return; 446 case 7: 447 EMIT(0xf); 448 EMIT(0x1f); 449 EMIT(0x80); 450 EMIT(0); 451 EMIT(0); 452 EMIT(0); 453 EMIT(0); 454 return; 455 default: 456 case 11: 457 EMIT(0x66); 458 bytes--; 459 case 10: 460 EMIT(0x66); 461 bytes--; 462 case 9: 463 EMIT(0x66); 464 bytes--; 465 case 8: 466 EMIT(0xf); 467 EMIT(0x1f); 468 EMIT(0x84); 469 EMIT(0); 470 EMIT(0); 471 EMIT(0); 472 EMIT(0); 473 EMIT(0); 474 bytes -= 8; 475 } 476 } 477} 478 479 480void Assembler::CodeTargetAlign() { 481 Align(16); // Preferred alignment of jump targets on ia32. 482} 483 484 485void Assembler::cpuid() { 486 ASSERT(CpuFeatures::IsEnabled(CPUID)); 487 EnsureSpace ensure_space(this); 488 EMIT(0x0F); 489 EMIT(0xA2); 490} 491 492 493void Assembler::pushad() { 494 EnsureSpace ensure_space(this); 495 EMIT(0x60); 496} 497 498 499void Assembler::popad() { 500 EnsureSpace ensure_space(this); 501 EMIT(0x61); 502} 503 504 505void Assembler::pushfd() { 506 EnsureSpace ensure_space(this); 507 EMIT(0x9C); 508} 509 510 511void Assembler::popfd() { 512 EnsureSpace ensure_space(this); 513 EMIT(0x9D); 514} 515 516 517void Assembler::push(const Immediate& x) { 518 EnsureSpace ensure_space(this); 519 if (x.is_int8()) { 520 EMIT(0x6a); 521 EMIT(x.x_); 522 } else { 523 EMIT(0x68); 524 emit(x); 525 } 526} 527 528 529void Assembler::push_imm32(int32_t imm32) { 530 EnsureSpace ensure_space(this); 531 EMIT(0x68); 532 emit(imm32); 533} 534 535 536void Assembler::push(Register src) { 537 EnsureSpace ensure_space(this); 538 EMIT(0x50 | src.code()); 539} 540 541 542void Assembler::push(const Operand& src) { 543 EnsureSpace ensure_space(this); 544 EMIT(0xFF); 545 emit_operand(esi, src); 546} 547 548 549void Assembler::pop(Register dst) { 550 ASSERT(reloc_info_writer.last_pc() != NULL); 551 EnsureSpace ensure_space(this); 552 EMIT(0x58 | dst.code()); 553} 554 555 556void Assembler::pop(const Operand& dst) { 557 EnsureSpace ensure_space(this); 558 EMIT(0x8F); 559 emit_operand(eax, dst); 560} 561 562 563void Assembler::enter(const Immediate& size) { 564 EnsureSpace ensure_space(this); 565 EMIT(0xC8); 566 emit_w(size); 567 EMIT(0); 568} 569 570 571void Assembler::leave() { 572 EnsureSpace ensure_space(this); 573 EMIT(0xC9); 574} 575 576 577void Assembler::mov_b(Register dst, const Operand& src) { 578 CHECK(dst.is_byte_register()); 579 EnsureSpace ensure_space(this); 580 EMIT(0x8A); 581 emit_operand(dst, src); 582} 583 584 585void Assembler::mov_b(const Operand& dst, int8_t imm8) { 586 EnsureSpace ensure_space(this); 587 EMIT(0xC6); 588 emit_operand(eax, dst); 589 EMIT(imm8); 590} 591 592 593void Assembler::mov_b(const Operand& dst, Register src) { 594 CHECK(src.is_byte_register()); 595 EnsureSpace ensure_space(this); 596 EMIT(0x88); 597 emit_operand(src, dst); 598} 599 600 601void Assembler::mov_w(Register dst, const Operand& src) { 602 EnsureSpace ensure_space(this); 603 EMIT(0x66); 604 EMIT(0x8B); 605 emit_operand(dst, src); 606} 607 608 609void Assembler::mov_w(const Operand& dst, Register src) { 610 EnsureSpace ensure_space(this); 611 EMIT(0x66); 612 EMIT(0x89); 613 emit_operand(src, dst); 614} 615 616 617void Assembler::mov(Register dst, int32_t imm32) { 618 EnsureSpace ensure_space(this); 619 EMIT(0xB8 | dst.code()); 620 emit(imm32); 621} 622 623 624void Assembler::mov(Register dst, const Immediate& x) { 625 EnsureSpace ensure_space(this); 626 EMIT(0xB8 | dst.code()); 627 emit(x); 628} 629 630 631void Assembler::mov(Register dst, Handle<Object> handle) { 632 EnsureSpace ensure_space(this); 633 EMIT(0xB8 | dst.code()); 634 emit(handle); 635} 636 637 638void Assembler::mov(Register dst, const Operand& src) { 639 EnsureSpace ensure_space(this); 640 EMIT(0x8B); 641 emit_operand(dst, src); 642} 643 644 645void Assembler::mov(Register dst, Register src) { 646 EnsureSpace ensure_space(this); 647 EMIT(0x89); 648 EMIT(0xC0 | src.code() << 3 | dst.code()); 649} 650 651 652void Assembler::mov(const Operand& dst, const Immediate& x) { 653 EnsureSpace ensure_space(this); 654 EMIT(0xC7); 655 emit_operand(eax, dst); 656 emit(x); 657} 658 659 660void Assembler::mov(const Operand& dst, Handle<Object> handle) { 661 EnsureSpace ensure_space(this); 662 EMIT(0xC7); 663 emit_operand(eax, dst); 664 emit(handle); 665} 666 667 668void Assembler::mov(const Operand& dst, Register src) { 669 EnsureSpace ensure_space(this); 670 EMIT(0x89); 671 emit_operand(src, dst); 672} 673 674 675void Assembler::movsx_b(Register dst, const Operand& src) { 676 EnsureSpace ensure_space(this); 677 EMIT(0x0F); 678 EMIT(0xBE); 679 emit_operand(dst, src); 680} 681 682 683void Assembler::movsx_w(Register dst, const Operand& src) { 684 EnsureSpace ensure_space(this); 685 EMIT(0x0F); 686 EMIT(0xBF); 687 emit_operand(dst, src); 688} 689 690 691void Assembler::movzx_b(Register dst, const Operand& src) { 692 EnsureSpace ensure_space(this); 693 EMIT(0x0F); 694 EMIT(0xB6); 695 emit_operand(dst, src); 696} 697 698 699void Assembler::movzx_w(Register dst, const Operand& src) { 700 EnsureSpace ensure_space(this); 701 EMIT(0x0F); 702 EMIT(0xB7); 703 emit_operand(dst, src); 704} 705 706 707void Assembler::cmov(Condition cc, Register dst, const Operand& src) { 708 ASSERT(CpuFeatures::IsEnabled(CMOV)); 709 EnsureSpace ensure_space(this); 710 // Opcode: 0f 40 + cc /r. 711 EMIT(0x0F); 712 EMIT(0x40 + cc); 713 emit_operand(dst, src); 714} 715 716 717void Assembler::cld() { 718 EnsureSpace ensure_space(this); 719 EMIT(0xFC); 720} 721 722 723void Assembler::rep_movs() { 724 EnsureSpace ensure_space(this); 725 EMIT(0xF3); 726 EMIT(0xA5); 727} 728 729 730void Assembler::rep_stos() { 731 EnsureSpace ensure_space(this); 732 EMIT(0xF3); 733 EMIT(0xAB); 734} 735 736 737void Assembler::stos() { 738 EnsureSpace ensure_space(this); 739 EMIT(0xAB); 740} 741 742 743void Assembler::xchg(Register dst, Register src) { 744 EnsureSpace ensure_space(this); 745 if (src.is(eax) || dst.is(eax)) { // Single-byte encoding. 746 EMIT(0x90 | (src.is(eax) ? dst.code() : src.code())); 747 } else { 748 EMIT(0x87); 749 EMIT(0xC0 | src.code() << 3 | dst.code()); 750 } 751} 752 753 754void Assembler::adc(Register dst, int32_t imm32) { 755 EnsureSpace ensure_space(this); 756 emit_arith(2, Operand(dst), Immediate(imm32)); 757} 758 759 760void Assembler::adc(Register dst, const Operand& src) { 761 EnsureSpace ensure_space(this); 762 EMIT(0x13); 763 emit_operand(dst, src); 764} 765 766 767void Assembler::add(Register dst, const Operand& src) { 768 EnsureSpace ensure_space(this); 769 EMIT(0x03); 770 emit_operand(dst, src); 771} 772 773 774void Assembler::add(const Operand& dst, Register src) { 775 EnsureSpace ensure_space(this); 776 EMIT(0x01); 777 emit_operand(src, dst); 778} 779 780 781void Assembler::add(const Operand& dst, const Immediate& x) { 782 ASSERT(reloc_info_writer.last_pc() != NULL); 783 EnsureSpace ensure_space(this); 784 emit_arith(0, dst, x); 785} 786 787 788void Assembler::and_(Register dst, int32_t imm32) { 789 and_(dst, Immediate(imm32)); 790} 791 792 793void Assembler::and_(Register dst, const Immediate& x) { 794 EnsureSpace ensure_space(this); 795 emit_arith(4, Operand(dst), x); 796} 797 798 799void Assembler::and_(Register dst, const Operand& src) { 800 EnsureSpace ensure_space(this); 801 EMIT(0x23); 802 emit_operand(dst, src); 803} 804 805 806void Assembler::and_(const Operand& dst, const Immediate& x) { 807 EnsureSpace ensure_space(this); 808 emit_arith(4, dst, x); 809} 810 811 812void Assembler::and_(const Operand& dst, Register src) { 813 EnsureSpace ensure_space(this); 814 EMIT(0x21); 815 emit_operand(src, dst); 816} 817 818 819void Assembler::cmpb(const Operand& op, int8_t imm8) { 820 EnsureSpace ensure_space(this); 821 if (op.is_reg(eax)) { 822 EMIT(0x3C); 823 } else { 824 EMIT(0x80); 825 emit_operand(edi, op); // edi == 7 826 } 827 EMIT(imm8); 828} 829 830 831void Assembler::cmpb(const Operand& op, Register reg) { 832 CHECK(reg.is_byte_register()); 833 EnsureSpace ensure_space(this); 834 EMIT(0x38); 835 emit_operand(reg, op); 836} 837 838 839void Assembler::cmpb(Register reg, const Operand& op) { 840 CHECK(reg.is_byte_register()); 841 EnsureSpace ensure_space(this); 842 EMIT(0x3A); 843 emit_operand(reg, op); 844} 845 846 847void Assembler::cmpw(const Operand& op, Immediate imm16) { 848 ASSERT(imm16.is_int16()); 849 EnsureSpace ensure_space(this); 850 EMIT(0x66); 851 EMIT(0x81); 852 emit_operand(edi, op); 853 emit_w(imm16); 854} 855 856 857void Assembler::cmp(Register reg, int32_t imm32) { 858 EnsureSpace ensure_space(this); 859 emit_arith(7, Operand(reg), Immediate(imm32)); 860} 861 862 863void Assembler::cmp(Register reg, Handle<Object> handle) { 864 EnsureSpace ensure_space(this); 865 emit_arith(7, Operand(reg), Immediate(handle)); 866} 867 868 869void Assembler::cmp(Register reg, const Operand& op) { 870 EnsureSpace ensure_space(this); 871 EMIT(0x3B); 872 emit_operand(reg, op); 873} 874 875 876void Assembler::cmp(const Operand& op, const Immediate& imm) { 877 EnsureSpace ensure_space(this); 878 emit_arith(7, op, imm); 879} 880 881 882void Assembler::cmp(const Operand& op, Handle<Object> handle) { 883 EnsureSpace ensure_space(this); 884 emit_arith(7, op, Immediate(handle)); 885} 886 887 888void Assembler::cmpb_al(const Operand& op) { 889 EnsureSpace ensure_space(this); 890 EMIT(0x38); // CMP r/m8, r8 891 emit_operand(eax, op); // eax has same code as register al. 892} 893 894 895void Assembler::cmpw_ax(const Operand& op) { 896 EnsureSpace ensure_space(this); 897 EMIT(0x66); 898 EMIT(0x39); // CMP r/m16, r16 899 emit_operand(eax, op); // eax has same code as register ax. 900} 901 902 903void Assembler::dec_b(Register dst) { 904 CHECK(dst.is_byte_register()); 905 EnsureSpace ensure_space(this); 906 EMIT(0xFE); 907 EMIT(0xC8 | dst.code()); 908} 909 910 911void Assembler::dec_b(const Operand& dst) { 912 EnsureSpace ensure_space(this); 913 EMIT(0xFE); 914 emit_operand(ecx, dst); 915} 916 917 918void Assembler::dec(Register dst) { 919 EnsureSpace ensure_space(this); 920 EMIT(0x48 | dst.code()); 921} 922 923 924void Assembler::dec(const Operand& dst) { 925 EnsureSpace ensure_space(this); 926 EMIT(0xFF); 927 emit_operand(ecx, dst); 928} 929 930 931void Assembler::cdq() { 932 EnsureSpace ensure_space(this); 933 EMIT(0x99); 934} 935 936 937void Assembler::idiv(Register src) { 938 EnsureSpace ensure_space(this); 939 EMIT(0xF7); 940 EMIT(0xF8 | src.code()); 941} 942 943 944void Assembler::imul(Register reg) { 945 EnsureSpace ensure_space(this); 946 EMIT(0xF7); 947 EMIT(0xE8 | reg.code()); 948} 949 950 951void Assembler::imul(Register dst, const Operand& src) { 952 EnsureSpace ensure_space(this); 953 EMIT(0x0F); 954 EMIT(0xAF); 955 emit_operand(dst, src); 956} 957 958 959void Assembler::imul(Register dst, Register src, int32_t imm32) { 960 EnsureSpace ensure_space(this); 961 if (is_int8(imm32)) { 962 EMIT(0x6B); 963 EMIT(0xC0 | dst.code() << 3 | src.code()); 964 EMIT(imm32); 965 } else { 966 EMIT(0x69); 967 EMIT(0xC0 | dst.code() << 3 | src.code()); 968 emit(imm32); 969 } 970} 971 972 973void Assembler::inc(Register dst) { 974 EnsureSpace ensure_space(this); 975 EMIT(0x40 | dst.code()); 976} 977 978 979void Assembler::inc(const Operand& dst) { 980 EnsureSpace ensure_space(this); 981 EMIT(0xFF); 982 emit_operand(eax, dst); 983} 984 985 986void Assembler::lea(Register dst, const Operand& src) { 987 EnsureSpace ensure_space(this); 988 EMIT(0x8D); 989 emit_operand(dst, src); 990} 991 992 993void Assembler::mul(Register src) { 994 EnsureSpace ensure_space(this); 995 EMIT(0xF7); 996 EMIT(0xE0 | src.code()); 997} 998 999 1000void Assembler::neg(Register dst) { 1001 EnsureSpace ensure_space(this); 1002 EMIT(0xF7); 1003 EMIT(0xD8 | dst.code()); 1004} 1005 1006 1007void Assembler::not_(Register dst) { 1008 EnsureSpace ensure_space(this); 1009 EMIT(0xF7); 1010 EMIT(0xD0 | dst.code()); 1011} 1012 1013 1014void Assembler::or_(Register dst, int32_t imm32) { 1015 EnsureSpace ensure_space(this); 1016 emit_arith(1, Operand(dst), Immediate(imm32)); 1017} 1018 1019 1020void Assembler::or_(Register dst, const Operand& src) { 1021 EnsureSpace ensure_space(this); 1022 EMIT(0x0B); 1023 emit_operand(dst, src); 1024} 1025 1026 1027void Assembler::or_(const Operand& dst, const Immediate& x) { 1028 EnsureSpace ensure_space(this); 1029 emit_arith(1, dst, x); 1030} 1031 1032 1033void Assembler::or_(const Operand& dst, Register src) { 1034 EnsureSpace ensure_space(this); 1035 EMIT(0x09); 1036 emit_operand(src, dst); 1037} 1038 1039 1040void Assembler::rcl(Register dst, uint8_t imm8) { 1041 EnsureSpace ensure_space(this); 1042 ASSERT(is_uint5(imm8)); // illegal shift count 1043 if (imm8 == 1) { 1044 EMIT(0xD1); 1045 EMIT(0xD0 | dst.code()); 1046 } else { 1047 EMIT(0xC1); 1048 EMIT(0xD0 | dst.code()); 1049 EMIT(imm8); 1050 } 1051} 1052 1053 1054void Assembler::rcr(Register dst, uint8_t imm8) { 1055 EnsureSpace ensure_space(this); 1056 ASSERT(is_uint5(imm8)); // illegal shift count 1057 if (imm8 == 1) { 1058 EMIT(0xD1); 1059 EMIT(0xD8 | dst.code()); 1060 } else { 1061 EMIT(0xC1); 1062 EMIT(0xD8 | dst.code()); 1063 EMIT(imm8); 1064 } 1065} 1066 1067 1068void Assembler::sar(Register dst, uint8_t imm8) { 1069 EnsureSpace ensure_space(this); 1070 ASSERT(is_uint5(imm8)); // illegal shift count 1071 if (imm8 == 1) { 1072 EMIT(0xD1); 1073 EMIT(0xF8 | dst.code()); 1074 } else { 1075 EMIT(0xC1); 1076 EMIT(0xF8 | dst.code()); 1077 EMIT(imm8); 1078 } 1079} 1080 1081 1082void Assembler::sar_cl(Register dst) { 1083 EnsureSpace ensure_space(this); 1084 EMIT(0xD3); 1085 EMIT(0xF8 | dst.code()); 1086} 1087 1088 1089void Assembler::sbb(Register dst, const Operand& src) { 1090 EnsureSpace ensure_space(this); 1091 EMIT(0x1B); 1092 emit_operand(dst, src); 1093} 1094 1095 1096void Assembler::shld(Register dst, const Operand& src) { 1097 EnsureSpace ensure_space(this); 1098 EMIT(0x0F); 1099 EMIT(0xA5); 1100 emit_operand(dst, src); 1101} 1102 1103 1104void Assembler::shl(Register dst, uint8_t imm8) { 1105 EnsureSpace ensure_space(this); 1106 ASSERT(is_uint5(imm8)); // illegal shift count 1107 if (imm8 == 1) { 1108 EMIT(0xD1); 1109 EMIT(0xE0 | dst.code()); 1110 } else { 1111 EMIT(0xC1); 1112 EMIT(0xE0 | dst.code()); 1113 EMIT(imm8); 1114 } 1115} 1116 1117 1118void Assembler::shl_cl(Register dst) { 1119 EnsureSpace ensure_space(this); 1120 EMIT(0xD3); 1121 EMIT(0xE0 | dst.code()); 1122} 1123 1124 1125void Assembler::shrd(Register dst, const Operand& src) { 1126 EnsureSpace ensure_space(this); 1127 EMIT(0x0F); 1128 EMIT(0xAD); 1129 emit_operand(dst, src); 1130} 1131 1132 1133void Assembler::shr(Register dst, uint8_t imm8) { 1134 EnsureSpace ensure_space(this); 1135 ASSERT(is_uint5(imm8)); // illegal shift count 1136 if (imm8 == 1) { 1137 EMIT(0xD1); 1138 EMIT(0xE8 | dst.code()); 1139 } else { 1140 EMIT(0xC1); 1141 EMIT(0xE8 | dst.code()); 1142 EMIT(imm8); 1143 } 1144} 1145 1146 1147void Assembler::shr_cl(Register dst) { 1148 EnsureSpace ensure_space(this); 1149 EMIT(0xD3); 1150 EMIT(0xE8 | dst.code()); 1151} 1152 1153 1154void Assembler::sub(const Operand& dst, const Immediate& x) { 1155 EnsureSpace ensure_space(this); 1156 emit_arith(5, dst, x); 1157} 1158 1159 1160void Assembler::sub(Register dst, const Operand& src) { 1161 EnsureSpace ensure_space(this); 1162 EMIT(0x2B); 1163 emit_operand(dst, src); 1164} 1165 1166 1167void Assembler::sub(const Operand& dst, Register src) { 1168 EnsureSpace ensure_space(this); 1169 EMIT(0x29); 1170 emit_operand(src, dst); 1171} 1172 1173 1174void Assembler::test(Register reg, const Immediate& imm) { 1175 EnsureSpace ensure_space(this); 1176 // Only use test against byte for registers that have a byte 1177 // variant: eax, ebx, ecx, and edx. 1178 if (imm.rmode_ == RelocInfo::NONE && 1179 is_uint8(imm.x_) && 1180 reg.is_byte_register()) { 1181 uint8_t imm8 = imm.x_; 1182 if (reg.is(eax)) { 1183 EMIT(0xA8); 1184 EMIT(imm8); 1185 } else { 1186 emit_arith_b(0xF6, 0xC0, reg, imm8); 1187 } 1188 } else { 1189 // This is not using emit_arith because test doesn't support 1190 // sign-extension of 8-bit operands. 1191 if (reg.is(eax)) { 1192 EMIT(0xA9); 1193 } else { 1194 EMIT(0xF7); 1195 EMIT(0xC0 | reg.code()); 1196 } 1197 emit(imm); 1198 } 1199} 1200 1201 1202void Assembler::test(Register reg, const Operand& op) { 1203 EnsureSpace ensure_space(this); 1204 EMIT(0x85); 1205 emit_operand(reg, op); 1206} 1207 1208 1209void Assembler::test_b(Register reg, const Operand& op) { 1210 CHECK(reg.is_byte_register()); 1211 EnsureSpace ensure_space(this); 1212 EMIT(0x84); 1213 emit_operand(reg, op); 1214} 1215 1216 1217void Assembler::test(const Operand& op, const Immediate& imm) { 1218 EnsureSpace ensure_space(this); 1219 EMIT(0xF7); 1220 emit_operand(eax, op); 1221 emit(imm); 1222} 1223 1224 1225void Assembler::test_b(const Operand& op, uint8_t imm8) { 1226 if (op.is_reg_only() && !op.reg().is_byte_register()) { 1227 test(op, Immediate(imm8)); 1228 return; 1229 } 1230 EnsureSpace ensure_space(this); 1231 EMIT(0xF6); 1232 emit_operand(eax, op); 1233 EMIT(imm8); 1234} 1235 1236 1237void Assembler::xor_(Register dst, int32_t imm32) { 1238 EnsureSpace ensure_space(this); 1239 emit_arith(6, Operand(dst), Immediate(imm32)); 1240} 1241 1242 1243void Assembler::xor_(Register dst, const Operand& src) { 1244 EnsureSpace ensure_space(this); 1245 EMIT(0x33); 1246 emit_operand(dst, src); 1247} 1248 1249 1250void Assembler::xor_(const Operand& dst, Register src) { 1251 EnsureSpace ensure_space(this); 1252 EMIT(0x31); 1253 emit_operand(src, dst); 1254} 1255 1256 1257void Assembler::xor_(const Operand& dst, const Immediate& x) { 1258 EnsureSpace ensure_space(this); 1259 emit_arith(6, dst, x); 1260} 1261 1262 1263void Assembler::bt(const Operand& dst, Register src) { 1264 EnsureSpace ensure_space(this); 1265 EMIT(0x0F); 1266 EMIT(0xA3); 1267 emit_operand(src, dst); 1268} 1269 1270 1271void Assembler::bts(const Operand& dst, Register src) { 1272 EnsureSpace ensure_space(this); 1273 EMIT(0x0F); 1274 EMIT(0xAB); 1275 emit_operand(src, dst); 1276} 1277 1278 1279void Assembler::hlt() { 1280 EnsureSpace ensure_space(this); 1281 EMIT(0xF4); 1282} 1283 1284 1285void Assembler::int3() { 1286 EnsureSpace ensure_space(this); 1287 EMIT(0xCC); 1288} 1289 1290 1291void Assembler::nop() { 1292 EnsureSpace ensure_space(this); 1293 EMIT(0x90); 1294} 1295 1296 1297void Assembler::rdtsc() { 1298 ASSERT(CpuFeatures::IsEnabled(RDTSC)); 1299 EnsureSpace ensure_space(this); 1300 EMIT(0x0F); 1301 EMIT(0x31); 1302} 1303 1304 1305void Assembler::ret(int imm16) { 1306 EnsureSpace ensure_space(this); 1307 ASSERT(is_uint16(imm16)); 1308 if (imm16 == 0) { 1309 EMIT(0xC3); 1310 } else { 1311 EMIT(0xC2); 1312 EMIT(imm16 & 0xFF); 1313 EMIT((imm16 >> 8) & 0xFF); 1314 } 1315} 1316 1317 1318// Labels refer to positions in the (to be) generated code. 1319// There are bound, linked, and unused labels. 1320// 1321// Bound labels refer to known positions in the already 1322// generated code. pos() is the position the label refers to. 1323// 1324// Linked labels refer to unknown positions in the code 1325// to be generated; pos() is the position of the 32bit 1326// Displacement of the last instruction using the label. 1327 1328 1329void Assembler::print(Label* L) { 1330 if (L->is_unused()) { 1331 PrintF("unused label\n"); 1332 } else if (L->is_bound()) { 1333 PrintF("bound label to %d\n", L->pos()); 1334 } else if (L->is_linked()) { 1335 Label l = *L; 1336 PrintF("unbound label"); 1337 while (l.is_linked()) { 1338 Displacement disp = disp_at(&l); 1339 PrintF("@ %d ", l.pos()); 1340 disp.print(); 1341 PrintF("\n"); 1342 disp.next(&l); 1343 } 1344 } else { 1345 PrintF("label in inconsistent state (pos = %d)\n", L->pos_); 1346 } 1347} 1348 1349 1350void Assembler::bind_to(Label* L, int pos) { 1351 EnsureSpace ensure_space(this); 1352 ASSERT(0 <= pos && pos <= pc_offset()); // must have a valid binding position 1353 while (L->is_linked()) { 1354 Displacement disp = disp_at(L); 1355 int fixup_pos = L->pos(); 1356 if (disp.type() == Displacement::CODE_RELATIVE) { 1357 // Relative to Code* heap object pointer. 1358 long_at_put(fixup_pos, pos + Code::kHeaderSize - kHeapObjectTag); 1359 } else { 1360 if (disp.type() == Displacement::UNCONDITIONAL_JUMP) { 1361 ASSERT(byte_at(fixup_pos - 1) == 0xE9); // jmp expected 1362 } 1363 // Relative address, relative to point after address. 1364 int imm32 = pos - (fixup_pos + sizeof(int32_t)); 1365 long_at_put(fixup_pos, imm32); 1366 } 1367 disp.next(L); 1368 } 1369 while (L->is_near_linked()) { 1370 int fixup_pos = L->near_link_pos(); 1371 int offset_to_next = 1372 static_cast<int>(*reinterpret_cast<int8_t*>(addr_at(fixup_pos))); 1373 ASSERT(offset_to_next <= 0); 1374 // Relative address, relative to point after address. 1375 int disp = pos - fixup_pos - sizeof(int8_t); 1376 ASSERT(0 <= disp && disp <= 127); 1377 set_byte_at(fixup_pos, disp); 1378 if (offset_to_next < 0) { 1379 L->link_to(fixup_pos + offset_to_next, Label::kNear); 1380 } else { 1381 L->UnuseNear(); 1382 } 1383 } 1384 L->bind_to(pos); 1385} 1386 1387 1388void Assembler::bind(Label* L) { 1389 EnsureSpace ensure_space(this); 1390 ASSERT(!L->is_bound()); // label can only be bound once 1391 bind_to(L, pc_offset()); 1392} 1393 1394 1395void Assembler::call(Label* L) { 1396 positions_recorder()->WriteRecordedPositions(); 1397 EnsureSpace ensure_space(this); 1398 if (L->is_bound()) { 1399 const int long_size = 5; 1400 int offs = L->pos() - pc_offset(); 1401 ASSERT(offs <= 0); 1402 // 1110 1000 #32-bit disp. 1403 EMIT(0xE8); 1404 emit(offs - long_size); 1405 } else { 1406 // 1110 1000 #32-bit disp. 1407 EMIT(0xE8); 1408 emit_disp(L, Displacement::OTHER); 1409 } 1410} 1411 1412 1413void Assembler::call(byte* entry, RelocInfo::Mode rmode) { 1414 positions_recorder()->WriteRecordedPositions(); 1415 EnsureSpace ensure_space(this); 1416 ASSERT(!RelocInfo::IsCodeTarget(rmode)); 1417 EMIT(0xE8); 1418 emit(entry - (pc_ + sizeof(int32_t)), rmode); 1419} 1420 1421 1422int Assembler::CallSize(const Operand& adr) { 1423 // Call size is 1 (opcode) + adr.len_ (operand). 1424 return 1 + adr.len_; 1425} 1426 1427 1428void Assembler::call(const Operand& adr) { 1429 positions_recorder()->WriteRecordedPositions(); 1430 EnsureSpace ensure_space(this); 1431 EMIT(0xFF); 1432 emit_operand(edx, adr); 1433} 1434 1435 1436int Assembler::CallSize(Handle<Code> code, RelocInfo::Mode rmode) { 1437 return 1 /* EMIT */ + sizeof(uint32_t) /* emit */; 1438} 1439 1440 1441void Assembler::call(Handle<Code> code, 1442 RelocInfo::Mode rmode, 1443 unsigned ast_id) { 1444 positions_recorder()->WriteRecordedPositions(); 1445 EnsureSpace ensure_space(this); 1446 ASSERT(RelocInfo::IsCodeTarget(rmode)); 1447 EMIT(0xE8); 1448 emit(reinterpret_cast<intptr_t>(code.location()), rmode, ast_id); 1449} 1450 1451 1452void Assembler::jmp(Label* L, Label::Distance distance) { 1453 EnsureSpace ensure_space(this); 1454 if (L->is_bound()) { 1455 const int short_size = 2; 1456 const int long_size = 5; 1457 int offs = L->pos() - pc_offset(); 1458 ASSERT(offs <= 0); 1459 if (is_int8(offs - short_size)) { 1460 // 1110 1011 #8-bit disp. 1461 EMIT(0xEB); 1462 EMIT((offs - short_size) & 0xFF); 1463 } else { 1464 // 1110 1001 #32-bit disp. 1465 EMIT(0xE9); 1466 emit(offs - long_size); 1467 } 1468 } else if (distance == Label::kNear) { 1469 EMIT(0xEB); 1470 emit_near_disp(L); 1471 } else { 1472 // 1110 1001 #32-bit disp. 1473 EMIT(0xE9); 1474 emit_disp(L, Displacement::UNCONDITIONAL_JUMP); 1475 } 1476} 1477 1478 1479void Assembler::jmp(byte* entry, RelocInfo::Mode rmode) { 1480 EnsureSpace ensure_space(this); 1481 ASSERT(!RelocInfo::IsCodeTarget(rmode)); 1482 EMIT(0xE9); 1483 emit(entry - (pc_ + sizeof(int32_t)), rmode); 1484} 1485 1486 1487void Assembler::jmp(const Operand& adr) { 1488 EnsureSpace ensure_space(this); 1489 EMIT(0xFF); 1490 emit_operand(esp, adr); 1491} 1492 1493 1494void Assembler::jmp(Handle<Code> code, RelocInfo::Mode rmode) { 1495 EnsureSpace ensure_space(this); 1496 ASSERT(RelocInfo::IsCodeTarget(rmode)); 1497 EMIT(0xE9); 1498 emit(reinterpret_cast<intptr_t>(code.location()), rmode); 1499} 1500 1501 1502void Assembler::j(Condition cc, Label* L, Label::Distance distance) { 1503 EnsureSpace ensure_space(this); 1504 ASSERT(0 <= cc && cc < 16); 1505 if (L->is_bound()) { 1506 const int short_size = 2; 1507 const int long_size = 6; 1508 int offs = L->pos() - pc_offset(); 1509 ASSERT(offs <= 0); 1510 if (is_int8(offs - short_size)) { 1511 // 0111 tttn #8-bit disp 1512 EMIT(0x70 | cc); 1513 EMIT((offs - short_size) & 0xFF); 1514 } else { 1515 // 0000 1111 1000 tttn #32-bit disp 1516 EMIT(0x0F); 1517 EMIT(0x80 | cc); 1518 emit(offs - long_size); 1519 } 1520 } else if (distance == Label::kNear) { 1521 EMIT(0x70 | cc); 1522 emit_near_disp(L); 1523 } else { 1524 // 0000 1111 1000 tttn #32-bit disp 1525 // Note: could eliminate cond. jumps to this jump if condition 1526 // is the same however, seems to be rather unlikely case. 1527 EMIT(0x0F); 1528 EMIT(0x80 | cc); 1529 emit_disp(L, Displacement::OTHER); 1530 } 1531} 1532 1533 1534void Assembler::j(Condition cc, byte* entry, RelocInfo::Mode rmode) { 1535 EnsureSpace ensure_space(this); 1536 ASSERT((0 <= cc) && (cc < 16)); 1537 // 0000 1111 1000 tttn #32-bit disp. 1538 EMIT(0x0F); 1539 EMIT(0x80 | cc); 1540 emit(entry - (pc_ + sizeof(int32_t)), rmode); 1541} 1542 1543 1544void Assembler::j(Condition cc, Handle<Code> code) { 1545 EnsureSpace ensure_space(this); 1546 // 0000 1111 1000 tttn #32-bit disp 1547 EMIT(0x0F); 1548 EMIT(0x80 | cc); 1549 emit(reinterpret_cast<intptr_t>(code.location()), RelocInfo::CODE_TARGET); 1550} 1551 1552 1553// FPU instructions. 1554 1555void Assembler::fld(int i) { 1556 EnsureSpace ensure_space(this); 1557 emit_farith(0xD9, 0xC0, i); 1558} 1559 1560 1561void Assembler::fstp(int i) { 1562 EnsureSpace ensure_space(this); 1563 emit_farith(0xDD, 0xD8, i); 1564} 1565 1566 1567void Assembler::fld1() { 1568 EnsureSpace ensure_space(this); 1569 EMIT(0xD9); 1570 EMIT(0xE8); 1571} 1572 1573 1574void Assembler::fldpi() { 1575 EnsureSpace ensure_space(this); 1576 EMIT(0xD9); 1577 EMIT(0xEB); 1578} 1579 1580 1581void Assembler::fldz() { 1582 EnsureSpace ensure_space(this); 1583 EMIT(0xD9); 1584 EMIT(0xEE); 1585} 1586 1587 1588void Assembler::fldln2() { 1589 EnsureSpace ensure_space(this); 1590 EMIT(0xD9); 1591 EMIT(0xED); 1592} 1593 1594 1595void Assembler::fld_s(const Operand& adr) { 1596 EnsureSpace ensure_space(this); 1597 EMIT(0xD9); 1598 emit_operand(eax, adr); 1599} 1600 1601 1602void Assembler::fld_d(const Operand& adr) { 1603 EnsureSpace ensure_space(this); 1604 EMIT(0xDD); 1605 emit_operand(eax, adr); 1606} 1607 1608 1609void Assembler::fstp_s(const Operand& adr) { 1610 EnsureSpace ensure_space(this); 1611 EMIT(0xD9); 1612 emit_operand(ebx, adr); 1613} 1614 1615 1616void Assembler::fstp_d(const Operand& adr) { 1617 EnsureSpace ensure_space(this); 1618 EMIT(0xDD); 1619 emit_operand(ebx, adr); 1620} 1621 1622 1623void Assembler::fst_d(const Operand& adr) { 1624 EnsureSpace ensure_space(this); 1625 EMIT(0xDD); 1626 emit_operand(edx, adr); 1627} 1628 1629 1630void Assembler::fild_s(const Operand& adr) { 1631 EnsureSpace ensure_space(this); 1632 EMIT(0xDB); 1633 emit_operand(eax, adr); 1634} 1635 1636 1637void Assembler::fild_d(const Operand& adr) { 1638 EnsureSpace ensure_space(this); 1639 EMIT(0xDF); 1640 emit_operand(ebp, adr); 1641} 1642 1643 1644void Assembler::fistp_s(const Operand& adr) { 1645 EnsureSpace ensure_space(this); 1646 EMIT(0xDB); 1647 emit_operand(ebx, adr); 1648} 1649 1650 1651void Assembler::fisttp_s(const Operand& adr) { 1652 ASSERT(CpuFeatures::IsEnabled(SSE3)); 1653 EnsureSpace ensure_space(this); 1654 EMIT(0xDB); 1655 emit_operand(ecx, adr); 1656} 1657 1658 1659void Assembler::fisttp_d(const Operand& adr) { 1660 ASSERT(CpuFeatures::IsEnabled(SSE3)); 1661 EnsureSpace ensure_space(this); 1662 EMIT(0xDD); 1663 emit_operand(ecx, adr); 1664} 1665 1666 1667void Assembler::fist_s(const Operand& adr) { 1668 EnsureSpace ensure_space(this); 1669 EMIT(0xDB); 1670 emit_operand(edx, adr); 1671} 1672 1673 1674void Assembler::fistp_d(const Operand& adr) { 1675 EnsureSpace ensure_space(this); 1676 EMIT(0xDF); 1677 emit_operand(edi, adr); 1678} 1679 1680 1681void Assembler::fabs() { 1682 EnsureSpace ensure_space(this); 1683 EMIT(0xD9); 1684 EMIT(0xE1); 1685} 1686 1687 1688void Assembler::fchs() { 1689 EnsureSpace ensure_space(this); 1690 EMIT(0xD9); 1691 EMIT(0xE0); 1692} 1693 1694 1695void Assembler::fcos() { 1696 EnsureSpace ensure_space(this); 1697 EMIT(0xD9); 1698 EMIT(0xFF); 1699} 1700 1701 1702void Assembler::fsin() { 1703 EnsureSpace ensure_space(this); 1704 EMIT(0xD9); 1705 EMIT(0xFE); 1706} 1707 1708 1709void Assembler::fptan() { 1710 EnsureSpace ensure_space(this); 1711 EMIT(0xD9); 1712 EMIT(0xF2); 1713} 1714 1715 1716void Assembler::fyl2x() { 1717 EnsureSpace ensure_space(this); 1718 EMIT(0xD9); 1719 EMIT(0xF1); 1720} 1721 1722 1723void Assembler::f2xm1() { 1724 EnsureSpace ensure_space(this); 1725 EMIT(0xD9); 1726 EMIT(0xF0); 1727} 1728 1729 1730void Assembler::fscale() { 1731 EnsureSpace ensure_space(this); 1732 EMIT(0xD9); 1733 EMIT(0xFD); 1734} 1735 1736 1737void Assembler::fninit() { 1738 EnsureSpace ensure_space(this); 1739 EMIT(0xDB); 1740 EMIT(0xE3); 1741} 1742 1743 1744void Assembler::fadd(int i) { 1745 EnsureSpace ensure_space(this); 1746 emit_farith(0xDC, 0xC0, i); 1747} 1748 1749 1750void Assembler::fsub(int i) { 1751 EnsureSpace ensure_space(this); 1752 emit_farith(0xDC, 0xE8, i); 1753} 1754 1755 1756void Assembler::fisub_s(const Operand& adr) { 1757 EnsureSpace ensure_space(this); 1758 EMIT(0xDA); 1759 emit_operand(esp, adr); 1760} 1761 1762 1763void Assembler::fmul(int i) { 1764 EnsureSpace ensure_space(this); 1765 emit_farith(0xDC, 0xC8, i); 1766} 1767 1768 1769void Assembler::fdiv(int i) { 1770 EnsureSpace ensure_space(this); 1771 emit_farith(0xDC, 0xF8, i); 1772} 1773 1774 1775void Assembler::faddp(int i) { 1776 EnsureSpace ensure_space(this); 1777 emit_farith(0xDE, 0xC0, i); 1778} 1779 1780 1781void Assembler::fsubp(int i) { 1782 EnsureSpace ensure_space(this); 1783 emit_farith(0xDE, 0xE8, i); 1784} 1785 1786 1787void Assembler::fsubrp(int i) { 1788 EnsureSpace ensure_space(this); 1789 emit_farith(0xDE, 0xE0, i); 1790} 1791 1792 1793void Assembler::fmulp(int i) { 1794 EnsureSpace ensure_space(this); 1795 emit_farith(0xDE, 0xC8, i); 1796} 1797 1798 1799void Assembler::fdivp(int i) { 1800 EnsureSpace ensure_space(this); 1801 emit_farith(0xDE, 0xF8, i); 1802} 1803 1804 1805void Assembler::fprem() { 1806 EnsureSpace ensure_space(this); 1807 EMIT(0xD9); 1808 EMIT(0xF8); 1809} 1810 1811 1812void Assembler::fprem1() { 1813 EnsureSpace ensure_space(this); 1814 EMIT(0xD9); 1815 EMIT(0xF5); 1816} 1817 1818 1819void Assembler::fxch(int i) { 1820 EnsureSpace ensure_space(this); 1821 emit_farith(0xD9, 0xC8, i); 1822} 1823 1824 1825void Assembler::fincstp() { 1826 EnsureSpace ensure_space(this); 1827 EMIT(0xD9); 1828 EMIT(0xF7); 1829} 1830 1831 1832void Assembler::ffree(int i) { 1833 EnsureSpace ensure_space(this); 1834 emit_farith(0xDD, 0xC0, i); 1835} 1836 1837 1838void Assembler::ftst() { 1839 EnsureSpace ensure_space(this); 1840 EMIT(0xD9); 1841 EMIT(0xE4); 1842} 1843 1844 1845void Assembler::fucomp(int i) { 1846 EnsureSpace ensure_space(this); 1847 emit_farith(0xDD, 0xE8, i); 1848} 1849 1850 1851void Assembler::fucompp() { 1852 EnsureSpace ensure_space(this); 1853 EMIT(0xDA); 1854 EMIT(0xE9); 1855} 1856 1857 1858void Assembler::fucomi(int i) { 1859 EnsureSpace ensure_space(this); 1860 EMIT(0xDB); 1861 EMIT(0xE8 + i); 1862} 1863 1864 1865void Assembler::fucomip() { 1866 EnsureSpace ensure_space(this); 1867 EMIT(0xDF); 1868 EMIT(0xE9); 1869} 1870 1871 1872void Assembler::fcompp() { 1873 EnsureSpace ensure_space(this); 1874 EMIT(0xDE); 1875 EMIT(0xD9); 1876} 1877 1878 1879void Assembler::fnstsw_ax() { 1880 EnsureSpace ensure_space(this); 1881 EMIT(0xDF); 1882 EMIT(0xE0); 1883} 1884 1885 1886void Assembler::fwait() { 1887 EnsureSpace ensure_space(this); 1888 EMIT(0x9B); 1889} 1890 1891 1892void Assembler::frndint() { 1893 EnsureSpace ensure_space(this); 1894 EMIT(0xD9); 1895 EMIT(0xFC); 1896} 1897 1898 1899void Assembler::fnclex() { 1900 EnsureSpace ensure_space(this); 1901 EMIT(0xDB); 1902 EMIT(0xE2); 1903} 1904 1905 1906void Assembler::sahf() { 1907 EnsureSpace ensure_space(this); 1908 EMIT(0x9E); 1909} 1910 1911 1912void Assembler::setcc(Condition cc, Register reg) { 1913 ASSERT(reg.is_byte_register()); 1914 EnsureSpace ensure_space(this); 1915 EMIT(0x0F); 1916 EMIT(0x90 | cc); 1917 EMIT(0xC0 | reg.code()); 1918} 1919 1920 1921void Assembler::cvttss2si(Register dst, const Operand& src) { 1922 ASSERT(CpuFeatures::IsEnabled(SSE2)); 1923 EnsureSpace ensure_space(this); 1924 EMIT(0xF3); 1925 EMIT(0x0F); 1926 EMIT(0x2C); 1927 emit_operand(dst, src); 1928} 1929 1930 1931void Assembler::cvttsd2si(Register dst, const Operand& src) { 1932 ASSERT(CpuFeatures::IsEnabled(SSE2)); 1933 EnsureSpace ensure_space(this); 1934 EMIT(0xF2); 1935 EMIT(0x0F); 1936 EMIT(0x2C); 1937 emit_operand(dst, src); 1938} 1939 1940 1941void Assembler::cvtsi2sd(XMMRegister dst, const Operand& src) { 1942 ASSERT(CpuFeatures::IsEnabled(SSE2)); 1943 EnsureSpace ensure_space(this); 1944 EMIT(0xF2); 1945 EMIT(0x0F); 1946 EMIT(0x2A); 1947 emit_sse_operand(dst, src); 1948} 1949 1950 1951void Assembler::cvtss2sd(XMMRegister dst, XMMRegister src) { 1952 ASSERT(CpuFeatures::IsEnabled(SSE2)); 1953 EnsureSpace ensure_space(this); 1954 EMIT(0xF3); 1955 EMIT(0x0F); 1956 EMIT(0x5A); 1957 emit_sse_operand(dst, src); 1958} 1959 1960 1961void Assembler::cvtsd2ss(XMMRegister dst, XMMRegister src) { 1962 ASSERT(CpuFeatures::IsEnabled(SSE2)); 1963 EnsureSpace ensure_space(this); 1964 EMIT(0xF2); 1965 EMIT(0x0F); 1966 EMIT(0x5A); 1967 emit_sse_operand(dst, src); 1968} 1969 1970 1971void Assembler::addsd(XMMRegister dst, XMMRegister src) { 1972 ASSERT(CpuFeatures::IsEnabled(SSE2)); 1973 EnsureSpace ensure_space(this); 1974 EMIT(0xF2); 1975 EMIT(0x0F); 1976 EMIT(0x58); 1977 emit_sse_operand(dst, src); 1978} 1979 1980 1981void Assembler::mulsd(XMMRegister dst, XMMRegister src) { 1982 ASSERT(CpuFeatures::IsEnabled(SSE2)); 1983 EnsureSpace ensure_space(this); 1984 EMIT(0xF2); 1985 EMIT(0x0F); 1986 EMIT(0x59); 1987 emit_sse_operand(dst, src); 1988} 1989 1990 1991void Assembler::subsd(XMMRegister dst, XMMRegister src) { 1992 ASSERT(CpuFeatures::IsEnabled(SSE2)); 1993 EnsureSpace ensure_space(this); 1994 EMIT(0xF2); 1995 EMIT(0x0F); 1996 EMIT(0x5C); 1997 emit_sse_operand(dst, src); 1998} 1999 2000 2001void Assembler::divsd(XMMRegister dst, XMMRegister src) { 2002 ASSERT(CpuFeatures::IsEnabled(SSE2)); 2003 EnsureSpace ensure_space(this); 2004 EMIT(0xF2); 2005 EMIT(0x0F); 2006 EMIT(0x5E); 2007 emit_sse_operand(dst, src); 2008} 2009 2010 2011void Assembler::xorpd(XMMRegister dst, XMMRegister src) { 2012 ASSERT(CpuFeatures::IsEnabled(SSE2)); 2013 EnsureSpace ensure_space(this); 2014 EMIT(0x66); 2015 EMIT(0x0F); 2016 EMIT(0x57); 2017 emit_sse_operand(dst, src); 2018} 2019 2020 2021void Assembler::xorps(XMMRegister dst, XMMRegister src) { 2022 EnsureSpace ensure_space(this); 2023 EMIT(0x0F); 2024 EMIT(0x57); 2025 emit_sse_operand(dst, src); 2026} 2027 2028 2029void Assembler::sqrtsd(XMMRegister dst, XMMRegister src) { 2030 EnsureSpace ensure_space(this); 2031 EMIT(0xF2); 2032 EMIT(0x0F); 2033 EMIT(0x51); 2034 emit_sse_operand(dst, src); 2035} 2036 2037 2038void Assembler::andpd(XMMRegister dst, XMMRegister src) { 2039 EnsureSpace ensure_space(this); 2040 EMIT(0x66); 2041 EMIT(0x0F); 2042 EMIT(0x54); 2043 emit_sse_operand(dst, src); 2044} 2045 2046 2047void Assembler::ucomisd(XMMRegister dst, XMMRegister src) { 2048 ASSERT(CpuFeatures::IsEnabled(SSE2)); 2049 EnsureSpace ensure_space(this); 2050 EMIT(0x66); 2051 EMIT(0x0F); 2052 EMIT(0x2E); 2053 emit_sse_operand(dst, src); 2054} 2055 2056 2057void Assembler::ucomisd(XMMRegister dst, const Operand& src) { 2058 ASSERT(CpuFeatures::IsEnabled(SSE2)); 2059 EnsureSpace ensure_space(this); 2060 EMIT(0x66); 2061 EMIT(0x0F); 2062 EMIT(0x2E); 2063 emit_sse_operand(dst, src); 2064} 2065 2066 2067void Assembler::roundsd(XMMRegister dst, XMMRegister src, RoundingMode mode) { 2068 ASSERT(CpuFeatures::IsEnabled(SSE4_1)); 2069 EnsureSpace ensure_space(this); 2070 EMIT(0x66); 2071 EMIT(0x0F); 2072 EMIT(0x3A); 2073 EMIT(0x0B); 2074 emit_sse_operand(dst, src); 2075 // Mask precision exeption. 2076 EMIT(static_cast<byte>(mode) | 0x8); 2077} 2078 2079void Assembler::movmskpd(Register dst, XMMRegister src) { 2080 ASSERT(CpuFeatures::IsEnabled(SSE2)); 2081 EnsureSpace ensure_space(this); 2082 EMIT(0x66); 2083 EMIT(0x0F); 2084 EMIT(0x50); 2085 emit_sse_operand(dst, src); 2086} 2087 2088 2089void Assembler::cmpltsd(XMMRegister dst, XMMRegister src) { 2090 ASSERT(CpuFeatures::IsEnabled(SSE2)); 2091 EnsureSpace ensure_space(this); 2092 EMIT(0xF2); 2093 EMIT(0x0F); 2094 EMIT(0xC2); 2095 emit_sse_operand(dst, src); 2096 EMIT(1); // LT == 1 2097} 2098 2099 2100void Assembler::movaps(XMMRegister dst, XMMRegister src) { 2101 ASSERT(CpuFeatures::IsEnabled(SSE2)); 2102 EnsureSpace ensure_space(this); 2103 EMIT(0x0F); 2104 EMIT(0x28); 2105 emit_sse_operand(dst, src); 2106} 2107 2108 2109void Assembler::movdqa(const Operand& dst, XMMRegister src) { 2110 ASSERT(CpuFeatures::IsEnabled(SSE2)); 2111 EnsureSpace ensure_space(this); 2112 EMIT(0x66); 2113 EMIT(0x0F); 2114 EMIT(0x7F); 2115 emit_sse_operand(src, dst); 2116} 2117 2118 2119void Assembler::movdqa(XMMRegister dst, const Operand& src) { 2120 ASSERT(CpuFeatures::IsEnabled(SSE2)); 2121 EnsureSpace ensure_space(this); 2122 EMIT(0x66); 2123 EMIT(0x0F); 2124 EMIT(0x6F); 2125 emit_sse_operand(dst, src); 2126} 2127 2128 2129void Assembler::movdqu(const Operand& dst, XMMRegister src ) { 2130 ASSERT(CpuFeatures::IsEnabled(SSE2)); 2131 EnsureSpace ensure_space(this); 2132 EMIT(0xF3); 2133 EMIT(0x0F); 2134 EMIT(0x7F); 2135 emit_sse_operand(src, dst); 2136} 2137 2138 2139void Assembler::movdqu(XMMRegister dst, const Operand& src) { 2140 ASSERT(CpuFeatures::IsEnabled(SSE2)); 2141 EnsureSpace ensure_space(this); 2142 EMIT(0xF3); 2143 EMIT(0x0F); 2144 EMIT(0x6F); 2145 emit_sse_operand(dst, src); 2146} 2147 2148 2149void Assembler::movntdqa(XMMRegister dst, const Operand& src) { 2150 ASSERT(CpuFeatures::IsEnabled(SSE4_1)); 2151 EnsureSpace ensure_space(this); 2152 EMIT(0x66); 2153 EMIT(0x0F); 2154 EMIT(0x38); 2155 EMIT(0x2A); 2156 emit_sse_operand(dst, src); 2157} 2158 2159 2160void Assembler::movntdq(const Operand& dst, XMMRegister src) { 2161 ASSERT(CpuFeatures::IsEnabled(SSE2)); 2162 EnsureSpace ensure_space(this); 2163 EMIT(0x66); 2164 EMIT(0x0F); 2165 EMIT(0xE7); 2166 emit_sse_operand(src, dst); 2167} 2168 2169 2170void Assembler::prefetch(const Operand& src, int level) { 2171 ASSERT(is_uint2(level)); 2172 EnsureSpace ensure_space(this); 2173 EMIT(0x0F); 2174 EMIT(0x18); 2175 XMMRegister code = { level }; // Emit hint number in Reg position of RegR/M. 2176 emit_sse_operand(code, src); 2177} 2178 2179 2180void Assembler::movdbl(XMMRegister dst, const Operand& src) { 2181 EnsureSpace ensure_space(this); 2182 movsd(dst, src); 2183} 2184 2185 2186void Assembler::movdbl(const Operand& dst, XMMRegister src) { 2187 EnsureSpace ensure_space(this); 2188 movsd(dst, src); 2189} 2190 2191 2192void Assembler::movsd(const Operand& dst, XMMRegister src ) { 2193 ASSERT(CpuFeatures::IsEnabled(SSE2)); 2194 EnsureSpace ensure_space(this); 2195 EMIT(0xF2); // double 2196 EMIT(0x0F); 2197 EMIT(0x11); // store 2198 emit_sse_operand(src, dst); 2199} 2200 2201 2202void Assembler::movsd(XMMRegister dst, const Operand& src) { 2203 ASSERT(CpuFeatures::IsEnabled(SSE2)); 2204 EnsureSpace ensure_space(this); 2205 EMIT(0xF2); // double 2206 EMIT(0x0F); 2207 EMIT(0x10); // load 2208 emit_sse_operand(dst, src); 2209} 2210 2211 2212void Assembler::movsd(XMMRegister dst, XMMRegister src) { 2213 ASSERT(CpuFeatures::IsEnabled(SSE2)); 2214 EnsureSpace ensure_space(this); 2215 EMIT(0xF2); 2216 EMIT(0x0F); 2217 EMIT(0x10); 2218 emit_sse_operand(dst, src); 2219} 2220 2221 2222void Assembler::movss(const Operand& dst, XMMRegister src ) { 2223 ASSERT(CpuFeatures::IsEnabled(SSE2)); 2224 EnsureSpace ensure_space(this); 2225 EMIT(0xF3); // float 2226 EMIT(0x0F); 2227 EMIT(0x11); // store 2228 emit_sse_operand(src, dst); 2229} 2230 2231 2232void Assembler::movss(XMMRegister dst, const Operand& src) { 2233 ASSERT(CpuFeatures::IsEnabled(SSE2)); 2234 EnsureSpace ensure_space(this); 2235 EMIT(0xF3); // float 2236 EMIT(0x0F); 2237 EMIT(0x10); // load 2238 emit_sse_operand(dst, src); 2239} 2240 2241 2242void Assembler::movss(XMMRegister dst, XMMRegister src) { 2243 ASSERT(CpuFeatures::IsEnabled(SSE2)); 2244 EnsureSpace ensure_space(this); 2245 EMIT(0xF3); 2246 EMIT(0x0F); 2247 EMIT(0x10); 2248 emit_sse_operand(dst, src); 2249} 2250 2251 2252void Assembler::movd(XMMRegister dst, const Operand& src) { 2253 ASSERT(CpuFeatures::IsEnabled(SSE2)); 2254 EnsureSpace ensure_space(this); 2255 EMIT(0x66); 2256 EMIT(0x0F); 2257 EMIT(0x6E); 2258 emit_sse_operand(dst, src); 2259} 2260 2261 2262void Assembler::movd(const Operand& dst, XMMRegister src) { 2263 ASSERT(CpuFeatures::IsEnabled(SSE2)); 2264 EnsureSpace ensure_space(this); 2265 EMIT(0x66); 2266 EMIT(0x0F); 2267 EMIT(0x7E); 2268 emit_sse_operand(src, dst); 2269} 2270 2271 2272void Assembler::extractps(Register dst, XMMRegister src, byte imm8) { 2273 ASSERT(CpuFeatures::IsSupported(SSE4_1)); 2274 ASSERT(is_uint8(imm8)); 2275 EnsureSpace ensure_space(this); 2276 EMIT(0x66); 2277 EMIT(0x0F); 2278 EMIT(0x3A); 2279 EMIT(0x17); 2280 emit_sse_operand(dst, src); 2281 EMIT(imm8); 2282} 2283 2284 2285void Assembler::pand(XMMRegister dst, XMMRegister src) { 2286 ASSERT(CpuFeatures::IsEnabled(SSE2)); 2287 EnsureSpace ensure_space(this); 2288 EMIT(0x66); 2289 EMIT(0x0F); 2290 EMIT(0xDB); 2291 emit_sse_operand(dst, src); 2292} 2293 2294 2295void Assembler::pxor(XMMRegister dst, XMMRegister src) { 2296 ASSERT(CpuFeatures::IsEnabled(SSE2)); 2297 EnsureSpace ensure_space(this); 2298 EMIT(0x66); 2299 EMIT(0x0F); 2300 EMIT(0xEF); 2301 emit_sse_operand(dst, src); 2302} 2303 2304 2305void Assembler::por(XMMRegister dst, XMMRegister src) { 2306 ASSERT(CpuFeatures::IsEnabled(SSE2)); 2307 EnsureSpace ensure_space(this); 2308 EMIT(0x66); 2309 EMIT(0x0F); 2310 EMIT(0xEB); 2311 emit_sse_operand(dst, src); 2312} 2313 2314 2315void Assembler::ptest(XMMRegister dst, XMMRegister src) { 2316 ASSERT(CpuFeatures::IsEnabled(SSE4_1)); 2317 EnsureSpace ensure_space(this); 2318 EMIT(0x66); 2319 EMIT(0x0F); 2320 EMIT(0x38); 2321 EMIT(0x17); 2322 emit_sse_operand(dst, src); 2323} 2324 2325 2326void Assembler::psllq(XMMRegister reg, int8_t shift) { 2327 ASSERT(CpuFeatures::IsEnabled(SSE2)); 2328 EnsureSpace ensure_space(this); 2329 EMIT(0x66); 2330 EMIT(0x0F); 2331 EMIT(0x73); 2332 emit_sse_operand(esi, reg); // esi == 6 2333 EMIT(shift); 2334} 2335 2336 2337void Assembler::psllq(XMMRegister dst, XMMRegister src) { 2338 ASSERT(CpuFeatures::IsEnabled(SSE2)); 2339 EnsureSpace ensure_space(this); 2340 EMIT(0x66); 2341 EMIT(0x0F); 2342 EMIT(0xF3); 2343 emit_sse_operand(dst, src); 2344} 2345 2346 2347void Assembler::psrlq(XMMRegister reg, int8_t shift) { 2348 ASSERT(CpuFeatures::IsEnabled(SSE2)); 2349 EnsureSpace ensure_space(this); 2350 EMIT(0x66); 2351 EMIT(0x0F); 2352 EMIT(0x73); 2353 emit_sse_operand(edx, reg); // edx == 2 2354 EMIT(shift); 2355} 2356 2357 2358void Assembler::psrlq(XMMRegister dst, XMMRegister src) { 2359 ASSERT(CpuFeatures::IsEnabled(SSE2)); 2360 EnsureSpace ensure_space(this); 2361 EMIT(0x66); 2362 EMIT(0x0F); 2363 EMIT(0xD3); 2364 emit_sse_operand(dst, src); 2365} 2366 2367 2368void Assembler::pshufd(XMMRegister dst, XMMRegister src, int8_t shuffle) { 2369 ASSERT(CpuFeatures::IsEnabled(SSE2)); 2370 EnsureSpace ensure_space(this); 2371 EMIT(0x66); 2372 EMIT(0x0F); 2373 EMIT(0x70); 2374 emit_sse_operand(dst, src); 2375 EMIT(shuffle); 2376} 2377 2378 2379void Assembler::pextrd(const Operand& dst, XMMRegister src, int8_t offset) { 2380 ASSERT(CpuFeatures::IsEnabled(SSE4_1)); 2381 EnsureSpace ensure_space(this); 2382 EMIT(0x66); 2383 EMIT(0x0F); 2384 EMIT(0x3A); 2385 EMIT(0x16); 2386 emit_sse_operand(src, dst); 2387 EMIT(offset); 2388} 2389 2390 2391void Assembler::pinsrd(XMMRegister dst, const Operand& src, int8_t offset) { 2392 ASSERT(CpuFeatures::IsEnabled(SSE4_1)); 2393 EnsureSpace ensure_space(this); 2394 EMIT(0x66); 2395 EMIT(0x0F); 2396 EMIT(0x3A); 2397 EMIT(0x22); 2398 emit_sse_operand(dst, src); 2399 EMIT(offset); 2400} 2401 2402 2403void Assembler::emit_sse_operand(XMMRegister reg, const Operand& adr) { 2404 Register ireg = { reg.code() }; 2405 emit_operand(ireg, adr); 2406} 2407 2408 2409void Assembler::emit_sse_operand(XMMRegister dst, XMMRegister src) { 2410 EMIT(0xC0 | dst.code() << 3 | src.code()); 2411} 2412 2413 2414void Assembler::emit_sse_operand(Register dst, XMMRegister src) { 2415 EMIT(0xC0 | dst.code() << 3 | src.code()); 2416} 2417 2418 2419void Assembler::Print() { 2420 Disassembler::Decode(stdout, buffer_, pc_); 2421} 2422 2423 2424void Assembler::RecordJSReturn() { 2425 positions_recorder()->WriteRecordedPositions(); 2426 EnsureSpace ensure_space(this); 2427 RecordRelocInfo(RelocInfo::JS_RETURN); 2428} 2429 2430 2431void Assembler::RecordDebugBreakSlot() { 2432 positions_recorder()->WriteRecordedPositions(); 2433 EnsureSpace ensure_space(this); 2434 RecordRelocInfo(RelocInfo::DEBUG_BREAK_SLOT); 2435} 2436 2437 2438void Assembler::RecordComment(const char* msg, bool force) { 2439 if (FLAG_code_comments || force) { 2440 EnsureSpace ensure_space(this); 2441 RecordRelocInfo(RelocInfo::COMMENT, reinterpret_cast<intptr_t>(msg)); 2442 } 2443} 2444 2445 2446void Assembler::GrowBuffer() { 2447 ASSERT(overflow()); 2448 if (!own_buffer_) FATAL("external code buffer is too small"); 2449 2450 // Compute new buffer size. 2451 CodeDesc desc; // the new buffer 2452 if (buffer_size_ < 4*KB) { 2453 desc.buffer_size = 4*KB; 2454 } else { 2455 desc.buffer_size = 2*buffer_size_; 2456 } 2457 // Some internal data structures overflow for very large buffers, 2458 // they must ensure that kMaximalBufferSize is not too large. 2459 if ((desc.buffer_size > kMaximalBufferSize) || 2460 (desc.buffer_size > isolate()->heap()->MaxOldGenerationSize())) { 2461 V8::FatalProcessOutOfMemory("Assembler::GrowBuffer"); 2462 } 2463 2464 // Set up new buffer. 2465 desc.buffer = NewArray<byte>(desc.buffer_size); 2466 desc.instr_size = pc_offset(); 2467 desc.reloc_size = (buffer_ + buffer_size_) - (reloc_info_writer.pos()); 2468 2469 // Clear the buffer in debug mode. Use 'int3' instructions to make 2470 // sure to get into problems if we ever run uninitialized code. 2471#ifdef DEBUG 2472 memset(desc.buffer, 0xCC, desc.buffer_size); 2473#endif 2474 2475 // Copy the data. 2476 int pc_delta = desc.buffer - buffer_; 2477 int rc_delta = (desc.buffer + desc.buffer_size) - (buffer_ + buffer_size_); 2478 memmove(desc.buffer, buffer_, desc.instr_size); 2479 memmove(rc_delta + reloc_info_writer.pos(), 2480 reloc_info_writer.pos(), desc.reloc_size); 2481 2482 // Switch buffers. 2483 if (isolate()->assembler_spare_buffer() == NULL && 2484 buffer_size_ == kMinimalBufferSize) { 2485 isolate()->set_assembler_spare_buffer(buffer_); 2486 } else { 2487 DeleteArray(buffer_); 2488 } 2489 buffer_ = desc.buffer; 2490 buffer_size_ = desc.buffer_size; 2491 pc_ += pc_delta; 2492 reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta, 2493 reloc_info_writer.last_pc() + pc_delta); 2494 2495 // Relocate runtime entries. 2496 for (RelocIterator it(desc); !it.done(); it.next()) { 2497 RelocInfo::Mode rmode = it.rinfo()->rmode(); 2498 if (rmode == RelocInfo::RUNTIME_ENTRY) { 2499 int32_t* p = reinterpret_cast<int32_t*>(it.rinfo()->pc()); 2500 *p -= pc_delta; // relocate entry 2501 } else if (rmode == RelocInfo::INTERNAL_REFERENCE) { 2502 int32_t* p = reinterpret_cast<int32_t*>(it.rinfo()->pc()); 2503 if (*p != 0) { // 0 means uninitialized. 2504 *p += pc_delta; 2505 } 2506 } 2507 } 2508 2509 ASSERT(!overflow()); 2510} 2511 2512 2513void Assembler::emit_arith_b(int op1, int op2, Register dst, int imm8) { 2514 ASSERT(is_uint8(op1) && is_uint8(op2)); // wrong opcode 2515 ASSERT(is_uint8(imm8)); 2516 ASSERT((op1 & 0x01) == 0); // should be 8bit operation 2517 EMIT(op1); 2518 EMIT(op2 | dst.code()); 2519 EMIT(imm8); 2520} 2521 2522 2523void Assembler::emit_arith(int sel, Operand dst, const Immediate& x) { 2524 ASSERT((0 <= sel) && (sel <= 7)); 2525 Register ireg = { sel }; 2526 if (x.is_int8()) { 2527 EMIT(0x83); // using a sign-extended 8-bit immediate. 2528 emit_operand(ireg, dst); 2529 EMIT(x.x_ & 0xFF); 2530 } else if (dst.is_reg(eax)) { 2531 EMIT((sel << 3) | 0x05); // short form if the destination is eax. 2532 emit(x); 2533 } else { 2534 EMIT(0x81); // using a literal 32-bit immediate. 2535 emit_operand(ireg, dst); 2536 emit(x); 2537 } 2538} 2539 2540 2541void Assembler::emit_operand(Register reg, const Operand& adr) { 2542 const unsigned length = adr.len_; 2543 ASSERT(length > 0); 2544 2545 // Emit updated ModRM byte containing the given register. 2546 pc_[0] = (adr.buf_[0] & ~0x38) | (reg.code() << 3); 2547 2548 // Emit the rest of the encoded operand. 2549 for (unsigned i = 1; i < length; i++) pc_[i] = adr.buf_[i]; 2550 pc_ += length; 2551 2552 // Emit relocation information if necessary. 2553 if (length >= sizeof(int32_t) && adr.rmode_ != RelocInfo::NONE) { 2554 pc_ -= sizeof(int32_t); // pc_ must be *at* disp32 2555 RecordRelocInfo(adr.rmode_); 2556 pc_ += sizeof(int32_t); 2557 } 2558} 2559 2560 2561void Assembler::emit_farith(int b1, int b2, int i) { 2562 ASSERT(is_uint8(b1) && is_uint8(b2)); // wrong opcode 2563 ASSERT(0 <= i && i < 8); // illegal stack offset 2564 EMIT(b1); 2565 EMIT(b2 + i); 2566} 2567 2568 2569void Assembler::db(uint8_t data) { 2570 EnsureSpace ensure_space(this); 2571 EMIT(data); 2572} 2573 2574 2575void Assembler::dd(uint32_t data) { 2576 EnsureSpace ensure_space(this); 2577 emit(data); 2578} 2579 2580 2581void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { 2582 ASSERT(rmode != RelocInfo::NONE); 2583 // Don't record external references unless the heap will be serialized. 2584 if (rmode == RelocInfo::EXTERNAL_REFERENCE) { 2585#ifdef DEBUG 2586 if (!Serializer::enabled()) { 2587 Serializer::TooLateToEnableNow(); 2588 } 2589#endif 2590 if (!Serializer::enabled() && !emit_debug_code()) { 2591 return; 2592 } 2593 } 2594 RelocInfo rinfo(pc_, rmode, data, NULL); 2595 reloc_info_writer.Write(&rinfo); 2596} 2597 2598 2599#ifdef GENERATED_CODE_COVERAGE 2600static FILE* coverage_log = NULL; 2601 2602 2603static void InitCoverageLog() { 2604 char* file_name = getenv("V8_GENERATED_CODE_COVERAGE_LOG"); 2605 if (file_name != NULL) { 2606 coverage_log = fopen(file_name, "aw+"); 2607 } 2608} 2609 2610 2611void LogGeneratedCodeCoverage(const char* file_line) { 2612 const char* return_address = (&file_line)[-1]; 2613 char* push_insn = const_cast<char*>(return_address - 12); 2614 push_insn[0] = 0xeb; // Relative branch insn. 2615 push_insn[1] = 13; // Skip over coverage insns. 2616 if (coverage_log != NULL) { 2617 fprintf(coverage_log, "%s\n", file_line); 2618 fflush(coverage_log); 2619 } 2620} 2621 2622#endif 2623 2624} } // namespace v8::internal 2625 2626#endif // V8_TARGET_ARCH_IA32 2627