assembler_x86.cc revision 647b9ed41cdb7cf302fd356627a3ba372419b78c
1/* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#include "assembler_x86.h" 18 19#include "base/casts.h" 20#include "entrypoints/quick/quick_entrypoints.h" 21#include "memory_region.h" 22#include "thread.h" 23#include "utils/dwarf_cfi.h" 24 25namespace art { 26namespace x86 { 27 28std::ostream& operator<<(std::ostream& os, const XmmRegister& reg) { 29 return os << "XMM" << static_cast<int>(reg); 30} 31 32std::ostream& operator<<(std::ostream& os, const X87Register& reg) { 33 return os << "ST" << static_cast<int>(reg); 34} 35 36void X86Assembler::call(Register reg) { 37 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 38 EmitUint8(0xFF); 39 EmitRegisterOperand(2, reg); 40} 41 42 43void X86Assembler::call(const Address& address) { 44 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 45 EmitUint8(0xFF); 46 EmitOperand(2, address); 47} 48 49 50void X86Assembler::call(Label* label) { 51 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 52 EmitUint8(0xE8); 53 static const int kSize = 5; 54 EmitLabel(label, kSize); 55} 56 57 58void X86Assembler::call(const ExternalLabel& label) { 59 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 60 intptr_t call_start = buffer_.GetPosition(); 61 EmitUint8(0xE8); 62 EmitInt32(label.address()); 63 static const intptr_t kCallExternalLabelSize = 5; 64 DCHECK_EQ((buffer_.GetPosition() - call_start), kCallExternalLabelSize); 65} 66 67 68void X86Assembler::pushl(Register reg) { 69 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 70 EmitUint8(0x50 + reg); 71} 72 73 74void X86Assembler::pushl(const Address& address) { 75 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 76 EmitUint8(0xFF); 77 EmitOperand(6, address); 78} 79 80 81void X86Assembler::pushl(const Immediate& imm) { 82 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 83 if (imm.is_int8()) { 84 EmitUint8(0x6A); 85 EmitUint8(imm.value() & 0xFF); 86 } else { 87 EmitUint8(0x68); 88 EmitImmediate(imm); 89 } 90} 91 92 93void X86Assembler::popl(Register reg) { 94 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 95 EmitUint8(0x58 + reg); 96} 97 98 99void X86Assembler::popl(const Address& address) { 100 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 101 EmitUint8(0x8F); 102 EmitOperand(0, address); 103} 104 105 106void X86Assembler::movl(Register dst, const Immediate& imm) { 107 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 108 EmitUint8(0xB8 + dst); 109 EmitImmediate(imm); 110} 111 112 113void X86Assembler::movl(Register dst, Register src) { 114 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 115 EmitUint8(0x89); 116 EmitRegisterOperand(src, dst); 117} 118 119 120void X86Assembler::movl(Register dst, const Address& src) { 121 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 122 EmitUint8(0x8B); 123 EmitOperand(dst, src); 124} 125 126 127void X86Assembler::movl(const Address& dst, Register src) { 128 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 129 EmitUint8(0x89); 130 EmitOperand(src, dst); 131} 132 133 134void X86Assembler::movl(const Address& dst, const Immediate& imm) { 135 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 136 EmitUint8(0xC7); 137 EmitOperand(0, dst); 138 EmitImmediate(imm); 139} 140 141void X86Assembler::movl(const Address& dst, Label* lbl) { 142 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 143 EmitUint8(0xC7); 144 EmitOperand(0, dst); 145 EmitLabel(lbl, dst.length_ + 5); 146} 147 148void X86Assembler::movzxb(Register dst, ByteRegister src) { 149 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 150 EmitUint8(0x0F); 151 EmitUint8(0xB6); 152 EmitRegisterOperand(dst, src); 153} 154 155 156void X86Assembler::movzxb(Register dst, const Address& src) { 157 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 158 EmitUint8(0x0F); 159 EmitUint8(0xB6); 160 EmitOperand(dst, src); 161} 162 163 164void X86Assembler::movsxb(Register dst, ByteRegister src) { 165 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 166 EmitUint8(0x0F); 167 EmitUint8(0xBE); 168 EmitRegisterOperand(dst, src); 169} 170 171 172void X86Assembler::movsxb(Register dst, const Address& src) { 173 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 174 EmitUint8(0x0F); 175 EmitUint8(0xBE); 176 EmitOperand(dst, src); 177} 178 179 180void X86Assembler::movb(Register /*dst*/, const Address& /*src*/) { 181 LOG(FATAL) << "Use movzxb or movsxb instead."; 182} 183 184 185void X86Assembler::movb(const Address& dst, ByteRegister src) { 186 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 187 EmitUint8(0x88); 188 EmitOperand(src, dst); 189} 190 191 192void X86Assembler::movb(const Address& dst, const Immediate& imm) { 193 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 194 EmitUint8(0xC6); 195 EmitOperand(EAX, dst); 196 CHECK(imm.is_int8()); 197 EmitUint8(imm.value() & 0xFF); 198} 199 200 201void X86Assembler::movzxw(Register dst, Register src) { 202 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 203 EmitUint8(0x0F); 204 EmitUint8(0xB7); 205 EmitRegisterOperand(dst, src); 206} 207 208 209void X86Assembler::movzxw(Register dst, const Address& src) { 210 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 211 EmitUint8(0x0F); 212 EmitUint8(0xB7); 213 EmitOperand(dst, src); 214} 215 216 217void X86Assembler::movsxw(Register dst, Register src) { 218 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 219 EmitUint8(0x0F); 220 EmitUint8(0xBF); 221 EmitRegisterOperand(dst, src); 222} 223 224 225void X86Assembler::movsxw(Register dst, const Address& src) { 226 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 227 EmitUint8(0x0F); 228 EmitUint8(0xBF); 229 EmitOperand(dst, src); 230} 231 232 233void X86Assembler::movw(Register /*dst*/, const Address& /*src*/) { 234 LOG(FATAL) << "Use movzxw or movsxw instead."; 235} 236 237 238void X86Assembler::movw(const Address& dst, Register src) { 239 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 240 EmitOperandSizeOverride(); 241 EmitUint8(0x89); 242 EmitOperand(src, dst); 243} 244 245 246void X86Assembler::movw(const Address& dst, const Immediate& imm) { 247 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 248 EmitOperandSizeOverride(); 249 EmitUint8(0xC7); 250 EmitOperand(0, dst); 251 CHECK(imm.is_uint16() || imm.is_int16()); 252 EmitUint8(imm.value() & 0xFF); 253 EmitUint8(imm.value() >> 8); 254} 255 256 257void X86Assembler::leal(Register dst, const Address& src) { 258 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 259 EmitUint8(0x8D); 260 EmitOperand(dst, src); 261} 262 263 264void X86Assembler::cmovl(Condition condition, Register dst, Register src) { 265 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 266 EmitUint8(0x0F); 267 EmitUint8(0x40 + condition); 268 EmitRegisterOperand(dst, src); 269} 270 271 272void X86Assembler::setb(Condition condition, Register dst) { 273 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 274 EmitUint8(0x0F); 275 EmitUint8(0x90 + condition); 276 EmitOperand(0, Operand(dst)); 277} 278 279 280void X86Assembler::movaps(XmmRegister dst, XmmRegister src) { 281 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 282 EmitUint8(0x0F); 283 EmitUint8(0x28); 284 EmitXmmRegisterOperand(dst, src); 285} 286 287 288void X86Assembler::movss(XmmRegister dst, const Address& src) { 289 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 290 EmitUint8(0xF3); 291 EmitUint8(0x0F); 292 EmitUint8(0x10); 293 EmitOperand(dst, src); 294} 295 296 297void X86Assembler::movss(const Address& dst, XmmRegister src) { 298 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 299 EmitUint8(0xF3); 300 EmitUint8(0x0F); 301 EmitUint8(0x11); 302 EmitOperand(src, dst); 303} 304 305 306void X86Assembler::movss(XmmRegister dst, XmmRegister src) { 307 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 308 EmitUint8(0xF3); 309 EmitUint8(0x0F); 310 EmitUint8(0x11); 311 EmitXmmRegisterOperand(src, dst); 312} 313 314 315void X86Assembler::movd(XmmRegister dst, Register src) { 316 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 317 EmitUint8(0x66); 318 EmitUint8(0x0F); 319 EmitUint8(0x6E); 320 EmitOperand(dst, Operand(src)); 321} 322 323 324void X86Assembler::movd(Register dst, XmmRegister src) { 325 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 326 EmitUint8(0x66); 327 EmitUint8(0x0F); 328 EmitUint8(0x7E); 329 EmitOperand(src, Operand(dst)); 330} 331 332 333void X86Assembler::addss(XmmRegister dst, XmmRegister src) { 334 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 335 EmitUint8(0xF3); 336 EmitUint8(0x0F); 337 EmitUint8(0x58); 338 EmitXmmRegisterOperand(dst, src); 339} 340 341 342void X86Assembler::addss(XmmRegister dst, const Address& src) { 343 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 344 EmitUint8(0xF3); 345 EmitUint8(0x0F); 346 EmitUint8(0x58); 347 EmitOperand(dst, src); 348} 349 350 351void X86Assembler::subss(XmmRegister dst, XmmRegister src) { 352 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 353 EmitUint8(0xF3); 354 EmitUint8(0x0F); 355 EmitUint8(0x5C); 356 EmitXmmRegisterOperand(dst, src); 357} 358 359 360void X86Assembler::subss(XmmRegister dst, const Address& src) { 361 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 362 EmitUint8(0xF3); 363 EmitUint8(0x0F); 364 EmitUint8(0x5C); 365 EmitOperand(dst, src); 366} 367 368 369void X86Assembler::mulss(XmmRegister dst, XmmRegister src) { 370 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 371 EmitUint8(0xF3); 372 EmitUint8(0x0F); 373 EmitUint8(0x59); 374 EmitXmmRegisterOperand(dst, src); 375} 376 377 378void X86Assembler::mulss(XmmRegister dst, const Address& src) { 379 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 380 EmitUint8(0xF3); 381 EmitUint8(0x0F); 382 EmitUint8(0x59); 383 EmitOperand(dst, src); 384} 385 386 387void X86Assembler::divss(XmmRegister dst, XmmRegister src) { 388 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 389 EmitUint8(0xF3); 390 EmitUint8(0x0F); 391 EmitUint8(0x5E); 392 EmitXmmRegisterOperand(dst, src); 393} 394 395 396void X86Assembler::divss(XmmRegister dst, const Address& src) { 397 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 398 EmitUint8(0xF3); 399 EmitUint8(0x0F); 400 EmitUint8(0x5E); 401 EmitOperand(dst, src); 402} 403 404 405void X86Assembler::flds(const Address& src) { 406 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 407 EmitUint8(0xD9); 408 EmitOperand(0, src); 409} 410 411 412void X86Assembler::fstps(const Address& dst) { 413 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 414 EmitUint8(0xD9); 415 EmitOperand(3, dst); 416} 417 418 419void X86Assembler::movsd(XmmRegister dst, const Address& src) { 420 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 421 EmitUint8(0xF2); 422 EmitUint8(0x0F); 423 EmitUint8(0x10); 424 EmitOperand(dst, src); 425} 426 427 428void X86Assembler::movsd(const Address& dst, XmmRegister src) { 429 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 430 EmitUint8(0xF2); 431 EmitUint8(0x0F); 432 EmitUint8(0x11); 433 EmitOperand(src, dst); 434} 435 436 437void X86Assembler::movsd(XmmRegister dst, XmmRegister src) { 438 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 439 EmitUint8(0xF2); 440 EmitUint8(0x0F); 441 EmitUint8(0x11); 442 EmitXmmRegisterOperand(src, dst); 443} 444 445 446void X86Assembler::addsd(XmmRegister dst, XmmRegister src) { 447 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 448 EmitUint8(0xF2); 449 EmitUint8(0x0F); 450 EmitUint8(0x58); 451 EmitXmmRegisterOperand(dst, src); 452} 453 454 455void X86Assembler::addsd(XmmRegister dst, const Address& src) { 456 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 457 EmitUint8(0xF2); 458 EmitUint8(0x0F); 459 EmitUint8(0x58); 460 EmitOperand(dst, src); 461} 462 463 464void X86Assembler::subsd(XmmRegister dst, XmmRegister src) { 465 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 466 EmitUint8(0xF2); 467 EmitUint8(0x0F); 468 EmitUint8(0x5C); 469 EmitXmmRegisterOperand(dst, src); 470} 471 472 473void X86Assembler::subsd(XmmRegister dst, const Address& src) { 474 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 475 EmitUint8(0xF2); 476 EmitUint8(0x0F); 477 EmitUint8(0x5C); 478 EmitOperand(dst, src); 479} 480 481 482void X86Assembler::mulsd(XmmRegister dst, XmmRegister src) { 483 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 484 EmitUint8(0xF2); 485 EmitUint8(0x0F); 486 EmitUint8(0x59); 487 EmitXmmRegisterOperand(dst, src); 488} 489 490 491void X86Assembler::mulsd(XmmRegister dst, const Address& src) { 492 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 493 EmitUint8(0xF2); 494 EmitUint8(0x0F); 495 EmitUint8(0x59); 496 EmitOperand(dst, src); 497} 498 499 500void X86Assembler::divsd(XmmRegister dst, XmmRegister src) { 501 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 502 EmitUint8(0xF2); 503 EmitUint8(0x0F); 504 EmitUint8(0x5E); 505 EmitXmmRegisterOperand(dst, src); 506} 507 508 509void X86Assembler::divsd(XmmRegister dst, const Address& src) { 510 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 511 EmitUint8(0xF2); 512 EmitUint8(0x0F); 513 EmitUint8(0x5E); 514 EmitOperand(dst, src); 515} 516 517 518void X86Assembler::cvtsi2ss(XmmRegister dst, Register src) { 519 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 520 EmitUint8(0xF3); 521 EmitUint8(0x0F); 522 EmitUint8(0x2A); 523 EmitOperand(dst, Operand(src)); 524} 525 526 527void X86Assembler::cvtsi2sd(XmmRegister dst, Register src) { 528 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 529 EmitUint8(0xF2); 530 EmitUint8(0x0F); 531 EmitUint8(0x2A); 532 EmitOperand(dst, Operand(src)); 533} 534 535 536void X86Assembler::cvtss2si(Register dst, XmmRegister src) { 537 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 538 EmitUint8(0xF3); 539 EmitUint8(0x0F); 540 EmitUint8(0x2D); 541 EmitXmmRegisterOperand(dst, src); 542} 543 544 545void X86Assembler::cvtss2sd(XmmRegister dst, XmmRegister src) { 546 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 547 EmitUint8(0xF3); 548 EmitUint8(0x0F); 549 EmitUint8(0x5A); 550 EmitXmmRegisterOperand(dst, src); 551} 552 553 554void X86Assembler::cvtsd2si(Register dst, XmmRegister src) { 555 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 556 EmitUint8(0xF2); 557 EmitUint8(0x0F); 558 EmitUint8(0x2D); 559 EmitXmmRegisterOperand(dst, src); 560} 561 562 563void X86Assembler::cvttss2si(Register dst, XmmRegister src) { 564 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 565 EmitUint8(0xF3); 566 EmitUint8(0x0F); 567 EmitUint8(0x2C); 568 EmitXmmRegisterOperand(dst, src); 569} 570 571 572void X86Assembler::cvttsd2si(Register dst, XmmRegister src) { 573 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 574 EmitUint8(0xF2); 575 EmitUint8(0x0F); 576 EmitUint8(0x2C); 577 EmitXmmRegisterOperand(dst, src); 578} 579 580 581void X86Assembler::cvtsd2ss(XmmRegister dst, XmmRegister src) { 582 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 583 EmitUint8(0xF2); 584 EmitUint8(0x0F); 585 EmitUint8(0x5A); 586 EmitXmmRegisterOperand(dst, src); 587} 588 589 590void X86Assembler::cvtdq2pd(XmmRegister dst, XmmRegister src) { 591 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 592 EmitUint8(0xF3); 593 EmitUint8(0x0F); 594 EmitUint8(0xE6); 595 EmitXmmRegisterOperand(dst, src); 596} 597 598 599void X86Assembler::comiss(XmmRegister a, XmmRegister b) { 600 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 601 EmitUint8(0x0F); 602 EmitUint8(0x2F); 603 EmitXmmRegisterOperand(a, b); 604} 605 606 607void X86Assembler::comisd(XmmRegister a, XmmRegister b) { 608 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 609 EmitUint8(0x66); 610 EmitUint8(0x0F); 611 EmitUint8(0x2F); 612 EmitXmmRegisterOperand(a, b); 613} 614 615 616void X86Assembler::sqrtsd(XmmRegister dst, XmmRegister src) { 617 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 618 EmitUint8(0xF2); 619 EmitUint8(0x0F); 620 EmitUint8(0x51); 621 EmitXmmRegisterOperand(dst, src); 622} 623 624 625void X86Assembler::sqrtss(XmmRegister dst, XmmRegister src) { 626 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 627 EmitUint8(0xF3); 628 EmitUint8(0x0F); 629 EmitUint8(0x51); 630 EmitXmmRegisterOperand(dst, src); 631} 632 633 634void X86Assembler::xorpd(XmmRegister dst, const Address& src) { 635 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 636 EmitUint8(0x66); 637 EmitUint8(0x0F); 638 EmitUint8(0x57); 639 EmitOperand(dst, src); 640} 641 642 643void X86Assembler::xorpd(XmmRegister dst, XmmRegister src) { 644 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 645 EmitUint8(0x66); 646 EmitUint8(0x0F); 647 EmitUint8(0x57); 648 EmitXmmRegisterOperand(dst, src); 649} 650 651 652void X86Assembler::xorps(XmmRegister dst, const Address& src) { 653 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 654 EmitUint8(0x0F); 655 EmitUint8(0x57); 656 EmitOperand(dst, src); 657} 658 659 660void X86Assembler::xorps(XmmRegister dst, XmmRegister src) { 661 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 662 EmitUint8(0x0F); 663 EmitUint8(0x57); 664 EmitXmmRegisterOperand(dst, src); 665} 666 667 668void X86Assembler::andpd(XmmRegister dst, const Address& src) { 669 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 670 EmitUint8(0x66); 671 EmitUint8(0x0F); 672 EmitUint8(0x54); 673 EmitOperand(dst, src); 674} 675 676 677void X86Assembler::fldl(const Address& src) { 678 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 679 EmitUint8(0xDD); 680 EmitOperand(0, src); 681} 682 683 684void X86Assembler::fstpl(const Address& dst) { 685 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 686 EmitUint8(0xDD); 687 EmitOperand(3, dst); 688} 689 690 691void X86Assembler::fnstcw(const Address& dst) { 692 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 693 EmitUint8(0xD9); 694 EmitOperand(7, dst); 695} 696 697 698void X86Assembler::fldcw(const Address& src) { 699 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 700 EmitUint8(0xD9); 701 EmitOperand(5, src); 702} 703 704 705void X86Assembler::fistpl(const Address& dst) { 706 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 707 EmitUint8(0xDF); 708 EmitOperand(7, dst); 709} 710 711 712void X86Assembler::fistps(const Address& dst) { 713 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 714 EmitUint8(0xDB); 715 EmitOperand(3, dst); 716} 717 718 719void X86Assembler::fildl(const Address& src) { 720 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 721 EmitUint8(0xDF); 722 EmitOperand(5, src); 723} 724 725 726void X86Assembler::fincstp() { 727 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 728 EmitUint8(0xD9); 729 EmitUint8(0xF7); 730} 731 732 733void X86Assembler::ffree(const Immediate& index) { 734 CHECK_LT(index.value(), 7); 735 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 736 EmitUint8(0xDD); 737 EmitUint8(0xC0 + index.value()); 738} 739 740 741void X86Assembler::fsin() { 742 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 743 EmitUint8(0xD9); 744 EmitUint8(0xFE); 745} 746 747 748void X86Assembler::fcos() { 749 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 750 EmitUint8(0xD9); 751 EmitUint8(0xFF); 752} 753 754 755void X86Assembler::fptan() { 756 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 757 EmitUint8(0xD9); 758 EmitUint8(0xF2); 759} 760 761 762void X86Assembler::xchgl(Register dst, Register src) { 763 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 764 EmitUint8(0x87); 765 EmitRegisterOperand(dst, src); 766} 767 768 769void X86Assembler::xchgl(Register reg, const Address& address) { 770 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 771 EmitUint8(0x87); 772 EmitOperand(reg, address); 773} 774 775 776void X86Assembler::cmpw(const Address& address, const Immediate& imm) { 777 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 778 EmitUint8(0x66); 779 EmitComplex(7, address, imm); 780} 781 782 783void X86Assembler::cmpl(Register reg, const Immediate& imm) { 784 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 785 EmitComplex(7, Operand(reg), imm); 786} 787 788 789void X86Assembler::cmpl(Register reg0, Register reg1) { 790 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 791 EmitUint8(0x3B); 792 EmitOperand(reg0, Operand(reg1)); 793} 794 795 796void X86Assembler::cmpl(Register reg, const Address& address) { 797 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 798 EmitUint8(0x3B); 799 EmitOperand(reg, address); 800} 801 802 803void X86Assembler::addl(Register dst, Register src) { 804 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 805 EmitUint8(0x03); 806 EmitRegisterOperand(dst, src); 807} 808 809 810void X86Assembler::addl(Register reg, const Address& address) { 811 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 812 EmitUint8(0x03); 813 EmitOperand(reg, address); 814} 815 816 817void X86Assembler::cmpl(const Address& address, Register reg) { 818 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 819 EmitUint8(0x39); 820 EmitOperand(reg, address); 821} 822 823 824void X86Assembler::cmpl(const Address& address, const Immediate& imm) { 825 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 826 EmitComplex(7, address, imm); 827} 828 829 830void X86Assembler::testl(Register reg1, Register reg2) { 831 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 832 EmitUint8(0x85); 833 EmitRegisterOperand(reg1, reg2); 834} 835 836 837void X86Assembler::testl(Register reg, const Address& address) { 838 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 839 EmitUint8(0x85); 840 EmitOperand(reg, address); 841} 842 843 844void X86Assembler::testl(Register reg, const Immediate& immediate) { 845 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 846 // For registers that have a byte variant (EAX, EBX, ECX, and EDX) 847 // we only test the byte register to keep the encoding short. 848 if (immediate.is_uint8() && reg < 4) { 849 // Use zero-extended 8-bit immediate. 850 if (reg == EAX) { 851 EmitUint8(0xA8); 852 } else { 853 EmitUint8(0xF6); 854 EmitUint8(0xC0 + reg); 855 } 856 EmitUint8(immediate.value() & 0xFF); 857 } else if (reg == EAX) { 858 // Use short form if the destination is EAX. 859 EmitUint8(0xA9); 860 EmitImmediate(immediate); 861 } else { 862 EmitUint8(0xF7); 863 EmitOperand(0, Operand(reg)); 864 EmitImmediate(immediate); 865 } 866} 867 868 869void X86Assembler::andl(Register dst, Register src) { 870 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 871 EmitUint8(0x23); 872 EmitOperand(dst, Operand(src)); 873} 874 875 876void X86Assembler::andl(Register reg, const Address& address) { 877 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 878 EmitUint8(0x23); 879 EmitOperand(reg, address); 880} 881 882 883void X86Assembler::andl(Register dst, const Immediate& imm) { 884 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 885 EmitComplex(4, Operand(dst), imm); 886} 887 888 889void X86Assembler::orl(Register dst, Register src) { 890 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 891 EmitUint8(0x0B); 892 EmitOperand(dst, Operand(src)); 893} 894 895 896void X86Assembler::orl(Register reg, const Address& address) { 897 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 898 EmitUint8(0x0B); 899 EmitOperand(reg, address); 900} 901 902 903void X86Assembler::orl(Register dst, const Immediate& imm) { 904 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 905 EmitComplex(1, Operand(dst), imm); 906} 907 908 909void X86Assembler::xorl(Register dst, Register src) { 910 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 911 EmitUint8(0x33); 912 EmitOperand(dst, Operand(src)); 913} 914 915 916void X86Assembler::xorl(Register reg, const Address& address) { 917 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 918 EmitUint8(0x33); 919 EmitOperand(reg, address); 920} 921 922 923void X86Assembler::xorl(Register dst, const Immediate& imm) { 924 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 925 EmitComplex(6, Operand(dst), imm); 926} 927 928 929void X86Assembler::addl(Register reg, const Immediate& imm) { 930 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 931 EmitComplex(0, Operand(reg), imm); 932} 933 934 935void X86Assembler::addl(const Address& address, Register reg) { 936 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 937 EmitUint8(0x01); 938 EmitOperand(reg, address); 939} 940 941 942void X86Assembler::addl(const Address& address, const Immediate& imm) { 943 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 944 EmitComplex(0, address, imm); 945} 946 947 948void X86Assembler::adcl(Register reg, const Immediate& imm) { 949 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 950 EmitComplex(2, Operand(reg), imm); 951} 952 953 954void X86Assembler::adcl(Register dst, Register src) { 955 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 956 EmitUint8(0x13); 957 EmitOperand(dst, Operand(src)); 958} 959 960 961void X86Assembler::adcl(Register dst, const Address& address) { 962 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 963 EmitUint8(0x13); 964 EmitOperand(dst, address); 965} 966 967 968void X86Assembler::subl(Register dst, Register src) { 969 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 970 EmitUint8(0x2B); 971 EmitOperand(dst, Operand(src)); 972} 973 974 975void X86Assembler::subl(Register reg, const Immediate& imm) { 976 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 977 EmitComplex(5, Operand(reg), imm); 978} 979 980 981void X86Assembler::subl(Register reg, const Address& address) { 982 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 983 EmitUint8(0x2B); 984 EmitOperand(reg, address); 985} 986 987 988void X86Assembler::cdq() { 989 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 990 EmitUint8(0x99); 991} 992 993 994void X86Assembler::idivl(Register reg) { 995 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 996 EmitUint8(0xF7); 997 EmitUint8(0xF8 | reg); 998} 999 1000 1001void X86Assembler::imull(Register dst, Register src) { 1002 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1003 EmitUint8(0x0F); 1004 EmitUint8(0xAF); 1005 EmitOperand(dst, Operand(src)); 1006} 1007 1008 1009void X86Assembler::imull(Register reg, const Immediate& imm) { 1010 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1011 EmitUint8(0x69); 1012 EmitOperand(reg, Operand(reg)); 1013 EmitImmediate(imm); 1014} 1015 1016 1017void X86Assembler::imull(Register reg, const Address& address) { 1018 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1019 EmitUint8(0x0F); 1020 EmitUint8(0xAF); 1021 EmitOperand(reg, address); 1022} 1023 1024 1025void X86Assembler::imull(Register reg) { 1026 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1027 EmitUint8(0xF7); 1028 EmitOperand(5, Operand(reg)); 1029} 1030 1031 1032void X86Assembler::imull(const Address& address) { 1033 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1034 EmitUint8(0xF7); 1035 EmitOperand(5, address); 1036} 1037 1038 1039void X86Assembler::mull(Register reg) { 1040 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1041 EmitUint8(0xF7); 1042 EmitOperand(4, Operand(reg)); 1043} 1044 1045 1046void X86Assembler::mull(const Address& address) { 1047 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1048 EmitUint8(0xF7); 1049 EmitOperand(4, address); 1050} 1051 1052 1053void X86Assembler::sbbl(Register dst, Register src) { 1054 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1055 EmitUint8(0x1B); 1056 EmitOperand(dst, Operand(src)); 1057} 1058 1059 1060void X86Assembler::sbbl(Register reg, const Immediate& imm) { 1061 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1062 EmitComplex(3, Operand(reg), imm); 1063} 1064 1065 1066void X86Assembler::sbbl(Register dst, const Address& address) { 1067 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1068 EmitUint8(0x1B); 1069 EmitOperand(dst, address); 1070} 1071 1072 1073void X86Assembler::incl(Register reg) { 1074 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1075 EmitUint8(0x40 + reg); 1076} 1077 1078 1079void X86Assembler::incl(const Address& address) { 1080 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1081 EmitUint8(0xFF); 1082 EmitOperand(0, address); 1083} 1084 1085 1086void X86Assembler::decl(Register reg) { 1087 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1088 EmitUint8(0x48 + reg); 1089} 1090 1091 1092void X86Assembler::decl(const Address& address) { 1093 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1094 EmitUint8(0xFF); 1095 EmitOperand(1, address); 1096} 1097 1098 1099void X86Assembler::shll(Register reg, const Immediate& imm) { 1100 EmitGenericShift(4, reg, imm); 1101} 1102 1103 1104void X86Assembler::shll(Register operand, Register shifter) { 1105 EmitGenericShift(4, operand, shifter); 1106} 1107 1108 1109void X86Assembler::shrl(Register reg, const Immediate& imm) { 1110 EmitGenericShift(5, reg, imm); 1111} 1112 1113 1114void X86Assembler::shrl(Register operand, Register shifter) { 1115 EmitGenericShift(5, operand, shifter); 1116} 1117 1118 1119void X86Assembler::sarl(Register reg, const Immediate& imm) { 1120 EmitGenericShift(7, reg, imm); 1121} 1122 1123 1124void X86Assembler::sarl(Register operand, Register shifter) { 1125 EmitGenericShift(7, operand, shifter); 1126} 1127 1128 1129void X86Assembler::shld(Register dst, Register src, Register shifter) { 1130 DCHECK_EQ(ECX, shifter); 1131 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1132 EmitUint8(0x0F); 1133 EmitUint8(0xA5); 1134 EmitRegisterOperand(src, dst); 1135} 1136 1137 1138void X86Assembler::shrd(Register dst, Register src, Register shifter) { 1139 DCHECK_EQ(ECX, shifter); 1140 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1141 EmitUint8(0x0F); 1142 EmitUint8(0xAD); 1143 EmitRegisterOperand(src, dst); 1144} 1145 1146 1147void X86Assembler::negl(Register reg) { 1148 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1149 EmitUint8(0xF7); 1150 EmitOperand(3, Operand(reg)); 1151} 1152 1153 1154void X86Assembler::notl(Register reg) { 1155 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1156 EmitUint8(0xF7); 1157 EmitUint8(0xD0 | reg); 1158} 1159 1160 1161void X86Assembler::enter(const Immediate& imm) { 1162 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1163 EmitUint8(0xC8); 1164 CHECK(imm.is_uint16()); 1165 EmitUint8(imm.value() & 0xFF); 1166 EmitUint8((imm.value() >> 8) & 0xFF); 1167 EmitUint8(0x00); 1168} 1169 1170 1171void X86Assembler::leave() { 1172 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1173 EmitUint8(0xC9); 1174} 1175 1176 1177void X86Assembler::ret() { 1178 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1179 EmitUint8(0xC3); 1180} 1181 1182 1183void X86Assembler::ret(const Immediate& imm) { 1184 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1185 EmitUint8(0xC2); 1186 CHECK(imm.is_uint16()); 1187 EmitUint8(imm.value() & 0xFF); 1188 EmitUint8((imm.value() >> 8) & 0xFF); 1189} 1190 1191 1192 1193void X86Assembler::nop() { 1194 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1195 EmitUint8(0x90); 1196} 1197 1198 1199void X86Assembler::int3() { 1200 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1201 EmitUint8(0xCC); 1202} 1203 1204 1205void X86Assembler::hlt() { 1206 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1207 EmitUint8(0xF4); 1208} 1209 1210 1211void X86Assembler::j(Condition condition, Label* label) { 1212 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1213 if (label->IsBound()) { 1214 static const int kShortSize = 2; 1215 static const int kLongSize = 6; 1216 int offset = label->Position() - buffer_.Size(); 1217 CHECK_LE(offset, 0); 1218 if (IsInt(8, offset - kShortSize)) { 1219 EmitUint8(0x70 + condition); 1220 EmitUint8((offset - kShortSize) & 0xFF); 1221 } else { 1222 EmitUint8(0x0F); 1223 EmitUint8(0x80 + condition); 1224 EmitInt32(offset - kLongSize); 1225 } 1226 } else { 1227 EmitUint8(0x0F); 1228 EmitUint8(0x80 + condition); 1229 EmitLabelLink(label); 1230 } 1231} 1232 1233 1234void X86Assembler::jmp(Register reg) { 1235 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1236 EmitUint8(0xFF); 1237 EmitRegisterOperand(4, reg); 1238} 1239 1240void X86Assembler::jmp(const Address& address) { 1241 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1242 EmitUint8(0xFF); 1243 EmitOperand(4, address); 1244} 1245 1246void X86Assembler::jmp(Label* label) { 1247 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1248 if (label->IsBound()) { 1249 static const int kShortSize = 2; 1250 static const int kLongSize = 5; 1251 int offset = label->Position() - buffer_.Size(); 1252 CHECK_LE(offset, 0); 1253 if (IsInt(8, offset - kShortSize)) { 1254 EmitUint8(0xEB); 1255 EmitUint8((offset - kShortSize) & 0xFF); 1256 } else { 1257 EmitUint8(0xE9); 1258 EmitInt32(offset - kLongSize); 1259 } 1260 } else { 1261 EmitUint8(0xE9); 1262 EmitLabelLink(label); 1263 } 1264} 1265 1266 1267X86Assembler* X86Assembler::lock() { 1268 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1269 EmitUint8(0xF0); 1270 return this; 1271} 1272 1273 1274void X86Assembler::cmpxchgl(const Address& address, Register reg) { 1275 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1276 EmitUint8(0x0F); 1277 EmitUint8(0xB1); 1278 EmitOperand(reg, address); 1279} 1280 1281void X86Assembler::mfence() { 1282 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1283 EmitUint8(0x0F); 1284 EmitUint8(0xAE); 1285 EmitUint8(0xF0); 1286} 1287 1288X86Assembler* X86Assembler::fs() { 1289 // TODO: fs is a prefix and not an instruction 1290 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1291 EmitUint8(0x64); 1292 return this; 1293} 1294 1295X86Assembler* X86Assembler::gs() { 1296 // TODO: fs is a prefix and not an instruction 1297 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1298 EmitUint8(0x65); 1299 return this; 1300} 1301 1302void X86Assembler::AddImmediate(Register reg, const Immediate& imm) { 1303 int value = imm.value(); 1304 if (value > 0) { 1305 if (value == 1) { 1306 incl(reg); 1307 } else if (value != 0) { 1308 addl(reg, imm); 1309 } 1310 } else if (value < 0) { 1311 value = -value; 1312 if (value == 1) { 1313 decl(reg); 1314 } else if (value != 0) { 1315 subl(reg, Immediate(value)); 1316 } 1317 } 1318} 1319 1320 1321void X86Assembler::LoadLongConstant(XmmRegister dst, int64_t value) { 1322 // TODO: Need to have a code constants table. 1323 pushl(Immediate(High32Bits(value))); 1324 pushl(Immediate(Low32Bits(value))); 1325 movsd(dst, Address(ESP, 0)); 1326 addl(ESP, Immediate(2 * sizeof(int32_t))); 1327} 1328 1329 1330void X86Assembler::LoadDoubleConstant(XmmRegister dst, double value) { 1331 // TODO: Need to have a code constants table. 1332 int64_t constant = bit_cast<int64_t, double>(value); 1333 LoadLongConstant(dst, constant); 1334} 1335 1336 1337void X86Assembler::FloatNegate(XmmRegister f) { 1338 static const struct { 1339 uint32_t a; 1340 uint32_t b; 1341 uint32_t c; 1342 uint32_t d; 1343 } float_negate_constant __attribute__((aligned(16))) = 1344 { 0x80000000, 0x00000000, 0x80000000, 0x00000000 }; 1345 xorps(f, Address::Absolute(reinterpret_cast<uintptr_t>(&float_negate_constant))); 1346} 1347 1348 1349void X86Assembler::DoubleNegate(XmmRegister d) { 1350 static const struct { 1351 uint64_t a; 1352 uint64_t b; 1353 } double_negate_constant __attribute__((aligned(16))) = 1354 {0x8000000000000000LL, 0x8000000000000000LL}; 1355 xorpd(d, Address::Absolute(reinterpret_cast<uintptr_t>(&double_negate_constant))); 1356} 1357 1358 1359void X86Assembler::DoubleAbs(XmmRegister reg) { 1360 static const struct { 1361 uint64_t a; 1362 uint64_t b; 1363 } double_abs_constant __attribute__((aligned(16))) = 1364 {0x7FFFFFFFFFFFFFFFLL, 0x7FFFFFFFFFFFFFFFLL}; 1365 andpd(reg, Address::Absolute(reinterpret_cast<uintptr_t>(&double_abs_constant))); 1366} 1367 1368 1369void X86Assembler::Align(int alignment, int offset) { 1370 CHECK(IsPowerOfTwo(alignment)); 1371 // Emit nop instruction until the real position is aligned. 1372 while (((offset + buffer_.GetPosition()) & (alignment-1)) != 0) { 1373 nop(); 1374 } 1375} 1376 1377 1378void X86Assembler::Bind(Label* label) { 1379 int bound = buffer_.Size(); 1380 CHECK(!label->IsBound()); // Labels can only be bound once. 1381 while (label->IsLinked()) { 1382 int position = label->LinkPosition(); 1383 int next = buffer_.Load<int32_t>(position); 1384 buffer_.Store<int32_t>(position, bound - (position + 4)); 1385 label->position_ = next; 1386 } 1387 label->BindTo(bound); 1388} 1389 1390 1391void X86Assembler::EmitOperand(int reg_or_opcode, const Operand& operand) { 1392 CHECK_GE(reg_or_opcode, 0); 1393 CHECK_LT(reg_or_opcode, 8); 1394 const int length = operand.length_; 1395 CHECK_GT(length, 0); 1396 // Emit the ModRM byte updated with the given reg value. 1397 CHECK_EQ(operand.encoding_[0] & 0x38, 0); 1398 EmitUint8(operand.encoding_[0] + (reg_or_opcode << 3)); 1399 // Emit the rest of the encoded operand. 1400 for (int i = 1; i < length; i++) { 1401 EmitUint8(operand.encoding_[i]); 1402 } 1403} 1404 1405 1406void X86Assembler::EmitImmediate(const Immediate& imm) { 1407 EmitInt32(imm.value()); 1408} 1409 1410 1411void X86Assembler::EmitComplex(int reg_or_opcode, 1412 const Operand& operand, 1413 const Immediate& immediate) { 1414 CHECK_GE(reg_or_opcode, 0); 1415 CHECK_LT(reg_or_opcode, 8); 1416 if (immediate.is_int8()) { 1417 // Use sign-extended 8-bit immediate. 1418 EmitUint8(0x83); 1419 EmitOperand(reg_or_opcode, operand); 1420 EmitUint8(immediate.value() & 0xFF); 1421 } else if (operand.IsRegister(EAX)) { 1422 // Use short form if the destination is eax. 1423 EmitUint8(0x05 + (reg_or_opcode << 3)); 1424 EmitImmediate(immediate); 1425 } else { 1426 EmitUint8(0x81); 1427 EmitOperand(reg_or_opcode, operand); 1428 EmitImmediate(immediate); 1429 } 1430} 1431 1432 1433void X86Assembler::EmitLabel(Label* label, int instruction_size) { 1434 if (label->IsBound()) { 1435 int offset = label->Position() - buffer_.Size(); 1436 CHECK_LE(offset, 0); 1437 EmitInt32(offset - instruction_size); 1438 } else { 1439 EmitLabelLink(label); 1440 } 1441} 1442 1443 1444void X86Assembler::EmitLabelLink(Label* label) { 1445 CHECK(!label->IsBound()); 1446 int position = buffer_.Size(); 1447 EmitInt32(label->position_); 1448 label->LinkTo(position); 1449} 1450 1451 1452void X86Assembler::EmitGenericShift(int reg_or_opcode, 1453 Register reg, 1454 const Immediate& imm) { 1455 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1456 CHECK(imm.is_int8()); 1457 if (imm.value() == 1) { 1458 EmitUint8(0xD1); 1459 EmitOperand(reg_or_opcode, Operand(reg)); 1460 } else { 1461 EmitUint8(0xC1); 1462 EmitOperand(reg_or_opcode, Operand(reg)); 1463 EmitUint8(imm.value() & 0xFF); 1464 } 1465} 1466 1467 1468void X86Assembler::EmitGenericShift(int reg_or_opcode, 1469 Register operand, 1470 Register shifter) { 1471 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 1472 CHECK_EQ(shifter, ECX); 1473 EmitUint8(0xD3); 1474 EmitOperand(reg_or_opcode, Operand(operand)); 1475} 1476 1477void X86Assembler::InitializeFrameDescriptionEntry() { 1478 WriteFDEHeader(&cfi_info_, false /* is_64bit */); 1479} 1480 1481void X86Assembler::FinalizeFrameDescriptionEntry() { 1482 WriteFDEAddressRange(&cfi_info_, buffer_.Size(), false /* is_64bit */); 1483 PadCFI(&cfi_info_); 1484 WriteCFILength(&cfi_info_, false /* is_64bit */); 1485} 1486 1487constexpr size_t kFramePointerSize = 4; 1488 1489void X86Assembler::BuildFrame(size_t frame_size, ManagedRegister method_reg, 1490 const std::vector<ManagedRegister>& spill_regs, 1491 const ManagedRegisterEntrySpills& entry_spills) { 1492 cfi_cfa_offset_ = kFramePointerSize; // Only return address on stack 1493 cfi_pc_ = buffer_.Size(); // Nothing emitted yet 1494 DCHECK_EQ(cfi_pc_, 0U); 1495 1496 uint32_t reg_offset = 1; 1497 CHECK_ALIGNED(frame_size, kStackAlignment); 1498 for (int i = spill_regs.size() - 1; i >= 0; --i) { 1499 pushl(spill_regs.at(i).AsX86().AsCpuRegister()); 1500 1501 // DW_CFA_advance_loc 1502 DW_CFA_advance_loc(&cfi_info_, buffer_.Size() - cfi_pc_); 1503 cfi_pc_ = buffer_.Size(); 1504 // DW_CFA_def_cfa_offset 1505 cfi_cfa_offset_ += kFramePointerSize; 1506 DW_CFA_def_cfa_offset(&cfi_info_, cfi_cfa_offset_); 1507 // DW_CFA_offset reg offset 1508 reg_offset++; 1509 DW_CFA_offset(&cfi_info_, spill_regs.at(i).AsX86().DWARFRegId(), reg_offset); 1510 } 1511 1512 // return address then method on stack 1513 int32_t adjust = frame_size - (spill_regs.size() * kFramePointerSize) - 1514 sizeof(StackReference<mirror::ArtMethod>) /*method*/ - 1515 kFramePointerSize /*return address*/; 1516 addl(ESP, Immediate(-adjust)); 1517 // DW_CFA_advance_loc 1518 DW_CFA_advance_loc(&cfi_info_, buffer_.Size() - cfi_pc_); 1519 cfi_pc_ = buffer_.Size(); 1520 // DW_CFA_def_cfa_offset 1521 cfi_cfa_offset_ += adjust; 1522 DW_CFA_def_cfa_offset(&cfi_info_, cfi_cfa_offset_); 1523 1524 pushl(method_reg.AsX86().AsCpuRegister()); 1525 // DW_CFA_advance_loc 1526 DW_CFA_advance_loc(&cfi_info_, buffer_.Size() - cfi_pc_); 1527 cfi_pc_ = buffer_.Size(); 1528 // DW_CFA_def_cfa_offset 1529 cfi_cfa_offset_ += kFramePointerSize; 1530 DW_CFA_def_cfa_offset(&cfi_info_, cfi_cfa_offset_); 1531 1532 for (size_t i = 0; i < entry_spills.size(); ++i) { 1533 movl(Address(ESP, frame_size + sizeof(StackReference<mirror::ArtMethod>) + 1534 (i * kFramePointerSize)), 1535 entry_spills.at(i).AsX86().AsCpuRegister()); 1536 } 1537} 1538 1539void X86Assembler::RemoveFrame(size_t frame_size, 1540 const std::vector<ManagedRegister>& spill_regs) { 1541 CHECK_ALIGNED(frame_size, kStackAlignment); 1542 addl(ESP, Immediate(frame_size - (spill_regs.size() * kFramePointerSize) - 1543 sizeof(StackReference<mirror::ArtMethod>))); 1544 for (size_t i = 0; i < spill_regs.size(); ++i) { 1545 popl(spill_regs.at(i).AsX86().AsCpuRegister()); 1546 } 1547 ret(); 1548} 1549 1550void X86Assembler::IncreaseFrameSize(size_t adjust) { 1551 CHECK_ALIGNED(adjust, kStackAlignment); 1552 addl(ESP, Immediate(-adjust)); 1553 // DW_CFA_advance_loc 1554 DW_CFA_advance_loc(&cfi_info_, buffer_.Size() - cfi_pc_); 1555 cfi_pc_ = buffer_.Size(); 1556 // DW_CFA_def_cfa_offset 1557 cfi_cfa_offset_ += adjust; 1558 DW_CFA_def_cfa_offset(&cfi_info_, cfi_cfa_offset_); 1559} 1560 1561void X86Assembler::DecreaseFrameSize(size_t adjust) { 1562 CHECK_ALIGNED(adjust, kStackAlignment); 1563 addl(ESP, Immediate(adjust)); 1564} 1565 1566void X86Assembler::Store(FrameOffset offs, ManagedRegister msrc, size_t size) { 1567 X86ManagedRegister src = msrc.AsX86(); 1568 if (src.IsNoRegister()) { 1569 CHECK_EQ(0u, size); 1570 } else if (src.IsCpuRegister()) { 1571 CHECK_EQ(4u, size); 1572 movl(Address(ESP, offs), src.AsCpuRegister()); 1573 } else if (src.IsRegisterPair()) { 1574 CHECK_EQ(8u, size); 1575 movl(Address(ESP, offs), src.AsRegisterPairLow()); 1576 movl(Address(ESP, FrameOffset(offs.Int32Value()+4)), 1577 src.AsRegisterPairHigh()); 1578 } else if (src.IsX87Register()) { 1579 if (size == 4) { 1580 fstps(Address(ESP, offs)); 1581 } else { 1582 fstpl(Address(ESP, offs)); 1583 } 1584 } else { 1585 CHECK(src.IsXmmRegister()); 1586 if (size == 4) { 1587 movss(Address(ESP, offs), src.AsXmmRegister()); 1588 } else { 1589 movsd(Address(ESP, offs), src.AsXmmRegister()); 1590 } 1591 } 1592} 1593 1594void X86Assembler::StoreRef(FrameOffset dest, ManagedRegister msrc) { 1595 X86ManagedRegister src = msrc.AsX86(); 1596 CHECK(src.IsCpuRegister()); 1597 movl(Address(ESP, dest), src.AsCpuRegister()); 1598} 1599 1600void X86Assembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) { 1601 X86ManagedRegister src = msrc.AsX86(); 1602 CHECK(src.IsCpuRegister()); 1603 movl(Address(ESP, dest), src.AsCpuRegister()); 1604} 1605 1606void X86Assembler::StoreImmediateToFrame(FrameOffset dest, uint32_t imm, 1607 ManagedRegister) { 1608 movl(Address(ESP, dest), Immediate(imm)); 1609} 1610 1611void X86Assembler::StoreImmediateToThread32(ThreadOffset<4> dest, uint32_t imm, 1612 ManagedRegister) { 1613 fs()->movl(Address::Absolute(dest), Immediate(imm)); 1614} 1615 1616void X86Assembler::StoreStackOffsetToThread32(ThreadOffset<4> thr_offs, 1617 FrameOffset fr_offs, 1618 ManagedRegister mscratch) { 1619 X86ManagedRegister scratch = mscratch.AsX86(); 1620 CHECK(scratch.IsCpuRegister()); 1621 leal(scratch.AsCpuRegister(), Address(ESP, fr_offs)); 1622 fs()->movl(Address::Absolute(thr_offs), scratch.AsCpuRegister()); 1623} 1624 1625void X86Assembler::StoreStackPointerToThread32(ThreadOffset<4> thr_offs) { 1626 fs()->movl(Address::Absolute(thr_offs), ESP); 1627} 1628 1629void X86Assembler::StoreSpanning(FrameOffset /*dst*/, ManagedRegister /*src*/, 1630 FrameOffset /*in_off*/, ManagedRegister /*scratch*/) { 1631 UNIMPLEMENTED(FATAL); // this case only currently exists for ARM 1632} 1633 1634void X86Assembler::Load(ManagedRegister mdest, FrameOffset src, size_t size) { 1635 X86ManagedRegister dest = mdest.AsX86(); 1636 if (dest.IsNoRegister()) { 1637 CHECK_EQ(0u, size); 1638 } else if (dest.IsCpuRegister()) { 1639 CHECK_EQ(4u, size); 1640 movl(dest.AsCpuRegister(), Address(ESP, src)); 1641 } else if (dest.IsRegisterPair()) { 1642 CHECK_EQ(8u, size); 1643 movl(dest.AsRegisterPairLow(), Address(ESP, src)); 1644 movl(dest.AsRegisterPairHigh(), Address(ESP, FrameOffset(src.Int32Value()+4))); 1645 } else if (dest.IsX87Register()) { 1646 if (size == 4) { 1647 flds(Address(ESP, src)); 1648 } else { 1649 fldl(Address(ESP, src)); 1650 } 1651 } else { 1652 CHECK(dest.IsXmmRegister()); 1653 if (size == 4) { 1654 movss(dest.AsXmmRegister(), Address(ESP, src)); 1655 } else { 1656 movsd(dest.AsXmmRegister(), Address(ESP, src)); 1657 } 1658 } 1659} 1660 1661void X86Assembler::LoadFromThread32(ManagedRegister mdest, ThreadOffset<4> src, size_t size) { 1662 X86ManagedRegister dest = mdest.AsX86(); 1663 if (dest.IsNoRegister()) { 1664 CHECK_EQ(0u, size); 1665 } else if (dest.IsCpuRegister()) { 1666 CHECK_EQ(4u, size); 1667 fs()->movl(dest.AsCpuRegister(), Address::Absolute(src)); 1668 } else if (dest.IsRegisterPair()) { 1669 CHECK_EQ(8u, size); 1670 fs()->movl(dest.AsRegisterPairLow(), Address::Absolute(src)); 1671 fs()->movl(dest.AsRegisterPairHigh(), Address::Absolute(ThreadOffset<4>(src.Int32Value()+4))); 1672 } else if (dest.IsX87Register()) { 1673 if (size == 4) { 1674 fs()->flds(Address::Absolute(src)); 1675 } else { 1676 fs()->fldl(Address::Absolute(src)); 1677 } 1678 } else { 1679 CHECK(dest.IsXmmRegister()); 1680 if (size == 4) { 1681 fs()->movss(dest.AsXmmRegister(), Address::Absolute(src)); 1682 } else { 1683 fs()->movsd(dest.AsXmmRegister(), Address::Absolute(src)); 1684 } 1685 } 1686} 1687 1688void X86Assembler::LoadRef(ManagedRegister mdest, FrameOffset src) { 1689 X86ManagedRegister dest = mdest.AsX86(); 1690 CHECK(dest.IsCpuRegister()); 1691 movl(dest.AsCpuRegister(), Address(ESP, src)); 1692} 1693 1694void X86Assembler::LoadRef(ManagedRegister mdest, ManagedRegister base, 1695 MemberOffset offs) { 1696 X86ManagedRegister dest = mdest.AsX86(); 1697 CHECK(dest.IsCpuRegister() && dest.IsCpuRegister()); 1698 movl(dest.AsCpuRegister(), Address(base.AsX86().AsCpuRegister(), offs)); 1699 if (kPoisonHeapReferences) { 1700 negl(dest.AsCpuRegister()); 1701 } 1702} 1703 1704void X86Assembler::LoadRawPtr(ManagedRegister mdest, ManagedRegister base, 1705 Offset offs) { 1706 X86ManagedRegister dest = mdest.AsX86(); 1707 CHECK(dest.IsCpuRegister() && dest.IsCpuRegister()); 1708 movl(dest.AsCpuRegister(), Address(base.AsX86().AsCpuRegister(), offs)); 1709} 1710 1711void X86Assembler::LoadRawPtrFromThread32(ManagedRegister mdest, 1712 ThreadOffset<4> offs) { 1713 X86ManagedRegister dest = mdest.AsX86(); 1714 CHECK(dest.IsCpuRegister()); 1715 fs()->movl(dest.AsCpuRegister(), Address::Absolute(offs)); 1716} 1717 1718void X86Assembler::SignExtend(ManagedRegister mreg, size_t size) { 1719 X86ManagedRegister reg = mreg.AsX86(); 1720 CHECK(size == 1 || size == 2) << size; 1721 CHECK(reg.IsCpuRegister()) << reg; 1722 if (size == 1) { 1723 movsxb(reg.AsCpuRegister(), reg.AsByteRegister()); 1724 } else { 1725 movsxw(reg.AsCpuRegister(), reg.AsCpuRegister()); 1726 } 1727} 1728 1729void X86Assembler::ZeroExtend(ManagedRegister mreg, size_t size) { 1730 X86ManagedRegister reg = mreg.AsX86(); 1731 CHECK(size == 1 || size == 2) << size; 1732 CHECK(reg.IsCpuRegister()) << reg; 1733 if (size == 1) { 1734 movzxb(reg.AsCpuRegister(), reg.AsByteRegister()); 1735 } else { 1736 movzxw(reg.AsCpuRegister(), reg.AsCpuRegister()); 1737 } 1738} 1739 1740void X86Assembler::Move(ManagedRegister mdest, ManagedRegister msrc, size_t size) { 1741 X86ManagedRegister dest = mdest.AsX86(); 1742 X86ManagedRegister src = msrc.AsX86(); 1743 if (!dest.Equals(src)) { 1744 if (dest.IsCpuRegister() && src.IsCpuRegister()) { 1745 movl(dest.AsCpuRegister(), src.AsCpuRegister()); 1746 } else if (src.IsX87Register() && dest.IsXmmRegister()) { 1747 // Pass via stack and pop X87 register 1748 subl(ESP, Immediate(16)); 1749 if (size == 4) { 1750 CHECK_EQ(src.AsX87Register(), ST0); 1751 fstps(Address(ESP, 0)); 1752 movss(dest.AsXmmRegister(), Address(ESP, 0)); 1753 } else { 1754 CHECK_EQ(src.AsX87Register(), ST0); 1755 fstpl(Address(ESP, 0)); 1756 movsd(dest.AsXmmRegister(), Address(ESP, 0)); 1757 } 1758 addl(ESP, Immediate(16)); 1759 } else { 1760 // TODO: x87, SSE 1761 UNIMPLEMENTED(FATAL) << ": Move " << dest << ", " << src; 1762 } 1763 } 1764} 1765 1766void X86Assembler::CopyRef(FrameOffset dest, FrameOffset src, 1767 ManagedRegister mscratch) { 1768 X86ManagedRegister scratch = mscratch.AsX86(); 1769 CHECK(scratch.IsCpuRegister()); 1770 movl(scratch.AsCpuRegister(), Address(ESP, src)); 1771 movl(Address(ESP, dest), scratch.AsCpuRegister()); 1772} 1773 1774void X86Assembler::CopyRawPtrFromThread32(FrameOffset fr_offs, 1775 ThreadOffset<4> thr_offs, 1776 ManagedRegister mscratch) { 1777 X86ManagedRegister scratch = mscratch.AsX86(); 1778 CHECK(scratch.IsCpuRegister()); 1779 fs()->movl(scratch.AsCpuRegister(), Address::Absolute(thr_offs)); 1780 Store(fr_offs, scratch, 4); 1781} 1782 1783void X86Assembler::CopyRawPtrToThread32(ThreadOffset<4> thr_offs, 1784 FrameOffset fr_offs, 1785 ManagedRegister mscratch) { 1786 X86ManagedRegister scratch = mscratch.AsX86(); 1787 CHECK(scratch.IsCpuRegister()); 1788 Load(scratch, fr_offs, 4); 1789 fs()->movl(Address::Absolute(thr_offs), scratch.AsCpuRegister()); 1790} 1791 1792void X86Assembler::Copy(FrameOffset dest, FrameOffset src, 1793 ManagedRegister mscratch, 1794 size_t size) { 1795 X86ManagedRegister scratch = mscratch.AsX86(); 1796 if (scratch.IsCpuRegister() && size == 8) { 1797 Load(scratch, src, 4); 1798 Store(dest, scratch, 4); 1799 Load(scratch, FrameOffset(src.Int32Value() + 4), 4); 1800 Store(FrameOffset(dest.Int32Value() + 4), scratch, 4); 1801 } else { 1802 Load(scratch, src, size); 1803 Store(dest, scratch, size); 1804 } 1805} 1806 1807void X86Assembler::Copy(FrameOffset /*dst*/, ManagedRegister /*src_base*/, Offset /*src_offset*/, 1808 ManagedRegister /*scratch*/, size_t /*size*/) { 1809 UNIMPLEMENTED(FATAL); 1810} 1811 1812void X86Assembler::Copy(ManagedRegister dest_base, Offset dest_offset, FrameOffset src, 1813 ManagedRegister scratch, size_t size) { 1814 CHECK(scratch.IsNoRegister()); 1815 CHECK_EQ(size, 4u); 1816 pushl(Address(ESP, src)); 1817 popl(Address(dest_base.AsX86().AsCpuRegister(), dest_offset)); 1818} 1819 1820void X86Assembler::Copy(FrameOffset dest, FrameOffset src_base, Offset src_offset, 1821 ManagedRegister mscratch, size_t size) { 1822 Register scratch = mscratch.AsX86().AsCpuRegister(); 1823 CHECK_EQ(size, 4u); 1824 movl(scratch, Address(ESP, src_base)); 1825 movl(scratch, Address(scratch, src_offset)); 1826 movl(Address(ESP, dest), scratch); 1827} 1828 1829void X86Assembler::Copy(ManagedRegister dest, Offset dest_offset, 1830 ManagedRegister src, Offset src_offset, 1831 ManagedRegister scratch, size_t size) { 1832 CHECK_EQ(size, 4u); 1833 CHECK(scratch.IsNoRegister()); 1834 pushl(Address(src.AsX86().AsCpuRegister(), src_offset)); 1835 popl(Address(dest.AsX86().AsCpuRegister(), dest_offset)); 1836} 1837 1838void X86Assembler::Copy(FrameOffset dest, Offset dest_offset, FrameOffset src, Offset src_offset, 1839 ManagedRegister mscratch, size_t size) { 1840 Register scratch = mscratch.AsX86().AsCpuRegister(); 1841 CHECK_EQ(size, 4u); 1842 CHECK_EQ(dest.Int32Value(), src.Int32Value()); 1843 movl(scratch, Address(ESP, src)); 1844 pushl(Address(scratch, src_offset)); 1845 popl(Address(scratch, dest_offset)); 1846} 1847 1848void X86Assembler::MemoryBarrier(ManagedRegister) { 1849 mfence(); 1850} 1851 1852void X86Assembler::CreateHandleScopeEntry(ManagedRegister mout_reg, 1853 FrameOffset handle_scope_offset, 1854 ManagedRegister min_reg, bool null_allowed) { 1855 X86ManagedRegister out_reg = mout_reg.AsX86(); 1856 X86ManagedRegister in_reg = min_reg.AsX86(); 1857 CHECK(in_reg.IsCpuRegister()); 1858 CHECK(out_reg.IsCpuRegister()); 1859 VerifyObject(in_reg, null_allowed); 1860 if (null_allowed) { 1861 Label null_arg; 1862 if (!out_reg.Equals(in_reg)) { 1863 xorl(out_reg.AsCpuRegister(), out_reg.AsCpuRegister()); 1864 } 1865 testl(in_reg.AsCpuRegister(), in_reg.AsCpuRegister()); 1866 j(kZero, &null_arg); 1867 leal(out_reg.AsCpuRegister(), Address(ESP, handle_scope_offset)); 1868 Bind(&null_arg); 1869 } else { 1870 leal(out_reg.AsCpuRegister(), Address(ESP, handle_scope_offset)); 1871 } 1872} 1873 1874void X86Assembler::CreateHandleScopeEntry(FrameOffset out_off, 1875 FrameOffset handle_scope_offset, 1876 ManagedRegister mscratch, 1877 bool null_allowed) { 1878 X86ManagedRegister scratch = mscratch.AsX86(); 1879 CHECK(scratch.IsCpuRegister()); 1880 if (null_allowed) { 1881 Label null_arg; 1882 movl(scratch.AsCpuRegister(), Address(ESP, handle_scope_offset)); 1883 testl(scratch.AsCpuRegister(), scratch.AsCpuRegister()); 1884 j(kZero, &null_arg); 1885 leal(scratch.AsCpuRegister(), Address(ESP, handle_scope_offset)); 1886 Bind(&null_arg); 1887 } else { 1888 leal(scratch.AsCpuRegister(), Address(ESP, handle_scope_offset)); 1889 } 1890 Store(out_off, scratch, 4); 1891} 1892 1893// Given a handle scope entry, load the associated reference. 1894void X86Assembler::LoadReferenceFromHandleScope(ManagedRegister mout_reg, 1895 ManagedRegister min_reg) { 1896 X86ManagedRegister out_reg = mout_reg.AsX86(); 1897 X86ManagedRegister in_reg = min_reg.AsX86(); 1898 CHECK(out_reg.IsCpuRegister()); 1899 CHECK(in_reg.IsCpuRegister()); 1900 Label null_arg; 1901 if (!out_reg.Equals(in_reg)) { 1902 xorl(out_reg.AsCpuRegister(), out_reg.AsCpuRegister()); 1903 } 1904 testl(in_reg.AsCpuRegister(), in_reg.AsCpuRegister()); 1905 j(kZero, &null_arg); 1906 movl(out_reg.AsCpuRegister(), Address(in_reg.AsCpuRegister(), 0)); 1907 Bind(&null_arg); 1908} 1909 1910void X86Assembler::VerifyObject(ManagedRegister /*src*/, bool /*could_be_null*/) { 1911 // TODO: not validating references 1912} 1913 1914void X86Assembler::VerifyObject(FrameOffset /*src*/, bool /*could_be_null*/) { 1915 // TODO: not validating references 1916} 1917 1918void X86Assembler::Call(ManagedRegister mbase, Offset offset, ManagedRegister) { 1919 X86ManagedRegister base = mbase.AsX86(); 1920 CHECK(base.IsCpuRegister()); 1921 call(Address(base.AsCpuRegister(), offset.Int32Value())); 1922 // TODO: place reference map on call 1923} 1924 1925void X86Assembler::Call(FrameOffset base, Offset offset, ManagedRegister mscratch) { 1926 Register scratch = mscratch.AsX86().AsCpuRegister(); 1927 movl(scratch, Address(ESP, base)); 1928 call(Address(scratch, offset)); 1929} 1930 1931void X86Assembler::CallFromThread32(ThreadOffset<4> offset, ManagedRegister /*mscratch*/) { 1932 fs()->call(Address::Absolute(offset)); 1933} 1934 1935void X86Assembler::GetCurrentThread(ManagedRegister tr) { 1936 fs()->movl(tr.AsX86().AsCpuRegister(), 1937 Address::Absolute(Thread::SelfOffset<4>())); 1938} 1939 1940void X86Assembler::GetCurrentThread(FrameOffset offset, 1941 ManagedRegister mscratch) { 1942 X86ManagedRegister scratch = mscratch.AsX86(); 1943 fs()->movl(scratch.AsCpuRegister(), Address::Absolute(Thread::SelfOffset<4>())); 1944 movl(Address(ESP, offset), scratch.AsCpuRegister()); 1945} 1946 1947void X86Assembler::ExceptionPoll(ManagedRegister /*scratch*/, size_t stack_adjust) { 1948 X86ExceptionSlowPath* slow = new X86ExceptionSlowPath(stack_adjust); 1949 buffer_.EnqueueSlowPath(slow); 1950 fs()->cmpl(Address::Absolute(Thread::ExceptionOffset<4>()), Immediate(0)); 1951 j(kNotEqual, slow->Entry()); 1952} 1953 1954void X86ExceptionSlowPath::Emit(Assembler *sasm) { 1955 X86Assembler* sp_asm = down_cast<X86Assembler*>(sasm); 1956#define __ sp_asm-> 1957 __ Bind(&entry_); 1958 // Note: the return value is dead 1959 if (stack_adjust_ != 0) { // Fix up the frame. 1960 __ DecreaseFrameSize(stack_adjust_); 1961 } 1962 // Pass exception as argument in EAX 1963 __ fs()->movl(EAX, Address::Absolute(Thread::ExceptionOffset<4>())); 1964 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(4, pDeliverException))); 1965 // this call should never return 1966 __ int3(); 1967#undef __ 1968} 1969 1970} // namespace x86 1971} // namespace art 1972