full-codegen-x64.cc revision bb769b257e753aafcbd96767abb2abc645eaa20c
19f8118474e9513f7a5b7d2a05e4a0fb15d1a6569Jesse Wilson// Copyright 2010 the V8 project authors. All rights reserved. 29f8118474e9513f7a5b7d2a05e4a0fb15d1a6569Jesse Wilson// Redistribution and use in source and binary forms, with or without 39f8118474e9513f7a5b7d2a05e4a0fb15d1a6569Jesse Wilson// modification, are permitted provided that the following conditions are 49f8118474e9513f7a5b7d2a05e4a0fb15d1a6569Jesse Wilson// met: 59f8118474e9513f7a5b7d2a05e4a0fb15d1a6569Jesse Wilson// 69f8118474e9513f7a5b7d2a05e4a0fb15d1a6569Jesse Wilson// * Redistributions of source code must retain the above copyright 79f8118474e9513f7a5b7d2a05e4a0fb15d1a6569Jesse Wilson// notice, this list of conditions and the following disclaimer. 89f8118474e9513f7a5b7d2a05e4a0fb15d1a6569Jesse Wilson// * Redistributions in binary form must reproduce the above 99f8118474e9513f7a5b7d2a05e4a0fb15d1a6569Jesse Wilson// copyright notice, this list of conditions and the following 109f8118474e9513f7a5b7d2a05e4a0fb15d1a6569Jesse Wilson// disclaimer in the documentation and/or other materials provided 119f8118474e9513f7a5b7d2a05e4a0fb15d1a6569Jesse Wilson// with the distribution. 129f8118474e9513f7a5b7d2a05e4a0fb15d1a6569Jesse Wilson// * Neither the name of Google Inc. nor the names of its 139f8118474e9513f7a5b7d2a05e4a0fb15d1a6569Jesse Wilson// contributors may be used to endorse or promote products derived 149f8118474e9513f7a5b7d2a05e4a0fb15d1a6569Jesse Wilson// from this software without specific prior written permission. 159f8118474e9513f7a5b7d2a05e4a0fb15d1a6569Jesse Wilson// 169f8118474e9513f7a5b7d2a05e4a0fb15d1a6569Jesse Wilson// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 179f8118474e9513f7a5b7d2a05e4a0fb15d1a6569Jesse Wilson// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 189f8118474e9513f7a5b7d2a05e4a0fb15d1a6569Jesse Wilson// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 199f8118474e9513f7a5b7d2a05e4a0fb15d1a6569Jesse Wilson// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 209f8118474e9513f7a5b7d2a05e4a0fb15d1a6569Jesse Wilson// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 219f8118474e9513f7a5b7d2a05e4a0fb15d1a6569Jesse Wilson// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 229f8118474e9513f7a5b7d2a05e4a0fb15d1a6569Jesse Wilson// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 239f8118474e9513f7a5b7d2a05e4a0fb15d1a6569Jesse Wilson// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 249f8118474e9513f7a5b7d2a05e4a0fb15d1a6569Jesse Wilson// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 259f8118474e9513f7a5b7d2a05e4a0fb15d1a6569Jesse Wilson// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 269f8118474e9513f7a5b7d2a05e4a0fb15d1a6569Jesse Wilson// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 279f8118474e9513f7a5b7d2a05e4a0fb15d1a6569Jesse Wilson 289f8118474e9513f7a5b7d2a05e4a0fb15d1a6569Jesse Wilson#include "v8.h" 299f8118474e9513f7a5b7d2a05e4a0fb15d1a6569Jesse Wilson 309f8118474e9513f7a5b7d2a05e4a0fb15d1a6569Jesse Wilson#if defined(V8_TARGET_ARCH_X64) 31 32#include "codegen-inl.h" 33#include "compiler.h" 34#include "debug.h" 35#include "full-codegen.h" 36#include "parser.h" 37#include "scopes.h" 38 39namespace v8 { 40namespace internal { 41 42#define __ ACCESS_MASM(masm_) 43 44// Generate code for a JS function. On entry to the function the receiver 45// and arguments have been pushed on the stack left to right, with the 46// return address on top of them. The actual argument count matches the 47// formal parameter count expected by the function. 48// 49// The live registers are: 50// o rdi: the JS function object being called (ie, ourselves) 51// o rsi: our context 52// o rbp: our caller's frame pointer 53// o rsp: stack pointer (pointing to return address) 54// 55// The function builds a JS frame. Please see JavaScriptFrameConstants in 56// frames-x64.h for its layout. 57void FullCodeGenerator::Generate(CompilationInfo* info, Mode mode) { 58 ASSERT(info_ == NULL); 59 info_ = info; 60 SetFunctionPosition(function()); 61 Comment cmnt(masm_, "[ function compiled by full code generator"); 62 63 if (mode == PRIMARY) { 64 __ push(rbp); // Caller's frame pointer. 65 __ movq(rbp, rsp); 66 __ push(rsi); // Callee's context. 67 __ push(rdi); // Callee's JS Function. 68 69 { Comment cmnt(masm_, "[ Allocate locals"); 70 int locals_count = scope()->num_stack_slots(); 71 if (locals_count == 1) { 72 __ PushRoot(Heap::kUndefinedValueRootIndex); 73 } else if (locals_count > 1) { 74 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); 75 for (int i = 0; i < locals_count; i++) { 76 __ push(rdx); 77 } 78 } 79 } 80 81 bool function_in_register = true; 82 83 // Possibly allocate a local context. 84 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; 85 if (heap_slots > 0) { 86 Comment cmnt(masm_, "[ Allocate local context"); 87 // Argument to NewContext is the function, which is still in rdi. 88 __ push(rdi); 89 if (heap_slots <= FastNewContextStub::kMaximumSlots) { 90 FastNewContextStub stub(heap_slots); 91 __ CallStub(&stub); 92 } else { 93 __ CallRuntime(Runtime::kNewContext, 1); 94 } 95 function_in_register = false; 96 // Context is returned in both rax and rsi. It replaces the context 97 // passed to us. It's saved in the stack and kept live in rsi. 98 __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi); 99 100 // Copy any necessary parameters into the context. 101 int num_parameters = scope()->num_parameters(); 102 for (int i = 0; i < num_parameters; i++) { 103 Slot* slot = scope()->parameter(i)->slot(); 104 if (slot != NULL && slot->type() == Slot::CONTEXT) { 105 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 106 (num_parameters - 1 - i) * kPointerSize; 107 // Load parameter from stack. 108 __ movq(rax, Operand(rbp, parameter_offset)); 109 // Store it in the context. 110 int context_offset = Context::SlotOffset(slot->index()); 111 __ movq(Operand(rsi, context_offset), rax); 112 // Update the write barrier. This clobbers all involved 113 // registers, so we have use a third register to avoid 114 // clobbering rsi. 115 __ movq(rcx, rsi); 116 __ RecordWrite(rcx, context_offset, rax, rbx); 117 } 118 } 119 } 120 121 // Possibly allocate an arguments object. 122 Variable* arguments = scope()->arguments()->AsVariable(); 123 if (arguments != NULL) { 124 // Arguments object must be allocated after the context object, in 125 // case the "arguments" or ".arguments" variables are in the context. 126 Comment cmnt(masm_, "[ Allocate arguments object"); 127 if (function_in_register) { 128 __ push(rdi); 129 } else { 130 __ push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 131 } 132 // The receiver is just before the parameters on the caller's stack. 133 int offset = scope()->num_parameters() * kPointerSize; 134 __ lea(rdx, 135 Operand(rbp, StandardFrameConstants::kCallerSPOffset + offset)); 136 __ push(rdx); 137 __ Push(Smi::FromInt(scope()->num_parameters())); 138 // Arguments to ArgumentsAccessStub: 139 // function, receiver address, parameter count. 140 // The stub will rewrite receiver and parameter count if the previous 141 // stack frame was an arguments adapter frame. 142 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT); 143 __ CallStub(&stub); 144 // Store new arguments object in both "arguments" and ".arguments" slots. 145 __ movq(rcx, rax); 146 Move(arguments->slot(), rax, rbx, rdx); 147 Slot* dot_arguments_slot = 148 scope()->arguments_shadow()->AsVariable()->slot(); 149 Move(dot_arguments_slot, rcx, rbx, rdx); 150 } 151 } 152 153 { Comment cmnt(masm_, "[ Declarations"); 154 // For named function expressions, declare the function name as a 155 // constant. 156 if (scope()->is_function_scope() && scope()->function() != NULL) { 157 EmitDeclaration(scope()->function(), Variable::CONST, NULL); 158 } 159 // Visit all the explicit declarations unless there is an illegal 160 // redeclaration. 161 if (scope()->HasIllegalRedeclaration()) { 162 scope()->VisitIllegalRedeclaration(this); 163 } else { 164 VisitDeclarations(scope()->declarations()); 165 } 166 } 167 168 { Comment cmnt(masm_, "[ Stack check"); 169 Label ok; 170 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); 171 __ j(above_equal, &ok); 172 StackCheckStub stub; 173 __ CallStub(&stub); 174 __ bind(&ok); 175 } 176 177 if (FLAG_trace) { 178 __ CallRuntime(Runtime::kTraceEnter, 0); 179 } 180 181 { Comment cmnt(masm_, "[ Body"); 182 ASSERT(loop_depth() == 0); 183 VisitStatements(function()->body()); 184 ASSERT(loop_depth() == 0); 185 } 186 187 { Comment cmnt(masm_, "[ return <undefined>;"); 188 // Emit a 'return undefined' in case control fell off the end of the body. 189 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); 190 EmitReturnSequence(); 191 } 192} 193 194 195void FullCodeGenerator::EmitReturnSequence() { 196 Comment cmnt(masm_, "[ Return sequence"); 197 if (return_label_.is_bound()) { 198 __ jmp(&return_label_); 199 } else { 200 __ bind(&return_label_); 201 if (FLAG_trace) { 202 __ push(rax); 203 __ CallRuntime(Runtime::kTraceExit, 1); 204 } 205#ifdef DEBUG 206 // Add a label for checking the size of the code used for returning. 207 Label check_exit_codesize; 208 masm_->bind(&check_exit_codesize); 209#endif 210 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); 211 __ RecordJSReturn(); 212 // Do not use the leave instruction here because it is too short to 213 // patch with the code required by the debugger. 214 __ movq(rsp, rbp); 215 __ pop(rbp); 216 __ ret((scope()->num_parameters() + 1) * kPointerSize); 217#ifdef ENABLE_DEBUGGER_SUPPORT 218 // Add padding that will be overwritten by a debugger breakpoint. We 219 // have just generated "movq rsp, rbp; pop rbp; ret k" with length 7 220 // (3 + 1 + 3). 221 const int kPadding = Assembler::kJSReturnSequenceLength - 7; 222 for (int i = 0; i < kPadding; ++i) { 223 masm_->int3(); 224 } 225 // Check that the size of the code used for returning matches what is 226 // expected by the debugger. 227 ASSERT_EQ(Assembler::kJSReturnSequenceLength, 228 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize)); 229#endif 230 } 231} 232 233 234void FullCodeGenerator::Apply(Expression::Context context, Register reg) { 235 switch (context) { 236 case Expression::kUninitialized: 237 UNREACHABLE(); 238 239 case Expression::kEffect: 240 // Nothing to do. 241 break; 242 243 case Expression::kValue: 244 // Move value into place. 245 switch (location_) { 246 case kAccumulator: 247 if (!reg.is(result_register())) __ movq(result_register(), reg); 248 break; 249 case kStack: 250 __ push(reg); 251 break; 252 } 253 break; 254 255 case Expression::kTest: 256 // For simplicity we always test the accumulator register. 257 if (!reg.is(result_register())) __ movq(result_register(), reg); 258 DoTest(context); 259 break; 260 261 case Expression::kValueTest: 262 case Expression::kTestValue: 263 if (!reg.is(result_register())) __ movq(result_register(), reg); 264 switch (location_) { 265 case kAccumulator: 266 break; 267 case kStack: 268 __ push(result_register()); 269 break; 270 } 271 DoTest(context); 272 break; 273 } 274} 275 276 277void FullCodeGenerator::Apply(Expression::Context context, Slot* slot) { 278 switch (context) { 279 case Expression::kUninitialized: 280 UNREACHABLE(); 281 case Expression::kEffect: 282 // Nothing to do. 283 break; 284 case Expression::kValue: { 285 MemOperand slot_operand = EmitSlotSearch(slot, result_register()); 286 switch (location_) { 287 case kAccumulator: 288 __ movq(result_register(), slot_operand); 289 break; 290 case kStack: 291 // Memory operands can be pushed directly. 292 __ push(slot_operand); 293 break; 294 } 295 break; 296 } 297 298 case Expression::kTest: 299 Move(result_register(), slot); 300 DoTest(context); 301 break; 302 303 case Expression::kValueTest: 304 case Expression::kTestValue: 305 Move(result_register(), slot); 306 switch (location_) { 307 case kAccumulator: 308 break; 309 case kStack: 310 __ push(result_register()); 311 break; 312 } 313 DoTest(context); 314 break; 315 } 316} 317 318 319void FullCodeGenerator::Apply(Expression::Context context, Literal* lit) { 320 switch (context) { 321 case Expression::kUninitialized: 322 UNREACHABLE(); 323 case Expression::kEffect: 324 // Nothing to do. 325 break; 326 case Expression::kValue: 327 switch (location_) { 328 case kAccumulator: 329 __ Move(result_register(), lit->handle()); 330 break; 331 case kStack: 332 __ Push(lit->handle()); 333 break; 334 } 335 break; 336 337 case Expression::kTest: 338 __ Move(result_register(), lit->handle()); 339 DoTest(context); 340 break; 341 342 case Expression::kValueTest: 343 case Expression::kTestValue: 344 __ Move(result_register(), lit->handle()); 345 switch (location_) { 346 case kAccumulator: 347 break; 348 case kStack: 349 __ push(result_register()); 350 break; 351 } 352 DoTest(context); 353 break; 354 } 355} 356 357 358void FullCodeGenerator::ApplyTOS(Expression::Context context) { 359 switch (context) { 360 case Expression::kUninitialized: 361 UNREACHABLE(); 362 363 case Expression::kEffect: 364 __ Drop(1); 365 break; 366 367 case Expression::kValue: 368 switch (location_) { 369 case kAccumulator: 370 __ pop(result_register()); 371 break; 372 case kStack: 373 break; 374 } 375 break; 376 377 case Expression::kTest: 378 __ pop(result_register()); 379 DoTest(context); 380 break; 381 382 case Expression::kValueTest: 383 case Expression::kTestValue: 384 switch (location_) { 385 case kAccumulator: 386 __ pop(result_register()); 387 break; 388 case kStack: 389 __ movq(result_register(), Operand(rsp, 0)); 390 break; 391 } 392 DoTest(context); 393 break; 394 } 395} 396 397 398void FullCodeGenerator::DropAndApply(int count, 399 Expression::Context context, 400 Register reg) { 401 ASSERT(count > 0); 402 ASSERT(!reg.is(rsp)); 403 switch (context) { 404 case Expression::kUninitialized: 405 UNREACHABLE(); 406 407 case Expression::kEffect: 408 __ Drop(count); 409 break; 410 411 case Expression::kValue: 412 switch (location_) { 413 case kAccumulator: 414 __ Drop(count); 415 if (!reg.is(result_register())) __ movq(result_register(), reg); 416 break; 417 case kStack: 418 if (count > 1) __ Drop(count - 1); 419 __ movq(Operand(rsp, 0), reg); 420 break; 421 } 422 break; 423 424 case Expression::kTest: 425 __ Drop(count); 426 if (!reg.is(result_register())) __ movq(result_register(), reg); 427 DoTest(context); 428 break; 429 430 case Expression::kValueTest: 431 case Expression::kTestValue: 432 switch (location_) { 433 case kAccumulator: 434 __ Drop(count); 435 if (!reg.is(result_register())) __ movq(result_register(), reg); 436 break; 437 case kStack: 438 if (count > 1) __ Drop(count - 1); 439 __ movq(result_register(), reg); 440 __ movq(Operand(rsp, 0), result_register()); 441 break; 442 } 443 DoTest(context); 444 break; 445 } 446} 447 448 449void FullCodeGenerator::PrepareTest(Label* materialize_true, 450 Label* materialize_false, 451 Label** if_true, 452 Label** if_false) { 453 switch (context_) { 454 case Expression::kUninitialized: 455 UNREACHABLE(); 456 break; 457 case Expression::kEffect: 458 // In an effect context, the true and the false case branch to the 459 // same label. 460 *if_true = *if_false = materialize_true; 461 break; 462 case Expression::kValue: 463 *if_true = materialize_true; 464 *if_false = materialize_false; 465 break; 466 case Expression::kTest: 467 *if_true = true_label_; 468 *if_false = false_label_; 469 break; 470 case Expression::kValueTest: 471 *if_true = materialize_true; 472 *if_false = false_label_; 473 break; 474 case Expression::kTestValue: 475 *if_true = true_label_; 476 *if_false = materialize_false; 477 break; 478 } 479} 480 481 482void FullCodeGenerator::Apply(Expression::Context context, 483 Label* materialize_true, 484 Label* materialize_false) { 485 switch (context) { 486 case Expression::kUninitialized: 487 488 case Expression::kEffect: 489 ASSERT_EQ(materialize_true, materialize_false); 490 __ bind(materialize_true); 491 break; 492 493 case Expression::kValue: { 494 Label done; 495 switch (location_) { 496 case kAccumulator: 497 __ bind(materialize_true); 498 __ Move(result_register(), Factory::true_value()); 499 __ jmp(&done); 500 __ bind(materialize_false); 501 __ Move(result_register(), Factory::false_value()); 502 break; 503 case kStack: 504 __ bind(materialize_true); 505 __ Push(Factory::true_value()); 506 __ jmp(&done); 507 __ bind(materialize_false); 508 __ Push(Factory::false_value()); 509 break; 510 } 511 __ bind(&done); 512 break; 513 } 514 515 case Expression::kTest: 516 break; 517 518 case Expression::kValueTest: 519 __ bind(materialize_true); 520 switch (location_) { 521 case kAccumulator: 522 __ Move(result_register(), Factory::true_value()); 523 break; 524 case kStack: 525 __ Push(Factory::true_value()); 526 break; 527 } 528 __ jmp(true_label_); 529 break; 530 531 case Expression::kTestValue: 532 __ bind(materialize_false); 533 switch (location_) { 534 case kAccumulator: 535 __ Move(result_register(), Factory::false_value()); 536 break; 537 case kStack: 538 __ Push(Factory::false_value()); 539 break; 540 } 541 __ jmp(false_label_); 542 break; 543 } 544} 545 546 547// Convert constant control flow (true or false) to the result expected for 548// a given expression context. 549void FullCodeGenerator::Apply(Expression::Context context, bool flag) { 550 switch (context) { 551 case Expression::kUninitialized: 552 UNREACHABLE(); 553 break; 554 case Expression::kEffect: 555 break; 556 case Expression::kValue: { 557 Heap::RootListIndex value_root_index = 558 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; 559 switch (location_) { 560 case kAccumulator: 561 __ LoadRoot(result_register(), value_root_index); 562 break; 563 case kStack: 564 __ PushRoot(value_root_index); 565 break; 566 } 567 break; 568 } 569 case Expression::kTest: 570 __ jmp(flag ? true_label_ : false_label_); 571 break; 572 case Expression::kTestValue: 573 switch (location_) { 574 case kAccumulator: 575 // If value is false it's needed. 576 if (!flag) __ LoadRoot(result_register(), Heap::kFalseValueRootIndex); 577 break; 578 case kStack: 579 // If value is false it's needed. 580 if (!flag) __ PushRoot(Heap::kFalseValueRootIndex); 581 break; 582 } 583 __ jmp(flag ? true_label_ : false_label_); 584 break; 585 case Expression::kValueTest: 586 switch (location_) { 587 case kAccumulator: 588 // If value is true it's needed. 589 if (flag) __ LoadRoot(result_register(), Heap::kTrueValueRootIndex); 590 break; 591 case kStack: 592 // If value is true it's needed. 593 if (flag) __ PushRoot(Heap::kTrueValueRootIndex); 594 break; 595 } 596 __ jmp(flag ? true_label_ : false_label_); 597 break; 598 } 599} 600 601 602void FullCodeGenerator::DoTest(Expression::Context context) { 603 // The value to test is in the accumulator. If the value might be needed 604 // on the stack (value/test and test/value contexts with a stack location 605 // desired), then the value is already duplicated on the stack. 606 ASSERT_NE(NULL, true_label_); 607 ASSERT_NE(NULL, false_label_); 608 609 // In value/test and test/value expression contexts with stack as the 610 // desired location, there is already an extra value on the stack. Use a 611 // label to discard it if unneeded. 612 Label discard; 613 Label* if_true = true_label_; 614 Label* if_false = false_label_; 615 switch (context) { 616 case Expression::kUninitialized: 617 case Expression::kEffect: 618 case Expression::kValue: 619 UNREACHABLE(); 620 case Expression::kTest: 621 break; 622 case Expression::kValueTest: 623 switch (location_) { 624 case kAccumulator: 625 break; 626 case kStack: 627 if_false = &discard; 628 break; 629 } 630 break; 631 case Expression::kTestValue: 632 switch (location_) { 633 case kAccumulator: 634 break; 635 case kStack: 636 if_true = &discard; 637 break; 638 } 639 break; 640 } 641 642 // Emit the inlined tests assumed by the stub. 643 __ CompareRoot(result_register(), Heap::kUndefinedValueRootIndex); 644 __ j(equal, if_false); 645 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex); 646 __ j(equal, if_true); 647 __ CompareRoot(result_register(), Heap::kFalseValueRootIndex); 648 __ j(equal, if_false); 649 ASSERT_EQ(0, kSmiTag); 650 __ SmiCompare(result_register(), Smi::FromInt(0)); 651 __ j(equal, if_false); 652 Condition is_smi = masm_->CheckSmi(result_register()); 653 __ j(is_smi, if_true); 654 655 // Save a copy of the value if it may be needed and isn't already saved. 656 switch (context) { 657 case Expression::kUninitialized: 658 case Expression::kEffect: 659 case Expression::kValue: 660 UNREACHABLE(); 661 case Expression::kTest: 662 break; 663 case Expression::kValueTest: 664 switch (location_) { 665 case kAccumulator: 666 __ push(result_register()); 667 break; 668 case kStack: 669 break; 670 } 671 break; 672 case Expression::kTestValue: 673 switch (location_) { 674 case kAccumulator: 675 __ push(result_register()); 676 break; 677 case kStack: 678 break; 679 } 680 break; 681 } 682 683 // Call the ToBoolean stub for all other cases. 684 ToBooleanStub stub; 685 __ push(result_register()); 686 __ CallStub(&stub); 687 __ testq(rax, rax); 688 689 // The stub returns nonzero for true. Complete based on the context. 690 switch (context) { 691 case Expression::kUninitialized: 692 case Expression::kEffect: 693 case Expression::kValue: 694 UNREACHABLE(); 695 696 case Expression::kTest: 697 __ j(not_zero, true_label_); 698 __ jmp(false_label_); 699 break; 700 701 case Expression::kValueTest: 702 switch (location_) { 703 case kAccumulator: 704 __ j(zero, &discard); 705 __ pop(result_register()); 706 __ jmp(true_label_); 707 break; 708 case kStack: 709 __ j(not_zero, true_label_); 710 break; 711 } 712 __ bind(&discard); 713 __ Drop(1); 714 __ jmp(false_label_); 715 break; 716 717 case Expression::kTestValue: 718 switch (location_) { 719 case kAccumulator: 720 __ j(not_zero, &discard); 721 __ pop(result_register()); 722 __ jmp(false_label_); 723 break; 724 case kStack: 725 __ j(zero, false_label_); 726 break; 727 } 728 __ bind(&discard); 729 __ Drop(1); 730 __ jmp(true_label_); 731 break; 732 } 733} 734 735 736MemOperand FullCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) { 737 switch (slot->type()) { 738 case Slot::PARAMETER: 739 case Slot::LOCAL: 740 return Operand(rbp, SlotOffset(slot)); 741 case Slot::CONTEXT: { 742 int context_chain_length = 743 scope()->ContextChainLength(slot->var()->scope()); 744 __ LoadContext(scratch, context_chain_length); 745 return CodeGenerator::ContextOperand(scratch, slot->index()); 746 } 747 case Slot::LOOKUP: 748 UNREACHABLE(); 749 } 750 UNREACHABLE(); 751 return Operand(rax, 0); 752} 753 754 755void FullCodeGenerator::Move(Register destination, Slot* source) { 756 MemOperand location = EmitSlotSearch(source, destination); 757 __ movq(destination, location); 758} 759 760 761void FullCodeGenerator::Move(Slot* dst, 762 Register src, 763 Register scratch1, 764 Register scratch2) { 765 ASSERT(dst->type() != Slot::LOOKUP); // Not yet implemented. 766 ASSERT(!scratch1.is(src) && !scratch2.is(src)); 767 MemOperand location = EmitSlotSearch(dst, scratch1); 768 __ movq(location, src); 769 // Emit the write barrier code if the location is in the heap. 770 if (dst->type() == Slot::CONTEXT) { 771 int offset = FixedArray::kHeaderSize + dst->index() * kPointerSize; 772 __ RecordWrite(scratch1, offset, src, scratch2); 773 } 774} 775 776 777void FullCodeGenerator::EmitDeclaration(Variable* variable, 778 Variable::Mode mode, 779 FunctionLiteral* function) { 780 Comment cmnt(masm_, "[ Declaration"); 781 ASSERT(variable != NULL); // Must have been resolved. 782 Slot* slot = variable->slot(); 783 Property* prop = variable->AsProperty(); 784 785 if (slot != NULL) { 786 switch (slot->type()) { 787 case Slot::PARAMETER: 788 case Slot::LOCAL: 789 if (mode == Variable::CONST) { 790 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); 791 __ movq(Operand(rbp, SlotOffset(slot)), kScratchRegister); 792 } else if (function != NULL) { 793 VisitForValue(function, kAccumulator); 794 __ movq(Operand(rbp, SlotOffset(slot)), result_register()); 795 } 796 break; 797 798 case Slot::CONTEXT: 799 // We bypass the general EmitSlotSearch because we know more about 800 // this specific context. 801 802 // The variable in the decl always resides in the current context. 803 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); 804 if (FLAG_debug_code) { 805 // Check if we have the correct context pointer. 806 __ movq(rbx, 807 CodeGenerator::ContextOperand(rsi, Context::FCONTEXT_INDEX)); 808 __ cmpq(rbx, rsi); 809 __ Check(equal, "Unexpected declaration in current context."); 810 } 811 if (mode == Variable::CONST) { 812 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); 813 __ movq(CodeGenerator::ContextOperand(rsi, slot->index()), 814 kScratchRegister); 815 // No write barrier since the hole value is in old space. 816 } else if (function != NULL) { 817 VisitForValue(function, kAccumulator); 818 __ movq(CodeGenerator::ContextOperand(rsi, slot->index()), 819 result_register()); 820 int offset = Context::SlotOffset(slot->index()); 821 __ movq(rbx, rsi); 822 __ RecordWrite(rbx, offset, result_register(), rcx); 823 } 824 break; 825 826 case Slot::LOOKUP: { 827 __ push(rsi); 828 __ Push(variable->name()); 829 // Declaration nodes are always introduced in one of two modes. 830 ASSERT(mode == Variable::VAR || mode == Variable::CONST); 831 PropertyAttributes attr = (mode == Variable::VAR) ? NONE : READ_ONLY; 832 __ Push(Smi::FromInt(attr)); 833 // Push initial value, if any. 834 // Note: For variables we must not push an initial value (such as 835 // 'undefined') because we may have a (legal) redeclaration and we 836 // must not destroy the current value. 837 if (mode == Variable::CONST) { 838 __ PushRoot(Heap::kTheHoleValueRootIndex); 839 } else if (function != NULL) { 840 VisitForValue(function, kStack); 841 } else { 842 __ Push(Smi::FromInt(0)); // no initial value! 843 } 844 __ CallRuntime(Runtime::kDeclareContextSlot, 4); 845 break; 846 } 847 } 848 849 } else if (prop != NULL) { 850 if (function != NULL || mode == Variable::CONST) { 851 // We are declaring a function or constant that rewrites to a 852 // property. Use (keyed) IC to set the initial value. 853 VisitForValue(prop->obj(), kStack); 854 if (function != NULL) { 855 VisitForValue(prop->key(), kStack); 856 VisitForValue(function, kAccumulator); 857 __ pop(rcx); 858 } else { 859 VisitForValue(prop->key(), kAccumulator); 860 __ movq(rcx, result_register()); 861 __ LoadRoot(result_register(), Heap::kTheHoleValueRootIndex); 862 } 863 __ pop(rdx); 864 865 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); 866 __ call(ic, RelocInfo::CODE_TARGET); 867 // Absence of a test rax instruction following the call 868 // indicates that none of the load was inlined. 869 __ nop(); 870 } 871 } 872} 873 874 875void FullCodeGenerator::VisitDeclaration(Declaration* decl) { 876 EmitDeclaration(decl->proxy()->var(), decl->mode(), decl->fun()); 877} 878 879 880void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { 881 // Call the runtime to declare the globals. 882 __ push(rsi); // The context is the first argument. 883 __ Push(pairs); 884 __ Push(Smi::FromInt(is_eval() ? 1 : 0)); 885 __ CallRuntime(Runtime::kDeclareGlobals, 3); 886 // Return value is ignored. 887} 888 889 890void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { 891 Comment cmnt(masm_, "[ SwitchStatement"); 892 Breakable nested_statement(this, stmt); 893 SetStatementPosition(stmt); 894 // Keep the switch value on the stack until a case matches. 895 VisitForValue(stmt->tag(), kStack); 896 897 ZoneList<CaseClause*>* clauses = stmt->cases(); 898 CaseClause* default_clause = NULL; // Can occur anywhere in the list. 899 900 Label next_test; // Recycled for each test. 901 // Compile all the tests with branches to their bodies. 902 for (int i = 0; i < clauses->length(); i++) { 903 CaseClause* clause = clauses->at(i); 904 // The default is not a test, but remember it as final fall through. 905 if (clause->is_default()) { 906 default_clause = clause; 907 continue; 908 } 909 910 Comment cmnt(masm_, "[ Case comparison"); 911 __ bind(&next_test); 912 next_test.Unuse(); 913 914 // Compile the label expression. 915 VisitForValue(clause->label(), kAccumulator); 916 917 // Perform the comparison as if via '==='. The comparison stub expects 918 // the smi vs. smi case to be handled before it is called. 919 Label slow_case; 920 __ movq(rdx, Operand(rsp, 0)); // Switch value. 921 __ JumpIfNotBothSmi(rdx, rax, &slow_case); 922 __ SmiCompare(rdx, rax); 923 __ j(not_equal, &next_test); 924 __ Drop(1); // Switch value is no longer needed. 925 __ jmp(clause->body_target()->entry_label()); 926 927 __ bind(&slow_case); 928 CompareStub stub(equal, true); 929 __ CallStub(&stub); 930 __ testq(rax, rax); 931 __ j(not_equal, &next_test); 932 __ Drop(1); // Switch value is no longer needed. 933 __ jmp(clause->body_target()->entry_label()); 934 } 935 936 // Discard the test value and jump to the default if present, otherwise to 937 // the end of the statement. 938 __ bind(&next_test); 939 __ Drop(1); // Switch value is no longer needed. 940 if (default_clause == NULL) { 941 __ jmp(nested_statement.break_target()); 942 } else { 943 __ jmp(default_clause->body_target()->entry_label()); 944 } 945 946 // Compile all the case bodies. 947 for (int i = 0; i < clauses->length(); i++) { 948 Comment cmnt(masm_, "[ Case body"); 949 CaseClause* clause = clauses->at(i); 950 __ bind(clause->body_target()->entry_label()); 951 VisitStatements(clause->statements()); 952 } 953 954 __ bind(nested_statement.break_target()); 955} 956 957 958void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { 959 Comment cmnt(masm_, "[ ForInStatement"); 960 SetStatementPosition(stmt); 961 962 Label loop, exit; 963 ForIn loop_statement(this, stmt); 964 increment_loop_depth(); 965 966 // Get the object to enumerate over. Both SpiderMonkey and JSC 967 // ignore null and undefined in contrast to the specification; see 968 // ECMA-262 section 12.6.4. 969 VisitForValue(stmt->enumerable(), kAccumulator); 970 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex); 971 __ j(equal, &exit); 972 __ CompareRoot(rax, Heap::kNullValueRootIndex); 973 __ j(equal, &exit); 974 975 // Convert the object to a JS object. 976 Label convert, done_convert; 977 __ JumpIfSmi(rax, &convert); 978 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx); 979 __ j(above_equal, &done_convert); 980 __ bind(&convert); 981 __ push(rax); 982 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 983 __ bind(&done_convert); 984 __ push(rax); 985 986 // TODO(kasperl): Check cache validity in generated code. This is a 987 // fast case for the JSObject::IsSimpleEnum cache validity 988 // checks. If we cannot guarantee cache validity, call the runtime 989 // system to check cache validity or get the property names in a 990 // fixed array. 991 992 // Get the set of properties to enumerate. 993 __ push(rax); // Duplicate the enumerable object on the stack. 994 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1); 995 996 // If we got a map from the runtime call, we can do a fast 997 // modification check. Otherwise, we got a fixed array, and we have 998 // to do a slow check. 999 Label fixed_array; 1000 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), 1001 Heap::kMetaMapRootIndex); 1002 __ j(not_equal, &fixed_array); 1003 1004 // We got a map in register rax. Get the enumeration cache from it. 1005 __ movq(rcx, FieldOperand(rax, Map::kInstanceDescriptorsOffset)); 1006 __ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumerationIndexOffset)); 1007 __ movq(rdx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset)); 1008 1009 // Setup the four remaining stack slots. 1010 __ push(rax); // Map. 1011 __ push(rdx); // Enumeration cache. 1012 __ movq(rax, FieldOperand(rdx, FixedArray::kLengthOffset)); 1013 __ push(rax); // Enumeration cache length (as smi). 1014 __ Push(Smi::FromInt(0)); // Initial index. 1015 __ jmp(&loop); 1016 1017 // We got a fixed array in register rax. Iterate through that. 1018 __ bind(&fixed_array); 1019 __ Push(Smi::FromInt(0)); // Map (0) - force slow check. 1020 __ push(rax); 1021 __ movq(rax, FieldOperand(rax, FixedArray::kLengthOffset)); 1022 __ push(rax); // Fixed array length (as smi). 1023 __ Push(Smi::FromInt(0)); // Initial index. 1024 1025 // Generate code for doing the condition check. 1026 __ bind(&loop); 1027 __ movq(rax, Operand(rsp, 0 * kPointerSize)); // Get the current index. 1028 __ cmpq(rax, Operand(rsp, 1 * kPointerSize)); // Compare to the array length. 1029 __ j(above_equal, loop_statement.break_target()); 1030 1031 // Get the current entry of the array into register rbx. 1032 __ movq(rbx, Operand(rsp, 2 * kPointerSize)); 1033 SmiIndex index = __ SmiToIndex(rax, rax, kPointerSizeLog2); 1034 __ movq(rbx, FieldOperand(rbx, 1035 index.reg, 1036 index.scale, 1037 FixedArray::kHeaderSize)); 1038 1039 // Get the expected map from the stack or a zero map in the 1040 // permanent slow case into register rdx. 1041 __ movq(rdx, Operand(rsp, 3 * kPointerSize)); 1042 1043 // Check if the expected map still matches that of the enumerable. 1044 // If not, we have to filter the key. 1045 Label update_each; 1046 __ movq(rcx, Operand(rsp, 4 * kPointerSize)); 1047 __ cmpq(rdx, FieldOperand(rcx, HeapObject::kMapOffset)); 1048 __ j(equal, &update_each); 1049 1050 // Convert the entry to a string or null if it isn't a property 1051 // anymore. If the property has been removed while iterating, we 1052 // just skip it. 1053 __ push(rcx); // Enumerable. 1054 __ push(rbx); // Current entry. 1055 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION); 1056 __ CompareRoot(rax, Heap::kNullValueRootIndex); 1057 __ j(equal, loop_statement.continue_target()); 1058 __ movq(rbx, rax); 1059 1060 // Update the 'each' property or variable from the possibly filtered 1061 // entry in register rbx. 1062 __ bind(&update_each); 1063 __ movq(result_register(), rbx); 1064 // Perform the assignment as if via '='. 1065 EmitAssignment(stmt->each()); 1066 1067 // Generate code for the body of the loop. 1068 Label stack_limit_hit, stack_check_done; 1069 Visit(stmt->body()); 1070 1071 __ StackLimitCheck(&stack_limit_hit); 1072 __ bind(&stack_check_done); 1073 1074 // Generate code for going to the next element by incrementing the 1075 // index (smi) stored on top of the stack. 1076 __ bind(loop_statement.continue_target()); 1077 __ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1)); 1078 __ jmp(&loop); 1079 1080 // Slow case for the stack limit check. 1081 StackCheckStub stack_check_stub; 1082 __ bind(&stack_limit_hit); 1083 __ CallStub(&stack_check_stub); 1084 __ jmp(&stack_check_done); 1085 1086 // Remove the pointers stored on the stack. 1087 __ bind(loop_statement.break_target()); 1088 __ addq(rsp, Immediate(5 * kPointerSize)); 1089 1090 // Exit and decrement the loop depth. 1091 __ bind(&exit); 1092 decrement_loop_depth(); 1093} 1094 1095 1096void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info) { 1097 // Use the fast case closure allocation code that allocates in new 1098 // space for nested functions that don't need literals cloning. 1099 if (scope()->is_function_scope() && info->num_literals() == 0) { 1100 FastNewClosureStub stub; 1101 __ Push(info); 1102 __ CallStub(&stub); 1103 } else { 1104 __ push(rsi); 1105 __ Push(info); 1106 __ CallRuntime(Runtime::kNewClosure, 2); 1107 } 1108 Apply(context_, rax); 1109} 1110 1111 1112void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { 1113 Comment cmnt(masm_, "[ VariableProxy"); 1114 EmitVariableLoad(expr->var(), context_); 1115} 1116 1117 1118void FullCodeGenerator::EmitVariableLoad(Variable* var, 1119 Expression::Context context) { 1120 // Four cases: non-this global variables, lookup slots, all other 1121 // types of slots, and parameters that rewrite to explicit property 1122 // accesses on the arguments object. 1123 Slot* slot = var->slot(); 1124 Property* property = var->AsProperty(); 1125 1126 if (var->is_global() && !var->is_this()) { 1127 Comment cmnt(masm_, "Global variable"); 1128 // Use inline caching. Variable name is passed in rcx and the global 1129 // object on the stack. 1130 __ Move(rcx, var->name()); 1131 __ movq(rax, CodeGenerator::GlobalObject()); 1132 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); 1133 __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT); 1134 // A test rax instruction following the call is used by the IC to 1135 // indicate that the inobject property case was inlined. Ensure there 1136 // is no test rax instruction here. 1137 __ nop(); 1138 Apply(context, rax); 1139 1140 } else if (slot != NULL && slot->type() == Slot::LOOKUP) { 1141 Comment cmnt(masm_, "Lookup slot"); 1142 __ push(rsi); // Context. 1143 __ Push(var->name()); 1144 __ CallRuntime(Runtime::kLoadContextSlot, 2); 1145 Apply(context, rax); 1146 1147 } else if (slot != NULL) { 1148 Comment cmnt(masm_, (slot->type() == Slot::CONTEXT) 1149 ? "Context slot" 1150 : "Stack slot"); 1151 if (var->mode() == Variable::CONST) { 1152 // Constants may be the hole value if they have not been initialized. 1153 // Unhole them. 1154 Label done; 1155 MemOperand slot_operand = EmitSlotSearch(slot, rax); 1156 __ movq(rax, slot_operand); 1157 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex); 1158 __ j(not_equal, &done); 1159 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); 1160 __ bind(&done); 1161 Apply(context, rax); 1162 } else { 1163 Apply(context, slot); 1164 } 1165 1166 } else { 1167 Comment cmnt(masm_, "Rewritten parameter"); 1168 ASSERT_NOT_NULL(property); 1169 // Rewritten parameter accesses are of the form "slot[literal]". 1170 1171 // Assert that the object is in a slot. 1172 Variable* object_var = property->obj()->AsVariableProxy()->AsVariable(); 1173 ASSERT_NOT_NULL(object_var); 1174 Slot* object_slot = object_var->slot(); 1175 ASSERT_NOT_NULL(object_slot); 1176 1177 // Load the object. 1178 MemOperand object_loc = EmitSlotSearch(object_slot, rax); 1179 __ movq(rdx, object_loc); 1180 1181 // Assert that the key is a smi. 1182 Literal* key_literal = property->key()->AsLiteral(); 1183 ASSERT_NOT_NULL(key_literal); 1184 ASSERT(key_literal->handle()->IsSmi()); 1185 1186 // Load the key. 1187 __ Move(rax, key_literal->handle()); 1188 1189 // Do a keyed property load. 1190 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); 1191 __ call(ic, RelocInfo::CODE_TARGET); 1192 // Notice: We must not have a "test rax, ..." instruction after the 1193 // call. It is treated specially by the LoadIC code. 1194 __ nop(); 1195 Apply(context, rax); 1196 } 1197} 1198 1199 1200void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { 1201 Comment cmnt(masm_, "[ RegExpLiteral"); 1202 Label materialized; 1203 // Registers will be used as follows: 1204 // rdi = JS function. 1205 // rcx = literals array. 1206 // rbx = regexp literal. 1207 // rax = regexp literal clone. 1208 __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 1209 __ movq(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset)); 1210 int literal_offset = 1211 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize; 1212 __ movq(rbx, FieldOperand(rcx, literal_offset)); 1213 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); 1214 __ j(not_equal, &materialized); 1215 1216 // Create regexp literal using runtime function 1217 // Result will be in rax. 1218 __ push(rcx); 1219 __ Push(Smi::FromInt(expr->literal_index())); 1220 __ Push(expr->pattern()); 1221 __ Push(expr->flags()); 1222 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); 1223 __ movq(rbx, rax); 1224 1225 __ bind(&materialized); 1226 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; 1227 Label allocated, runtime_allocate; 1228 __ AllocateInNewSpace(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT); 1229 __ jmp(&allocated); 1230 1231 __ bind(&runtime_allocate); 1232 __ push(rbx); 1233 __ Push(Smi::FromInt(size)); 1234 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); 1235 __ pop(rbx); 1236 1237 __ bind(&allocated); 1238 // Copy the content into the newly allocated memory. 1239 // (Unroll copy loop once for better throughput). 1240 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) { 1241 __ movq(rdx, FieldOperand(rbx, i)); 1242 __ movq(rcx, FieldOperand(rbx, i + kPointerSize)); 1243 __ movq(FieldOperand(rax, i), rdx); 1244 __ movq(FieldOperand(rax, i + kPointerSize), rcx); 1245 } 1246 if ((size % (2 * kPointerSize)) != 0) { 1247 __ movq(rdx, FieldOperand(rbx, size - kPointerSize)); 1248 __ movq(FieldOperand(rax, size - kPointerSize), rdx); 1249 } 1250 Apply(context_, rax); 1251} 1252 1253 1254void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { 1255 Comment cmnt(masm_, "[ ObjectLiteral"); 1256 __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 1257 __ push(FieldOperand(rdi, JSFunction::kLiteralsOffset)); 1258 __ Push(Smi::FromInt(expr->literal_index())); 1259 __ Push(expr->constant_properties()); 1260 __ Push(Smi::FromInt(expr->fast_elements() ? 1 : 0)); 1261 if (expr->depth() > 1) { 1262 __ CallRuntime(Runtime::kCreateObjectLiteral, 4); 1263 } else { 1264 __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4); 1265 } 1266 1267 // If result_saved is true the result is on top of the stack. If 1268 // result_saved is false the result is in rax. 1269 bool result_saved = false; 1270 1271 for (int i = 0; i < expr->properties()->length(); i++) { 1272 ObjectLiteral::Property* property = expr->properties()->at(i); 1273 if (property->IsCompileTimeValue()) continue; 1274 1275 Literal* key = property->key(); 1276 Expression* value = property->value(); 1277 if (!result_saved) { 1278 __ push(rax); // Save result on the stack 1279 result_saved = true; 1280 } 1281 switch (property->kind()) { 1282 case ObjectLiteral::Property::CONSTANT: 1283 UNREACHABLE(); 1284 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 1285 ASSERT(!CompileTimeValue::IsCompileTimeValue(value)); 1286 // Fall through. 1287 case ObjectLiteral::Property::COMPUTED: 1288 if (key->handle()->IsSymbol()) { 1289 VisitForValue(value, kAccumulator); 1290 __ Move(rcx, key->handle()); 1291 __ movq(rdx, Operand(rsp, 0)); 1292 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); 1293 __ call(ic, RelocInfo::CODE_TARGET); 1294 __ nop(); 1295 break; 1296 } 1297 // Fall through. 1298 case ObjectLiteral::Property::PROTOTYPE: 1299 __ push(Operand(rsp, 0)); // Duplicate receiver. 1300 VisitForValue(key, kStack); 1301 VisitForValue(value, kStack); 1302 __ CallRuntime(Runtime::kSetProperty, 3); 1303 break; 1304 case ObjectLiteral::Property::SETTER: 1305 case ObjectLiteral::Property::GETTER: 1306 __ push(Operand(rsp, 0)); // Duplicate receiver. 1307 VisitForValue(key, kStack); 1308 __ Push(property->kind() == ObjectLiteral::Property::SETTER ? 1309 Smi::FromInt(1) : 1310 Smi::FromInt(0)); 1311 VisitForValue(value, kStack); 1312 __ CallRuntime(Runtime::kDefineAccessor, 4); 1313 break; 1314 } 1315 } 1316 1317 if (result_saved) { 1318 ApplyTOS(context_); 1319 } else { 1320 Apply(context_, rax); 1321 } 1322} 1323 1324 1325void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { 1326 Comment cmnt(masm_, "[ ArrayLiteral"); 1327 1328 ZoneList<Expression*>* subexprs = expr->values(); 1329 int length = subexprs->length(); 1330 1331 __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 1332 __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset)); 1333 __ Push(Smi::FromInt(expr->literal_index())); 1334 __ Push(expr->constant_elements()); 1335 if (expr->depth() > 1) { 1336 __ CallRuntime(Runtime::kCreateArrayLiteral, 3); 1337 } else if (length > FastCloneShallowArrayStub::kMaximumLength) { 1338 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3); 1339 } else { 1340 FastCloneShallowArrayStub stub(length); 1341 __ CallStub(&stub); 1342 } 1343 1344 bool result_saved = false; // Is the result saved to the stack? 1345 1346 // Emit code to evaluate all the non-constant subexpressions and to store 1347 // them into the newly cloned array. 1348 for (int i = 0; i < length; i++) { 1349 Expression* subexpr = subexprs->at(i); 1350 // If the subexpression is a literal or a simple materialized literal it 1351 // is already set in the cloned array. 1352 if (subexpr->AsLiteral() != NULL || 1353 CompileTimeValue::IsCompileTimeValue(subexpr)) { 1354 continue; 1355 } 1356 1357 if (!result_saved) { 1358 __ push(rax); 1359 result_saved = true; 1360 } 1361 VisitForValue(subexpr, kAccumulator); 1362 1363 // Store the subexpression value in the array's elements. 1364 __ movq(rbx, Operand(rsp, 0)); // Copy of array literal. 1365 __ movq(rbx, FieldOperand(rbx, JSObject::kElementsOffset)); 1366 int offset = FixedArray::kHeaderSize + (i * kPointerSize); 1367 __ movq(FieldOperand(rbx, offset), result_register()); 1368 1369 // Update the write barrier for the array store. 1370 __ RecordWrite(rbx, offset, result_register(), rcx); 1371 } 1372 1373 if (result_saved) { 1374 ApplyTOS(context_); 1375 } else { 1376 Apply(context_, rax); 1377 } 1378} 1379 1380 1381void FullCodeGenerator::VisitAssignment(Assignment* expr) { 1382 Comment cmnt(masm_, "[ Assignment"); 1383 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError' 1384 // on the left-hand side. 1385 if (!expr->target()->IsValidLeftHandSide()) { 1386 VisitForEffect(expr->target()); 1387 return; 1388 } 1389 1390 // Left-hand side can only be a property, a global or a (parameter or local) 1391 // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY. 1392 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 1393 LhsKind assign_type = VARIABLE; 1394 Property* prop = expr->target()->AsProperty(); 1395 if (prop != NULL) { 1396 assign_type = 1397 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY; 1398 } 1399 1400 // Evaluate LHS expression. 1401 switch (assign_type) { 1402 case VARIABLE: 1403 // Nothing to do here. 1404 break; 1405 case NAMED_PROPERTY: 1406 if (expr->is_compound()) { 1407 // We need the receiver both on the stack and in the accumulator. 1408 VisitForValue(prop->obj(), kAccumulator); 1409 __ push(result_register()); 1410 } else { 1411 VisitForValue(prop->obj(), kStack); 1412 } 1413 break; 1414 case KEYED_PROPERTY: 1415 if (expr->is_compound()) { 1416 VisitForValue(prop->obj(), kStack); 1417 VisitForValue(prop->key(), kAccumulator); 1418 __ movq(rdx, Operand(rsp, 0)); 1419 __ push(rax); 1420 } else { 1421 VisitForValue(prop->obj(), kStack); 1422 VisitForValue(prop->key(), kStack); 1423 } 1424 break; 1425 } 1426 1427 // If we have a compound assignment: Get value of LHS expression and 1428 // store in on top of the stack. 1429 if (expr->is_compound()) { 1430 Location saved_location = location_; 1431 location_ = kStack; 1432 switch (assign_type) { 1433 case VARIABLE: 1434 EmitVariableLoad(expr->target()->AsVariableProxy()->var(), 1435 Expression::kValue); 1436 break; 1437 case NAMED_PROPERTY: 1438 EmitNamedPropertyLoad(prop); 1439 __ push(result_register()); 1440 break; 1441 case KEYED_PROPERTY: 1442 EmitKeyedPropertyLoad(prop); 1443 __ push(result_register()); 1444 break; 1445 } 1446 location_ = saved_location; 1447 } 1448 1449 // Evaluate RHS expression. 1450 Expression* rhs = expr->value(); 1451 VisitForValue(rhs, kAccumulator); 1452 1453 // If we have a compound assignment: Apply operator. 1454 if (expr->is_compound()) { 1455 Location saved_location = location_; 1456 location_ = kAccumulator; 1457 EmitBinaryOp(expr->binary_op(), Expression::kValue); 1458 location_ = saved_location; 1459 } 1460 1461 // Record source position before possible IC call. 1462 SetSourcePosition(expr->position()); 1463 1464 // Store the value. 1465 switch (assign_type) { 1466 case VARIABLE: 1467 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(), 1468 expr->op(), 1469 context_); 1470 break; 1471 case NAMED_PROPERTY: 1472 EmitNamedPropertyAssignment(expr); 1473 break; 1474 case KEYED_PROPERTY: 1475 EmitKeyedPropertyAssignment(expr); 1476 break; 1477 } 1478} 1479 1480 1481void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) { 1482 SetSourcePosition(prop->position()); 1483 Literal* key = prop->key()->AsLiteral(); 1484 __ Move(rcx, key->handle()); 1485 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); 1486 __ Call(ic, RelocInfo::CODE_TARGET); 1487 __ nop(); 1488} 1489 1490 1491void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) { 1492 SetSourcePosition(prop->position()); 1493 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); 1494 __ Call(ic, RelocInfo::CODE_TARGET); 1495 __ nop(); 1496} 1497 1498 1499void FullCodeGenerator::EmitBinaryOp(Token::Value op, 1500 Expression::Context context) { 1501 __ push(result_register()); 1502 GenericBinaryOpStub stub(op, 1503 NO_OVERWRITE, 1504 NO_GENERIC_BINARY_FLAGS); 1505 __ CallStub(&stub); 1506 Apply(context, rax); 1507} 1508 1509 1510void FullCodeGenerator::EmitAssignment(Expression* expr) { 1511 // Invalid left-hand sides are rewritten to have a 'throw 1512 // ReferenceError' on the left-hand side. 1513 if (!expr->IsValidLeftHandSide()) { 1514 VisitForEffect(expr); 1515 return; 1516 } 1517 1518 // Left-hand side can only be a property, a global or a (parameter or local) 1519 // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY. 1520 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 1521 LhsKind assign_type = VARIABLE; 1522 Property* prop = expr->AsProperty(); 1523 if (prop != NULL) { 1524 assign_type = (prop->key()->IsPropertyName()) 1525 ? NAMED_PROPERTY 1526 : KEYED_PROPERTY; 1527 } 1528 1529 switch (assign_type) { 1530 case VARIABLE: { 1531 Variable* var = expr->AsVariableProxy()->var(); 1532 EmitVariableAssignment(var, Token::ASSIGN, Expression::kEffect); 1533 break; 1534 } 1535 case NAMED_PROPERTY: { 1536 __ push(rax); // Preserve value. 1537 VisitForValue(prop->obj(), kAccumulator); 1538 __ movq(rdx, rax); 1539 __ pop(rax); // Restore value. 1540 __ Move(rcx, prop->key()->AsLiteral()->handle()); 1541 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); 1542 __ call(ic, RelocInfo::CODE_TARGET); 1543 __ nop(); // Signal no inlined code. 1544 break; 1545 } 1546 case KEYED_PROPERTY: { 1547 __ push(rax); // Preserve value. 1548 VisitForValue(prop->obj(), kStack); 1549 VisitForValue(prop->key(), kAccumulator); 1550 __ movq(rcx, rax); 1551 __ pop(rdx); 1552 __ pop(rax); 1553 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); 1554 __ call(ic, RelocInfo::CODE_TARGET); 1555 __ nop(); // Signal no inlined code. 1556 break; 1557 } 1558 } 1559} 1560 1561 1562void FullCodeGenerator::EmitVariableAssignment(Variable* var, 1563 Token::Value op, 1564 Expression::Context context) { 1565 // Left-hand sides that rewrite to explicit property accesses do not reach 1566 // here. 1567 ASSERT(var != NULL); 1568 ASSERT(var->is_global() || var->slot() != NULL); 1569 1570 if (var->is_global()) { 1571 ASSERT(!var->is_this()); 1572 // Assignment to a global variable. Use inline caching for the 1573 // assignment. Right-hand-side value is passed in rax, variable name in 1574 // rcx, and the global object on the stack. 1575 __ Move(rcx, var->name()); 1576 __ movq(rdx, CodeGenerator::GlobalObject()); 1577 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); 1578 __ Call(ic, RelocInfo::CODE_TARGET); 1579 __ nop(); 1580 1581 } else if (var->mode() != Variable::CONST || op == Token::INIT_CONST) { 1582 // Perform the assignment for non-const variables and for initialization 1583 // of const variables. Const assignments are simply skipped. 1584 Label done; 1585 Slot* slot = var->slot(); 1586 switch (slot->type()) { 1587 case Slot::PARAMETER: 1588 case Slot::LOCAL: 1589 if (op == Token::INIT_CONST) { 1590 // Detect const reinitialization by checking for the hole value. 1591 __ movq(rdx, Operand(rbp, SlotOffset(slot))); 1592 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex); 1593 __ j(not_equal, &done); 1594 } 1595 // Perform the assignment. 1596 __ movq(Operand(rbp, SlotOffset(slot)), rax); 1597 break; 1598 1599 case Slot::CONTEXT: { 1600 MemOperand target = EmitSlotSearch(slot, rcx); 1601 if (op == Token::INIT_CONST) { 1602 // Detect const reinitialization by checking for the hole value. 1603 __ movq(rdx, target); 1604 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex); 1605 __ j(not_equal, &done); 1606 } 1607 // Perform the assignment and issue the write barrier. 1608 __ movq(target, rax); 1609 // The value of the assignment is in rax. RecordWrite clobbers its 1610 // register arguments. 1611 __ movq(rdx, rax); 1612 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize; 1613 __ RecordWrite(rcx, offset, rdx, rbx); 1614 break; 1615 } 1616 1617 case Slot::LOOKUP: 1618 // Call the runtime for the assignment. The runtime will ignore 1619 // const reinitialization. 1620 __ push(rax); // Value. 1621 __ push(rsi); // Context. 1622 __ Push(var->name()); 1623 if (op == Token::INIT_CONST) { 1624 // The runtime will ignore const redeclaration. 1625 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); 1626 } else { 1627 __ CallRuntime(Runtime::kStoreContextSlot, 3); 1628 } 1629 break; 1630 } 1631 __ bind(&done); 1632 } 1633 1634 Apply(context, rax); 1635} 1636 1637 1638void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { 1639 // Assignment to a property, using a named store IC. 1640 Property* prop = expr->target()->AsProperty(); 1641 ASSERT(prop != NULL); 1642 ASSERT(prop->key()->AsLiteral() != NULL); 1643 1644 // If the assignment starts a block of assignments to the same object, 1645 // change to slow case to avoid the quadratic behavior of repeatedly 1646 // adding fast properties. 1647 if (expr->starts_initialization_block()) { 1648 __ push(result_register()); 1649 __ push(Operand(rsp, kPointerSize)); // Receiver is now under value. 1650 __ CallRuntime(Runtime::kToSlowProperties, 1); 1651 __ pop(result_register()); 1652 } 1653 1654 // Record source code position before IC call. 1655 SetSourcePosition(expr->position()); 1656 __ Move(rcx, prop->key()->AsLiteral()->handle()); 1657 if (expr->ends_initialization_block()) { 1658 __ movq(rdx, Operand(rsp, 0)); 1659 } else { 1660 __ pop(rdx); 1661 } 1662 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); 1663 __ Call(ic, RelocInfo::CODE_TARGET); 1664 __ nop(); 1665 1666 // If the assignment ends an initialization block, revert to fast case. 1667 if (expr->ends_initialization_block()) { 1668 __ push(rax); // Result of assignment, saved even if not needed. 1669 __ push(Operand(rsp, kPointerSize)); // Receiver is under value. 1670 __ CallRuntime(Runtime::kToFastProperties, 1); 1671 __ pop(rax); 1672 DropAndApply(1, context_, rax); 1673 } else { 1674 Apply(context_, rax); 1675 } 1676} 1677 1678 1679void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { 1680 // Assignment to a property, using a keyed store IC. 1681 1682 // If the assignment starts a block of assignments to the same object, 1683 // change to slow case to avoid the quadratic behavior of repeatedly 1684 // adding fast properties. 1685 if (expr->starts_initialization_block()) { 1686 __ push(result_register()); 1687 // Receiver is now under the key and value. 1688 __ push(Operand(rsp, 2 * kPointerSize)); 1689 __ CallRuntime(Runtime::kToSlowProperties, 1); 1690 __ pop(result_register()); 1691 } 1692 1693 __ pop(rcx); 1694 if (expr->ends_initialization_block()) { 1695 __ movq(rdx, Operand(rsp, 0)); // Leave receiver on the stack for later. 1696 } else { 1697 __ pop(rdx); 1698 } 1699 // Record source code position before IC call. 1700 SetSourcePosition(expr->position()); 1701 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); 1702 __ Call(ic, RelocInfo::CODE_TARGET); 1703 // This nop signals to the IC that there is no inlined code at the call 1704 // site for it to patch. 1705 __ nop(); 1706 1707 // If the assignment ends an initialization block, revert to fast case. 1708 if (expr->ends_initialization_block()) { 1709 __ pop(rdx); 1710 __ push(rax); // Result of assignment, saved even if not needed. 1711 __ push(rdx); 1712 __ CallRuntime(Runtime::kToFastProperties, 1); 1713 __ pop(rax); 1714 } 1715 1716 Apply(context_, rax); 1717} 1718 1719 1720void FullCodeGenerator::VisitProperty(Property* expr) { 1721 Comment cmnt(masm_, "[ Property"); 1722 Expression* key = expr->key(); 1723 1724 if (key->IsPropertyName()) { 1725 VisitForValue(expr->obj(), kAccumulator); 1726 EmitNamedPropertyLoad(expr); 1727 Apply(context_, rax); 1728 } else { 1729 VisitForValue(expr->obj(), kStack); 1730 VisitForValue(expr->key(), kAccumulator); 1731 __ pop(rdx); 1732 EmitKeyedPropertyLoad(expr); 1733 Apply(context_, rax); 1734 } 1735} 1736 1737 1738void FullCodeGenerator::EmitCallWithIC(Call* expr, 1739 Handle<Object> name, 1740 RelocInfo::Mode mode) { 1741 // Code common for calls using the IC. 1742 ZoneList<Expression*>* args = expr->arguments(); 1743 int arg_count = args->length(); 1744 for (int i = 0; i < arg_count; i++) { 1745 VisitForValue(args->at(i), kStack); 1746 } 1747 __ Move(rcx, name); 1748 // Record source position for debugger. 1749 SetSourcePosition(expr->position()); 1750 // Call the IC initialization code. 1751 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; 1752 Handle<Code> ic = CodeGenerator::ComputeCallInitialize(arg_count, 1753 in_loop); 1754 __ Call(ic, mode); 1755 // Restore context register. 1756 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 1757 Apply(context_, rax); 1758} 1759 1760 1761void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr, 1762 Expression* key, 1763 RelocInfo::Mode mode) { 1764 // Code common for calls using the IC. 1765 ZoneList<Expression*>* args = expr->arguments(); 1766 int arg_count = args->length(); 1767 for (int i = 0; i < arg_count; i++) { 1768 VisitForValue(args->at(i), kStack); 1769 } 1770 VisitForValue(key, kAccumulator); 1771 __ movq(rcx, rax); 1772 // Record source position for debugger. 1773 SetSourcePosition(expr->position()); 1774 // Call the IC initialization code. 1775 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; 1776 Handle<Code> ic = CodeGenerator::ComputeKeyedCallInitialize(arg_count, 1777 in_loop); 1778 __ Call(ic, mode); 1779 // Restore context register. 1780 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 1781 Apply(context_, rax); 1782} 1783 1784 1785void FullCodeGenerator::EmitCallWithStub(Call* expr) { 1786 // Code common for calls using the call stub. 1787 ZoneList<Expression*>* args = expr->arguments(); 1788 int arg_count = args->length(); 1789 for (int i = 0; i < arg_count; i++) { 1790 VisitForValue(args->at(i), kStack); 1791 } 1792 // Record source position for debugger. 1793 SetSourcePosition(expr->position()); 1794 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; 1795 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE); 1796 __ CallStub(&stub); 1797 // Restore context register. 1798 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 1799 // Discard the function left on TOS. 1800 DropAndApply(1, context_, rax); 1801} 1802 1803 1804void FullCodeGenerator::VisitCall(Call* expr) { 1805 Comment cmnt(masm_, "[ Call"); 1806 Expression* fun = expr->expression(); 1807 Variable* var = fun->AsVariableProxy()->AsVariable(); 1808 1809 if (var != NULL && var->is_possibly_eval()) { 1810 // In a call to eval, we first call %ResolvePossiblyDirectEval to 1811 // resolve the function we need to call and the receiver of the 1812 // call. The we call the resolved function using the given 1813 // arguments. 1814 VisitForValue(fun, kStack); 1815 __ PushRoot(Heap::kUndefinedValueRootIndex); // Reserved receiver slot. 1816 1817 // Push the arguments. 1818 ZoneList<Expression*>* args = expr->arguments(); 1819 int arg_count = args->length(); 1820 for (int i = 0; i < arg_count; i++) { 1821 VisitForValue(args->at(i), kStack); 1822 } 1823 1824 // Push copy of the function - found below the arguments. 1825 __ push(Operand(rsp, (arg_count + 1) * kPointerSize)); 1826 1827 // Push copy of the first argument or undefined if it doesn't exist. 1828 if (arg_count > 0) { 1829 __ push(Operand(rsp, arg_count * kPointerSize)); 1830 } else { 1831 __ PushRoot(Heap::kUndefinedValueRootIndex); 1832 } 1833 1834 // Push the receiver of the enclosing function and do runtime call. 1835 __ push(Operand(rbp, (2 + scope()->num_parameters()) * kPointerSize)); 1836 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 3); 1837 1838 // The runtime call returns a pair of values in rax (function) and 1839 // rdx (receiver). Touch up the stack with the right values. 1840 __ movq(Operand(rsp, (arg_count + 0) * kPointerSize), rdx); 1841 __ movq(Operand(rsp, (arg_count + 1) * kPointerSize), rax); 1842 1843 // Record source position for debugger. 1844 SetSourcePosition(expr->position()); 1845 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; 1846 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE); 1847 __ CallStub(&stub); 1848 // Restore context register. 1849 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 1850 DropAndApply(1, context_, rax); 1851 } else if (var != NULL && !var->is_this() && var->is_global()) { 1852 // Call to a global variable. 1853 // Push global object as receiver for the call IC lookup. 1854 __ push(CodeGenerator::GlobalObject()); 1855 EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT); 1856 } else if (var != NULL && var->slot() != NULL && 1857 var->slot()->type() == Slot::LOOKUP) { 1858 // Call to a lookup slot (dynamically introduced variable). Call 1859 // the runtime to find the function to call (returned in rax) and 1860 // the object holding it (returned in rdx). 1861 __ push(context_register()); 1862 __ Push(var->name()); 1863 __ CallRuntime(Runtime::kLoadContextSlot, 2); 1864 __ push(rax); // Function. 1865 __ push(rdx); // Receiver. 1866 EmitCallWithStub(expr); 1867 } else if (fun->AsProperty() != NULL) { 1868 // Call to an object property. 1869 Property* prop = fun->AsProperty(); 1870 Literal* key = prop->key()->AsLiteral(); 1871 if (key != NULL && key->handle()->IsSymbol()) { 1872 // Call to a named property, use call IC. 1873 VisitForValue(prop->obj(), kStack); 1874 EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET); 1875 } else { 1876 // Call to a keyed property. 1877 // For a synthetic property use keyed load IC followed by function call, 1878 // for a regular property use KeyedCallIC. 1879 VisitForValue(prop->obj(), kStack); 1880 if (prop->is_synthetic()) { 1881 VisitForValue(prop->key(), kAccumulator); 1882 __ movq(rdx, Operand(rsp, 0)); 1883 // Record source code position for IC call. 1884 SetSourcePosition(prop->position()); 1885 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); 1886 __ call(ic, RelocInfo::CODE_TARGET); 1887 // By emitting a nop we make sure that we do not have a "test rax,..." 1888 // instruction after the call as it is treated specially 1889 // by the LoadIC code. 1890 __ nop(); 1891 // Pop receiver. 1892 __ pop(rbx); 1893 // Push result (function). 1894 __ push(rax); 1895 // Push receiver object on stack. 1896 __ movq(rcx, CodeGenerator::GlobalObject()); 1897 __ push(FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset)); 1898 EmitCallWithStub(expr); 1899 } else { 1900 EmitKeyedCallWithIC(expr, prop->key(), RelocInfo::CODE_TARGET); 1901 } 1902 } 1903 } else { 1904 // Call to some other expression. If the expression is an anonymous 1905 // function literal not called in a loop, mark it as one that should 1906 // also use the fast code generator. 1907 FunctionLiteral* lit = fun->AsFunctionLiteral(); 1908 if (lit != NULL && 1909 lit->name()->Equals(Heap::empty_string()) && 1910 loop_depth() == 0) { 1911 lit->set_try_full_codegen(true); 1912 } 1913 VisitForValue(fun, kStack); 1914 // Load global receiver object. 1915 __ movq(rbx, CodeGenerator::GlobalObject()); 1916 __ push(FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset)); 1917 // Emit function call. 1918 EmitCallWithStub(expr); 1919 } 1920} 1921 1922 1923void FullCodeGenerator::VisitCallNew(CallNew* expr) { 1924 Comment cmnt(masm_, "[ CallNew"); 1925 // According to ECMA-262, section 11.2.2, page 44, the function 1926 // expression in new calls must be evaluated before the 1927 // arguments. 1928 // Push function on the stack. 1929 VisitForValue(expr->expression(), kStack); 1930 1931 // Push global object (receiver). 1932 __ push(CodeGenerator::GlobalObject()); 1933 1934 // Push the arguments ("left-to-right") on the stack. 1935 ZoneList<Expression*>* args = expr->arguments(); 1936 int arg_count = args->length(); 1937 for (int i = 0; i < arg_count; i++) { 1938 VisitForValue(args->at(i), kStack); 1939 } 1940 1941 // Call the construct call builtin that handles allocation and 1942 // constructor invocation. 1943 SetSourcePosition(expr->position()); 1944 1945 // Load function, arg_count into rdi and rax. 1946 __ Set(rax, arg_count); 1947 // Function is in rsp[arg_count + 1]. 1948 __ movq(rdi, Operand(rsp, rax, times_pointer_size, kPointerSize)); 1949 1950 Handle<Code> construct_builtin(Builtins::builtin(Builtins::JSConstructCall)); 1951 __ Call(construct_builtin, RelocInfo::CONSTRUCT_CALL); 1952 1953 // Replace function on TOS with result in rax, or pop it. 1954 DropAndApply(1, context_, rax); 1955} 1956 1957 1958void FullCodeGenerator::EmitIsSmi(ZoneList<Expression*>* args) { 1959 ASSERT(args->length() == 1); 1960 1961 VisitForValue(args->at(0), kAccumulator); 1962 1963 Label materialize_true, materialize_false; 1964 Label* if_true = NULL; 1965 Label* if_false = NULL; 1966 PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false); 1967 1968 __ JumpIfSmi(rax, if_true); 1969 __ jmp(if_false); 1970 1971 Apply(context_, if_true, if_false); 1972} 1973 1974 1975void FullCodeGenerator::EmitIsNonNegativeSmi(ZoneList<Expression*>* args) { 1976 ASSERT(args->length() == 1); 1977 1978 VisitForValue(args->at(0), kAccumulator); 1979 1980 Label materialize_true, materialize_false; 1981 Label* if_true = NULL; 1982 Label* if_false = NULL; 1983 PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false); 1984 1985 Condition positive_smi = __ CheckPositiveSmi(rax); 1986 __ j(positive_smi, if_true); 1987 __ jmp(if_false); 1988 1989 Apply(context_, if_true, if_false); 1990} 1991 1992 1993void FullCodeGenerator::EmitIsObject(ZoneList<Expression*>* args) { 1994 ASSERT(args->length() == 1); 1995 1996 VisitForValue(args->at(0), kAccumulator); 1997 1998 Label materialize_true, materialize_false; 1999 Label* if_true = NULL; 2000 Label* if_false = NULL; 2001 PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false); 2002 2003 __ JumpIfSmi(rax, if_false); 2004 __ CompareRoot(rax, Heap::kNullValueRootIndex); 2005 __ j(equal, if_true); 2006 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); 2007 // Undetectable objects behave like undefined when tested with typeof. 2008 __ testb(FieldOperand(rbx, Map::kBitFieldOffset), 2009 Immediate(1 << Map::kIsUndetectable)); 2010 __ j(not_zero, if_false); 2011 __ movzxbq(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); 2012 __ cmpq(rbx, Immediate(FIRST_JS_OBJECT_TYPE)); 2013 __ j(below, if_false); 2014 __ cmpq(rbx, Immediate(LAST_JS_OBJECT_TYPE)); 2015 __ j(below_equal, if_true); 2016 __ jmp(if_false); 2017 2018 Apply(context_, if_true, if_false); 2019} 2020 2021 2022void FullCodeGenerator::EmitIsSpecObject(ZoneList<Expression*>* args) { 2023 ASSERT(args->length() == 1); 2024 2025 VisitForValue(args->at(0), kAccumulator); 2026 2027 Label materialize_true, materialize_false; 2028 Label* if_true = NULL; 2029 Label* if_false = NULL; 2030 PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false); 2031 2032 __ JumpIfSmi(rax, if_false); 2033 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rbx); 2034 __ j(above_equal, if_true); 2035 __ jmp(if_false); 2036 2037 Apply(context_, if_true, if_false); 2038} 2039 2040 2041void FullCodeGenerator::EmitIsUndetectableObject(ZoneList<Expression*>* args) { 2042 ASSERT(args->length() == 1); 2043 2044 VisitForValue(args->at(0), kAccumulator); 2045 2046 Label materialize_true, materialize_false; 2047 Label* if_true = NULL; 2048 Label* if_false = NULL; 2049 PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false); 2050 2051 __ JumpIfSmi(rax, if_false); 2052 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); 2053 __ testb(FieldOperand(rbx, Map::kBitFieldOffset), 2054 Immediate(1 << Map::kIsUndetectable)); 2055 __ j(not_zero, if_true); 2056 __ jmp(if_false); 2057 2058 Apply(context_, if_true, if_false); 2059} 2060 2061 2062void FullCodeGenerator::EmitIsFunction(ZoneList<Expression*>* args) { 2063 ASSERT(args->length() == 1); 2064 2065 VisitForValue(args->at(0), kAccumulator); 2066 2067 Label materialize_true, materialize_false; 2068 Label* if_true = NULL; 2069 Label* if_false = NULL; 2070 PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false); 2071 2072 __ JumpIfSmi(rax, if_false); 2073 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx); 2074 __ j(equal, if_true); 2075 __ jmp(if_false); 2076 2077 Apply(context_, if_true, if_false); 2078} 2079 2080 2081void FullCodeGenerator::EmitIsArray(ZoneList<Expression*>* args) { 2082 ASSERT(args->length() == 1); 2083 2084 VisitForValue(args->at(0), kAccumulator); 2085 2086 Label materialize_true, materialize_false; 2087 Label* if_true = NULL; 2088 Label* if_false = NULL; 2089 PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false); 2090 2091 __ JumpIfSmi(rax, if_false); 2092 __ CmpObjectType(rax, JS_ARRAY_TYPE, rbx); 2093 __ j(equal, if_true); 2094 __ jmp(if_false); 2095 2096 Apply(context_, if_true, if_false); 2097} 2098 2099 2100void FullCodeGenerator::EmitIsRegExp(ZoneList<Expression*>* args) { 2101 ASSERT(args->length() == 1); 2102 2103 VisitForValue(args->at(0), kAccumulator); 2104 2105 Label materialize_true, materialize_false; 2106 Label* if_true = NULL; 2107 Label* if_false = NULL; 2108 PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false); 2109 2110 __ JumpIfSmi(rax, if_false); 2111 __ CmpObjectType(rax, JS_REGEXP_TYPE, rbx); 2112 __ j(equal, if_true); 2113 __ jmp(if_false); 2114 2115 Apply(context_, if_true, if_false); 2116} 2117 2118 2119 2120void FullCodeGenerator::EmitIsConstructCall(ZoneList<Expression*>* args) { 2121 ASSERT(args->length() == 0); 2122 2123 Label materialize_true, materialize_false; 2124 Label* if_true = NULL; 2125 Label* if_false = NULL; 2126 PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false); 2127 2128 // Get the frame pointer for the calling frame. 2129 __ movq(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 2130 2131 // Skip the arguments adaptor frame if it exists. 2132 Label check_frame_marker; 2133 __ SmiCompare(Operand(rax, StandardFrameConstants::kContextOffset), 2134 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 2135 __ j(not_equal, &check_frame_marker); 2136 __ movq(rax, Operand(rax, StandardFrameConstants::kCallerFPOffset)); 2137 2138 // Check the marker in the calling frame. 2139 __ bind(&check_frame_marker); 2140 __ SmiCompare(Operand(rax, StandardFrameConstants::kMarkerOffset), 2141 Smi::FromInt(StackFrame::CONSTRUCT)); 2142 __ j(equal, if_true); 2143 __ jmp(if_false); 2144 2145 Apply(context_, if_true, if_false); 2146} 2147 2148 2149void FullCodeGenerator::EmitObjectEquals(ZoneList<Expression*>* args) { 2150 ASSERT(args->length() == 2); 2151 2152 // Load the two objects into registers and perform the comparison. 2153 VisitForValue(args->at(0), kStack); 2154 VisitForValue(args->at(1), kAccumulator); 2155 2156 Label materialize_true, materialize_false; 2157 Label* if_true = NULL; 2158 Label* if_false = NULL; 2159 PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false); 2160 2161 __ pop(rbx); 2162 __ cmpq(rax, rbx); 2163 __ j(equal, if_true); 2164 __ jmp(if_false); 2165 2166 Apply(context_, if_true, if_false); 2167} 2168 2169 2170void FullCodeGenerator::EmitArguments(ZoneList<Expression*>* args) { 2171 ASSERT(args->length() == 1); 2172 2173 // ArgumentsAccessStub expects the key in edx and the formal 2174 // parameter count in eax. 2175 VisitForValue(args->at(0), kAccumulator); 2176 __ movq(rdx, rax); 2177 __ Move(rax, Smi::FromInt(scope()->num_parameters())); 2178 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT); 2179 __ CallStub(&stub); 2180 Apply(context_, rax); 2181} 2182 2183 2184void FullCodeGenerator::EmitArgumentsLength(ZoneList<Expression*>* args) { 2185 ASSERT(args->length() == 0); 2186 2187 Label exit; 2188 // Get the number of formal parameters. 2189 __ Move(rax, Smi::FromInt(scope()->num_parameters())); 2190 2191 // Check if the calling frame is an arguments adaptor frame. 2192 __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 2193 __ SmiCompare(Operand(rbx, StandardFrameConstants::kContextOffset), 2194 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 2195 __ j(not_equal, &exit); 2196 2197 // Arguments adaptor case: Read the arguments length from the 2198 // adaptor frame. 2199 __ movq(rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset)); 2200 2201 __ bind(&exit); 2202 if (FLAG_debug_code) __ AbortIfNotSmi(rax); 2203 Apply(context_, rax); 2204} 2205 2206 2207void FullCodeGenerator::EmitClassOf(ZoneList<Expression*>* args) { 2208 ASSERT(args->length() == 1); 2209 Label done, null, function, non_function_constructor; 2210 2211 VisitForValue(args->at(0), kAccumulator); 2212 2213 // If the object is a smi, we return null. 2214 __ JumpIfSmi(rax, &null); 2215 2216 // Check that the object is a JS object but take special care of JS 2217 // functions to make sure they have 'Function' as their class. 2218 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rax); // Map is now in rax. 2219 __ j(below, &null); 2220 2221 // As long as JS_FUNCTION_TYPE is the last instance type and it is 2222 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for 2223 // LAST_JS_OBJECT_TYPE. 2224 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); 2225 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1); 2226 __ CmpInstanceType(rax, JS_FUNCTION_TYPE); 2227 __ j(equal, &function); 2228 2229 // Check if the constructor in the map is a function. 2230 __ movq(rax, FieldOperand(rax, Map::kConstructorOffset)); 2231 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx); 2232 __ j(not_equal, &non_function_constructor); 2233 2234 // rax now contains the constructor function. Grab the 2235 // instance class name from there. 2236 __ movq(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset)); 2237 __ movq(rax, FieldOperand(rax, SharedFunctionInfo::kInstanceClassNameOffset)); 2238 __ jmp(&done); 2239 2240 // Functions have class 'Function'. 2241 __ bind(&function); 2242 __ Move(rax, Factory::function_class_symbol()); 2243 __ jmp(&done); 2244 2245 // Objects with a non-function constructor have class 'Object'. 2246 __ bind(&non_function_constructor); 2247 __ Move(rax, Factory::Object_symbol()); 2248 __ jmp(&done); 2249 2250 // Non-JS objects have class null. 2251 __ bind(&null); 2252 __ LoadRoot(rax, Heap::kNullValueRootIndex); 2253 2254 // All done. 2255 __ bind(&done); 2256 2257 Apply(context_, rax); 2258} 2259 2260 2261void FullCodeGenerator::EmitLog(ZoneList<Expression*>* args) { 2262 // Conditionally generate a log call. 2263 // Args: 2264 // 0 (literal string): The type of logging (corresponds to the flags). 2265 // This is used to determine whether or not to generate the log call. 2266 // 1 (string): Format string. Access the string at argument index 2 2267 // with '%2s' (see Logger::LogRuntime for all the formats). 2268 // 2 (array): Arguments to the format string. 2269 ASSERT_EQ(args->length(), 3); 2270#ifdef ENABLE_LOGGING_AND_PROFILING 2271 if (CodeGenerator::ShouldGenerateLog(args->at(0))) { 2272 VisitForValue(args->at(1), kStack); 2273 VisitForValue(args->at(2), kStack); 2274 __ CallRuntime(Runtime::kLog, 2); 2275 } 2276#endif 2277 // Finally, we're expected to leave a value on the top of the stack. 2278 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); 2279 Apply(context_, rax); 2280} 2281 2282 2283void FullCodeGenerator::EmitRandomHeapNumber(ZoneList<Expression*>* args) { 2284 ASSERT(args->length() == 0); 2285 2286 Label slow_allocate_heapnumber; 2287 Label heapnumber_allocated; 2288 2289 __ AllocateHeapNumber(rbx, rcx, &slow_allocate_heapnumber); 2290 __ jmp(&heapnumber_allocated); 2291 2292 __ bind(&slow_allocate_heapnumber); 2293 // Allocate a heap number. 2294 __ CallRuntime(Runtime::kNumberAlloc, 0); 2295 __ movq(rbx, rax); 2296 2297 __ bind(&heapnumber_allocated); 2298 2299 // Return a random uint32 number in rax. 2300 // The fresh HeapNumber is in rbx, which is callee-save on both x64 ABIs. 2301 __ PrepareCallCFunction(0); 2302 __ CallCFunction(ExternalReference::random_uint32_function(), 0); 2303 2304 // Convert 32 random bits in rax to 0.(32 random bits) in a double 2305 // by computing: 2306 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)). 2307 __ movl(rcx, Immediate(0x49800000)); // 1.0 x 2^20 as single. 2308 __ movd(xmm1, rcx); 2309 __ movd(xmm0, rax); 2310 __ cvtss2sd(xmm1, xmm1); 2311 __ xorpd(xmm0, xmm1); 2312 __ subsd(xmm0, xmm1); 2313 __ movsd(FieldOperand(rbx, HeapNumber::kValueOffset), xmm0); 2314 2315 __ movq(rax, rbx); 2316 Apply(context_, rax); 2317} 2318 2319 2320void FullCodeGenerator::EmitSubString(ZoneList<Expression*>* args) { 2321 // Load the arguments on the stack and call the stub. 2322 SubStringStub stub; 2323 ASSERT(args->length() == 3); 2324 VisitForValue(args->at(0), kStack); 2325 VisitForValue(args->at(1), kStack); 2326 VisitForValue(args->at(2), kStack); 2327 __ CallStub(&stub); 2328 Apply(context_, rax); 2329} 2330 2331 2332void FullCodeGenerator::EmitRegExpExec(ZoneList<Expression*>* args) { 2333 // Load the arguments on the stack and call the stub. 2334 RegExpExecStub stub; 2335 ASSERT(args->length() == 4); 2336 VisitForValue(args->at(0), kStack); 2337 VisitForValue(args->at(1), kStack); 2338 VisitForValue(args->at(2), kStack); 2339 VisitForValue(args->at(3), kStack); 2340 __ CallStub(&stub); 2341 Apply(context_, rax); 2342} 2343 2344 2345void FullCodeGenerator::EmitValueOf(ZoneList<Expression*>* args) { 2346 ASSERT(args->length() == 1); 2347 2348 VisitForValue(args->at(0), kAccumulator); // Load the object. 2349 2350 Label done; 2351 // If the object is a smi return the object. 2352 __ JumpIfSmi(rax, &done); 2353 // If the object is not a value type, return the object. 2354 __ CmpObjectType(rax, JS_VALUE_TYPE, rbx); 2355 __ j(not_equal, &done); 2356 __ movq(rax, FieldOperand(rax, JSValue::kValueOffset)); 2357 2358 __ bind(&done); 2359 Apply(context_, rax); 2360} 2361 2362 2363void FullCodeGenerator::EmitMathPow(ZoneList<Expression*>* args) { 2364 // Load the arguments on the stack and call the runtime function. 2365 ASSERT(args->length() == 2); 2366 VisitForValue(args->at(0), kStack); 2367 VisitForValue(args->at(1), kStack); 2368 __ CallRuntime(Runtime::kMath_pow, 2); 2369 Apply(context_, rax); 2370} 2371 2372 2373void FullCodeGenerator::EmitSetValueOf(ZoneList<Expression*>* args) { 2374 ASSERT(args->length() == 2); 2375 2376 VisitForValue(args->at(0), kStack); // Load the object. 2377 VisitForValue(args->at(1), kAccumulator); // Load the value. 2378 __ pop(rbx); // rax = value. ebx = object. 2379 2380 Label done; 2381 // If the object is a smi, return the value. 2382 __ JumpIfSmi(rbx, &done); 2383 2384 // If the object is not a value type, return the value. 2385 __ CmpObjectType(rbx, JS_VALUE_TYPE, rcx); 2386 __ j(not_equal, &done); 2387 2388 // Store the value. 2389 __ movq(FieldOperand(rbx, JSValue::kValueOffset), rax); 2390 // Update the write barrier. Save the value as it will be 2391 // overwritten by the write barrier code and is needed afterward. 2392 __ movq(rdx, rax); 2393 __ RecordWrite(rbx, JSValue::kValueOffset, rdx, rcx); 2394 2395 __ bind(&done); 2396 Apply(context_, rax); 2397} 2398 2399 2400void FullCodeGenerator::EmitNumberToString(ZoneList<Expression*>* args) { 2401 ASSERT_EQ(args->length(), 1); 2402 2403 // Load the argument on the stack and call the stub. 2404 VisitForValue(args->at(0), kStack); 2405 2406 NumberToStringStub stub; 2407 __ CallStub(&stub); 2408 Apply(context_, rax); 2409} 2410 2411 2412void FullCodeGenerator::EmitStringCharFromCode(ZoneList<Expression*>* args) { 2413 ASSERT(args->length() == 1); 2414 2415 VisitForValue(args->at(0), kAccumulator); 2416 2417 Label done; 2418 StringCharFromCodeGenerator generator(rax, rbx); 2419 generator.GenerateFast(masm_); 2420 __ jmp(&done); 2421 2422 NopRuntimeCallHelper call_helper; 2423 generator.GenerateSlow(masm_, call_helper); 2424 2425 __ bind(&done); 2426 Apply(context_, rbx); 2427} 2428 2429 2430void FullCodeGenerator::EmitStringCharCodeAt(ZoneList<Expression*>* args) { 2431 ASSERT(args->length() == 2); 2432 2433 VisitForValue(args->at(0), kStack); 2434 VisitForValue(args->at(1), kAccumulator); 2435 2436 Register object = rbx; 2437 Register index = rax; 2438 Register scratch = rcx; 2439 Register result = rdx; 2440 2441 __ pop(object); 2442 2443 Label need_conversion; 2444 Label index_out_of_range; 2445 Label done; 2446 StringCharCodeAtGenerator generator(object, 2447 index, 2448 scratch, 2449 result, 2450 &need_conversion, 2451 &need_conversion, 2452 &index_out_of_range, 2453 STRING_INDEX_IS_NUMBER); 2454 generator.GenerateFast(masm_); 2455 __ jmp(&done); 2456 2457 __ bind(&index_out_of_range); 2458 // When the index is out of range, the spec requires us to return 2459 // NaN. 2460 __ LoadRoot(result, Heap::kNanValueRootIndex); 2461 __ jmp(&done); 2462 2463 __ bind(&need_conversion); 2464 // Move the undefined value into the result register, which will 2465 // trigger conversion. 2466 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); 2467 __ jmp(&done); 2468 2469 NopRuntimeCallHelper call_helper; 2470 generator.GenerateSlow(masm_, call_helper); 2471 2472 __ bind(&done); 2473 Apply(context_, result); 2474} 2475 2476 2477void FullCodeGenerator::EmitStringCharAt(ZoneList<Expression*>* args) { 2478 ASSERT(args->length() == 2); 2479 2480 VisitForValue(args->at(0), kStack); 2481 VisitForValue(args->at(1), kAccumulator); 2482 2483 Register object = rbx; 2484 Register index = rax; 2485 Register scratch1 = rcx; 2486 Register scratch2 = rdx; 2487 Register result = rax; 2488 2489 __ pop(object); 2490 2491 Label need_conversion; 2492 Label index_out_of_range; 2493 Label done; 2494 StringCharAtGenerator generator(object, 2495 index, 2496 scratch1, 2497 scratch2, 2498 result, 2499 &need_conversion, 2500 &need_conversion, 2501 &index_out_of_range, 2502 STRING_INDEX_IS_NUMBER); 2503 generator.GenerateFast(masm_); 2504 __ jmp(&done); 2505 2506 __ bind(&index_out_of_range); 2507 // When the index is out of range, the spec requires us to return 2508 // the empty string. 2509 __ LoadRoot(result, Heap::kEmptyStringRootIndex); 2510 __ jmp(&done); 2511 2512 __ bind(&need_conversion); 2513 // Move smi zero into the result register, which will trigger 2514 // conversion. 2515 __ Move(result, Smi::FromInt(0)); 2516 __ jmp(&done); 2517 2518 NopRuntimeCallHelper call_helper; 2519 generator.GenerateSlow(masm_, call_helper); 2520 2521 __ bind(&done); 2522 Apply(context_, result); 2523} 2524 2525 2526void FullCodeGenerator::EmitStringAdd(ZoneList<Expression*>* args) { 2527 ASSERT_EQ(2, args->length()); 2528 2529 VisitForValue(args->at(0), kStack); 2530 VisitForValue(args->at(1), kStack); 2531 2532 StringAddStub stub(NO_STRING_ADD_FLAGS); 2533 __ CallStub(&stub); 2534 Apply(context_, rax); 2535} 2536 2537 2538void FullCodeGenerator::EmitStringCompare(ZoneList<Expression*>* args) { 2539 ASSERT_EQ(2, args->length()); 2540 2541 VisitForValue(args->at(0), kStack); 2542 VisitForValue(args->at(1), kStack); 2543 2544 StringCompareStub stub; 2545 __ CallStub(&stub); 2546 Apply(context_, rax); 2547} 2548 2549 2550void FullCodeGenerator::EmitMathSin(ZoneList<Expression*>* args) { 2551 // Load the argument on the stack and call the stub. 2552 TranscendentalCacheStub stub(TranscendentalCache::SIN); 2553 ASSERT(args->length() == 1); 2554 VisitForValue(args->at(0), kStack); 2555 __ CallStub(&stub); 2556 Apply(context_, rax); 2557} 2558 2559 2560void FullCodeGenerator::EmitMathCos(ZoneList<Expression*>* args) { 2561 // Load the argument on the stack and call the stub. 2562 TranscendentalCacheStub stub(TranscendentalCache::COS); 2563 ASSERT(args->length() == 1); 2564 VisitForValue(args->at(0), kStack); 2565 __ CallStub(&stub); 2566 Apply(context_, rax); 2567} 2568 2569 2570void FullCodeGenerator::EmitMathSqrt(ZoneList<Expression*>* args) { 2571 // Load the argument on the stack and call the runtime function. 2572 ASSERT(args->length() == 1); 2573 VisitForValue(args->at(0), kStack); 2574 __ CallRuntime(Runtime::kMath_sqrt, 1); 2575 Apply(context_, rax); 2576} 2577 2578 2579void FullCodeGenerator::EmitCallFunction(ZoneList<Expression*>* args) { 2580 ASSERT(args->length() >= 2); 2581 2582 int arg_count = args->length() - 2; // For receiver and function. 2583 VisitForValue(args->at(0), kStack); // Receiver. 2584 for (int i = 0; i < arg_count; i++) { 2585 VisitForValue(args->at(i + 1), kStack); 2586 } 2587 VisitForValue(args->at(arg_count + 1), kAccumulator); // Function. 2588 2589 // InvokeFunction requires function in rdi. Move it in there. 2590 if (!result_register().is(rdi)) __ movq(rdi, result_register()); 2591 ParameterCount count(arg_count); 2592 __ InvokeFunction(rdi, count, CALL_FUNCTION); 2593 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 2594 Apply(context_, rax); 2595} 2596 2597 2598void FullCodeGenerator::EmitRegExpConstructResult(ZoneList<Expression*>* args) { 2599 ASSERT(args->length() == 3); 2600 VisitForValue(args->at(0), kStack); 2601 VisitForValue(args->at(1), kStack); 2602 VisitForValue(args->at(2), kStack); 2603 __ CallRuntime(Runtime::kRegExpConstructResult, 3); 2604 Apply(context_, rax); 2605} 2606 2607 2608void FullCodeGenerator::EmitSwapElements(ZoneList<Expression*>* args) { 2609 ASSERT(args->length() == 3); 2610 VisitForValue(args->at(0), kStack); 2611 VisitForValue(args->at(1), kStack); 2612 VisitForValue(args->at(2), kStack); 2613 __ CallRuntime(Runtime::kSwapElements, 3); 2614 Apply(context_, rax); 2615} 2616 2617 2618void FullCodeGenerator::EmitGetFromCache(ZoneList<Expression*>* args) { 2619 ASSERT_EQ(2, args->length()); 2620 2621 ASSERT_NE(NULL, args->at(0)->AsLiteral()); 2622 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value(); 2623 2624 Handle<FixedArray> jsfunction_result_caches( 2625 Top::global_context()->jsfunction_result_caches()); 2626 if (jsfunction_result_caches->length() <= cache_id) { 2627 __ Abort("Attempt to use undefined cache."); 2628 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); 2629 Apply(context_, rax); 2630 return; 2631 } 2632 2633 VisitForValue(args->at(1), kAccumulator); 2634 2635 Register key = rax; 2636 Register cache = rbx; 2637 Register tmp = rcx; 2638 __ movq(cache, CodeGenerator::ContextOperand(rsi, Context::GLOBAL_INDEX)); 2639 __ movq(cache, 2640 FieldOperand(cache, GlobalObject::kGlobalContextOffset)); 2641 __ movq(cache, 2642 CodeGenerator::ContextOperand( 2643 cache, Context::JSFUNCTION_RESULT_CACHES_INDEX)); 2644 __ movq(cache, 2645 FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id))); 2646 2647 Label done, not_found; 2648 // tmp now holds finger offset as a smi. 2649 ASSERT(kSmiTag == 0 && kSmiTagSize == 1); 2650 __ movq(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset)); 2651 SmiIndex index = 2652 __ SmiToIndex(kScratchRegister, tmp, kPointerSizeLog2); 2653 __ cmpq(key, FieldOperand(cache, 2654 index.reg, 2655 index.scale, 2656 FixedArray::kHeaderSize)); 2657 __ j(not_equal, ¬_found); 2658 __ movq(rax, FieldOperand(cache, 2659 index.reg, 2660 index.scale, 2661 FixedArray::kHeaderSize + kPointerSize)); 2662 __ jmp(&done); 2663 2664 __ bind(¬_found); 2665 // Call runtime to perform the lookup. 2666 __ push(cache); 2667 __ push(key); 2668 __ CallRuntime(Runtime::kGetFromCache, 2); 2669 2670 __ bind(&done); 2671 Apply(context_, rax); 2672} 2673 2674 2675void FullCodeGenerator::EmitIsRegExpEquivalent(ZoneList<Expression*>* args) { 2676 ASSERT_EQ(2, args->length()); 2677 2678 Register right = rax; 2679 Register left = rbx; 2680 Register tmp = rcx; 2681 2682 VisitForValue(args->at(0), kStack); 2683 VisitForValue(args->at(1), kAccumulator); 2684 __ pop(left); 2685 2686 Label done, fail, ok; 2687 __ cmpq(left, right); 2688 __ j(equal, &ok); 2689 // Fail if either is a non-HeapObject. 2690 Condition either_smi = masm()->CheckEitherSmi(left, right, tmp); 2691 __ j(either_smi, &fail); 2692 __ j(zero, &fail); 2693 __ movq(tmp, FieldOperand(left, HeapObject::kMapOffset)); 2694 __ cmpb(FieldOperand(tmp, Map::kInstanceTypeOffset), 2695 Immediate(JS_REGEXP_TYPE)); 2696 __ j(not_equal, &fail); 2697 __ cmpq(tmp, FieldOperand(right, HeapObject::kMapOffset)); 2698 __ j(not_equal, &fail); 2699 __ movq(tmp, FieldOperand(left, JSRegExp::kDataOffset)); 2700 __ cmpq(tmp, FieldOperand(right, JSRegExp::kDataOffset)); 2701 __ j(equal, &ok); 2702 __ bind(&fail); 2703 __ Move(rax, Factory::false_value()); 2704 __ jmp(&done); 2705 __ bind(&ok); 2706 __ Move(rax, Factory::true_value()); 2707 __ bind(&done); 2708 2709 Apply(context_, rax); 2710} 2711 2712 2713void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { 2714 Handle<String> name = expr->name(); 2715 if (name->length() > 0 && name->Get(0) == '_') { 2716 Comment cmnt(masm_, "[ InlineRuntimeCall"); 2717 EmitInlineRuntimeCall(expr); 2718 return; 2719 } 2720 2721 Comment cmnt(masm_, "[ CallRuntime"); 2722 ZoneList<Expression*>* args = expr->arguments(); 2723 2724 if (expr->is_jsruntime()) { 2725 // Prepare for calling JS runtime function. 2726 __ movq(rax, CodeGenerator::GlobalObject()); 2727 __ push(FieldOperand(rax, GlobalObject::kBuiltinsOffset)); 2728 } 2729 2730 // Push the arguments ("left-to-right"). 2731 int arg_count = args->length(); 2732 for (int i = 0; i < arg_count; i++) { 2733 VisitForValue(args->at(i), kStack); 2734 } 2735 2736 if (expr->is_jsruntime()) { 2737 // Call the JS runtime function using a call IC. 2738 __ Move(rcx, expr->name()); 2739 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; 2740 Handle<Code> ic = CodeGenerator::ComputeCallInitialize(arg_count, in_loop); 2741 __ call(ic, RelocInfo::CODE_TARGET); 2742 // Restore context register. 2743 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 2744 } else { 2745 __ CallRuntime(expr->function(), arg_count); 2746 } 2747 Apply(context_, rax); 2748} 2749 2750 2751void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { 2752 switch (expr->op()) { 2753 case Token::DELETE: { 2754 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); 2755 Property* prop = expr->expression()->AsProperty(); 2756 Variable* var = expr->expression()->AsVariableProxy()->AsVariable(); 2757 if (prop == NULL && var == NULL) { 2758 // Result of deleting non-property, non-variable reference is true. 2759 // The subexpression may have side effects. 2760 VisitForEffect(expr->expression()); 2761 Apply(context_, true); 2762 } else if (var != NULL && 2763 !var->is_global() && 2764 var->slot() != NULL && 2765 var->slot()->type() != Slot::LOOKUP) { 2766 // Result of deleting non-global, non-dynamic variables is false. 2767 // The subexpression does not have side effects. 2768 Apply(context_, false); 2769 } else { 2770 // Property or variable reference. Call the delete builtin with 2771 // object and property name as arguments. 2772 if (prop != NULL) { 2773 VisitForValue(prop->obj(), kStack); 2774 VisitForValue(prop->key(), kStack); 2775 } else if (var->is_global()) { 2776 __ push(CodeGenerator::GlobalObject()); 2777 __ Push(var->name()); 2778 } else { 2779 // Non-global variable. Call the runtime to look up the context 2780 // where the variable was introduced. 2781 __ push(context_register()); 2782 __ Push(var->name()); 2783 __ CallRuntime(Runtime::kLookupContext, 2); 2784 __ push(rax); 2785 __ Push(var->name()); 2786 } 2787 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 2788 Apply(context_, rax); 2789 } 2790 break; 2791 } 2792 2793 case Token::VOID: { 2794 Comment cmnt(masm_, "[ UnaryOperation (VOID)"); 2795 VisitForEffect(expr->expression()); 2796 switch (context_) { 2797 case Expression::kUninitialized: 2798 UNREACHABLE(); 2799 break; 2800 case Expression::kEffect: 2801 break; 2802 case Expression::kValue: 2803 switch (location_) { 2804 case kAccumulator: 2805 __ LoadRoot(result_register(), Heap::kUndefinedValueRootIndex); 2806 break; 2807 case kStack: 2808 __ PushRoot(Heap::kUndefinedValueRootIndex); 2809 break; 2810 } 2811 break; 2812 case Expression::kTestValue: 2813 // Value is false so it's needed. 2814 switch (location_) { 2815 case kAccumulator: 2816 __ LoadRoot(result_register(), Heap::kUndefinedValueRootIndex); 2817 break; 2818 case kStack: 2819 __ PushRoot(Heap::kUndefinedValueRootIndex); 2820 break; 2821 } 2822 // Fall through. 2823 case Expression::kTest: 2824 case Expression::kValueTest: 2825 __ jmp(false_label_); 2826 break; 2827 } 2828 break; 2829 } 2830 2831 case Token::NOT: { 2832 Comment cmnt(masm_, "[ UnaryOperation (NOT)"); 2833 Label materialize_true, materialize_false; 2834 Label* if_true = NULL; 2835 Label* if_false = NULL; 2836 2837 // Notice that the labels are swapped. 2838 PrepareTest(&materialize_true, &materialize_false, &if_false, &if_true); 2839 2840 VisitForControl(expr->expression(), if_true, if_false); 2841 2842 Apply(context_, if_false, if_true); // Labels swapped. 2843 break; 2844 } 2845 2846 case Token::TYPEOF: { 2847 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)"); 2848 VariableProxy* proxy = expr->expression()->AsVariableProxy(); 2849 if (proxy != NULL && 2850 !proxy->var()->is_this() && 2851 proxy->var()->is_global()) { 2852 Comment cmnt(masm_, "Global variable"); 2853 __ Move(rcx, proxy->name()); 2854 __ movq(rax, CodeGenerator::GlobalObject()); 2855 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); 2856 // Use a regular load, not a contextual load, to avoid a reference 2857 // error. 2858 __ Call(ic, RelocInfo::CODE_TARGET); 2859 __ push(rax); 2860 } else if (proxy != NULL && 2861 proxy->var()->slot() != NULL && 2862 proxy->var()->slot()->type() == Slot::LOOKUP) { 2863 __ push(rsi); 2864 __ Push(proxy->name()); 2865 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); 2866 __ push(rax); 2867 } else { 2868 // This expression cannot throw a reference error at the top level. 2869 VisitForValue(expr->expression(), kStack); 2870 } 2871 2872 __ CallRuntime(Runtime::kTypeof, 1); 2873 Apply(context_, rax); 2874 break; 2875 } 2876 2877 case Token::ADD: { 2878 Comment cmt(masm_, "[ UnaryOperation (ADD)"); 2879 VisitForValue(expr->expression(), kAccumulator); 2880 Label no_conversion; 2881 Condition is_smi = masm_->CheckSmi(result_register()); 2882 __ j(is_smi, &no_conversion); 2883 __ push(result_register()); 2884 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION); 2885 __ bind(&no_conversion); 2886 Apply(context_, result_register()); 2887 break; 2888 } 2889 2890 case Token::SUB: { 2891 Comment cmt(masm_, "[ UnaryOperation (SUB)"); 2892 bool can_overwrite = 2893 (expr->expression()->AsBinaryOperation() != NULL && 2894 expr->expression()->AsBinaryOperation()->ResultOverwriteAllowed()); 2895 UnaryOverwriteMode overwrite = 2896 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE; 2897 GenericUnaryOpStub stub(Token::SUB, overwrite); 2898 // GenericUnaryOpStub expects the argument to be in the 2899 // accumulator register rax. 2900 VisitForValue(expr->expression(), kAccumulator); 2901 __ CallStub(&stub); 2902 Apply(context_, rax); 2903 break; 2904 } 2905 2906 case Token::BIT_NOT: { 2907 Comment cmt(masm_, "[ UnaryOperation (BIT_NOT)"); 2908 bool can_overwrite = 2909 (expr->expression()->AsBinaryOperation() != NULL && 2910 expr->expression()->AsBinaryOperation()->ResultOverwriteAllowed()); 2911 UnaryOverwriteMode overwrite = 2912 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE; 2913 GenericUnaryOpStub stub(Token::BIT_NOT, overwrite); 2914 // GenericUnaryOpStub expects the argument to be in the 2915 // accumulator register rax. 2916 VisitForValue(expr->expression(), kAccumulator); 2917 // Avoid calling the stub for Smis. 2918 Label smi, done; 2919 Condition is_smi = masm_->CheckSmi(result_register()); 2920 __ j(is_smi, &smi); 2921 // Non-smi: call stub leaving result in accumulator register. 2922 __ CallStub(&stub); 2923 __ jmp(&done); 2924 // Perform operation directly on Smis. 2925 __ bind(&smi); 2926 __ SmiNot(result_register(), result_register()); 2927 __ bind(&done); 2928 Apply(context_, result_register()); 2929 break; 2930 } 2931 2932 default: 2933 UNREACHABLE(); 2934 } 2935} 2936 2937 2938void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { 2939 Comment cmnt(masm_, "[ CountOperation"); 2940 2941 // Invalid left-hand-sides are rewritten to have a 'throw 2942 // ReferenceError' as the left-hand side. 2943 if (!expr->expression()->IsValidLeftHandSide()) { 2944 VisitForEffect(expr->expression()); 2945 return; 2946 } 2947 2948 // Expression can only be a property, a global or a (parameter or local) 2949 // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY. 2950 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 2951 LhsKind assign_type = VARIABLE; 2952 Property* prop = expr->expression()->AsProperty(); 2953 // In case of a property we use the uninitialized expression context 2954 // of the key to detect a named property. 2955 if (prop != NULL) { 2956 assign_type = 2957 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY; 2958 } 2959 2960 // Evaluate expression and get value. 2961 if (assign_type == VARIABLE) { 2962 ASSERT(expr->expression()->AsVariableProxy()->var() != NULL); 2963 Location saved_location = location_; 2964 location_ = kAccumulator; 2965 EmitVariableLoad(expr->expression()->AsVariableProxy()->var(), 2966 Expression::kValue); 2967 location_ = saved_location; 2968 } else { 2969 // Reserve space for result of postfix operation. 2970 if (expr->is_postfix() && context_ != Expression::kEffect) { 2971 __ Push(Smi::FromInt(0)); 2972 } 2973 if (assign_type == NAMED_PROPERTY) { 2974 VisitForValue(prop->obj(), kAccumulator); 2975 __ push(rax); // Copy of receiver, needed for later store. 2976 EmitNamedPropertyLoad(prop); 2977 } else { 2978 VisitForValue(prop->obj(), kStack); 2979 VisitForValue(prop->key(), kAccumulator); 2980 __ movq(rdx, Operand(rsp, 0)); // Leave receiver on stack 2981 __ push(rax); // Copy of key, needed for later store. 2982 EmitKeyedPropertyLoad(prop); 2983 } 2984 } 2985 2986 // Call ToNumber only if operand is not a smi. 2987 Label no_conversion; 2988 Condition is_smi; 2989 is_smi = masm_->CheckSmi(rax); 2990 __ j(is_smi, &no_conversion); 2991 __ push(rax); 2992 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION); 2993 __ bind(&no_conversion); 2994 2995 // Save result for postfix expressions. 2996 if (expr->is_postfix()) { 2997 switch (context_) { 2998 case Expression::kUninitialized: 2999 UNREACHABLE(); 3000 case Expression::kEffect: 3001 // Do not save result. 3002 break; 3003 case Expression::kValue: 3004 case Expression::kTest: 3005 case Expression::kValueTest: 3006 case Expression::kTestValue: 3007 // Save the result on the stack. If we have a named or keyed property 3008 // we store the result under the receiver that is currently on top 3009 // of the stack. 3010 switch (assign_type) { 3011 case VARIABLE: 3012 __ push(rax); 3013 break; 3014 case NAMED_PROPERTY: 3015 __ movq(Operand(rsp, kPointerSize), rax); 3016 break; 3017 case KEYED_PROPERTY: 3018 __ movq(Operand(rsp, 2 * kPointerSize), rax); 3019 break; 3020 } 3021 break; 3022 } 3023 } 3024 3025 // Inline smi case if we are in a loop. 3026 Label stub_call, done; 3027 if (loop_depth() > 0) { 3028 if (expr->op() == Token::INC) { 3029 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); 3030 } else { 3031 __ SmiSubConstant(rax, rax, Smi::FromInt(1)); 3032 } 3033 __ j(overflow, &stub_call); 3034 // We could eliminate this smi check if we split the code at 3035 // the first smi check before calling ToNumber. 3036 is_smi = masm_->CheckSmi(rax); 3037 __ j(is_smi, &done); 3038 __ bind(&stub_call); 3039 // Call stub. Undo operation first. 3040 if (expr->op() == Token::INC) { 3041 __ SmiSubConstant(rax, rax, Smi::FromInt(1)); 3042 } else { 3043 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); 3044 } 3045 } 3046 // Call stub for +1/-1. 3047 GenericBinaryOpStub stub(expr->binary_op(), 3048 NO_OVERWRITE, 3049 NO_GENERIC_BINARY_FLAGS); 3050 stub.GenerateCall(masm_, rax, Smi::FromInt(1)); 3051 __ bind(&done); 3052 3053 // Store the value returned in rax. 3054 switch (assign_type) { 3055 case VARIABLE: 3056 if (expr->is_postfix()) { 3057 // Perform the assignment as if via '='. 3058 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 3059 Token::ASSIGN, 3060 Expression::kEffect); 3061 // For all contexts except kEffect: We have the result on 3062 // top of the stack. 3063 if (context_ != Expression::kEffect) { 3064 ApplyTOS(context_); 3065 } 3066 } else { 3067 // Perform the assignment as if via '='. 3068 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 3069 Token::ASSIGN, 3070 context_); 3071 } 3072 break; 3073 case NAMED_PROPERTY: { 3074 __ Move(rcx, prop->key()->AsLiteral()->handle()); 3075 __ pop(rdx); 3076 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); 3077 __ call(ic, RelocInfo::CODE_TARGET); 3078 // This nop signals to the IC that there is no inlined code at the call 3079 // site for it to patch. 3080 __ nop(); 3081 if (expr->is_postfix()) { 3082 if (context_ != Expression::kEffect) { 3083 ApplyTOS(context_); 3084 } 3085 } else { 3086 Apply(context_, rax); 3087 } 3088 break; 3089 } 3090 case KEYED_PROPERTY: { 3091 __ pop(rcx); 3092 __ pop(rdx); 3093 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); 3094 __ call(ic, RelocInfo::CODE_TARGET); 3095 // This nop signals to the IC that there is no inlined code at the call 3096 // site for it to patch. 3097 __ nop(); 3098 if (expr->is_postfix()) { 3099 if (context_ != Expression::kEffect) { 3100 ApplyTOS(context_); 3101 } 3102 } else { 3103 Apply(context_, rax); 3104 } 3105 break; 3106 } 3107 } 3108} 3109 3110void FullCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) { 3111 Comment cmnt(masm_, "[ BinaryOperation"); 3112 switch (expr->op()) { 3113 case Token::COMMA: 3114 VisitForEffect(expr->left()); 3115 Visit(expr->right()); 3116 break; 3117 3118 case Token::OR: 3119 case Token::AND: 3120 EmitLogicalOperation(expr); 3121 break; 3122 3123 case Token::ADD: 3124 case Token::SUB: 3125 case Token::DIV: 3126 case Token::MOD: 3127 case Token::MUL: 3128 case Token::BIT_OR: 3129 case Token::BIT_AND: 3130 case Token::BIT_XOR: 3131 case Token::SHL: 3132 case Token::SHR: 3133 case Token::SAR: 3134 VisitForValue(expr->left(), kStack); 3135 VisitForValue(expr->right(), kAccumulator); 3136 EmitBinaryOp(expr->op(), context_); 3137 break; 3138 3139 default: 3140 UNREACHABLE(); 3141 } 3142} 3143 3144 3145void FullCodeGenerator::EmitNullCompare(bool strict, 3146 Register obj, 3147 Register null_const, 3148 Label* if_true, 3149 Label* if_false, 3150 Register scratch) { 3151 __ cmpq(obj, null_const); 3152 if (strict) { 3153 __ j(equal, if_true); 3154 } else { 3155 __ j(equal, if_true); 3156 __ CompareRoot(obj, Heap::kUndefinedValueRootIndex); 3157 __ j(equal, if_true); 3158 __ JumpIfSmi(obj, if_false); 3159 // It can be an undetectable object. 3160 __ movq(scratch, FieldOperand(obj, HeapObject::kMapOffset)); 3161 __ testb(FieldOperand(scratch, Map::kBitFieldOffset), 3162 Immediate(1 << Map::kIsUndetectable)); 3163 __ j(not_zero, if_true); 3164 } 3165 __ jmp(if_false); 3166} 3167 3168 3169void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { 3170 Comment cmnt(masm_, "[ CompareOperation"); 3171 3172 // Always perform the comparison for its control flow. Pack the result 3173 // into the expression's context after the comparison is performed. 3174 Label materialize_true, materialize_false; 3175 Label* if_true = NULL; 3176 Label* if_false = NULL; 3177 PrepareTest(&materialize_true, &materialize_false, &if_true, &if_false); 3178 3179 VisitForValue(expr->left(), kStack); 3180 switch (expr->op()) { 3181 case Token::IN: 3182 VisitForValue(expr->right(), kStack); 3183 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION); 3184 __ CompareRoot(rax, Heap::kTrueValueRootIndex); 3185 __ j(equal, if_true); 3186 __ jmp(if_false); 3187 break; 3188 3189 case Token::INSTANCEOF: { 3190 VisitForValue(expr->right(), kStack); 3191 InstanceofStub stub; 3192 __ CallStub(&stub); 3193 __ testq(rax, rax); 3194 __ j(zero, if_true); // The stub returns 0 for true. 3195 __ jmp(if_false); 3196 break; 3197 } 3198 3199 default: { 3200 VisitForValue(expr->right(), kAccumulator); 3201 Condition cc = no_condition; 3202 bool strict = false; 3203 switch (expr->op()) { 3204 case Token::EQ_STRICT: 3205 strict = true; 3206 // Fall through. 3207 case Token::EQ: { 3208 cc = equal; 3209 __ pop(rdx); 3210 // If either operand is constant null we do a fast compare 3211 // against null. 3212 Literal* right_literal = expr->right()->AsLiteral(); 3213 Literal* left_literal = expr->left()->AsLiteral(); 3214 if (right_literal != NULL && right_literal->handle()->IsNull()) { 3215 EmitNullCompare(strict, rdx, rax, if_true, if_false, rcx); 3216 Apply(context_, if_true, if_false); 3217 return; 3218 } else if (left_literal != NULL && left_literal->handle()->IsNull()) { 3219 EmitNullCompare(strict, rax, rdx, if_true, if_false, rcx); 3220 Apply(context_, if_true, if_false); 3221 return; 3222 } 3223 break; 3224 } 3225 case Token::LT: 3226 cc = less; 3227 __ pop(rdx); 3228 break; 3229 case Token::GT: 3230 // Reverse left and right sizes to obtain ECMA-262 conversion order. 3231 cc = less; 3232 __ movq(rdx, result_register()); 3233 __ pop(rax); 3234 break; 3235 case Token::LTE: 3236 // Reverse left and right sizes to obtain ECMA-262 conversion order. 3237 cc = greater_equal; 3238 __ movq(rdx, result_register()); 3239 __ pop(rax); 3240 break; 3241 case Token::GTE: 3242 cc = greater_equal; 3243 __ pop(rdx); 3244 break; 3245 case Token::IN: 3246 case Token::INSTANCEOF: 3247 default: 3248 UNREACHABLE(); 3249 } 3250 3251 // The comparison stub expects the smi vs. smi case to be handled 3252 // before it is called. 3253 Label slow_case; 3254 __ JumpIfNotBothSmi(rax, rdx, &slow_case); 3255 __ SmiCompare(rdx, rax); 3256 __ j(cc, if_true); 3257 __ jmp(if_false); 3258 3259 __ bind(&slow_case); 3260 CompareStub stub(cc, strict); 3261 __ CallStub(&stub); 3262 __ testq(rax, rax); 3263 __ j(cc, if_true); 3264 __ jmp(if_false); 3265 } 3266 } 3267 3268 // Convert the result of the comparison into one expected for this 3269 // expression's context. 3270 Apply(context_, if_true, if_false); 3271} 3272 3273 3274void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { 3275 __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 3276 Apply(context_, rax); 3277} 3278 3279 3280Register FullCodeGenerator::result_register() { return rax; } 3281 3282 3283Register FullCodeGenerator::context_register() { return rsi; } 3284 3285 3286void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { 3287 ASSERT(IsAligned(frame_offset, kPointerSize)); 3288 __ movq(Operand(rbp, frame_offset), value); 3289} 3290 3291 3292void FullCodeGenerator::LoadContextField(Register dst, int context_index) { 3293 __ movq(dst, CodeGenerator::ContextOperand(rsi, context_index)); 3294} 3295 3296 3297// ---------------------------------------------------------------------------- 3298// Non-local control flow support. 3299 3300 3301void FullCodeGenerator::EnterFinallyBlock() { 3302 ASSERT(!result_register().is(rdx)); 3303 ASSERT(!result_register().is(rcx)); 3304 // Cook return address on top of stack (smi encoded Code* delta) 3305 __ movq(rdx, Operand(rsp, 0)); 3306 __ Move(rcx, masm_->CodeObject()); 3307 __ subq(rdx, rcx); 3308 __ Integer32ToSmi(rdx, rdx); 3309 __ movq(Operand(rsp, 0), rdx); 3310 // Store result register while executing finally block. 3311 __ push(result_register()); 3312} 3313 3314 3315void FullCodeGenerator::ExitFinallyBlock() { 3316 ASSERT(!result_register().is(rdx)); 3317 ASSERT(!result_register().is(rcx)); 3318 // Restore result register from stack. 3319 __ pop(result_register()); 3320 // Uncook return address. 3321 __ movq(rdx, Operand(rsp, 0)); 3322 __ SmiToInteger32(rdx, rdx); 3323 __ Move(rcx, masm_->CodeObject()); 3324 __ addq(rdx, rcx); 3325 __ movq(Operand(rsp, 0), rdx); 3326 // And return. 3327 __ ret(0); 3328} 3329 3330 3331#undef __ 3332 3333 3334} } // namespace v8::internal 3335 3336#endif // V8_TARGET_ARCH_X64 3337