1// Copyright 2013 the V8 project authors. All rights reserved. 2// Use of this source code is governed by a BSD-style license that can be 3// found in the LICENSE file. 4 5#include "src/v8.h" 6 7#if V8_TARGET_ARCH_X64 8 9#include "src/bootstrapper.h" 10#include "src/code-stubs.h" 11#include "src/regexp-macro-assembler.h" 12#include "src/stub-cache.h" 13#include "src/runtime.h" 14 15namespace v8 { 16namespace internal { 17 18 19void FastNewClosureStub::InitializeInterfaceDescriptor( 20 CodeStubInterfaceDescriptor* descriptor) { 21 static Register registers[] = { rbx }; 22 descriptor->register_param_count_ = 1; 23 descriptor->register_params_ = registers; 24 descriptor->deoptimization_handler_ = 25 Runtime::FunctionForId(Runtime::kHiddenNewClosureFromStubFailure)->entry; 26} 27 28 29void FastNewContextStub::InitializeInterfaceDescriptor( 30 CodeStubInterfaceDescriptor* descriptor) { 31 static Register registers[] = { rdi }; 32 descriptor->register_param_count_ = 1; 33 descriptor->register_params_ = registers; 34 descriptor->deoptimization_handler_ = NULL; 35} 36 37 38void ToNumberStub::InitializeInterfaceDescriptor( 39 CodeStubInterfaceDescriptor* descriptor) { 40 static Register registers[] = { rax }; 41 descriptor->register_param_count_ = 1; 42 descriptor->register_params_ = registers; 43 descriptor->deoptimization_handler_ = NULL; 44} 45 46 47void NumberToStringStub::InitializeInterfaceDescriptor( 48 CodeStubInterfaceDescriptor* descriptor) { 49 static Register registers[] = { rax }; 50 descriptor->register_param_count_ = 1; 51 descriptor->register_params_ = registers; 52 descriptor->deoptimization_handler_ = 53 Runtime::FunctionForId(Runtime::kHiddenNumberToString)->entry; 54} 55 56 57void FastCloneShallowArrayStub::InitializeInterfaceDescriptor( 58 CodeStubInterfaceDescriptor* descriptor) { 59 static Register registers[] = { rax, rbx, rcx }; 60 descriptor->register_param_count_ = 3; 61 descriptor->register_params_ = registers; 62 static Representation representations[] = { 63 Representation::Tagged(), 64 Representation::Smi(), 65 Representation::Tagged() }; 66 descriptor->register_param_representations_ = representations; 67 descriptor->deoptimization_handler_ = 68 Runtime::FunctionForId( 69 Runtime::kHiddenCreateArrayLiteralStubBailout)->entry; 70} 71 72 73void FastCloneShallowObjectStub::InitializeInterfaceDescriptor( 74 CodeStubInterfaceDescriptor* descriptor) { 75 static Register registers[] = { rax, rbx, rcx, rdx }; 76 descriptor->register_param_count_ = 4; 77 descriptor->register_params_ = registers; 78 descriptor->deoptimization_handler_ = 79 Runtime::FunctionForId(Runtime::kHiddenCreateObjectLiteral)->entry; 80} 81 82 83void CreateAllocationSiteStub::InitializeInterfaceDescriptor( 84 CodeStubInterfaceDescriptor* descriptor) { 85 static Register registers[] = { rbx, rdx }; 86 descriptor->register_param_count_ = 2; 87 descriptor->register_params_ = registers; 88 descriptor->deoptimization_handler_ = NULL; 89} 90 91 92void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( 93 CodeStubInterfaceDescriptor* descriptor) { 94 static Register registers[] = { rdx, rax }; 95 descriptor->register_param_count_ = 2; 96 descriptor->register_params_ = registers; 97 descriptor->deoptimization_handler_ = 98 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure); 99} 100 101 102void KeyedLoadDictionaryElementStub::InitializeInterfaceDescriptor( 103 CodeStubInterfaceDescriptor* descriptor) { 104 static Register registers[] = { rdx, rax }; 105 descriptor->register_param_count_ = 2; 106 descriptor->register_params_ = registers; 107 descriptor->deoptimization_handler_ = 108 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure); 109} 110 111 112void RegExpConstructResultStub::InitializeInterfaceDescriptor( 113 CodeStubInterfaceDescriptor* descriptor) { 114 static Register registers[] = { rcx, rbx, rax }; 115 descriptor->register_param_count_ = 3; 116 descriptor->register_params_ = registers; 117 descriptor->deoptimization_handler_ = 118 Runtime::FunctionForId(Runtime::kHiddenRegExpConstructResult)->entry; 119} 120 121 122void KeyedLoadGenericElementStub::InitializeInterfaceDescriptor( 123 CodeStubInterfaceDescriptor* descriptor) { 124 static Register registers[] = { rdx, rax }; 125 descriptor->register_param_count_ = 2; 126 descriptor->register_params_ = registers; 127 descriptor->deoptimization_handler_ = 128 Runtime::FunctionForId(Runtime::kKeyedGetProperty)->entry; 129} 130 131 132void LoadFieldStub::InitializeInterfaceDescriptor( 133 CodeStubInterfaceDescriptor* descriptor) { 134 static Register registers[] = { rax }; 135 descriptor->register_param_count_ = 1; 136 descriptor->register_params_ = registers; 137 descriptor->deoptimization_handler_ = NULL; 138} 139 140 141void KeyedLoadFieldStub::InitializeInterfaceDescriptor( 142 CodeStubInterfaceDescriptor* descriptor) { 143 static Register registers[] = { rdx }; 144 descriptor->register_param_count_ = 1; 145 descriptor->register_params_ = registers; 146 descriptor->deoptimization_handler_ = NULL; 147} 148 149 150void StringLengthStub::InitializeInterfaceDescriptor( 151 CodeStubInterfaceDescriptor* descriptor) { 152 static Register registers[] = { rax, rcx }; 153 descriptor->register_param_count_ = 2; 154 descriptor->register_params_ = registers; 155 descriptor->deoptimization_handler_ = NULL; 156} 157 158 159void KeyedStringLengthStub::InitializeInterfaceDescriptor( 160 CodeStubInterfaceDescriptor* descriptor) { 161 static Register registers[] = { rdx, rax }; 162 descriptor->register_param_count_ = 2; 163 descriptor->register_params_ = registers; 164 descriptor->deoptimization_handler_ = NULL; 165} 166 167 168void KeyedStoreFastElementStub::InitializeInterfaceDescriptor( 169 CodeStubInterfaceDescriptor* descriptor) { 170 static Register registers[] = { rdx, rcx, rax }; 171 descriptor->register_param_count_ = 3; 172 descriptor->register_params_ = registers; 173 descriptor->deoptimization_handler_ = 174 FUNCTION_ADDR(KeyedStoreIC_MissFromStubFailure); 175} 176 177 178void TransitionElementsKindStub::InitializeInterfaceDescriptor( 179 CodeStubInterfaceDescriptor* descriptor) { 180 static Register registers[] = { rax, rbx }; 181 descriptor->register_param_count_ = 2; 182 descriptor->register_params_ = registers; 183 descriptor->deoptimization_handler_ = 184 Runtime::FunctionForId(Runtime::kTransitionElementsKind)->entry; 185} 186 187 188static void InitializeArrayConstructorDescriptor( 189 CodeStubInterfaceDescriptor* descriptor, 190 int constant_stack_parameter_count) { 191 // register state 192 // rax -- number of arguments 193 // rdi -- function 194 // rbx -- allocation site with elements kind 195 static Register registers_variable_args[] = { rdi, rbx, rax }; 196 static Register registers_no_args[] = { rdi, rbx }; 197 198 if (constant_stack_parameter_count == 0) { 199 descriptor->register_param_count_ = 2; 200 descriptor->register_params_ = registers_no_args; 201 } else { 202 // stack param count needs (constructor pointer, and single argument) 203 descriptor->handler_arguments_mode_ = PASS_ARGUMENTS; 204 descriptor->stack_parameter_count_ = rax; 205 descriptor->register_param_count_ = 3; 206 static Representation representations[] = { 207 Representation::Tagged(), 208 Representation::Tagged(), 209 Representation::Integer32() }; 210 descriptor->register_param_representations_ = representations; 211 descriptor->register_params_ = registers_variable_args; 212 } 213 214 descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count; 215 descriptor->function_mode_ = JS_FUNCTION_STUB_MODE; 216 descriptor->deoptimization_handler_ = 217 Runtime::FunctionForId(Runtime::kHiddenArrayConstructor)->entry; 218} 219 220 221static void InitializeInternalArrayConstructorDescriptor( 222 CodeStubInterfaceDescriptor* descriptor, 223 int constant_stack_parameter_count) { 224 // register state 225 // rax -- number of arguments 226 // rdi -- constructor function 227 static Register registers_variable_args[] = { rdi, rax }; 228 static Register registers_no_args[] = { rdi }; 229 230 if (constant_stack_parameter_count == 0) { 231 descriptor->register_param_count_ = 1; 232 descriptor->register_params_ = registers_no_args; 233 } else { 234 // stack param count needs (constructor pointer, and single argument) 235 descriptor->handler_arguments_mode_ = PASS_ARGUMENTS; 236 descriptor->stack_parameter_count_ = rax; 237 descriptor->register_param_count_ = 2; 238 descriptor->register_params_ = registers_variable_args; 239 static Representation representations[] = { 240 Representation::Tagged(), 241 Representation::Integer32() }; 242 descriptor->register_param_representations_ = representations; 243 } 244 245 descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count; 246 descriptor->function_mode_ = JS_FUNCTION_STUB_MODE; 247 descriptor->deoptimization_handler_ = 248 Runtime::FunctionForId(Runtime::kHiddenInternalArrayConstructor)->entry; 249} 250 251 252void ArrayNoArgumentConstructorStub::InitializeInterfaceDescriptor( 253 CodeStubInterfaceDescriptor* descriptor) { 254 InitializeArrayConstructorDescriptor(descriptor, 0); 255} 256 257 258void ArraySingleArgumentConstructorStub::InitializeInterfaceDescriptor( 259 CodeStubInterfaceDescriptor* descriptor) { 260 InitializeArrayConstructorDescriptor(descriptor, 1); 261} 262 263 264void ArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor( 265 CodeStubInterfaceDescriptor* descriptor) { 266 InitializeArrayConstructorDescriptor(descriptor, -1); 267} 268 269 270void InternalArrayNoArgumentConstructorStub::InitializeInterfaceDescriptor( 271 CodeStubInterfaceDescriptor* descriptor) { 272 InitializeInternalArrayConstructorDescriptor(descriptor, 0); 273} 274 275 276void InternalArraySingleArgumentConstructorStub::InitializeInterfaceDescriptor( 277 CodeStubInterfaceDescriptor* descriptor) { 278 InitializeInternalArrayConstructorDescriptor(descriptor, 1); 279} 280 281 282void InternalArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor( 283 CodeStubInterfaceDescriptor* descriptor) { 284 InitializeInternalArrayConstructorDescriptor(descriptor, -1); 285} 286 287 288void CompareNilICStub::InitializeInterfaceDescriptor( 289 CodeStubInterfaceDescriptor* descriptor) { 290 static Register registers[] = { rax }; 291 descriptor->register_param_count_ = 1; 292 descriptor->register_params_ = registers; 293 descriptor->deoptimization_handler_ = 294 FUNCTION_ADDR(CompareNilIC_Miss); 295 descriptor->SetMissHandler( 296 ExternalReference(IC_Utility(IC::kCompareNilIC_Miss), isolate())); 297} 298 299 300void ToBooleanStub::InitializeInterfaceDescriptor( 301 CodeStubInterfaceDescriptor* descriptor) { 302 static Register registers[] = { rax }; 303 descriptor->register_param_count_ = 1; 304 descriptor->register_params_ = registers; 305 descriptor->deoptimization_handler_ = 306 FUNCTION_ADDR(ToBooleanIC_Miss); 307 descriptor->SetMissHandler( 308 ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate())); 309} 310 311 312void StoreGlobalStub::InitializeInterfaceDescriptor( 313 CodeStubInterfaceDescriptor* descriptor) { 314 static Register registers[] = { rdx, rcx, rax }; 315 descriptor->register_param_count_ = 3; 316 descriptor->register_params_ = registers; 317 descriptor->deoptimization_handler_ = 318 FUNCTION_ADDR(StoreIC_MissFromStubFailure); 319} 320 321 322void ElementsTransitionAndStoreStub::InitializeInterfaceDescriptor( 323 CodeStubInterfaceDescriptor* descriptor) { 324 static Register registers[] = { rax, rbx, rcx, rdx }; 325 descriptor->register_param_count_ = 4; 326 descriptor->register_params_ = registers; 327 descriptor->deoptimization_handler_ = 328 FUNCTION_ADDR(ElementsTransitionAndStoreIC_Miss); 329} 330 331 332void BinaryOpICStub::InitializeInterfaceDescriptor( 333 CodeStubInterfaceDescriptor* descriptor) { 334 static Register registers[] = { rdx, rax }; 335 descriptor->register_param_count_ = 2; 336 descriptor->register_params_ = registers; 337 descriptor->deoptimization_handler_ = FUNCTION_ADDR(BinaryOpIC_Miss); 338 descriptor->SetMissHandler( 339 ExternalReference(IC_Utility(IC::kBinaryOpIC_Miss), isolate())); 340} 341 342 343void BinaryOpWithAllocationSiteStub::InitializeInterfaceDescriptor( 344 CodeStubInterfaceDescriptor* descriptor) { 345 static Register registers[] = { rcx, rdx, rax }; 346 descriptor->register_param_count_ = 3; 347 descriptor->register_params_ = registers; 348 descriptor->deoptimization_handler_ = 349 FUNCTION_ADDR(BinaryOpIC_MissWithAllocationSite); 350} 351 352 353void StringAddStub::InitializeInterfaceDescriptor( 354 CodeStubInterfaceDescriptor* descriptor) { 355 static Register registers[] = { rdx, rax }; 356 descriptor->register_param_count_ = 2; 357 descriptor->register_params_ = registers; 358 descriptor->deoptimization_handler_ = 359 Runtime::FunctionForId(Runtime::kHiddenStringAdd)->entry; 360} 361 362 363void CallDescriptors::InitializeForIsolate(Isolate* isolate) { 364 { 365 CallInterfaceDescriptor* descriptor = 366 isolate->call_descriptor(Isolate::ArgumentAdaptorCall); 367 static Register registers[] = { rdi, // JSFunction 368 rsi, // context 369 rax, // actual number of arguments 370 rbx, // expected number of arguments 371 }; 372 static Representation representations[] = { 373 Representation::Tagged(), // JSFunction 374 Representation::Tagged(), // context 375 Representation::Integer32(), // actual number of arguments 376 Representation::Integer32(), // expected number of arguments 377 }; 378 descriptor->register_param_count_ = 4; 379 descriptor->register_params_ = registers; 380 descriptor->param_representations_ = representations; 381 } 382 { 383 CallInterfaceDescriptor* descriptor = 384 isolate->call_descriptor(Isolate::KeyedCall); 385 static Register registers[] = { rsi, // context 386 rcx, // key 387 }; 388 static Representation representations[] = { 389 Representation::Tagged(), // context 390 Representation::Tagged(), // key 391 }; 392 descriptor->register_param_count_ = 2; 393 descriptor->register_params_ = registers; 394 descriptor->param_representations_ = representations; 395 } 396 { 397 CallInterfaceDescriptor* descriptor = 398 isolate->call_descriptor(Isolate::NamedCall); 399 static Register registers[] = { rsi, // context 400 rcx, // name 401 }; 402 static Representation representations[] = { 403 Representation::Tagged(), // context 404 Representation::Tagged(), // name 405 }; 406 descriptor->register_param_count_ = 2; 407 descriptor->register_params_ = registers; 408 descriptor->param_representations_ = representations; 409 } 410 { 411 CallInterfaceDescriptor* descriptor = 412 isolate->call_descriptor(Isolate::CallHandler); 413 static Register registers[] = { rsi, // context 414 rdx, // receiver 415 }; 416 static Representation representations[] = { 417 Representation::Tagged(), // context 418 Representation::Tagged(), // receiver 419 }; 420 descriptor->register_param_count_ = 2; 421 descriptor->register_params_ = registers; 422 descriptor->param_representations_ = representations; 423 } 424 { 425 CallInterfaceDescriptor* descriptor = 426 isolate->call_descriptor(Isolate::ApiFunctionCall); 427 static Register registers[] = { rax, // callee 428 rbx, // call_data 429 rcx, // holder 430 rdx, // api_function_address 431 rsi, // context 432 }; 433 static Representation representations[] = { 434 Representation::Tagged(), // callee 435 Representation::Tagged(), // call_data 436 Representation::Tagged(), // holder 437 Representation::External(), // api_function_address 438 Representation::Tagged(), // context 439 }; 440 descriptor->register_param_count_ = 5; 441 descriptor->register_params_ = registers; 442 descriptor->param_representations_ = representations; 443 } 444} 445 446 447#define __ ACCESS_MASM(masm) 448 449 450void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) { 451 // Update the static counter each time a new code stub is generated. 452 isolate()->counters()->code_stubs()->Increment(); 453 454 CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(); 455 int param_count = descriptor->register_param_count_; 456 { 457 // Call the runtime system in a fresh internal frame. 458 FrameScope scope(masm, StackFrame::INTERNAL); 459 ASSERT(descriptor->register_param_count_ == 0 || 460 rax.is(descriptor->register_params_[param_count - 1])); 461 // Push arguments 462 for (int i = 0; i < param_count; ++i) { 463 __ Push(descriptor->register_params_[i]); 464 } 465 ExternalReference miss = descriptor->miss_handler(); 466 __ CallExternalReference(miss, descriptor->register_param_count_); 467 } 468 469 __ Ret(); 470} 471 472 473void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { 474 __ PushCallerSaved(save_doubles_); 475 const int argument_count = 1; 476 __ PrepareCallCFunction(argument_count); 477 __ LoadAddress(arg_reg_1, 478 ExternalReference::isolate_address(isolate())); 479 480 AllowExternalCallThatCantCauseGC scope(masm); 481 __ CallCFunction( 482 ExternalReference::store_buffer_overflow_function(isolate()), 483 argument_count); 484 __ PopCallerSaved(save_doubles_); 485 __ ret(0); 486} 487 488 489class FloatingPointHelper : public AllStatic { 490 public: 491 enum ConvertUndefined { 492 CONVERT_UNDEFINED_TO_ZERO, 493 BAILOUT_ON_UNDEFINED 494 }; 495 // Load the operands from rdx and rax into xmm0 and xmm1, as doubles. 496 // If the operands are not both numbers, jump to not_numbers. 497 // Leaves rdx and rax unchanged. SmiOperands assumes both are smis. 498 // NumberOperands assumes both are smis or heap numbers. 499 static void LoadSSE2UnknownOperands(MacroAssembler* masm, 500 Label* not_numbers); 501}; 502 503 504void DoubleToIStub::Generate(MacroAssembler* masm) { 505 Register input_reg = this->source(); 506 Register final_result_reg = this->destination(); 507 ASSERT(is_truncating()); 508 509 Label check_negative, process_64_bits, done; 510 511 int double_offset = offset(); 512 513 // Account for return address and saved regs if input is rsp. 514 if (input_reg.is(rsp)) double_offset += 3 * kRegisterSize; 515 516 MemOperand mantissa_operand(MemOperand(input_reg, double_offset)); 517 MemOperand exponent_operand(MemOperand(input_reg, 518 double_offset + kDoubleSize / 2)); 519 520 Register scratch1; 521 Register scratch_candidates[3] = { rbx, rdx, rdi }; 522 for (int i = 0; i < 3; i++) { 523 scratch1 = scratch_candidates[i]; 524 if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break; 525 } 526 527 // Since we must use rcx for shifts below, use some other register (rax) 528 // to calculate the result if ecx is the requested return register. 529 Register result_reg = final_result_reg.is(rcx) ? rax : final_result_reg; 530 // Save ecx if it isn't the return register and therefore volatile, or if it 531 // is the return register, then save the temp register we use in its stead 532 // for the result. 533 Register save_reg = final_result_reg.is(rcx) ? rax : rcx; 534 __ pushq(scratch1); 535 __ pushq(save_reg); 536 537 bool stash_exponent_copy = !input_reg.is(rsp); 538 __ movl(scratch1, mantissa_operand); 539 __ movsd(xmm0, mantissa_operand); 540 __ movl(rcx, exponent_operand); 541 if (stash_exponent_copy) __ pushq(rcx); 542 543 __ andl(rcx, Immediate(HeapNumber::kExponentMask)); 544 __ shrl(rcx, Immediate(HeapNumber::kExponentShift)); 545 __ leal(result_reg, MemOperand(rcx, -HeapNumber::kExponentBias)); 546 __ cmpl(result_reg, Immediate(HeapNumber::kMantissaBits)); 547 __ j(below, &process_64_bits); 548 549 // Result is entirely in lower 32-bits of mantissa 550 int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize; 551 __ subl(rcx, Immediate(delta)); 552 __ xorl(result_reg, result_reg); 553 __ cmpl(rcx, Immediate(31)); 554 __ j(above, &done); 555 __ shll_cl(scratch1); 556 __ jmp(&check_negative); 557 558 __ bind(&process_64_bits); 559 __ cvttsd2siq(result_reg, xmm0); 560 __ jmp(&done, Label::kNear); 561 562 // If the double was negative, negate the integer result. 563 __ bind(&check_negative); 564 __ movl(result_reg, scratch1); 565 __ negl(result_reg); 566 if (stash_exponent_copy) { 567 __ cmpl(MemOperand(rsp, 0), Immediate(0)); 568 } else { 569 __ cmpl(exponent_operand, Immediate(0)); 570 } 571 __ cmovl(greater, result_reg, scratch1); 572 573 // Restore registers 574 __ bind(&done); 575 if (stash_exponent_copy) { 576 __ addp(rsp, Immediate(kDoubleSize)); 577 } 578 if (!final_result_reg.is(result_reg)) { 579 ASSERT(final_result_reg.is(rcx)); 580 __ movl(final_result_reg, result_reg); 581 } 582 __ popq(save_reg); 583 __ popq(scratch1); 584 __ ret(0); 585} 586 587 588void FloatingPointHelper::LoadSSE2UnknownOperands(MacroAssembler* masm, 589 Label* not_numbers) { 590 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, load_float_rax, done; 591 // Load operand in rdx into xmm0, or branch to not_numbers. 592 __ LoadRoot(rcx, Heap::kHeapNumberMapRootIndex); 593 __ JumpIfSmi(rdx, &load_smi_rdx); 594 __ cmpp(FieldOperand(rdx, HeapObject::kMapOffset), rcx); 595 __ j(not_equal, not_numbers); // Argument in rdx is not a number. 596 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); 597 // Load operand in rax into xmm1, or branch to not_numbers. 598 __ JumpIfSmi(rax, &load_smi_rax); 599 600 __ bind(&load_nonsmi_rax); 601 __ cmpp(FieldOperand(rax, HeapObject::kMapOffset), rcx); 602 __ j(not_equal, not_numbers); 603 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); 604 __ jmp(&done); 605 606 __ bind(&load_smi_rdx); 607 __ SmiToInteger32(kScratchRegister, rdx); 608 __ Cvtlsi2sd(xmm0, kScratchRegister); 609 __ JumpIfNotSmi(rax, &load_nonsmi_rax); 610 611 __ bind(&load_smi_rax); 612 __ SmiToInteger32(kScratchRegister, rax); 613 __ Cvtlsi2sd(xmm1, kScratchRegister); 614 __ bind(&done); 615} 616 617 618void MathPowStub::Generate(MacroAssembler* masm) { 619 const Register exponent = rdx; 620 const Register base = rax; 621 const Register scratch = rcx; 622 const XMMRegister double_result = xmm3; 623 const XMMRegister double_base = xmm2; 624 const XMMRegister double_exponent = xmm1; 625 const XMMRegister double_scratch = xmm4; 626 627 Label call_runtime, done, exponent_not_smi, int_exponent; 628 629 // Save 1 in double_result - we need this several times later on. 630 __ movp(scratch, Immediate(1)); 631 __ Cvtlsi2sd(double_result, scratch); 632 633 if (exponent_type_ == ON_STACK) { 634 Label base_is_smi, unpack_exponent; 635 // The exponent and base are supplied as arguments on the stack. 636 // This can only happen if the stub is called from non-optimized code. 637 // Load input parameters from stack. 638 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER); 639 __ movp(base, args.GetArgumentOperand(0)); 640 __ movp(exponent, args.GetArgumentOperand(1)); 641 __ JumpIfSmi(base, &base_is_smi, Label::kNear); 642 __ CompareRoot(FieldOperand(base, HeapObject::kMapOffset), 643 Heap::kHeapNumberMapRootIndex); 644 __ j(not_equal, &call_runtime); 645 646 __ movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset)); 647 __ jmp(&unpack_exponent, Label::kNear); 648 649 __ bind(&base_is_smi); 650 __ SmiToInteger32(base, base); 651 __ Cvtlsi2sd(double_base, base); 652 __ bind(&unpack_exponent); 653 654 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear); 655 __ SmiToInteger32(exponent, exponent); 656 __ jmp(&int_exponent); 657 658 __ bind(&exponent_not_smi); 659 __ CompareRoot(FieldOperand(exponent, HeapObject::kMapOffset), 660 Heap::kHeapNumberMapRootIndex); 661 __ j(not_equal, &call_runtime); 662 __ movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset)); 663 } else if (exponent_type_ == TAGGED) { 664 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear); 665 __ SmiToInteger32(exponent, exponent); 666 __ jmp(&int_exponent); 667 668 __ bind(&exponent_not_smi); 669 __ movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset)); 670 } 671 672 if (exponent_type_ != INTEGER) { 673 Label fast_power, try_arithmetic_simplification; 674 // Detect integer exponents stored as double. 675 __ DoubleToI(exponent, double_exponent, double_scratch, 676 TREAT_MINUS_ZERO_AS_ZERO, &try_arithmetic_simplification); 677 __ jmp(&int_exponent); 678 679 __ bind(&try_arithmetic_simplification); 680 __ cvttsd2si(exponent, double_exponent); 681 // Skip to runtime if possibly NaN (indicated by the indefinite integer). 682 __ cmpl(exponent, Immediate(0x1)); 683 __ j(overflow, &call_runtime); 684 685 if (exponent_type_ == ON_STACK) { 686 // Detect square root case. Crankshaft detects constant +/-0.5 at 687 // compile time and uses DoMathPowHalf instead. We then skip this check 688 // for non-constant cases of +/-0.5 as these hardly occur. 689 Label continue_sqrt, continue_rsqrt, not_plus_half; 690 // Test for 0.5. 691 // Load double_scratch with 0.5. 692 __ movq(scratch, V8_UINT64_C(0x3FE0000000000000)); 693 __ movq(double_scratch, scratch); 694 // Already ruled out NaNs for exponent. 695 __ ucomisd(double_scratch, double_exponent); 696 __ j(not_equal, ¬_plus_half, Label::kNear); 697 698 // Calculates square root of base. Check for the special case of 699 // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13). 700 // According to IEEE-754, double-precision -Infinity has the highest 701 // 12 bits set and the lowest 52 bits cleared. 702 __ movq(scratch, V8_UINT64_C(0xFFF0000000000000)); 703 __ movq(double_scratch, scratch); 704 __ ucomisd(double_scratch, double_base); 705 // Comparing -Infinity with NaN results in "unordered", which sets the 706 // zero flag as if both were equal. However, it also sets the carry flag. 707 __ j(not_equal, &continue_sqrt, Label::kNear); 708 __ j(carry, &continue_sqrt, Label::kNear); 709 710 // Set result to Infinity in the special case. 711 __ xorps(double_result, double_result); 712 __ subsd(double_result, double_scratch); 713 __ jmp(&done); 714 715 __ bind(&continue_sqrt); 716 // sqrtsd returns -0 when input is -0. ECMA spec requires +0. 717 __ xorps(double_scratch, double_scratch); 718 __ addsd(double_scratch, double_base); // Convert -0 to 0. 719 __ sqrtsd(double_result, double_scratch); 720 __ jmp(&done); 721 722 // Test for -0.5. 723 __ bind(¬_plus_half); 724 // Load double_scratch with -0.5 by substracting 1. 725 __ subsd(double_scratch, double_result); 726 // Already ruled out NaNs for exponent. 727 __ ucomisd(double_scratch, double_exponent); 728 __ j(not_equal, &fast_power, Label::kNear); 729 730 // Calculates reciprocal of square root of base. Check for the special 731 // case of Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13). 732 // According to IEEE-754, double-precision -Infinity has the highest 733 // 12 bits set and the lowest 52 bits cleared. 734 __ movq(scratch, V8_UINT64_C(0xFFF0000000000000)); 735 __ movq(double_scratch, scratch); 736 __ ucomisd(double_scratch, double_base); 737 // Comparing -Infinity with NaN results in "unordered", which sets the 738 // zero flag as if both were equal. However, it also sets the carry flag. 739 __ j(not_equal, &continue_rsqrt, Label::kNear); 740 __ j(carry, &continue_rsqrt, Label::kNear); 741 742 // Set result to 0 in the special case. 743 __ xorps(double_result, double_result); 744 __ jmp(&done); 745 746 __ bind(&continue_rsqrt); 747 // sqrtsd returns -0 when input is -0. ECMA spec requires +0. 748 __ xorps(double_exponent, double_exponent); 749 __ addsd(double_exponent, double_base); // Convert -0 to +0. 750 __ sqrtsd(double_exponent, double_exponent); 751 __ divsd(double_result, double_exponent); 752 __ jmp(&done); 753 } 754 755 // Using FPU instructions to calculate power. 756 Label fast_power_failed; 757 __ bind(&fast_power); 758 __ fnclex(); // Clear flags to catch exceptions later. 759 // Transfer (B)ase and (E)xponent onto the FPU register stack. 760 __ subp(rsp, Immediate(kDoubleSize)); 761 __ movsd(Operand(rsp, 0), double_exponent); 762 __ fld_d(Operand(rsp, 0)); // E 763 __ movsd(Operand(rsp, 0), double_base); 764 __ fld_d(Operand(rsp, 0)); // B, E 765 766 // Exponent is in st(1) and base is in st(0) 767 // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B) 768 // FYL2X calculates st(1) * log2(st(0)) 769 __ fyl2x(); // X 770 __ fld(0); // X, X 771 __ frndint(); // rnd(X), X 772 __ fsub(1); // rnd(X), X-rnd(X) 773 __ fxch(1); // X - rnd(X), rnd(X) 774 // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1 775 __ f2xm1(); // 2^(X-rnd(X)) - 1, rnd(X) 776 __ fld1(); // 1, 2^(X-rnd(X)) - 1, rnd(X) 777 __ faddp(1); // 2^(X-rnd(X)), rnd(X) 778 // FSCALE calculates st(0) * 2^st(1) 779 __ fscale(); // 2^X, rnd(X) 780 __ fstp(1); 781 // Bail out to runtime in case of exceptions in the status word. 782 __ fnstsw_ax(); 783 __ testb(rax, Immediate(0x5F)); // Check for all but precision exception. 784 __ j(not_zero, &fast_power_failed, Label::kNear); 785 __ fstp_d(Operand(rsp, 0)); 786 __ movsd(double_result, Operand(rsp, 0)); 787 __ addp(rsp, Immediate(kDoubleSize)); 788 __ jmp(&done); 789 790 __ bind(&fast_power_failed); 791 __ fninit(); 792 __ addp(rsp, Immediate(kDoubleSize)); 793 __ jmp(&call_runtime); 794 } 795 796 // Calculate power with integer exponent. 797 __ bind(&int_exponent); 798 const XMMRegister double_scratch2 = double_exponent; 799 // Back up exponent as we need to check if exponent is negative later. 800 __ movp(scratch, exponent); // Back up exponent. 801 __ movsd(double_scratch, double_base); // Back up base. 802 __ movsd(double_scratch2, double_result); // Load double_exponent with 1. 803 804 // Get absolute value of exponent. 805 Label no_neg, while_true, while_false; 806 __ testl(scratch, scratch); 807 __ j(positive, &no_neg, Label::kNear); 808 __ negl(scratch); 809 __ bind(&no_neg); 810 811 __ j(zero, &while_false, Label::kNear); 812 __ shrl(scratch, Immediate(1)); 813 // Above condition means CF==0 && ZF==0. This means that the 814 // bit that has been shifted out is 0 and the result is not 0. 815 __ j(above, &while_true, Label::kNear); 816 __ movsd(double_result, double_scratch); 817 __ j(zero, &while_false, Label::kNear); 818 819 __ bind(&while_true); 820 __ shrl(scratch, Immediate(1)); 821 __ mulsd(double_scratch, double_scratch); 822 __ j(above, &while_true, Label::kNear); 823 __ mulsd(double_result, double_scratch); 824 __ j(not_zero, &while_true); 825 826 __ bind(&while_false); 827 // If the exponent is negative, return 1/result. 828 __ testl(exponent, exponent); 829 __ j(greater, &done); 830 __ divsd(double_scratch2, double_result); 831 __ movsd(double_result, double_scratch2); 832 // Test whether result is zero. Bail out to check for subnormal result. 833 // Due to subnormals, x^-y == (1/x)^y does not hold in all cases. 834 __ xorps(double_scratch2, double_scratch2); 835 __ ucomisd(double_scratch2, double_result); 836 // double_exponent aliased as double_scratch2 has already been overwritten 837 // and may not have contained the exponent value in the first place when the 838 // input was a smi. We reset it with exponent value before bailing out. 839 __ j(not_equal, &done); 840 __ Cvtlsi2sd(double_exponent, exponent); 841 842 // Returning or bailing out. 843 Counters* counters = isolate()->counters(); 844 if (exponent_type_ == ON_STACK) { 845 // The arguments are still on the stack. 846 __ bind(&call_runtime); 847 __ TailCallRuntime(Runtime::kHiddenMathPow, 2, 1); 848 849 // The stub is called from non-optimized code, which expects the result 850 // as heap number in rax. 851 __ bind(&done); 852 __ AllocateHeapNumber(rax, rcx, &call_runtime); 853 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), double_result); 854 __ IncrementCounter(counters->math_pow(), 1); 855 __ ret(2 * kPointerSize); 856 } else { 857 __ bind(&call_runtime); 858 // Move base to the correct argument register. Exponent is already in xmm1. 859 __ movsd(xmm0, double_base); 860 ASSERT(double_exponent.is(xmm1)); 861 { 862 AllowExternalCallThatCantCauseGC scope(masm); 863 __ PrepareCallCFunction(2); 864 __ CallCFunction( 865 ExternalReference::power_double_double_function(isolate()), 2); 866 } 867 // Return value is in xmm0. 868 __ movsd(double_result, xmm0); 869 870 __ bind(&done); 871 __ IncrementCounter(counters->math_pow(), 1); 872 __ ret(0); 873 } 874} 875 876 877void FunctionPrototypeStub::Generate(MacroAssembler* masm) { 878 Label miss; 879 Register receiver; 880 if (kind() == Code::KEYED_LOAD_IC) { 881 // ----------- S t a t e ------------- 882 // -- rax : key 883 // -- rdx : receiver 884 // -- rsp[0] : return address 885 // ----------------------------------- 886 __ Cmp(rax, isolate()->factory()->prototype_string()); 887 __ j(not_equal, &miss); 888 receiver = rdx; 889 } else { 890 ASSERT(kind() == Code::LOAD_IC); 891 // ----------- S t a t e ------------- 892 // -- rax : receiver 893 // -- rcx : name 894 // -- rsp[0] : return address 895 // ----------------------------------- 896 receiver = rax; 897 } 898 899 StubCompiler::GenerateLoadFunctionPrototype(masm, receiver, r8, r9, &miss); 900 __ bind(&miss); 901 StubCompiler::TailCallBuiltin( 902 masm, BaseLoadStoreStubCompiler::MissBuiltin(kind())); 903} 904 905 906void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) { 907 // The key is in rdx and the parameter count is in rax. 908 909 // Check that the key is a smi. 910 Label slow; 911 __ JumpIfNotSmi(rdx, &slow); 912 913 // Check if the calling frame is an arguments adaptor frame. We look at the 914 // context offset, and if the frame is not a regular one, then we find a 915 // Smi instead of the context. We can't use SmiCompare here, because that 916 // only works for comparing two smis. 917 Label adaptor; 918 __ movp(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 919 __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset), 920 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 921 __ j(equal, &adaptor); 922 923 // Check index against formal parameters count limit passed in 924 // through register rax. Use unsigned comparison to get negative 925 // check for free. 926 __ cmpp(rdx, rax); 927 __ j(above_equal, &slow); 928 929 // Read the argument from the stack and return it. 930 __ SmiSub(rax, rax, rdx); 931 __ SmiToInteger32(rax, rax); 932 StackArgumentsAccessor args(rbp, rax, ARGUMENTS_DONT_CONTAIN_RECEIVER); 933 __ movp(rax, args.GetArgumentOperand(0)); 934 __ Ret(); 935 936 // Arguments adaptor case: Check index against actual arguments 937 // limit found in the arguments adaptor frame. Use unsigned 938 // comparison to get negative check for free. 939 __ bind(&adaptor); 940 __ movp(rcx, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset)); 941 __ cmpp(rdx, rcx); 942 __ j(above_equal, &slow); 943 944 // Read the argument from the stack and return it. 945 __ SmiSub(rcx, rcx, rdx); 946 __ SmiToInteger32(rcx, rcx); 947 StackArgumentsAccessor adaptor_args(rbx, rcx, 948 ARGUMENTS_DONT_CONTAIN_RECEIVER); 949 __ movp(rax, adaptor_args.GetArgumentOperand(0)); 950 __ Ret(); 951 952 // Slow-case: Handle non-smi or out-of-bounds access to arguments 953 // by calling the runtime system. 954 __ bind(&slow); 955 __ PopReturnAddressTo(rbx); 956 __ Push(rdx); 957 __ PushReturnAddressFrom(rbx); 958 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1); 959} 960 961 962void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { 963 // Stack layout: 964 // rsp[0] : return address 965 // rsp[8] : number of parameters (tagged) 966 // rsp[16] : receiver displacement 967 // rsp[24] : function 968 // Registers used over the whole function: 969 // rbx: the mapped parameter count (untagged) 970 // rax: the allocated object (tagged). 971 972 Factory* factory = isolate()->factory(); 973 974 StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER); 975 __ SmiToInteger64(rbx, args.GetArgumentOperand(2)); 976 // rbx = parameter count (untagged) 977 978 // Check if the calling frame is an arguments adaptor frame. 979 Label runtime; 980 Label adaptor_frame, try_allocate; 981 __ movp(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 982 __ movp(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); 983 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 984 __ j(equal, &adaptor_frame); 985 986 // No adaptor, parameter count = argument count. 987 __ movp(rcx, rbx); 988 __ jmp(&try_allocate, Label::kNear); 989 990 // We have an adaptor frame. Patch the parameters pointer. 991 __ bind(&adaptor_frame); 992 __ SmiToInteger64(rcx, 993 Operand(rdx, 994 ArgumentsAdaptorFrameConstants::kLengthOffset)); 995 __ leap(rdx, Operand(rdx, rcx, times_pointer_size, 996 StandardFrameConstants::kCallerSPOffset)); 997 __ movp(args.GetArgumentOperand(1), rdx); 998 999 // rbx = parameter count (untagged) 1000 // rcx = argument count (untagged) 1001 // Compute the mapped parameter count = min(rbx, rcx) in rbx. 1002 __ cmpp(rbx, rcx); 1003 __ j(less_equal, &try_allocate, Label::kNear); 1004 __ movp(rbx, rcx); 1005 1006 __ bind(&try_allocate); 1007 1008 // Compute the sizes of backing store, parameter map, and arguments object. 1009 // 1. Parameter map, has 2 extra words containing context and backing store. 1010 const int kParameterMapHeaderSize = 1011 FixedArray::kHeaderSize + 2 * kPointerSize; 1012 Label no_parameter_map; 1013 __ xorp(r8, r8); 1014 __ testp(rbx, rbx); 1015 __ j(zero, &no_parameter_map, Label::kNear); 1016 __ leap(r8, Operand(rbx, times_pointer_size, kParameterMapHeaderSize)); 1017 __ bind(&no_parameter_map); 1018 1019 // 2. Backing store. 1020 __ leap(r8, Operand(r8, rcx, times_pointer_size, FixedArray::kHeaderSize)); 1021 1022 // 3. Arguments object. 1023 __ addp(r8, Immediate(Heap::kSloppyArgumentsObjectSize)); 1024 1025 // Do the allocation of all three objects in one go. 1026 __ Allocate(r8, rax, rdx, rdi, &runtime, TAG_OBJECT); 1027 1028 // rax = address of new object(s) (tagged) 1029 // rcx = argument count (untagged) 1030 // Get the arguments boilerplate from the current native context into rdi. 1031 Label has_mapped_parameters, copy; 1032 __ movp(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 1033 __ movp(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset)); 1034 __ testp(rbx, rbx); 1035 __ j(not_zero, &has_mapped_parameters, Label::kNear); 1036 1037 const int kIndex = Context::SLOPPY_ARGUMENTS_BOILERPLATE_INDEX; 1038 __ movp(rdi, Operand(rdi, Context::SlotOffset(kIndex))); 1039 __ jmp(©, Label::kNear); 1040 1041 const int kAliasedIndex = Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX; 1042 __ bind(&has_mapped_parameters); 1043 __ movp(rdi, Operand(rdi, Context::SlotOffset(kAliasedIndex))); 1044 __ bind(©); 1045 1046 // rax = address of new object (tagged) 1047 // rbx = mapped parameter count (untagged) 1048 // rcx = argument count (untagged) 1049 // rdi = address of boilerplate object (tagged) 1050 // Copy the JS object part. 1051 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) { 1052 __ movp(rdx, FieldOperand(rdi, i)); 1053 __ movp(FieldOperand(rax, i), rdx); 1054 } 1055 1056 // Set up the callee in-object property. 1057 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1); 1058 __ movp(rdx, args.GetArgumentOperand(0)); 1059 __ movp(FieldOperand(rax, JSObject::kHeaderSize + 1060 Heap::kArgumentsCalleeIndex * kPointerSize), 1061 rdx); 1062 1063 // Use the length (smi tagged) and set that as an in-object property too. 1064 // Note: rcx is tagged from here on. 1065 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); 1066 __ Integer32ToSmi(rcx, rcx); 1067 __ movp(FieldOperand(rax, JSObject::kHeaderSize + 1068 Heap::kArgumentsLengthIndex * kPointerSize), 1069 rcx); 1070 1071 // Set up the elements pointer in the allocated arguments object. 1072 // If we allocated a parameter map, edi will point there, otherwise to the 1073 // backing store. 1074 __ leap(rdi, Operand(rax, Heap::kSloppyArgumentsObjectSize)); 1075 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rdi); 1076 1077 // rax = address of new object (tagged) 1078 // rbx = mapped parameter count (untagged) 1079 // rcx = argument count (tagged) 1080 // rdi = address of parameter map or backing store (tagged) 1081 1082 // Initialize parameter map. If there are no mapped arguments, we're done. 1083 Label skip_parameter_map; 1084 __ testp(rbx, rbx); 1085 __ j(zero, &skip_parameter_map); 1086 1087 __ LoadRoot(kScratchRegister, Heap::kSloppyArgumentsElementsMapRootIndex); 1088 // rbx contains the untagged argument count. Add 2 and tag to write. 1089 __ movp(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister); 1090 __ Integer64PlusConstantToSmi(r9, rbx, 2); 1091 __ movp(FieldOperand(rdi, FixedArray::kLengthOffset), r9); 1092 __ movp(FieldOperand(rdi, FixedArray::kHeaderSize + 0 * kPointerSize), rsi); 1093 __ leap(r9, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize)); 1094 __ movp(FieldOperand(rdi, FixedArray::kHeaderSize + 1 * kPointerSize), r9); 1095 1096 // Copy the parameter slots and the holes in the arguments. 1097 // We need to fill in mapped_parameter_count slots. They index the context, 1098 // where parameters are stored in reverse order, at 1099 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1 1100 // The mapped parameter thus need to get indices 1101 // MIN_CONTEXT_SLOTS+parameter_count-1 .. 1102 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count 1103 // We loop from right to left. 1104 Label parameters_loop, parameters_test; 1105 1106 // Load tagged parameter count into r9. 1107 __ Integer32ToSmi(r9, rbx); 1108 __ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS)); 1109 __ addp(r8, args.GetArgumentOperand(2)); 1110 __ subp(r8, r9); 1111 __ Move(r11, factory->the_hole_value()); 1112 __ movp(rdx, rdi); 1113 __ leap(rdi, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize)); 1114 // r9 = loop variable (tagged) 1115 // r8 = mapping index (tagged) 1116 // r11 = the hole value 1117 // rdx = address of parameter map (tagged) 1118 // rdi = address of backing store (tagged) 1119 __ jmp(¶meters_test, Label::kNear); 1120 1121 __ bind(¶meters_loop); 1122 __ SmiSubConstant(r9, r9, Smi::FromInt(1)); 1123 __ SmiToInteger64(kScratchRegister, r9); 1124 __ movp(FieldOperand(rdx, kScratchRegister, 1125 times_pointer_size, 1126 kParameterMapHeaderSize), 1127 r8); 1128 __ movp(FieldOperand(rdi, kScratchRegister, 1129 times_pointer_size, 1130 FixedArray::kHeaderSize), 1131 r11); 1132 __ SmiAddConstant(r8, r8, Smi::FromInt(1)); 1133 __ bind(¶meters_test); 1134 __ SmiTest(r9); 1135 __ j(not_zero, ¶meters_loop, Label::kNear); 1136 1137 __ bind(&skip_parameter_map); 1138 1139 // rcx = argument count (tagged) 1140 // rdi = address of backing store (tagged) 1141 // Copy arguments header and remaining slots (if there are any). 1142 __ Move(FieldOperand(rdi, FixedArray::kMapOffset), 1143 factory->fixed_array_map()); 1144 __ movp(FieldOperand(rdi, FixedArray::kLengthOffset), rcx); 1145 1146 Label arguments_loop, arguments_test; 1147 __ movp(r8, rbx); 1148 __ movp(rdx, args.GetArgumentOperand(1)); 1149 // Untag rcx for the loop below. 1150 __ SmiToInteger64(rcx, rcx); 1151 __ leap(kScratchRegister, Operand(r8, times_pointer_size, 0)); 1152 __ subp(rdx, kScratchRegister); 1153 __ jmp(&arguments_test, Label::kNear); 1154 1155 __ bind(&arguments_loop); 1156 __ subp(rdx, Immediate(kPointerSize)); 1157 __ movp(r9, Operand(rdx, 0)); 1158 __ movp(FieldOperand(rdi, r8, 1159 times_pointer_size, 1160 FixedArray::kHeaderSize), 1161 r9); 1162 __ addp(r8, Immediate(1)); 1163 1164 __ bind(&arguments_test); 1165 __ cmpp(r8, rcx); 1166 __ j(less, &arguments_loop, Label::kNear); 1167 1168 // Return and remove the on-stack parameters. 1169 __ ret(3 * kPointerSize); 1170 1171 // Do the runtime call to allocate the arguments object. 1172 // rcx = argument count (untagged) 1173 __ bind(&runtime); 1174 __ Integer32ToSmi(rcx, rcx); 1175 __ movp(args.GetArgumentOperand(2), rcx); // Patch argument count. 1176 __ TailCallRuntime(Runtime::kHiddenNewSloppyArguments, 3, 1); 1177} 1178 1179 1180void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) { 1181 // rsp[0] : return address 1182 // rsp[8] : number of parameters 1183 // rsp[16] : receiver displacement 1184 // rsp[24] : function 1185 1186 // Check if the calling frame is an arguments adaptor frame. 1187 Label runtime; 1188 __ movp(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 1189 __ movp(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); 1190 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 1191 __ j(not_equal, &runtime); 1192 1193 // Patch the arguments.length and the parameters pointer. 1194 StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER); 1195 __ movp(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); 1196 __ movp(args.GetArgumentOperand(2), rcx); 1197 __ SmiToInteger64(rcx, rcx); 1198 __ leap(rdx, Operand(rdx, rcx, times_pointer_size, 1199 StandardFrameConstants::kCallerSPOffset)); 1200 __ movp(args.GetArgumentOperand(1), rdx); 1201 1202 __ bind(&runtime); 1203 __ TailCallRuntime(Runtime::kHiddenNewSloppyArguments, 3, 1); 1204} 1205 1206 1207void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { 1208 // rsp[0] : return address 1209 // rsp[8] : number of parameters 1210 // rsp[16] : receiver displacement 1211 // rsp[24] : function 1212 1213 // Check if the calling frame is an arguments adaptor frame. 1214 Label adaptor_frame, try_allocate, runtime; 1215 __ movp(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 1216 __ movp(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); 1217 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 1218 __ j(equal, &adaptor_frame); 1219 1220 // Get the length from the frame. 1221 StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER); 1222 __ movp(rcx, args.GetArgumentOperand(2)); 1223 __ SmiToInteger64(rcx, rcx); 1224 __ jmp(&try_allocate); 1225 1226 // Patch the arguments.length and the parameters pointer. 1227 __ bind(&adaptor_frame); 1228 __ movp(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); 1229 __ movp(args.GetArgumentOperand(2), rcx); 1230 __ SmiToInteger64(rcx, rcx); 1231 __ leap(rdx, Operand(rdx, rcx, times_pointer_size, 1232 StandardFrameConstants::kCallerSPOffset)); 1233 __ movp(args.GetArgumentOperand(1), rdx); 1234 1235 // Try the new space allocation. Start out with computing the size of 1236 // the arguments object and the elements array. 1237 Label add_arguments_object; 1238 __ bind(&try_allocate); 1239 __ testp(rcx, rcx); 1240 __ j(zero, &add_arguments_object, Label::kNear); 1241 __ leap(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize)); 1242 __ bind(&add_arguments_object); 1243 __ addp(rcx, Immediate(Heap::kStrictArgumentsObjectSize)); 1244 1245 // Do the allocation of both objects in one go. 1246 __ Allocate(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT); 1247 1248 // Get the arguments boilerplate from the current native context. 1249 __ movp(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 1250 __ movp(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset)); 1251 const int offset = 1252 Context::SlotOffset(Context::STRICT_ARGUMENTS_BOILERPLATE_INDEX); 1253 __ movp(rdi, Operand(rdi, offset)); 1254 1255 // Copy the JS object part. 1256 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) { 1257 __ movp(rbx, FieldOperand(rdi, i)); 1258 __ movp(FieldOperand(rax, i), rbx); 1259 } 1260 1261 // Get the length (smi tagged) and set that as an in-object property too. 1262 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); 1263 __ movp(rcx, args.GetArgumentOperand(2)); 1264 __ movp(FieldOperand(rax, JSObject::kHeaderSize + 1265 Heap::kArgumentsLengthIndex * kPointerSize), 1266 rcx); 1267 1268 // If there are no actual arguments, we're done. 1269 Label done; 1270 __ testp(rcx, rcx); 1271 __ j(zero, &done); 1272 1273 // Get the parameters pointer from the stack. 1274 __ movp(rdx, args.GetArgumentOperand(1)); 1275 1276 // Set up the elements pointer in the allocated arguments object and 1277 // initialize the header in the elements fixed array. 1278 __ leap(rdi, Operand(rax, Heap::kStrictArgumentsObjectSize)); 1279 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rdi); 1280 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex); 1281 __ movp(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister); 1282 1283 1284 __ movp(FieldOperand(rdi, FixedArray::kLengthOffset), rcx); 1285 // Untag the length for the loop below. 1286 __ SmiToInteger64(rcx, rcx); 1287 1288 // Copy the fixed array slots. 1289 Label loop; 1290 __ bind(&loop); 1291 __ movp(rbx, Operand(rdx, -1 * kPointerSize)); // Skip receiver. 1292 __ movp(FieldOperand(rdi, FixedArray::kHeaderSize), rbx); 1293 __ addp(rdi, Immediate(kPointerSize)); 1294 __ subp(rdx, Immediate(kPointerSize)); 1295 __ decp(rcx); 1296 __ j(not_zero, &loop); 1297 1298 // Return and remove the on-stack parameters. 1299 __ bind(&done); 1300 __ ret(3 * kPointerSize); 1301 1302 // Do the runtime call to allocate the arguments object. 1303 __ bind(&runtime); 1304 __ TailCallRuntime(Runtime::kHiddenNewStrictArguments, 3, 1); 1305} 1306 1307 1308void RegExpExecStub::Generate(MacroAssembler* masm) { 1309 // Just jump directly to runtime if native RegExp is not selected at compile 1310 // time or if regexp entry in generated code is turned off runtime switch or 1311 // at compilation. 1312#ifdef V8_INTERPRETED_REGEXP 1313 __ TailCallRuntime(Runtime::kHiddenRegExpExec, 4, 1); 1314#else // V8_INTERPRETED_REGEXP 1315 1316 // Stack frame on entry. 1317 // rsp[0] : return address 1318 // rsp[8] : last_match_info (expected JSArray) 1319 // rsp[16] : previous index 1320 // rsp[24] : subject string 1321 // rsp[32] : JSRegExp object 1322 1323 enum RegExpExecStubArgumentIndices { 1324 JS_REG_EXP_OBJECT_ARGUMENT_INDEX, 1325 SUBJECT_STRING_ARGUMENT_INDEX, 1326 PREVIOUS_INDEX_ARGUMENT_INDEX, 1327 LAST_MATCH_INFO_ARGUMENT_INDEX, 1328 REG_EXP_EXEC_ARGUMENT_COUNT 1329 }; 1330 1331 StackArgumentsAccessor args(rsp, REG_EXP_EXEC_ARGUMENT_COUNT, 1332 ARGUMENTS_DONT_CONTAIN_RECEIVER); 1333 Label runtime; 1334 // Ensure that a RegExp stack is allocated. 1335 ExternalReference address_of_regexp_stack_memory_address = 1336 ExternalReference::address_of_regexp_stack_memory_address(isolate()); 1337 ExternalReference address_of_regexp_stack_memory_size = 1338 ExternalReference::address_of_regexp_stack_memory_size(isolate()); 1339 __ Load(kScratchRegister, address_of_regexp_stack_memory_size); 1340 __ testp(kScratchRegister, kScratchRegister); 1341 __ j(zero, &runtime); 1342 1343 // Check that the first argument is a JSRegExp object. 1344 __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX)); 1345 __ JumpIfSmi(rax, &runtime); 1346 __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister); 1347 __ j(not_equal, &runtime); 1348 1349 // Check that the RegExp has been compiled (data contains a fixed array). 1350 __ movp(rax, FieldOperand(rax, JSRegExp::kDataOffset)); 1351 if (FLAG_debug_code) { 1352 Condition is_smi = masm->CheckSmi(rax); 1353 __ Check(NegateCondition(is_smi), 1354 kUnexpectedTypeForRegExpDataFixedArrayExpected); 1355 __ CmpObjectType(rax, FIXED_ARRAY_TYPE, kScratchRegister); 1356 __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected); 1357 } 1358 1359 // rax: RegExp data (FixedArray) 1360 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP. 1361 __ SmiToInteger32(rbx, FieldOperand(rax, JSRegExp::kDataTagOffset)); 1362 __ cmpl(rbx, Immediate(JSRegExp::IRREGEXP)); 1363 __ j(not_equal, &runtime); 1364 1365 // rax: RegExp data (FixedArray) 1366 // Check that the number of captures fit in the static offsets vector buffer. 1367 __ SmiToInteger32(rdx, 1368 FieldOperand(rax, JSRegExp::kIrregexpCaptureCountOffset)); 1369 // Check (number_of_captures + 1) * 2 <= offsets vector size 1370 // Or number_of_captures <= offsets vector size / 2 - 1 1371 STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2); 1372 __ cmpl(rdx, Immediate(Isolate::kJSRegexpStaticOffsetsVectorSize / 2 - 1)); 1373 __ j(above, &runtime); 1374 1375 // Reset offset for possibly sliced string. 1376 __ Set(r14, 0); 1377 __ movp(rdi, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX)); 1378 __ JumpIfSmi(rdi, &runtime); 1379 __ movp(r15, rdi); // Make a copy of the original subject string. 1380 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); 1381 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); 1382 // rax: RegExp data (FixedArray) 1383 // rdi: subject string 1384 // r15: subject string 1385 // Handle subject string according to its encoding and representation: 1386 // (1) Sequential two byte? If yes, go to (9). 1387 // (2) Sequential one byte? If yes, go to (6). 1388 // (3) Anything but sequential or cons? If yes, go to (7). 1389 // (4) Cons string. If the string is flat, replace subject with first string. 1390 // Otherwise bailout. 1391 // (5a) Is subject sequential two byte? If yes, go to (9). 1392 // (5b) Is subject external? If yes, go to (8). 1393 // (6) One byte sequential. Load regexp code for one byte. 1394 // (E) Carry on. 1395 /// [...] 1396 1397 // Deferred code at the end of the stub: 1398 // (7) Not a long external string? If yes, go to (10). 1399 // (8) External string. Make it, offset-wise, look like a sequential string. 1400 // (8a) Is the external string one byte? If yes, go to (6). 1401 // (9) Two byte sequential. Load regexp code for one byte. Go to (E). 1402 // (10) Short external string or not a string? If yes, bail out to runtime. 1403 // (11) Sliced string. Replace subject with parent. Go to (5a). 1404 1405 Label seq_one_byte_string /* 6 */, seq_two_byte_string /* 9 */, 1406 external_string /* 8 */, check_underlying /* 5a */, 1407 not_seq_nor_cons /* 7 */, check_code /* E */, 1408 not_long_external /* 10 */; 1409 1410 // (1) Sequential two byte? If yes, go to (9). 1411 __ andb(rbx, Immediate(kIsNotStringMask | 1412 kStringRepresentationMask | 1413 kStringEncodingMask | 1414 kShortExternalStringMask)); 1415 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0); 1416 __ j(zero, &seq_two_byte_string); // Go to (9). 1417 1418 // (2) Sequential one byte? If yes, go to (6). 1419 // Any other sequential string must be one byte. 1420 __ andb(rbx, Immediate(kIsNotStringMask | 1421 kStringRepresentationMask | 1422 kShortExternalStringMask)); 1423 __ j(zero, &seq_one_byte_string, Label::kNear); // Go to (6). 1424 1425 // (3) Anything but sequential or cons? If yes, go to (7). 1426 // We check whether the subject string is a cons, since sequential strings 1427 // have already been covered. 1428 STATIC_ASSERT(kConsStringTag < kExternalStringTag); 1429 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag); 1430 STATIC_ASSERT(kIsNotStringMask > kExternalStringTag); 1431 STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag); 1432 __ cmpp(rbx, Immediate(kExternalStringTag)); 1433 __ j(greater_equal, ¬_seq_nor_cons); // Go to (7). 1434 1435 // (4) Cons string. Check that it's flat. 1436 // Replace subject with first string and reload instance type. 1437 __ CompareRoot(FieldOperand(rdi, ConsString::kSecondOffset), 1438 Heap::kempty_stringRootIndex); 1439 __ j(not_equal, &runtime); 1440 __ movp(rdi, FieldOperand(rdi, ConsString::kFirstOffset)); 1441 __ bind(&check_underlying); 1442 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); 1443 __ movp(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); 1444 1445 // (5a) Is subject sequential two byte? If yes, go to (9). 1446 __ testb(rbx, Immediate(kStringRepresentationMask | kStringEncodingMask)); 1447 STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0); 1448 __ j(zero, &seq_two_byte_string); // Go to (9). 1449 // (5b) Is subject external? If yes, go to (8). 1450 __ testb(rbx, Immediate(kStringRepresentationMask)); 1451 // The underlying external string is never a short external string. 1452 STATIC_ASSERT(ExternalString::kMaxShortLength < ConsString::kMinLength); 1453 STATIC_ASSERT(ExternalString::kMaxShortLength < SlicedString::kMinLength); 1454 __ j(not_zero, &external_string); // Go to (8) 1455 1456 // (6) One byte sequential. Load regexp code for one byte. 1457 __ bind(&seq_one_byte_string); 1458 // rax: RegExp data (FixedArray) 1459 __ movp(r11, FieldOperand(rax, JSRegExp::kDataAsciiCodeOffset)); 1460 __ Set(rcx, 1); // Type is one byte. 1461 1462 // (E) Carry on. String handling is done. 1463 __ bind(&check_code); 1464 // r11: irregexp code 1465 // Check that the irregexp code has been generated for the actual string 1466 // encoding. If it has, the field contains a code object otherwise it contains 1467 // smi (code flushing support) 1468 __ JumpIfSmi(r11, &runtime); 1469 1470 // rdi: sequential subject string (or look-alike, external string) 1471 // r15: original subject string 1472 // rcx: encoding of subject string (1 if ASCII, 0 if two_byte); 1473 // r11: code 1474 // Load used arguments before starting to push arguments for call to native 1475 // RegExp code to avoid handling changing stack height. 1476 // We have to use r15 instead of rdi to load the length because rdi might 1477 // have been only made to look like a sequential string when it actually 1478 // is an external string. 1479 __ movp(rbx, args.GetArgumentOperand(PREVIOUS_INDEX_ARGUMENT_INDEX)); 1480 __ JumpIfNotSmi(rbx, &runtime); 1481 __ SmiCompare(rbx, FieldOperand(r15, String::kLengthOffset)); 1482 __ j(above_equal, &runtime); 1483 __ SmiToInteger64(rbx, rbx); 1484 1485 // rdi: subject string 1486 // rbx: previous index 1487 // rcx: encoding of subject string (1 if ASCII 0 if two_byte); 1488 // r11: code 1489 // All checks done. Now push arguments for native regexp code. 1490 Counters* counters = isolate()->counters(); 1491 __ IncrementCounter(counters->regexp_entry_native(), 1); 1492 1493 // Isolates: note we add an additional parameter here (isolate pointer). 1494 static const int kRegExpExecuteArguments = 9; 1495 int argument_slots_on_stack = 1496 masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments); 1497 __ EnterApiExitFrame(argument_slots_on_stack); 1498 1499 // Argument 9: Pass current isolate address. 1500 __ LoadAddress(kScratchRegister, 1501 ExternalReference::isolate_address(isolate())); 1502 __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kRegisterSize), 1503 kScratchRegister); 1504 1505 // Argument 8: Indicate that this is a direct call from JavaScript. 1506 __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kRegisterSize), 1507 Immediate(1)); 1508 1509 // Argument 7: Start (high end) of backtracking stack memory area. 1510 __ Move(kScratchRegister, address_of_regexp_stack_memory_address); 1511 __ movp(r9, Operand(kScratchRegister, 0)); 1512 __ Move(kScratchRegister, address_of_regexp_stack_memory_size); 1513 __ addp(r9, Operand(kScratchRegister, 0)); 1514 __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kRegisterSize), r9); 1515 1516 // Argument 6: Set the number of capture registers to zero to force global 1517 // regexps to behave as non-global. This does not affect non-global regexps. 1518 // Argument 6 is passed in r9 on Linux and on the stack on Windows. 1519#ifdef _WIN64 1520 __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kRegisterSize), 1521 Immediate(0)); 1522#else 1523 __ Set(r9, 0); 1524#endif 1525 1526 // Argument 5: static offsets vector buffer. 1527 __ LoadAddress( 1528 r8, ExternalReference::address_of_static_offsets_vector(isolate())); 1529 // Argument 5 passed in r8 on Linux and on the stack on Windows. 1530#ifdef _WIN64 1531 __ movq(Operand(rsp, (argument_slots_on_stack - 5) * kRegisterSize), r8); 1532#endif 1533 1534 // rdi: subject string 1535 // rbx: previous index 1536 // rcx: encoding of subject string (1 if ASCII 0 if two_byte); 1537 // r11: code 1538 // r14: slice offset 1539 // r15: original subject string 1540 1541 // Argument 2: Previous index. 1542 __ movp(arg_reg_2, rbx); 1543 1544 // Argument 4: End of string data 1545 // Argument 3: Start of string data 1546 Label setup_two_byte, setup_rest, got_length, length_not_from_slice; 1547 // Prepare start and end index of the input. 1548 // Load the length from the original sliced string if that is the case. 1549 __ addp(rbx, r14); 1550 __ SmiToInteger32(arg_reg_3, FieldOperand(r15, String::kLengthOffset)); 1551 __ addp(r14, arg_reg_3); // Using arg3 as scratch. 1552 1553 // rbx: start index of the input 1554 // r14: end index of the input 1555 // r15: original subject string 1556 __ testb(rcx, rcx); // Last use of rcx as encoding of subject string. 1557 __ j(zero, &setup_two_byte, Label::kNear); 1558 __ leap(arg_reg_4, 1559 FieldOperand(rdi, r14, times_1, SeqOneByteString::kHeaderSize)); 1560 __ leap(arg_reg_3, 1561 FieldOperand(rdi, rbx, times_1, SeqOneByteString::kHeaderSize)); 1562 __ jmp(&setup_rest, Label::kNear); 1563 __ bind(&setup_two_byte); 1564 __ leap(arg_reg_4, 1565 FieldOperand(rdi, r14, times_2, SeqTwoByteString::kHeaderSize)); 1566 __ leap(arg_reg_3, 1567 FieldOperand(rdi, rbx, times_2, SeqTwoByteString::kHeaderSize)); 1568 __ bind(&setup_rest); 1569 1570 // Argument 1: Original subject string. 1571 // The original subject is in the previous stack frame. Therefore we have to 1572 // use rbp, which points exactly to one pointer size below the previous rsp. 1573 // (Because creating a new stack frame pushes the previous rbp onto the stack 1574 // and thereby moves up rsp by one kPointerSize.) 1575 __ movp(arg_reg_1, r15); 1576 1577 // Locate the code entry and call it. 1578 __ addp(r11, Immediate(Code::kHeaderSize - kHeapObjectTag)); 1579 __ call(r11); 1580 1581 __ LeaveApiExitFrame(true); 1582 1583 // Check the result. 1584 Label success; 1585 Label exception; 1586 __ cmpl(rax, Immediate(1)); 1587 // We expect exactly one result since we force the called regexp to behave 1588 // as non-global. 1589 __ j(equal, &success, Label::kNear); 1590 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION)); 1591 __ j(equal, &exception); 1592 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE)); 1593 // If none of the above, it can only be retry. 1594 // Handle that in the runtime system. 1595 __ j(not_equal, &runtime); 1596 1597 // For failure return null. 1598 __ LoadRoot(rax, Heap::kNullValueRootIndex); 1599 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize); 1600 1601 // Load RegExp data. 1602 __ bind(&success); 1603 __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX)); 1604 __ movp(rcx, FieldOperand(rax, JSRegExp::kDataOffset)); 1605 __ SmiToInteger32(rax, 1606 FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset)); 1607 // Calculate number of capture registers (number_of_captures + 1) * 2. 1608 __ leal(rdx, Operand(rax, rax, times_1, 2)); 1609 1610 // rdx: Number of capture registers 1611 // Check that the fourth object is a JSArray object. 1612 __ movp(r15, args.GetArgumentOperand(LAST_MATCH_INFO_ARGUMENT_INDEX)); 1613 __ JumpIfSmi(r15, &runtime); 1614 __ CmpObjectType(r15, JS_ARRAY_TYPE, kScratchRegister); 1615 __ j(not_equal, &runtime); 1616 // Check that the JSArray is in fast case. 1617 __ movp(rbx, FieldOperand(r15, JSArray::kElementsOffset)); 1618 __ movp(rax, FieldOperand(rbx, HeapObject::kMapOffset)); 1619 __ CompareRoot(rax, Heap::kFixedArrayMapRootIndex); 1620 __ j(not_equal, &runtime); 1621 // Check that the last match info has space for the capture registers and the 1622 // additional information. Ensure no overflow in add. 1623 STATIC_ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset); 1624 __ SmiToInteger32(rax, FieldOperand(rbx, FixedArray::kLengthOffset)); 1625 __ subl(rax, Immediate(RegExpImpl::kLastMatchOverhead)); 1626 __ cmpl(rdx, rax); 1627 __ j(greater, &runtime); 1628 1629 // rbx: last_match_info backing store (FixedArray) 1630 // rdx: number of capture registers 1631 // Store the capture count. 1632 __ Integer32ToSmi(kScratchRegister, rdx); 1633 __ movp(FieldOperand(rbx, RegExpImpl::kLastCaptureCountOffset), 1634 kScratchRegister); 1635 // Store last subject and last input. 1636 __ movp(rax, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX)); 1637 __ movp(FieldOperand(rbx, RegExpImpl::kLastSubjectOffset), rax); 1638 __ movp(rcx, rax); 1639 __ RecordWriteField(rbx, 1640 RegExpImpl::kLastSubjectOffset, 1641 rax, 1642 rdi, 1643 kDontSaveFPRegs); 1644 __ movp(rax, rcx); 1645 __ movp(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax); 1646 __ RecordWriteField(rbx, 1647 RegExpImpl::kLastInputOffset, 1648 rax, 1649 rdi, 1650 kDontSaveFPRegs); 1651 1652 // Get the static offsets vector filled by the native regexp code. 1653 __ LoadAddress( 1654 rcx, ExternalReference::address_of_static_offsets_vector(isolate())); 1655 1656 // rbx: last_match_info backing store (FixedArray) 1657 // rcx: offsets vector 1658 // rdx: number of capture registers 1659 Label next_capture, done; 1660 // Capture register counter starts from number of capture registers and 1661 // counts down until wraping after zero. 1662 __ bind(&next_capture); 1663 __ subp(rdx, Immediate(1)); 1664 __ j(negative, &done, Label::kNear); 1665 // Read the value from the static offsets vector buffer and make it a smi. 1666 __ movl(rdi, Operand(rcx, rdx, times_int_size, 0)); 1667 __ Integer32ToSmi(rdi, rdi); 1668 // Store the smi value in the last match info. 1669 __ movp(FieldOperand(rbx, 1670 rdx, 1671 times_pointer_size, 1672 RegExpImpl::kFirstCaptureOffset), 1673 rdi); 1674 __ jmp(&next_capture); 1675 __ bind(&done); 1676 1677 // Return last match info. 1678 __ movp(rax, r15); 1679 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize); 1680 1681 __ bind(&exception); 1682 // Result must now be exception. If there is no pending exception already a 1683 // stack overflow (on the backtrack stack) was detected in RegExp code but 1684 // haven't created the exception yet. Handle that in the runtime system. 1685 // TODO(592): Rerunning the RegExp to get the stack overflow exception. 1686 ExternalReference pending_exception_address( 1687 Isolate::kPendingExceptionAddress, isolate()); 1688 Operand pending_exception_operand = 1689 masm->ExternalOperand(pending_exception_address, rbx); 1690 __ movp(rax, pending_exception_operand); 1691 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex); 1692 __ cmpp(rax, rdx); 1693 __ j(equal, &runtime); 1694 __ movp(pending_exception_operand, rdx); 1695 1696 __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex); 1697 Label termination_exception; 1698 __ j(equal, &termination_exception, Label::kNear); 1699 __ Throw(rax); 1700 1701 __ bind(&termination_exception); 1702 __ ThrowUncatchable(rax); 1703 1704 // Do the runtime call to execute the regexp. 1705 __ bind(&runtime); 1706 __ TailCallRuntime(Runtime::kHiddenRegExpExec, 4, 1); 1707 1708 // Deferred code for string handling. 1709 // (7) Not a long external string? If yes, go to (10). 1710 __ bind(¬_seq_nor_cons); 1711 // Compare flags are still set from (3). 1712 __ j(greater, ¬_long_external, Label::kNear); // Go to (10). 1713 1714 // (8) External string. Short external strings have been ruled out. 1715 __ bind(&external_string); 1716 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); 1717 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); 1718 if (FLAG_debug_code) { 1719 // Assert that we do not have a cons or slice (indirect strings) here. 1720 // Sequential strings have already been ruled out. 1721 __ testb(rbx, Immediate(kIsIndirectStringMask)); 1722 __ Assert(zero, kExternalStringExpectedButNotFound); 1723 } 1724 __ movp(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset)); 1725 // Move the pointer so that offset-wise, it looks like a sequential string. 1726 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize); 1727 __ subp(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); 1728 STATIC_ASSERT(kTwoByteStringTag == 0); 1729 // (8a) Is the external string one byte? If yes, go to (6). 1730 __ testb(rbx, Immediate(kStringEncodingMask)); 1731 __ j(not_zero, &seq_one_byte_string); // Goto (6). 1732 1733 // rdi: subject string (flat two-byte) 1734 // rax: RegExp data (FixedArray) 1735 // (9) Two byte sequential. Load regexp code for one byte. Go to (E). 1736 __ bind(&seq_two_byte_string); 1737 __ movp(r11, FieldOperand(rax, JSRegExp::kDataUC16CodeOffset)); 1738 __ Set(rcx, 0); // Type is two byte. 1739 __ jmp(&check_code); // Go to (E). 1740 1741 // (10) Not a string or a short external string? If yes, bail out to runtime. 1742 __ bind(¬_long_external); 1743 // Catch non-string subject or short external string. 1744 STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0); 1745 __ testb(rbx, Immediate(kIsNotStringMask | kShortExternalStringMask)); 1746 __ j(not_zero, &runtime); 1747 1748 // (11) Sliced string. Replace subject with parent. Go to (5a). 1749 // Load offset into r14 and replace subject string with parent. 1750 __ SmiToInteger32(r14, FieldOperand(rdi, SlicedString::kOffsetOffset)); 1751 __ movp(rdi, FieldOperand(rdi, SlicedString::kParentOffset)); 1752 __ jmp(&check_underlying); 1753#endif // V8_INTERPRETED_REGEXP 1754} 1755 1756 1757static int NegativeComparisonResult(Condition cc) { 1758 ASSERT(cc != equal); 1759 ASSERT((cc == less) || (cc == less_equal) 1760 || (cc == greater) || (cc == greater_equal)); 1761 return (cc == greater || cc == greater_equal) ? LESS : GREATER; 1762} 1763 1764 1765static void CheckInputType(MacroAssembler* masm, 1766 Register input, 1767 CompareIC::State expected, 1768 Label* fail) { 1769 Label ok; 1770 if (expected == CompareIC::SMI) { 1771 __ JumpIfNotSmi(input, fail); 1772 } else if (expected == CompareIC::NUMBER) { 1773 __ JumpIfSmi(input, &ok); 1774 __ CompareMap(input, masm->isolate()->factory()->heap_number_map()); 1775 __ j(not_equal, fail); 1776 } 1777 // We could be strict about internalized/non-internalized here, but as long as 1778 // hydrogen doesn't care, the stub doesn't have to care either. 1779 __ bind(&ok); 1780} 1781 1782 1783static void BranchIfNotInternalizedString(MacroAssembler* masm, 1784 Label* label, 1785 Register object, 1786 Register scratch) { 1787 __ JumpIfSmi(object, label); 1788 __ movp(scratch, FieldOperand(object, HeapObject::kMapOffset)); 1789 __ movzxbp(scratch, 1790 FieldOperand(scratch, Map::kInstanceTypeOffset)); 1791 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0); 1792 __ testb(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask)); 1793 __ j(not_zero, label); 1794} 1795 1796 1797void ICCompareStub::GenerateGeneric(MacroAssembler* masm) { 1798 Label check_unequal_objects, done; 1799 Condition cc = GetCondition(); 1800 Factory* factory = isolate()->factory(); 1801 1802 Label miss; 1803 CheckInputType(masm, rdx, left_, &miss); 1804 CheckInputType(masm, rax, right_, &miss); 1805 1806 // Compare two smis. 1807 Label non_smi, smi_done; 1808 __ JumpIfNotBothSmi(rax, rdx, &non_smi); 1809 __ subp(rdx, rax); 1810 __ j(no_overflow, &smi_done); 1811 __ notp(rdx); // Correct sign in case of overflow. rdx cannot be 0 here. 1812 __ bind(&smi_done); 1813 __ movp(rax, rdx); 1814 __ ret(0); 1815 __ bind(&non_smi); 1816 1817 // The compare stub returns a positive, negative, or zero 64-bit integer 1818 // value in rax, corresponding to result of comparing the two inputs. 1819 // NOTICE! This code is only reached after a smi-fast-case check, so 1820 // it is certain that at least one operand isn't a smi. 1821 1822 // Two identical objects are equal unless they are both NaN or undefined. 1823 { 1824 Label not_identical; 1825 __ cmpp(rax, rdx); 1826 __ j(not_equal, ¬_identical, Label::kNear); 1827 1828 if (cc != equal) { 1829 // Check for undefined. undefined OP undefined is false even though 1830 // undefined == undefined. 1831 Label check_for_nan; 1832 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex); 1833 __ j(not_equal, &check_for_nan, Label::kNear); 1834 __ Set(rax, NegativeComparisonResult(cc)); 1835 __ ret(0); 1836 __ bind(&check_for_nan); 1837 } 1838 1839 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(), 1840 // so we do the second best thing - test it ourselves. 1841 Label heap_number; 1842 // If it's not a heap number, then return equal for (in)equality operator. 1843 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset), 1844 factory->heap_number_map()); 1845 __ j(equal, &heap_number, Label::kNear); 1846 if (cc != equal) { 1847 // Call runtime on identical objects. Otherwise return equal. 1848 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx); 1849 __ j(above_equal, ¬_identical, Label::kNear); 1850 } 1851 __ Set(rax, EQUAL); 1852 __ ret(0); 1853 1854 __ bind(&heap_number); 1855 // It is a heap number, so return equal if it's not NaN. 1856 // For NaN, return 1 for every condition except greater and 1857 // greater-equal. Return -1 for them, so the comparison yields 1858 // false for all conditions except not-equal. 1859 __ Set(rax, EQUAL); 1860 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); 1861 __ ucomisd(xmm0, xmm0); 1862 __ setcc(parity_even, rax); 1863 // rax is 0 for equal non-NaN heapnumbers, 1 for NaNs. 1864 if (cc == greater_equal || cc == greater) { 1865 __ negp(rax); 1866 } 1867 __ ret(0); 1868 1869 __ bind(¬_identical); 1870 } 1871 1872 if (cc == equal) { // Both strict and non-strict. 1873 Label slow; // Fallthrough label. 1874 1875 // If we're doing a strict equality comparison, we don't have to do 1876 // type conversion, so we generate code to do fast comparison for objects 1877 // and oddballs. Non-smi numbers and strings still go through the usual 1878 // slow-case code. 1879 if (strict()) { 1880 // If either is a Smi (we know that not both are), then they can only 1881 // be equal if the other is a HeapNumber. If so, use the slow case. 1882 { 1883 Label not_smis; 1884 __ SelectNonSmi(rbx, rax, rdx, ¬_smis); 1885 1886 // Check if the non-smi operand is a heap number. 1887 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), 1888 factory->heap_number_map()); 1889 // If heap number, handle it in the slow case. 1890 __ j(equal, &slow); 1891 // Return non-equal. ebx (the lower half of rbx) is not zero. 1892 __ movp(rax, rbx); 1893 __ ret(0); 1894 1895 __ bind(¬_smis); 1896 } 1897 1898 // If either operand is a JSObject or an oddball value, then they are not 1899 // equal since their pointers are different 1900 // There is no test for undetectability in strict equality. 1901 1902 // If the first object is a JS object, we have done pointer comparison. 1903 STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE); 1904 Label first_non_object; 1905 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx); 1906 __ j(below, &first_non_object, Label::kNear); 1907 // Return non-zero (rax (not rax) is not zero) 1908 Label return_not_equal; 1909 STATIC_ASSERT(kHeapObjectTag != 0); 1910 __ bind(&return_not_equal); 1911 __ ret(0); 1912 1913 __ bind(&first_non_object); 1914 // Check for oddballs: true, false, null, undefined. 1915 __ CmpInstanceType(rcx, ODDBALL_TYPE); 1916 __ j(equal, &return_not_equal); 1917 1918 __ CmpObjectType(rdx, FIRST_SPEC_OBJECT_TYPE, rcx); 1919 __ j(above_equal, &return_not_equal); 1920 1921 // Check for oddballs: true, false, null, undefined. 1922 __ CmpInstanceType(rcx, ODDBALL_TYPE); 1923 __ j(equal, &return_not_equal); 1924 1925 // Fall through to the general case. 1926 } 1927 __ bind(&slow); 1928 } 1929 1930 // Generate the number comparison code. 1931 Label non_number_comparison; 1932 Label unordered; 1933 FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison); 1934 __ xorl(rax, rax); 1935 __ xorl(rcx, rcx); 1936 __ ucomisd(xmm0, xmm1); 1937 1938 // Don't base result on EFLAGS when a NaN is involved. 1939 __ j(parity_even, &unordered, Label::kNear); 1940 // Return a result of -1, 0, or 1, based on EFLAGS. 1941 __ setcc(above, rax); 1942 __ setcc(below, rcx); 1943 __ subp(rax, rcx); 1944 __ ret(0); 1945 1946 // If one of the numbers was NaN, then the result is always false. 1947 // The cc is never not-equal. 1948 __ bind(&unordered); 1949 ASSERT(cc != not_equal); 1950 if (cc == less || cc == less_equal) { 1951 __ Set(rax, 1); 1952 } else { 1953 __ Set(rax, -1); 1954 } 1955 __ ret(0); 1956 1957 // The number comparison code did not provide a valid result. 1958 __ bind(&non_number_comparison); 1959 1960 // Fast negative check for internalized-to-internalized equality. 1961 Label check_for_strings; 1962 if (cc == equal) { 1963 BranchIfNotInternalizedString( 1964 masm, &check_for_strings, rax, kScratchRegister); 1965 BranchIfNotInternalizedString( 1966 masm, &check_for_strings, rdx, kScratchRegister); 1967 1968 // We've already checked for object identity, so if both operands are 1969 // internalized strings they aren't equal. Register rax (not rax) already 1970 // holds a non-zero value, which indicates not equal, so just return. 1971 __ ret(0); 1972 } 1973 1974 __ bind(&check_for_strings); 1975 1976 __ JumpIfNotBothSequentialAsciiStrings( 1977 rdx, rax, rcx, rbx, &check_unequal_objects); 1978 1979 // Inline comparison of ASCII strings. 1980 if (cc == equal) { 1981 StringCompareStub::GenerateFlatAsciiStringEquals(masm, 1982 rdx, 1983 rax, 1984 rcx, 1985 rbx); 1986 } else { 1987 StringCompareStub::GenerateCompareFlatAsciiStrings(masm, 1988 rdx, 1989 rax, 1990 rcx, 1991 rbx, 1992 rdi, 1993 r8); 1994 } 1995 1996#ifdef DEBUG 1997 __ Abort(kUnexpectedFallThroughFromStringComparison); 1998#endif 1999 2000 __ bind(&check_unequal_objects); 2001 if (cc == equal && !strict()) { 2002 // Not strict equality. Objects are unequal if 2003 // they are both JSObjects and not undetectable, 2004 // and their pointers are different. 2005 Label not_both_objects, return_unequal; 2006 // At most one is a smi, so we can test for smi by adding the two. 2007 // A smi plus a heap object has the low bit set, a heap object plus 2008 // a heap object has the low bit clear. 2009 STATIC_ASSERT(kSmiTag == 0); 2010 STATIC_ASSERT(kSmiTagMask == 1); 2011 __ leap(rcx, Operand(rax, rdx, times_1, 0)); 2012 __ testb(rcx, Immediate(kSmiTagMask)); 2013 __ j(not_zero, ¬_both_objects, Label::kNear); 2014 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rbx); 2015 __ j(below, ¬_both_objects, Label::kNear); 2016 __ CmpObjectType(rdx, FIRST_SPEC_OBJECT_TYPE, rcx); 2017 __ j(below, ¬_both_objects, Label::kNear); 2018 __ testb(FieldOperand(rbx, Map::kBitFieldOffset), 2019 Immediate(1 << Map::kIsUndetectable)); 2020 __ j(zero, &return_unequal, Label::kNear); 2021 __ testb(FieldOperand(rcx, Map::kBitFieldOffset), 2022 Immediate(1 << Map::kIsUndetectable)); 2023 __ j(zero, &return_unequal, Label::kNear); 2024 // The objects are both undetectable, so they both compare as the value 2025 // undefined, and are equal. 2026 __ Set(rax, EQUAL); 2027 __ bind(&return_unequal); 2028 // Return non-equal by returning the non-zero object pointer in rax, 2029 // or return equal if we fell through to here. 2030 __ ret(0); 2031 __ bind(¬_both_objects); 2032 } 2033 2034 // Push arguments below the return address to prepare jump to builtin. 2035 __ PopReturnAddressTo(rcx); 2036 __ Push(rdx); 2037 __ Push(rax); 2038 2039 // Figure out which native to call and setup the arguments. 2040 Builtins::JavaScript builtin; 2041 if (cc == equal) { 2042 builtin = strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS; 2043 } else { 2044 builtin = Builtins::COMPARE; 2045 __ Push(Smi::FromInt(NegativeComparisonResult(cc))); 2046 } 2047 2048 __ PushReturnAddressFrom(rcx); 2049 2050 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) 2051 // tagged as a small integer. 2052 __ InvokeBuiltin(builtin, JUMP_FUNCTION); 2053 2054 __ bind(&miss); 2055 GenerateMiss(masm); 2056} 2057 2058 2059static void GenerateRecordCallTarget(MacroAssembler* masm) { 2060 // Cache the called function in a feedback vector slot. Cache states 2061 // are uninitialized, monomorphic (indicated by a JSFunction), and 2062 // megamorphic. 2063 // rax : number of arguments to the construct function 2064 // rbx : Feedback vector 2065 // rdx : slot in feedback vector (Smi) 2066 // rdi : the function to call 2067 Isolate* isolate = masm->isolate(); 2068 Label initialize, done, miss, megamorphic, not_array_function, 2069 done_no_smi_convert; 2070 2071 // Load the cache state into rcx. 2072 __ SmiToInteger32(rdx, rdx); 2073 __ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size, 2074 FixedArray::kHeaderSize)); 2075 2076 // A monomorphic cache hit or an already megamorphic state: invoke the 2077 // function without changing the state. 2078 __ cmpp(rcx, rdi); 2079 __ j(equal, &done); 2080 __ Cmp(rcx, TypeFeedbackInfo::MegamorphicSentinel(isolate)); 2081 __ j(equal, &done); 2082 2083 if (!FLAG_pretenuring_call_new) { 2084 // If we came here, we need to see if we are the array function. 2085 // If we didn't have a matching function, and we didn't find the megamorph 2086 // sentinel, then we have in the slot either some other function or an 2087 // AllocationSite. Do a map check on the object in rcx. 2088 Handle<Map> allocation_site_map = 2089 masm->isolate()->factory()->allocation_site_map(); 2090 __ Cmp(FieldOperand(rcx, 0), allocation_site_map); 2091 __ j(not_equal, &miss); 2092 2093 // Make sure the function is the Array() function 2094 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx); 2095 __ cmpp(rdi, rcx); 2096 __ j(not_equal, &megamorphic); 2097 __ jmp(&done); 2098 } 2099 2100 __ bind(&miss); 2101 2102 // A monomorphic miss (i.e, here the cache is not uninitialized) goes 2103 // megamorphic. 2104 __ Cmp(rcx, TypeFeedbackInfo::UninitializedSentinel(isolate)); 2105 __ j(equal, &initialize); 2106 // MegamorphicSentinel is an immortal immovable object (undefined) so no 2107 // write-barrier is needed. 2108 __ bind(&megamorphic); 2109 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize), 2110 TypeFeedbackInfo::MegamorphicSentinel(isolate)); 2111 __ jmp(&done); 2112 2113 // An uninitialized cache is patched with the function or sentinel to 2114 // indicate the ElementsKind if function is the Array constructor. 2115 __ bind(&initialize); 2116 2117 if (!FLAG_pretenuring_call_new) { 2118 // Make sure the function is the Array() function 2119 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx); 2120 __ cmpp(rdi, rcx); 2121 __ j(not_equal, ¬_array_function); 2122 2123 { 2124 FrameScope scope(masm, StackFrame::INTERNAL); 2125 2126 // Arguments register must be smi-tagged to call out. 2127 __ Integer32ToSmi(rax, rax); 2128 __ Push(rax); 2129 __ Push(rdi); 2130 __ Integer32ToSmi(rdx, rdx); 2131 __ Push(rdx); 2132 __ Push(rbx); 2133 2134 CreateAllocationSiteStub create_stub(isolate); 2135 __ CallStub(&create_stub); 2136 2137 __ Pop(rbx); 2138 __ Pop(rdx); 2139 __ Pop(rdi); 2140 __ Pop(rax); 2141 __ SmiToInteger32(rax, rax); 2142 } 2143 __ jmp(&done_no_smi_convert); 2144 2145 __ bind(¬_array_function); 2146 } 2147 2148 __ movp(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize), 2149 rdi); 2150 2151 // We won't need rdx or rbx anymore, just save rdi 2152 __ Push(rdi); 2153 __ Push(rbx); 2154 __ Push(rdx); 2155 __ RecordWriteArray(rbx, rdi, rdx, kDontSaveFPRegs, 2156 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); 2157 __ Pop(rdx); 2158 __ Pop(rbx); 2159 __ Pop(rdi); 2160 2161 __ bind(&done); 2162 __ Integer32ToSmi(rdx, rdx); 2163 2164 __ bind(&done_no_smi_convert); 2165} 2166 2167 2168static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) { 2169 // Do not transform the receiver for strict mode functions. 2170 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 2171 __ testb(FieldOperand(rcx, SharedFunctionInfo::kStrictModeByteOffset), 2172 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); 2173 __ j(not_equal, cont); 2174 2175 // Do not transform the receiver for natives. 2176 // SharedFunctionInfo is already loaded into rcx. 2177 __ testb(FieldOperand(rcx, SharedFunctionInfo::kNativeByteOffset), 2178 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte)); 2179 __ j(not_equal, cont); 2180} 2181 2182 2183static void EmitSlowCase(Isolate* isolate, 2184 MacroAssembler* masm, 2185 StackArgumentsAccessor* args, 2186 int argc, 2187 Label* non_function) { 2188 // Check for function proxy. 2189 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); 2190 __ j(not_equal, non_function); 2191 __ PopReturnAddressTo(rcx); 2192 __ Push(rdi); // put proxy as additional argument under return address 2193 __ PushReturnAddressFrom(rcx); 2194 __ Set(rax, argc + 1); 2195 __ Set(rbx, 0); 2196 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); 2197 { 2198 Handle<Code> adaptor = 2199 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); 2200 __ jmp(adaptor, RelocInfo::CODE_TARGET); 2201 } 2202 2203 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead 2204 // of the original receiver from the call site). 2205 __ bind(non_function); 2206 __ movp(args->GetReceiverOperand(), rdi); 2207 __ Set(rax, argc); 2208 __ Set(rbx, 0); 2209 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); 2210 Handle<Code> adaptor = 2211 isolate->builtins()->ArgumentsAdaptorTrampoline(); 2212 __ Jump(adaptor, RelocInfo::CODE_TARGET); 2213} 2214 2215 2216static void EmitWrapCase(MacroAssembler* masm, 2217 StackArgumentsAccessor* args, 2218 Label* cont) { 2219 // Wrap the receiver and patch it back onto the stack. 2220 { FrameScope frame_scope(masm, StackFrame::INTERNAL); 2221 __ Push(rdi); 2222 __ Push(rax); 2223 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 2224 __ Pop(rdi); 2225 } 2226 __ movp(args->GetReceiverOperand(), rax); 2227 __ jmp(cont); 2228} 2229 2230 2231static void CallFunctionNoFeedback(MacroAssembler* masm, 2232 int argc, bool needs_checks, 2233 bool call_as_method) { 2234 // rdi : the function to call 2235 2236 // wrap_and_call can only be true if we are compiling a monomorphic method. 2237 Isolate* isolate = masm->isolate(); 2238 Label slow, non_function, wrap, cont; 2239 StackArgumentsAccessor args(rsp, argc); 2240 2241 if (needs_checks) { 2242 // Check that the function really is a JavaScript function. 2243 __ JumpIfSmi(rdi, &non_function); 2244 2245 // Goto slow case if we do not have a function. 2246 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 2247 __ j(not_equal, &slow); 2248 } 2249 2250 // Fast-case: Just invoke the function. 2251 ParameterCount actual(argc); 2252 2253 if (call_as_method) { 2254 if (needs_checks) { 2255 EmitContinueIfStrictOrNative(masm, &cont); 2256 } 2257 2258 // Load the receiver from the stack. 2259 __ movp(rax, args.GetReceiverOperand()); 2260 2261 if (needs_checks) { 2262 __ JumpIfSmi(rax, &wrap); 2263 2264 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx); 2265 __ j(below, &wrap); 2266 } else { 2267 __ jmp(&wrap); 2268 } 2269 2270 __ bind(&cont); 2271 } 2272 2273 __ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper()); 2274 2275 if (needs_checks) { 2276 // Slow-case: Non-function called. 2277 __ bind(&slow); 2278 EmitSlowCase(isolate, masm, &args, argc, &non_function); 2279 } 2280 2281 if (call_as_method) { 2282 __ bind(&wrap); 2283 EmitWrapCase(masm, &args, &cont); 2284 } 2285} 2286 2287 2288void CallFunctionStub::Generate(MacroAssembler* masm) { 2289 CallFunctionNoFeedback(masm, argc_, NeedsChecks(), CallAsMethod()); 2290} 2291 2292 2293void CallConstructStub::Generate(MacroAssembler* masm) { 2294 // rax : number of arguments 2295 // rbx : feedback vector 2296 // rdx : (only if rbx is not the megamorphic symbol) slot in feedback 2297 // vector (Smi) 2298 // rdi : constructor function 2299 Label slow, non_function_call; 2300 2301 // Check that function is not a smi. 2302 __ JumpIfSmi(rdi, &non_function_call); 2303 // Check that function is a JSFunction. 2304 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 2305 __ j(not_equal, &slow); 2306 2307 if (RecordCallTarget()) { 2308 GenerateRecordCallTarget(masm); 2309 2310 __ SmiToInteger32(rdx, rdx); 2311 if (FLAG_pretenuring_call_new) { 2312 // Put the AllocationSite from the feedback vector into ebx. 2313 // By adding kPointerSize we encode that we know the AllocationSite 2314 // entry is at the feedback vector slot given by rdx + 1. 2315 __ movp(rbx, FieldOperand(rbx, rdx, times_pointer_size, 2316 FixedArray::kHeaderSize + kPointerSize)); 2317 } else { 2318 Label feedback_register_initialized; 2319 // Put the AllocationSite from the feedback vector into rbx, or undefined. 2320 __ movp(rbx, FieldOperand(rbx, rdx, times_pointer_size, 2321 FixedArray::kHeaderSize)); 2322 __ CompareRoot(FieldOperand(rbx, 0), Heap::kAllocationSiteMapRootIndex); 2323 __ j(equal, &feedback_register_initialized); 2324 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex); 2325 __ bind(&feedback_register_initialized); 2326 } 2327 2328 __ AssertUndefinedOrAllocationSite(rbx); 2329 } 2330 2331 // Jump to the function-specific construct stub. 2332 Register jmp_reg = rcx; 2333 __ movp(jmp_reg, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 2334 __ movp(jmp_reg, FieldOperand(jmp_reg, 2335 SharedFunctionInfo::kConstructStubOffset)); 2336 __ leap(jmp_reg, FieldOperand(jmp_reg, Code::kHeaderSize)); 2337 __ jmp(jmp_reg); 2338 2339 // rdi: called object 2340 // rax: number of arguments 2341 // rcx: object map 2342 Label do_call; 2343 __ bind(&slow); 2344 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); 2345 __ j(not_equal, &non_function_call); 2346 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR); 2347 __ jmp(&do_call); 2348 2349 __ bind(&non_function_call); 2350 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); 2351 __ bind(&do_call); 2352 // Set expected number of arguments to zero (not changing rax). 2353 __ Set(rbx, 0); 2354 __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(), 2355 RelocInfo::CODE_TARGET); 2356} 2357 2358 2359static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) { 2360 __ movp(vector, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 2361 __ movp(vector, FieldOperand(vector, JSFunction::kSharedFunctionInfoOffset)); 2362 __ movp(vector, FieldOperand(vector, 2363 SharedFunctionInfo::kFeedbackVectorOffset)); 2364} 2365 2366 2367void CallIC_ArrayStub::Generate(MacroAssembler* masm) { 2368 // rdi - function 2369 // rdx - slot id (as integer) 2370 Label miss; 2371 int argc = state_.arg_count(); 2372 ParameterCount actual(argc); 2373 2374 EmitLoadTypeFeedbackVector(masm, rbx); 2375 __ SmiToInteger32(rdx, rdx); 2376 2377 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx); 2378 __ cmpq(rdi, rcx); 2379 __ j(not_equal, &miss); 2380 2381 __ movp(rax, Immediate(arg_count())); 2382 __ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size, 2383 FixedArray::kHeaderSize)); 2384 // Verify that ecx contains an AllocationSite 2385 Factory* factory = masm->isolate()->factory(); 2386 __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset), 2387 factory->allocation_site_map()); 2388 __ j(not_equal, &miss); 2389 2390 __ movp(rbx, rcx); 2391 ArrayConstructorStub stub(masm->isolate(), arg_count()); 2392 __ TailCallStub(&stub); 2393 2394 __ bind(&miss); 2395 GenerateMiss(masm, IC::kCallIC_Customization_Miss); 2396 2397 // The slow case, we need this no matter what to complete a call after a miss. 2398 CallFunctionNoFeedback(masm, 2399 arg_count(), 2400 true, 2401 CallAsMethod()); 2402 2403 // Unreachable. 2404 __ int3(); 2405} 2406 2407 2408void CallICStub::Generate(MacroAssembler* masm) { 2409 // rdi - function 2410 // rbx - vector 2411 // rdx - slot id 2412 Isolate* isolate = masm->isolate(); 2413 Label extra_checks_or_miss, slow_start; 2414 Label slow, non_function, wrap, cont; 2415 Label have_js_function; 2416 int argc = state_.arg_count(); 2417 StackArgumentsAccessor args(rsp, argc); 2418 ParameterCount actual(argc); 2419 2420 EmitLoadTypeFeedbackVector(masm, rbx); 2421 2422 // The checks. First, does rdi match the recorded monomorphic target? 2423 __ SmiToInteger32(rdx, rdx); 2424 __ cmpq(rdi, FieldOperand(rbx, rdx, times_pointer_size, 2425 FixedArray::kHeaderSize)); 2426 __ j(not_equal, &extra_checks_or_miss); 2427 2428 __ bind(&have_js_function); 2429 if (state_.CallAsMethod()) { 2430 EmitContinueIfStrictOrNative(masm, &cont); 2431 2432 // Load the receiver from the stack. 2433 __ movp(rax, args.GetReceiverOperand()); 2434 2435 __ JumpIfSmi(rax, &wrap); 2436 2437 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx); 2438 __ j(below, &wrap); 2439 2440 __ bind(&cont); 2441 } 2442 2443 __ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper()); 2444 2445 __ bind(&slow); 2446 EmitSlowCase(isolate, masm, &args, argc, &non_function); 2447 2448 if (state_.CallAsMethod()) { 2449 __ bind(&wrap); 2450 EmitWrapCase(masm, &args, &cont); 2451 } 2452 2453 __ bind(&extra_checks_or_miss); 2454 Label miss; 2455 2456 __ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size, 2457 FixedArray::kHeaderSize)); 2458 __ Cmp(rcx, TypeFeedbackInfo::MegamorphicSentinel(isolate)); 2459 __ j(equal, &slow_start); 2460 __ Cmp(rcx, TypeFeedbackInfo::UninitializedSentinel(isolate)); 2461 __ j(equal, &miss); 2462 2463 if (!FLAG_trace_ic) { 2464 // We are going megamorphic. If the feedback is a JSFunction, it is fine 2465 // to handle it here. More complex cases are dealt with in the runtime. 2466 __ AssertNotSmi(rcx); 2467 __ CmpObjectType(rcx, JS_FUNCTION_TYPE, rcx); 2468 __ j(not_equal, &miss); 2469 __ Move(FieldOperand(rbx, rdx, times_pointer_size, 2470 FixedArray::kHeaderSize), 2471 TypeFeedbackInfo::MegamorphicSentinel(isolate)); 2472 __ jmp(&slow_start); 2473 } 2474 2475 // We are here because tracing is on or we are going monomorphic. 2476 __ bind(&miss); 2477 GenerateMiss(masm, IC::kCallIC_Miss); 2478 2479 // the slow case 2480 __ bind(&slow_start); 2481 // Check that function is not a smi. 2482 __ JumpIfSmi(rdi, &non_function); 2483 // Check that function is a JSFunction. 2484 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 2485 __ j(not_equal, &slow); 2486 __ jmp(&have_js_function); 2487 2488 // Unreachable 2489 __ int3(); 2490} 2491 2492 2493void CallICStub::GenerateMiss(MacroAssembler* masm, IC::UtilityId id) { 2494 // Get the receiver of the function from the stack; 1 ~ return address. 2495 __ movp(rcx, Operand(rsp, (state_.arg_count() + 1) * kPointerSize)); 2496 2497 { 2498 FrameScope scope(masm, StackFrame::INTERNAL); 2499 2500 // Push the receiver and the function and feedback info. 2501 __ Push(rcx); 2502 __ Push(rdi); 2503 __ Push(rbx); 2504 __ Integer32ToSmi(rdx, rdx); 2505 __ Push(rdx); 2506 2507 // Call the entry. 2508 ExternalReference miss = ExternalReference(IC_Utility(id), 2509 masm->isolate()); 2510 __ CallExternalReference(miss, 4); 2511 2512 // Move result to edi and exit the internal frame. 2513 __ movp(rdi, rax); 2514 } 2515} 2516 2517 2518bool CEntryStub::NeedsImmovableCode() { 2519 return false; 2520} 2521 2522 2523void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { 2524 CEntryStub::GenerateAheadOfTime(isolate); 2525 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); 2526 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); 2527 // It is important that the store buffer overflow stubs are generated first. 2528 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); 2529 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); 2530 BinaryOpICStub::GenerateAheadOfTime(isolate); 2531 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); 2532} 2533 2534 2535void CodeStub::GenerateFPStubs(Isolate* isolate) { 2536} 2537 2538 2539void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { 2540 CEntryStub stub(isolate, 1, kDontSaveFPRegs); 2541 stub.GetCode(); 2542 CEntryStub save_doubles(isolate, 1, kSaveFPRegs); 2543 save_doubles.GetCode(); 2544} 2545 2546 2547void CEntryStub::Generate(MacroAssembler* masm) { 2548 // rax: number of arguments including receiver 2549 // rbx: pointer to C function (C callee-saved) 2550 // rbp: frame pointer of calling JS frame (restored after C call) 2551 // rsp: stack pointer (restored after C call) 2552 // rsi: current context (restored) 2553 2554 ProfileEntryHookStub::MaybeCallEntryHook(masm); 2555 2556 // Enter the exit frame that transitions from JavaScript to C++. 2557#ifdef _WIN64 2558 int arg_stack_space = (result_size_ < 2 ? 2 : 4); 2559#else 2560 int arg_stack_space = 0; 2561#endif 2562 __ EnterExitFrame(arg_stack_space, save_doubles_); 2563 2564 // rbx: pointer to builtin function (C callee-saved). 2565 // rbp: frame pointer of exit frame (restored after C call). 2566 // rsp: stack pointer (restored after C call). 2567 // r14: number of arguments including receiver (C callee-saved). 2568 // r15: argv pointer (C callee-saved). 2569 2570 // Simple results returned in rax (both AMD64 and Win64 calling conventions). 2571 // Complex results must be written to address passed as first argument. 2572 // AMD64 calling convention: a struct of two pointers in rax+rdx 2573 2574 // Check stack alignment. 2575 if (FLAG_debug_code) { 2576 __ CheckStackAlignment(); 2577 } 2578 2579 // Call C function. 2580#ifdef _WIN64 2581 // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9. 2582 // Pass argv and argc as two parameters. The arguments object will 2583 // be created by stubs declared by DECLARE_RUNTIME_FUNCTION(). 2584 if (result_size_ < 2) { 2585 // Pass a pointer to the Arguments object as the first argument. 2586 // Return result in single register (rax). 2587 __ movp(rcx, r14); // argc. 2588 __ movp(rdx, r15); // argv. 2589 __ Move(r8, ExternalReference::isolate_address(isolate())); 2590 } else { 2591 ASSERT_EQ(2, result_size_); 2592 // Pass a pointer to the result location as the first argument. 2593 __ leap(rcx, StackSpaceOperand(2)); 2594 // Pass a pointer to the Arguments object as the second argument. 2595 __ movp(rdx, r14); // argc. 2596 __ movp(r8, r15); // argv. 2597 __ Move(r9, ExternalReference::isolate_address(isolate())); 2598 } 2599 2600#else // _WIN64 2601 // GCC passes arguments in rdi, rsi, rdx, rcx, r8, r9. 2602 __ movp(rdi, r14); // argc. 2603 __ movp(rsi, r15); // argv. 2604 __ Move(rdx, ExternalReference::isolate_address(isolate())); 2605#endif 2606 __ call(rbx); 2607 // Result is in rax - do not destroy this register! 2608 2609#ifdef _WIN64 2610 // If return value is on the stack, pop it to registers. 2611 if (result_size_ > 1) { 2612 ASSERT_EQ(2, result_size_); 2613 // Read result values stored on stack. Result is stored 2614 // above the four argument mirror slots and the two 2615 // Arguments object slots. 2616 __ movq(rax, Operand(rsp, 6 * kRegisterSize)); 2617 __ movq(rdx, Operand(rsp, 7 * kRegisterSize)); 2618 } 2619#endif 2620 2621 // Runtime functions should not return 'the hole'. Allowing it to escape may 2622 // lead to crashes in the IC code later. 2623 if (FLAG_debug_code) { 2624 Label okay; 2625 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex); 2626 __ j(not_equal, &okay, Label::kNear); 2627 __ int3(); 2628 __ bind(&okay); 2629 } 2630 2631 // Check result for exception sentinel. 2632 Label exception_returned; 2633 __ CompareRoot(rax, Heap::kExceptionRootIndex); 2634 __ j(equal, &exception_returned); 2635 2636 ExternalReference pending_exception_address( 2637 Isolate::kPendingExceptionAddress, isolate()); 2638 2639 // Check that there is no pending exception, otherwise we 2640 // should have returned the exception sentinel. 2641 if (FLAG_debug_code) { 2642 Label okay; 2643 __ LoadRoot(r14, Heap::kTheHoleValueRootIndex); 2644 Operand pending_exception_operand = 2645 masm->ExternalOperand(pending_exception_address); 2646 __ cmpp(r14, pending_exception_operand); 2647 __ j(equal, &okay, Label::kNear); 2648 __ int3(); 2649 __ bind(&okay); 2650 } 2651 2652 // Exit the JavaScript to C++ exit frame. 2653 __ LeaveExitFrame(save_doubles_); 2654 __ ret(0); 2655 2656 // Handling of exception. 2657 __ bind(&exception_returned); 2658 2659 // Retrieve the pending exception. 2660 Operand pending_exception_operand = 2661 masm->ExternalOperand(pending_exception_address); 2662 __ movp(rax, pending_exception_operand); 2663 2664 // Clear the pending exception. 2665 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex); 2666 __ movp(pending_exception_operand, rdx); 2667 2668 // Special handling of termination exceptions which are uncatchable 2669 // by javascript code. 2670 Label throw_termination_exception; 2671 __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex); 2672 __ j(equal, &throw_termination_exception); 2673 2674 // Handle normal exception. 2675 __ Throw(rax); 2676 2677 __ bind(&throw_termination_exception); 2678 __ ThrowUncatchable(rax); 2679} 2680 2681 2682void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { 2683 Label invoke, handler_entry, exit; 2684 Label not_outermost_js, not_outermost_js_2; 2685 2686 ProfileEntryHookStub::MaybeCallEntryHook(masm); 2687 2688 { // NOLINT. Scope block confuses linter. 2689 MacroAssembler::NoRootArrayScope uninitialized_root_register(masm); 2690 // Set up frame. 2691 __ pushq(rbp); 2692 __ movp(rbp, rsp); 2693 2694 // Push the stack frame type marker twice. 2695 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY; 2696 // Scratch register is neither callee-save, nor an argument register on any 2697 // platform. It's free to use at this point. 2698 // Cannot use smi-register for loading yet. 2699 __ Move(kScratchRegister, Smi::FromInt(marker), Assembler::RelocInfoNone()); 2700 __ Push(kScratchRegister); // context slot 2701 __ Push(kScratchRegister); // function slot 2702 // Save callee-saved registers (X64/X32/Win64 calling conventions). 2703 __ pushq(r12); 2704 __ pushq(r13); 2705 __ pushq(r14); 2706 __ pushq(r15); 2707#ifdef _WIN64 2708 __ pushq(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI. 2709 __ pushq(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI. 2710#endif 2711 __ pushq(rbx); 2712 2713#ifdef _WIN64 2714 // On Win64 XMM6-XMM15 are callee-save 2715 __ subp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize)); 2716 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0), xmm6); 2717 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1), xmm7); 2718 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2), xmm8); 2719 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3), xmm9); 2720 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4), xmm10); 2721 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5), xmm11); 2722 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6), xmm12); 2723 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7), xmm13); 2724 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8), xmm14); 2725 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9), xmm15); 2726#endif 2727 2728 // Set up the roots and smi constant registers. 2729 // Needs to be done before any further smi loads. 2730 __ InitializeSmiConstantRegister(); 2731 __ InitializeRootRegister(); 2732 } 2733 2734 // Save copies of the top frame descriptor on the stack. 2735 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate()); 2736 { 2737 Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp); 2738 __ Push(c_entry_fp_operand); 2739 } 2740 2741 // If this is the outermost JS call, set js_entry_sp value. 2742 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate()); 2743 __ Load(rax, js_entry_sp); 2744 __ testp(rax, rax); 2745 __ j(not_zero, ¬_outermost_js); 2746 __ Push(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)); 2747 __ movp(rax, rbp); 2748 __ Store(js_entry_sp, rax); 2749 Label cont; 2750 __ jmp(&cont); 2751 __ bind(¬_outermost_js); 2752 __ Push(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)); 2753 __ bind(&cont); 2754 2755 // Jump to a faked try block that does the invoke, with a faked catch 2756 // block that sets the pending exception. 2757 __ jmp(&invoke); 2758 __ bind(&handler_entry); 2759 handler_offset_ = handler_entry.pos(); 2760 // Caught exception: Store result (exception) in the pending exception 2761 // field in the JSEnv and return a failure sentinel. 2762 ExternalReference pending_exception(Isolate::kPendingExceptionAddress, 2763 isolate()); 2764 __ Store(pending_exception, rax); 2765 __ LoadRoot(rax, Heap::kExceptionRootIndex); 2766 __ jmp(&exit); 2767 2768 // Invoke: Link this frame into the handler chain. There's only one 2769 // handler block in this code object, so its index is 0. 2770 __ bind(&invoke); 2771 __ PushTryHandler(StackHandler::JS_ENTRY, 0); 2772 2773 // Clear any pending exceptions. 2774 __ LoadRoot(rax, Heap::kTheHoleValueRootIndex); 2775 __ Store(pending_exception, rax); 2776 2777 // Fake a receiver (NULL). 2778 __ Push(Immediate(0)); // receiver 2779 2780 // Invoke the function by calling through JS entry trampoline builtin and 2781 // pop the faked function when we return. We load the address from an 2782 // external reference instead of inlining the call target address directly 2783 // in the code, because the builtin stubs may not have been generated yet 2784 // at the time this code is generated. 2785 if (is_construct) { 2786 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline, 2787 isolate()); 2788 __ Load(rax, construct_entry); 2789 } else { 2790 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate()); 2791 __ Load(rax, entry); 2792 } 2793 __ leap(kScratchRegister, FieldOperand(rax, Code::kHeaderSize)); 2794 __ call(kScratchRegister); 2795 2796 // Unlink this frame from the handler chain. 2797 __ PopTryHandler(); 2798 2799 __ bind(&exit); 2800 // Check if the current stack frame is marked as the outermost JS frame. 2801 __ Pop(rbx); 2802 __ Cmp(rbx, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)); 2803 __ j(not_equal, ¬_outermost_js_2); 2804 __ Move(kScratchRegister, js_entry_sp); 2805 __ movp(Operand(kScratchRegister, 0), Immediate(0)); 2806 __ bind(¬_outermost_js_2); 2807 2808 // Restore the top frame descriptor from the stack. 2809 { Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp); 2810 __ Pop(c_entry_fp_operand); 2811 } 2812 2813 // Restore callee-saved registers (X64 conventions). 2814#ifdef _WIN64 2815 // On Win64 XMM6-XMM15 are callee-save 2816 __ movdqu(xmm6, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0)); 2817 __ movdqu(xmm7, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1)); 2818 __ movdqu(xmm8, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2)); 2819 __ movdqu(xmm9, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3)); 2820 __ movdqu(xmm10, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4)); 2821 __ movdqu(xmm11, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5)); 2822 __ movdqu(xmm12, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6)); 2823 __ movdqu(xmm13, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7)); 2824 __ movdqu(xmm14, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8)); 2825 __ movdqu(xmm15, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9)); 2826 __ addp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize)); 2827#endif 2828 2829 __ popq(rbx); 2830#ifdef _WIN64 2831 // Callee save on in Win64 ABI, arguments/volatile in AMD64 ABI. 2832 __ popq(rsi); 2833 __ popq(rdi); 2834#endif 2835 __ popq(r15); 2836 __ popq(r14); 2837 __ popq(r13); 2838 __ popq(r12); 2839 __ addp(rsp, Immediate(2 * kPointerSize)); // remove markers 2840 2841 // Restore frame pointer and return. 2842 __ popq(rbp); 2843 __ ret(0); 2844} 2845 2846 2847void InstanceofStub::Generate(MacroAssembler* masm) { 2848 // Implements "value instanceof function" operator. 2849 // Expected input state with no inline cache: 2850 // rsp[0] : return address 2851 // rsp[8] : function pointer 2852 // rsp[16] : value 2853 // Expected input state with an inline one-element cache: 2854 // rsp[0] : return address 2855 // rsp[8] : offset from return address to location of inline cache 2856 // rsp[16] : function pointer 2857 // rsp[24] : value 2858 // Returns a bitwise zero to indicate that the value 2859 // is and instance of the function and anything else to 2860 // indicate that the value is not an instance. 2861 2862 static const int kOffsetToMapCheckValue = 2; 2863 static const int kOffsetToResultValue = kPointerSize == kInt64Size ? 18 : 14; 2864 // The last 4 bytes of the instruction sequence 2865 // movp(rdi, FieldOperand(rax, HeapObject::kMapOffset)) 2866 // Move(kScratchRegister, Factory::the_hole_value()) 2867 // in front of the hole value address. 2868 static const unsigned int kWordBeforeMapCheckValue = 2869 kPointerSize == kInt64Size ? 0xBA49FF78 : 0xBA41FF78; 2870 // The last 4 bytes of the instruction sequence 2871 // __ j(not_equal, &cache_miss); 2872 // __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex); 2873 // before the offset of the hole value in the root array. 2874 static const unsigned int kWordBeforeResultValue = 2875 kPointerSize == kInt64Size ? 0x458B4906 : 0x458B4106; 2876 // Only the inline check flag is supported on X64. 2877 ASSERT(flags_ == kNoFlags || HasCallSiteInlineCheck()); 2878 int extra_argument_offset = HasCallSiteInlineCheck() ? 1 : 0; 2879 2880 // Get the object - go slow case if it's a smi. 2881 Label slow; 2882 StackArgumentsAccessor args(rsp, 2 + extra_argument_offset, 2883 ARGUMENTS_DONT_CONTAIN_RECEIVER); 2884 __ movp(rax, args.GetArgumentOperand(0)); 2885 __ JumpIfSmi(rax, &slow); 2886 2887 // Check that the left hand is a JS object. Leave its map in rax. 2888 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rax); 2889 __ j(below, &slow); 2890 __ CmpInstanceType(rax, LAST_SPEC_OBJECT_TYPE); 2891 __ j(above, &slow); 2892 2893 // Get the prototype of the function. 2894 __ movp(rdx, args.GetArgumentOperand(1)); 2895 // rdx is function, rax is map. 2896 2897 // If there is a call site cache don't look in the global cache, but do the 2898 // real lookup and update the call site cache. 2899 if (!HasCallSiteInlineCheck()) { 2900 // Look up the function and the map in the instanceof cache. 2901 Label miss; 2902 __ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); 2903 __ j(not_equal, &miss, Label::kNear); 2904 __ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex); 2905 __ j(not_equal, &miss, Label::kNear); 2906 __ LoadRoot(rax, Heap::kInstanceofCacheAnswerRootIndex); 2907 __ ret(2 * kPointerSize); 2908 __ bind(&miss); 2909 } 2910 2911 __ TryGetFunctionPrototype(rdx, rbx, &slow, true); 2912 2913 // Check that the function prototype is a JS object. 2914 __ JumpIfSmi(rbx, &slow); 2915 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, kScratchRegister); 2916 __ j(below, &slow); 2917 __ CmpInstanceType(kScratchRegister, LAST_SPEC_OBJECT_TYPE); 2918 __ j(above, &slow); 2919 2920 // Register mapping: 2921 // rax is object map. 2922 // rdx is function. 2923 // rbx is function prototype. 2924 if (!HasCallSiteInlineCheck()) { 2925 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); 2926 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex); 2927 } else { 2928 // Get return address and delta to inlined map check. 2929 __ movq(kScratchRegister, StackOperandForReturnAddress(0)); 2930 __ subp(kScratchRegister, args.GetArgumentOperand(2)); 2931 if (FLAG_debug_code) { 2932 __ movl(rdi, Immediate(kWordBeforeMapCheckValue)); 2933 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi); 2934 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCheck); 2935 } 2936 __ movp(kScratchRegister, 2937 Operand(kScratchRegister, kOffsetToMapCheckValue)); 2938 __ movp(Operand(kScratchRegister, 0), rax); 2939 } 2940 2941 __ movp(rcx, FieldOperand(rax, Map::kPrototypeOffset)); 2942 2943 // Loop through the prototype chain looking for the function prototype. 2944 Label loop, is_instance, is_not_instance; 2945 __ LoadRoot(kScratchRegister, Heap::kNullValueRootIndex); 2946 __ bind(&loop); 2947 __ cmpp(rcx, rbx); 2948 __ j(equal, &is_instance, Label::kNear); 2949 __ cmpp(rcx, kScratchRegister); 2950 // The code at is_not_instance assumes that kScratchRegister contains a 2951 // non-zero GCable value (the null object in this case). 2952 __ j(equal, &is_not_instance, Label::kNear); 2953 __ movp(rcx, FieldOperand(rcx, HeapObject::kMapOffset)); 2954 __ movp(rcx, FieldOperand(rcx, Map::kPrototypeOffset)); 2955 __ jmp(&loop); 2956 2957 __ bind(&is_instance); 2958 if (!HasCallSiteInlineCheck()) { 2959 __ xorl(rax, rax); 2960 // Store bitwise zero in the cache. This is a Smi in GC terms. 2961 STATIC_ASSERT(kSmiTag == 0); 2962 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex); 2963 } else { 2964 // Store offset of true in the root array at the inline check site. 2965 int true_offset = 0x100 + 2966 (Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias; 2967 // Assert it is a 1-byte signed value. 2968 ASSERT(true_offset >= 0 && true_offset < 0x100); 2969 __ movl(rax, Immediate(true_offset)); 2970 __ movq(kScratchRegister, StackOperandForReturnAddress(0)); 2971 __ subp(kScratchRegister, args.GetArgumentOperand(2)); 2972 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); 2973 if (FLAG_debug_code) { 2974 __ movl(rax, Immediate(kWordBeforeResultValue)); 2975 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); 2976 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov); 2977 } 2978 __ Set(rax, 0); 2979 } 2980 __ ret((2 + extra_argument_offset) * kPointerSize); 2981 2982 __ bind(&is_not_instance); 2983 if (!HasCallSiteInlineCheck()) { 2984 // We have to store a non-zero value in the cache. 2985 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex); 2986 } else { 2987 // Store offset of false in the root array at the inline check site. 2988 int false_offset = 0x100 + 2989 (Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias; 2990 // Assert it is a 1-byte signed value. 2991 ASSERT(false_offset >= 0 && false_offset < 0x100); 2992 __ movl(rax, Immediate(false_offset)); 2993 __ movq(kScratchRegister, StackOperandForReturnAddress(0)); 2994 __ subp(kScratchRegister, args.GetArgumentOperand(2)); 2995 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); 2996 if (FLAG_debug_code) { 2997 __ movl(rax, Immediate(kWordBeforeResultValue)); 2998 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); 2999 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov); 3000 } 3001 } 3002 __ ret((2 + extra_argument_offset) * kPointerSize); 3003 3004 // Slow-case: Go through the JavaScript implementation. 3005 __ bind(&slow); 3006 if (HasCallSiteInlineCheck()) { 3007 // Remove extra value from the stack. 3008 __ PopReturnAddressTo(rcx); 3009 __ Pop(rax); 3010 __ PushReturnAddressFrom(rcx); 3011 } 3012 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); 3013} 3014 3015 3016// Passing arguments in registers is not supported. 3017Register InstanceofStub::left() { return no_reg; } 3018 3019 3020Register InstanceofStub::right() { return no_reg; } 3021 3022 3023// ------------------------------------------------------------------------- 3024// StringCharCodeAtGenerator 3025 3026void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { 3027 Label flat_string; 3028 Label ascii_string; 3029 Label got_char_code; 3030 Label sliced_string; 3031 3032 // If the receiver is a smi trigger the non-string case. 3033 __ JumpIfSmi(object_, receiver_not_string_); 3034 3035 // Fetch the instance type of the receiver into result register. 3036 __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset)); 3037 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset)); 3038 // If the receiver is not a string trigger the non-string case. 3039 __ testb(result_, Immediate(kIsNotStringMask)); 3040 __ j(not_zero, receiver_not_string_); 3041 3042 // If the index is non-smi trigger the non-smi case. 3043 __ JumpIfNotSmi(index_, &index_not_smi_); 3044 __ bind(&got_smi_index_); 3045 3046 // Check for index out of range. 3047 __ SmiCompare(index_, FieldOperand(object_, String::kLengthOffset)); 3048 __ j(above_equal, index_out_of_range_); 3049 3050 __ SmiToInteger32(index_, index_); 3051 3052 StringCharLoadGenerator::Generate( 3053 masm, object_, index_, result_, &call_runtime_); 3054 3055 __ Integer32ToSmi(result_, result_); 3056 __ bind(&exit_); 3057} 3058 3059 3060void StringCharCodeAtGenerator::GenerateSlow( 3061 MacroAssembler* masm, 3062 const RuntimeCallHelper& call_helper) { 3063 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase); 3064 3065 Factory* factory = masm->isolate()->factory(); 3066 // Index is not a smi. 3067 __ bind(&index_not_smi_); 3068 // If index is a heap number, try converting it to an integer. 3069 __ CheckMap(index_, 3070 factory->heap_number_map(), 3071 index_not_number_, 3072 DONT_DO_SMI_CHECK); 3073 call_helper.BeforeCall(masm); 3074 __ Push(object_); 3075 __ Push(index_); // Consumed by runtime conversion function. 3076 if (index_flags_ == STRING_INDEX_IS_NUMBER) { 3077 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1); 3078 } else { 3079 ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX); 3080 // NumberToSmi discards numbers that are not exact integers. 3081 __ CallRuntime(Runtime::kHiddenNumberToSmi, 1); 3082 } 3083 if (!index_.is(rax)) { 3084 // Save the conversion result before the pop instructions below 3085 // have a chance to overwrite it. 3086 __ movp(index_, rax); 3087 } 3088 __ Pop(object_); 3089 // Reload the instance type. 3090 __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset)); 3091 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset)); 3092 call_helper.AfterCall(masm); 3093 // If index is still not a smi, it must be out of range. 3094 __ JumpIfNotSmi(index_, index_out_of_range_); 3095 // Otherwise, return to the fast path. 3096 __ jmp(&got_smi_index_); 3097 3098 // Call runtime. We get here when the receiver is a string and the 3099 // index is a number, but the code of getting the actual character 3100 // is too complex (e.g., when the string needs to be flattened). 3101 __ bind(&call_runtime_); 3102 call_helper.BeforeCall(masm); 3103 __ Push(object_); 3104 __ Integer32ToSmi(index_, index_); 3105 __ Push(index_); 3106 __ CallRuntime(Runtime::kHiddenStringCharCodeAt, 2); 3107 if (!result_.is(rax)) { 3108 __ movp(result_, rax); 3109 } 3110 call_helper.AfterCall(masm); 3111 __ jmp(&exit_); 3112 3113 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase); 3114} 3115 3116 3117// ------------------------------------------------------------------------- 3118// StringCharFromCodeGenerator 3119 3120void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) { 3121 // Fast case of Heap::LookupSingleCharacterStringFromCode. 3122 __ JumpIfNotSmi(code_, &slow_case_); 3123 __ SmiCompare(code_, Smi::FromInt(String::kMaxOneByteCharCode)); 3124 __ j(above, &slow_case_); 3125 3126 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex); 3127 SmiIndex index = masm->SmiToIndex(kScratchRegister, code_, kPointerSizeLog2); 3128 __ movp(result_, FieldOperand(result_, index.reg, index.scale, 3129 FixedArray::kHeaderSize)); 3130 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex); 3131 __ j(equal, &slow_case_); 3132 __ bind(&exit_); 3133} 3134 3135 3136void StringCharFromCodeGenerator::GenerateSlow( 3137 MacroAssembler* masm, 3138 const RuntimeCallHelper& call_helper) { 3139 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase); 3140 3141 __ bind(&slow_case_); 3142 call_helper.BeforeCall(masm); 3143 __ Push(code_); 3144 __ CallRuntime(Runtime::kCharFromCode, 1); 3145 if (!result_.is(rax)) { 3146 __ movp(result_, rax); 3147 } 3148 call_helper.AfterCall(masm); 3149 __ jmp(&exit_); 3150 3151 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase); 3152} 3153 3154 3155void StringHelper::GenerateCopyCharacters(MacroAssembler* masm, 3156 Register dest, 3157 Register src, 3158 Register count, 3159 String::Encoding encoding) { 3160 // Nothing to do for zero characters. 3161 Label done; 3162 __ testl(count, count); 3163 __ j(zero, &done, Label::kNear); 3164 3165 // Make count the number of bytes to copy. 3166 if (encoding == String::TWO_BYTE_ENCODING) { 3167 STATIC_ASSERT(2 == sizeof(uc16)); 3168 __ addl(count, count); 3169 } 3170 3171 // Copy remaining characters. 3172 Label loop; 3173 __ bind(&loop); 3174 __ movb(kScratchRegister, Operand(src, 0)); 3175 __ movb(Operand(dest, 0), kScratchRegister); 3176 __ incp(src); 3177 __ incp(dest); 3178 __ decl(count); 3179 __ j(not_zero, &loop); 3180 3181 __ bind(&done); 3182} 3183 3184 3185void StringHelper::GenerateHashInit(MacroAssembler* masm, 3186 Register hash, 3187 Register character, 3188 Register scratch) { 3189 // hash = (seed + character) + ((seed + character) << 10); 3190 __ LoadRoot(scratch, Heap::kHashSeedRootIndex); 3191 __ SmiToInteger32(scratch, scratch); 3192 __ addl(scratch, character); 3193 __ movl(hash, scratch); 3194 __ shll(scratch, Immediate(10)); 3195 __ addl(hash, scratch); 3196 // hash ^= hash >> 6; 3197 __ movl(scratch, hash); 3198 __ shrl(scratch, Immediate(6)); 3199 __ xorl(hash, scratch); 3200} 3201 3202 3203void StringHelper::GenerateHashAddCharacter(MacroAssembler* masm, 3204 Register hash, 3205 Register character, 3206 Register scratch) { 3207 // hash += character; 3208 __ addl(hash, character); 3209 // hash += hash << 10; 3210 __ movl(scratch, hash); 3211 __ shll(scratch, Immediate(10)); 3212 __ addl(hash, scratch); 3213 // hash ^= hash >> 6; 3214 __ movl(scratch, hash); 3215 __ shrl(scratch, Immediate(6)); 3216 __ xorl(hash, scratch); 3217} 3218 3219 3220void StringHelper::GenerateHashGetHash(MacroAssembler* masm, 3221 Register hash, 3222 Register scratch) { 3223 // hash += hash << 3; 3224 __ leal(hash, Operand(hash, hash, times_8, 0)); 3225 // hash ^= hash >> 11; 3226 __ movl(scratch, hash); 3227 __ shrl(scratch, Immediate(11)); 3228 __ xorl(hash, scratch); 3229 // hash += hash << 15; 3230 __ movl(scratch, hash); 3231 __ shll(scratch, Immediate(15)); 3232 __ addl(hash, scratch); 3233 3234 __ andl(hash, Immediate(String::kHashBitMask)); 3235 3236 // if (hash == 0) hash = 27; 3237 Label hash_not_zero; 3238 __ j(not_zero, &hash_not_zero); 3239 __ Set(hash, StringHasher::kZeroHash); 3240 __ bind(&hash_not_zero); 3241} 3242 3243 3244void SubStringStub::Generate(MacroAssembler* masm) { 3245 Label runtime; 3246 3247 // Stack frame on entry. 3248 // rsp[0] : return address 3249 // rsp[8] : to 3250 // rsp[16] : from 3251 // rsp[24] : string 3252 3253 enum SubStringStubArgumentIndices { 3254 STRING_ARGUMENT_INDEX, 3255 FROM_ARGUMENT_INDEX, 3256 TO_ARGUMENT_INDEX, 3257 SUB_STRING_ARGUMENT_COUNT 3258 }; 3259 3260 StackArgumentsAccessor args(rsp, SUB_STRING_ARGUMENT_COUNT, 3261 ARGUMENTS_DONT_CONTAIN_RECEIVER); 3262 3263 // Make sure first argument is a string. 3264 __ movp(rax, args.GetArgumentOperand(STRING_ARGUMENT_INDEX)); 3265 STATIC_ASSERT(kSmiTag == 0); 3266 __ testl(rax, Immediate(kSmiTagMask)); 3267 __ j(zero, &runtime); 3268 Condition is_string = masm->IsObjectStringType(rax, rbx, rbx); 3269 __ j(NegateCondition(is_string), &runtime); 3270 3271 // rax: string 3272 // rbx: instance type 3273 // Calculate length of sub string using the smi values. 3274 __ movp(rcx, args.GetArgumentOperand(TO_ARGUMENT_INDEX)); 3275 __ movp(rdx, args.GetArgumentOperand(FROM_ARGUMENT_INDEX)); 3276 __ JumpUnlessBothNonNegativeSmi(rcx, rdx, &runtime); 3277 3278 __ SmiSub(rcx, rcx, rdx); // Overflow doesn't happen. 3279 __ cmpp(rcx, FieldOperand(rax, String::kLengthOffset)); 3280 Label not_original_string; 3281 // Shorter than original string's length: an actual substring. 3282 __ j(below, ¬_original_string, Label::kNear); 3283 // Longer than original string's length or negative: unsafe arguments. 3284 __ j(above, &runtime); 3285 // Return original string. 3286 Counters* counters = isolate()->counters(); 3287 __ IncrementCounter(counters->sub_string_native(), 1); 3288 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize); 3289 __ bind(¬_original_string); 3290 3291 Label single_char; 3292 __ SmiCompare(rcx, Smi::FromInt(1)); 3293 __ j(equal, &single_char); 3294 3295 __ SmiToInteger32(rcx, rcx); 3296 3297 // rax: string 3298 // rbx: instance type 3299 // rcx: sub string length 3300 // rdx: from index (smi) 3301 // Deal with different string types: update the index if necessary 3302 // and put the underlying string into edi. 3303 Label underlying_unpacked, sliced_string, seq_or_external_string; 3304 // If the string is not indirect, it can only be sequential or external. 3305 STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag)); 3306 STATIC_ASSERT(kIsIndirectStringMask != 0); 3307 __ testb(rbx, Immediate(kIsIndirectStringMask)); 3308 __ j(zero, &seq_or_external_string, Label::kNear); 3309 3310 __ testb(rbx, Immediate(kSlicedNotConsMask)); 3311 __ j(not_zero, &sliced_string, Label::kNear); 3312 // Cons string. Check whether it is flat, then fetch first part. 3313 // Flat cons strings have an empty second part. 3314 __ CompareRoot(FieldOperand(rax, ConsString::kSecondOffset), 3315 Heap::kempty_stringRootIndex); 3316 __ j(not_equal, &runtime); 3317 __ movp(rdi, FieldOperand(rax, ConsString::kFirstOffset)); 3318 // Update instance type. 3319 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); 3320 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); 3321 __ jmp(&underlying_unpacked, Label::kNear); 3322 3323 __ bind(&sliced_string); 3324 // Sliced string. Fetch parent and correct start index by offset. 3325 __ addp(rdx, FieldOperand(rax, SlicedString::kOffsetOffset)); 3326 __ movp(rdi, FieldOperand(rax, SlicedString::kParentOffset)); 3327 // Update instance type. 3328 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); 3329 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); 3330 __ jmp(&underlying_unpacked, Label::kNear); 3331 3332 __ bind(&seq_or_external_string); 3333 // Sequential or external string. Just move string to the correct register. 3334 __ movp(rdi, rax); 3335 3336 __ bind(&underlying_unpacked); 3337 3338 if (FLAG_string_slices) { 3339 Label copy_routine; 3340 // rdi: underlying subject string 3341 // rbx: instance type of underlying subject string 3342 // rdx: adjusted start index (smi) 3343 // rcx: length 3344 // If coming from the make_two_character_string path, the string 3345 // is too short to be sliced anyways. 3346 __ cmpp(rcx, Immediate(SlicedString::kMinLength)); 3347 // Short slice. Copy instead of slicing. 3348 __ j(less, ©_routine); 3349 // Allocate new sliced string. At this point we do not reload the instance 3350 // type including the string encoding because we simply rely on the info 3351 // provided by the original string. It does not matter if the original 3352 // string's encoding is wrong because we always have to recheck encoding of 3353 // the newly created string's parent anyways due to externalized strings. 3354 Label two_byte_slice, set_slice_header; 3355 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0); 3356 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0); 3357 __ testb(rbx, Immediate(kStringEncodingMask)); 3358 __ j(zero, &two_byte_slice, Label::kNear); 3359 __ AllocateAsciiSlicedString(rax, rbx, r14, &runtime); 3360 __ jmp(&set_slice_header, Label::kNear); 3361 __ bind(&two_byte_slice); 3362 __ AllocateTwoByteSlicedString(rax, rbx, r14, &runtime); 3363 __ bind(&set_slice_header); 3364 __ Integer32ToSmi(rcx, rcx); 3365 __ movp(FieldOperand(rax, SlicedString::kLengthOffset), rcx); 3366 __ movp(FieldOperand(rax, SlicedString::kHashFieldOffset), 3367 Immediate(String::kEmptyHashField)); 3368 __ movp(FieldOperand(rax, SlicedString::kParentOffset), rdi); 3369 __ movp(FieldOperand(rax, SlicedString::kOffsetOffset), rdx); 3370 __ IncrementCounter(counters->sub_string_native(), 1); 3371 __ ret(3 * kPointerSize); 3372 3373 __ bind(©_routine); 3374 } 3375 3376 // rdi: underlying subject string 3377 // rbx: instance type of underlying subject string 3378 // rdx: adjusted start index (smi) 3379 // rcx: length 3380 // The subject string can only be external or sequential string of either 3381 // encoding at this point. 3382 Label two_byte_sequential, sequential_string; 3383 STATIC_ASSERT(kExternalStringTag != 0); 3384 STATIC_ASSERT(kSeqStringTag == 0); 3385 __ testb(rbx, Immediate(kExternalStringTag)); 3386 __ j(zero, &sequential_string); 3387 3388 // Handle external string. 3389 // Rule out short external strings. 3390 STATIC_ASSERT(kShortExternalStringTag != 0); 3391 __ testb(rbx, Immediate(kShortExternalStringMask)); 3392 __ j(not_zero, &runtime); 3393 __ movp(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset)); 3394 // Move the pointer so that offset-wise, it looks like a sequential string. 3395 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize); 3396 __ subp(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); 3397 3398 __ bind(&sequential_string); 3399 STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0); 3400 __ testb(rbx, Immediate(kStringEncodingMask)); 3401 __ j(zero, &two_byte_sequential); 3402 3403 // Allocate the result. 3404 __ AllocateAsciiString(rax, rcx, r11, r14, r15, &runtime); 3405 3406 // rax: result string 3407 // rcx: result string length 3408 { // Locate character of sub string start. 3409 SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_1); 3410 __ leap(r14, Operand(rdi, smi_as_index.reg, smi_as_index.scale, 3411 SeqOneByteString::kHeaderSize - kHeapObjectTag)); 3412 } 3413 // Locate first character of result. 3414 __ leap(rdi, FieldOperand(rax, SeqOneByteString::kHeaderSize)); 3415 3416 // rax: result string 3417 // rcx: result length 3418 // r14: first character of result 3419 // rsi: character of sub string start 3420 StringHelper::GenerateCopyCharacters( 3421 masm, rdi, r14, rcx, String::ONE_BYTE_ENCODING); 3422 __ IncrementCounter(counters->sub_string_native(), 1); 3423 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize); 3424 3425 __ bind(&two_byte_sequential); 3426 // Allocate the result. 3427 __ AllocateTwoByteString(rax, rcx, r11, r14, r15, &runtime); 3428 3429 // rax: result string 3430 // rcx: result string length 3431 { // Locate character of sub string start. 3432 SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_2); 3433 __ leap(r14, Operand(rdi, smi_as_index.reg, smi_as_index.scale, 3434 SeqOneByteString::kHeaderSize - kHeapObjectTag)); 3435 } 3436 // Locate first character of result. 3437 __ leap(rdi, FieldOperand(rax, SeqTwoByteString::kHeaderSize)); 3438 3439 // rax: result string 3440 // rcx: result length 3441 // rdi: first character of result 3442 // r14: character of sub string start 3443 StringHelper::GenerateCopyCharacters( 3444 masm, rdi, r14, rcx, String::TWO_BYTE_ENCODING); 3445 __ IncrementCounter(counters->sub_string_native(), 1); 3446 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize); 3447 3448 // Just jump to runtime to create the sub string. 3449 __ bind(&runtime); 3450 __ TailCallRuntime(Runtime::kHiddenSubString, 3, 1); 3451 3452 __ bind(&single_char); 3453 // rax: string 3454 // rbx: instance type 3455 // rcx: sub string length (smi) 3456 // rdx: from index (smi) 3457 StringCharAtGenerator generator( 3458 rax, rdx, rcx, rax, &runtime, &runtime, &runtime, STRING_INDEX_IS_NUMBER); 3459 generator.GenerateFast(masm); 3460 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize); 3461 generator.SkipSlow(masm, &runtime); 3462} 3463 3464 3465void StringCompareStub::GenerateFlatAsciiStringEquals(MacroAssembler* masm, 3466 Register left, 3467 Register right, 3468 Register scratch1, 3469 Register scratch2) { 3470 Register length = scratch1; 3471 3472 // Compare lengths. 3473 Label check_zero_length; 3474 __ movp(length, FieldOperand(left, String::kLengthOffset)); 3475 __ SmiCompare(length, FieldOperand(right, String::kLengthOffset)); 3476 __ j(equal, &check_zero_length, Label::kNear); 3477 __ Move(rax, Smi::FromInt(NOT_EQUAL)); 3478 __ ret(0); 3479 3480 // Check if the length is zero. 3481 Label compare_chars; 3482 __ bind(&check_zero_length); 3483 STATIC_ASSERT(kSmiTag == 0); 3484 __ SmiTest(length); 3485 __ j(not_zero, &compare_chars, Label::kNear); 3486 __ Move(rax, Smi::FromInt(EQUAL)); 3487 __ ret(0); 3488 3489 // Compare characters. 3490 __ bind(&compare_chars); 3491 Label strings_not_equal; 3492 GenerateAsciiCharsCompareLoop(masm, left, right, length, scratch2, 3493 &strings_not_equal, Label::kNear); 3494 3495 // Characters are equal. 3496 __ Move(rax, Smi::FromInt(EQUAL)); 3497 __ ret(0); 3498 3499 // Characters are not equal. 3500 __ bind(&strings_not_equal); 3501 __ Move(rax, Smi::FromInt(NOT_EQUAL)); 3502 __ ret(0); 3503} 3504 3505 3506void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm, 3507 Register left, 3508 Register right, 3509 Register scratch1, 3510 Register scratch2, 3511 Register scratch3, 3512 Register scratch4) { 3513 // Ensure that you can always subtract a string length from a non-negative 3514 // number (e.g. another length). 3515 STATIC_ASSERT(String::kMaxLength < 0x7fffffff); 3516 3517 // Find minimum length and length difference. 3518 __ movp(scratch1, FieldOperand(left, String::kLengthOffset)); 3519 __ movp(scratch4, scratch1); 3520 __ SmiSub(scratch4, 3521 scratch4, 3522 FieldOperand(right, String::kLengthOffset)); 3523 // Register scratch4 now holds left.length - right.length. 3524 const Register length_difference = scratch4; 3525 Label left_shorter; 3526 __ j(less, &left_shorter, Label::kNear); 3527 // The right string isn't longer that the left one. 3528 // Get the right string's length by subtracting the (non-negative) difference 3529 // from the left string's length. 3530 __ SmiSub(scratch1, scratch1, length_difference); 3531 __ bind(&left_shorter); 3532 // Register scratch1 now holds Min(left.length, right.length). 3533 const Register min_length = scratch1; 3534 3535 Label compare_lengths; 3536 // If min-length is zero, go directly to comparing lengths. 3537 __ SmiTest(min_length); 3538 __ j(zero, &compare_lengths, Label::kNear); 3539 3540 // Compare loop. 3541 Label result_not_equal; 3542 GenerateAsciiCharsCompareLoop(masm, left, right, min_length, scratch2, 3543 &result_not_equal, 3544 // In debug-code mode, SmiTest below might push 3545 // the target label outside the near range. 3546 Label::kFar); 3547 3548 // Completed loop without finding different characters. 3549 // Compare lengths (precomputed). 3550 __ bind(&compare_lengths); 3551 __ SmiTest(length_difference); 3552 Label length_not_equal; 3553 __ j(not_zero, &length_not_equal, Label::kNear); 3554 3555 // Result is EQUAL. 3556 __ Move(rax, Smi::FromInt(EQUAL)); 3557 __ ret(0); 3558 3559 Label result_greater; 3560 Label result_less; 3561 __ bind(&length_not_equal); 3562 __ j(greater, &result_greater, Label::kNear); 3563 __ jmp(&result_less, Label::kNear); 3564 __ bind(&result_not_equal); 3565 // Unequal comparison of left to right, either character or length. 3566 __ j(above, &result_greater, Label::kNear); 3567 __ bind(&result_less); 3568 3569 // Result is LESS. 3570 __ Move(rax, Smi::FromInt(LESS)); 3571 __ ret(0); 3572 3573 // Result is GREATER. 3574 __ bind(&result_greater); 3575 __ Move(rax, Smi::FromInt(GREATER)); 3576 __ ret(0); 3577} 3578 3579 3580void StringCompareStub::GenerateAsciiCharsCompareLoop( 3581 MacroAssembler* masm, 3582 Register left, 3583 Register right, 3584 Register length, 3585 Register scratch, 3586 Label* chars_not_equal, 3587 Label::Distance near_jump) { 3588 // Change index to run from -length to -1 by adding length to string 3589 // start. This means that loop ends when index reaches zero, which 3590 // doesn't need an additional compare. 3591 __ SmiToInteger32(length, length); 3592 __ leap(left, 3593 FieldOperand(left, length, times_1, SeqOneByteString::kHeaderSize)); 3594 __ leap(right, 3595 FieldOperand(right, length, times_1, SeqOneByteString::kHeaderSize)); 3596 __ negq(length); 3597 Register index = length; // index = -length; 3598 3599 // Compare loop. 3600 Label loop; 3601 __ bind(&loop); 3602 __ movb(scratch, Operand(left, index, times_1, 0)); 3603 __ cmpb(scratch, Operand(right, index, times_1, 0)); 3604 __ j(not_equal, chars_not_equal, near_jump); 3605 __ incq(index); 3606 __ j(not_zero, &loop); 3607} 3608 3609 3610void StringCompareStub::Generate(MacroAssembler* masm) { 3611 Label runtime; 3612 3613 // Stack frame on entry. 3614 // rsp[0] : return address 3615 // rsp[8] : right string 3616 // rsp[16] : left string 3617 3618 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER); 3619 __ movp(rdx, args.GetArgumentOperand(0)); // left 3620 __ movp(rax, args.GetArgumentOperand(1)); // right 3621 3622 // Check for identity. 3623 Label not_same; 3624 __ cmpp(rdx, rax); 3625 __ j(not_equal, ¬_same, Label::kNear); 3626 __ Move(rax, Smi::FromInt(EQUAL)); 3627 Counters* counters = isolate()->counters(); 3628 __ IncrementCounter(counters->string_compare_native(), 1); 3629 __ ret(2 * kPointerSize); 3630 3631 __ bind(¬_same); 3632 3633 // Check that both are sequential ASCII strings. 3634 __ JumpIfNotBothSequentialAsciiStrings(rdx, rax, rcx, rbx, &runtime); 3635 3636 // Inline comparison of ASCII strings. 3637 __ IncrementCounter(counters->string_compare_native(), 1); 3638 // Drop arguments from the stack 3639 __ PopReturnAddressTo(rcx); 3640 __ addp(rsp, Immediate(2 * kPointerSize)); 3641 __ PushReturnAddressFrom(rcx); 3642 GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8); 3643 3644 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) 3645 // tagged as a small integer. 3646 __ bind(&runtime); 3647 __ TailCallRuntime(Runtime::kHiddenStringCompare, 2, 1); 3648} 3649 3650 3651void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { 3652 // ----------- S t a t e ------------- 3653 // -- rdx : left 3654 // -- rax : right 3655 // -- rsp[0] : return address 3656 // ----------------------------------- 3657 3658 // Load rcx with the allocation site. We stick an undefined dummy value here 3659 // and replace it with the real allocation site later when we instantiate this 3660 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate(). 3661 __ Move(rcx, handle(isolate()->heap()->undefined_value())); 3662 3663 // Make sure that we actually patched the allocation site. 3664 if (FLAG_debug_code) { 3665 __ testb(rcx, Immediate(kSmiTagMask)); 3666 __ Assert(not_equal, kExpectedAllocationSite); 3667 __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset), 3668 isolate()->factory()->allocation_site_map()); 3669 __ Assert(equal, kExpectedAllocationSite); 3670 } 3671 3672 // Tail call into the stub that handles binary operations with allocation 3673 // sites. 3674 BinaryOpWithAllocationSiteStub stub(isolate(), state_); 3675 __ TailCallStub(&stub); 3676} 3677 3678 3679void ICCompareStub::GenerateSmis(MacroAssembler* masm) { 3680 ASSERT(state_ == CompareIC::SMI); 3681 Label miss; 3682 __ JumpIfNotBothSmi(rdx, rax, &miss, Label::kNear); 3683 3684 if (GetCondition() == equal) { 3685 // For equality we do not care about the sign of the result. 3686 __ subp(rax, rdx); 3687 } else { 3688 Label done; 3689 __ subp(rdx, rax); 3690 __ j(no_overflow, &done, Label::kNear); 3691 // Correct sign of result in case of overflow. 3692 __ notp(rdx); 3693 __ bind(&done); 3694 __ movp(rax, rdx); 3695 } 3696 __ ret(0); 3697 3698 __ bind(&miss); 3699 GenerateMiss(masm); 3700} 3701 3702 3703void ICCompareStub::GenerateNumbers(MacroAssembler* masm) { 3704 ASSERT(state_ == CompareIC::NUMBER); 3705 3706 Label generic_stub; 3707 Label unordered, maybe_undefined1, maybe_undefined2; 3708 Label miss; 3709 3710 if (left_ == CompareIC::SMI) { 3711 __ JumpIfNotSmi(rdx, &miss); 3712 } 3713 if (right_ == CompareIC::SMI) { 3714 __ JumpIfNotSmi(rax, &miss); 3715 } 3716 3717 // Load left and right operand. 3718 Label done, left, left_smi, right_smi; 3719 __ JumpIfSmi(rax, &right_smi, Label::kNear); 3720 __ CompareMap(rax, isolate()->factory()->heap_number_map()); 3721 __ j(not_equal, &maybe_undefined1, Label::kNear); 3722 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); 3723 __ jmp(&left, Label::kNear); 3724 __ bind(&right_smi); 3725 __ SmiToInteger32(rcx, rax); // Can't clobber rax yet. 3726 __ Cvtlsi2sd(xmm1, rcx); 3727 3728 __ bind(&left); 3729 __ JumpIfSmi(rdx, &left_smi, Label::kNear); 3730 __ CompareMap(rdx, isolate()->factory()->heap_number_map()); 3731 __ j(not_equal, &maybe_undefined2, Label::kNear); 3732 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); 3733 __ jmp(&done); 3734 __ bind(&left_smi); 3735 __ SmiToInteger32(rcx, rdx); // Can't clobber rdx yet. 3736 __ Cvtlsi2sd(xmm0, rcx); 3737 3738 __ bind(&done); 3739 // Compare operands 3740 __ ucomisd(xmm0, xmm1); 3741 3742 // Don't base result on EFLAGS when a NaN is involved. 3743 __ j(parity_even, &unordered, Label::kNear); 3744 3745 // Return a result of -1, 0, or 1, based on EFLAGS. 3746 // Performing mov, because xor would destroy the flag register. 3747 __ movl(rax, Immediate(0)); 3748 __ movl(rcx, Immediate(0)); 3749 __ setcc(above, rax); // Add one to zero if carry clear and not equal. 3750 __ sbbp(rax, rcx); // Subtract one if below (aka. carry set). 3751 __ ret(0); 3752 3753 __ bind(&unordered); 3754 __ bind(&generic_stub); 3755 ICCompareStub stub(isolate(), op_, CompareIC::GENERIC, CompareIC::GENERIC, 3756 CompareIC::GENERIC); 3757 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); 3758 3759 __ bind(&maybe_undefined1); 3760 if (Token::IsOrderedRelationalCompareOp(op_)) { 3761 __ Cmp(rax, isolate()->factory()->undefined_value()); 3762 __ j(not_equal, &miss); 3763 __ JumpIfSmi(rdx, &unordered); 3764 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx); 3765 __ j(not_equal, &maybe_undefined2, Label::kNear); 3766 __ jmp(&unordered); 3767 } 3768 3769 __ bind(&maybe_undefined2); 3770 if (Token::IsOrderedRelationalCompareOp(op_)) { 3771 __ Cmp(rdx, isolate()->factory()->undefined_value()); 3772 __ j(equal, &unordered); 3773 } 3774 3775 __ bind(&miss); 3776 GenerateMiss(masm); 3777} 3778 3779 3780void ICCompareStub::GenerateInternalizedStrings(MacroAssembler* masm) { 3781 ASSERT(state_ == CompareIC::INTERNALIZED_STRING); 3782 ASSERT(GetCondition() == equal); 3783 3784 // Registers containing left and right operands respectively. 3785 Register left = rdx; 3786 Register right = rax; 3787 Register tmp1 = rcx; 3788 Register tmp2 = rbx; 3789 3790 // Check that both operands are heap objects. 3791 Label miss; 3792 Condition cond = masm->CheckEitherSmi(left, right, tmp1); 3793 __ j(cond, &miss, Label::kNear); 3794 3795 // Check that both operands are internalized strings. 3796 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset)); 3797 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset)); 3798 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); 3799 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); 3800 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0); 3801 __ orp(tmp1, tmp2); 3802 __ testb(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask)); 3803 __ j(not_zero, &miss, Label::kNear); 3804 3805 // Internalized strings are compared by identity. 3806 Label done; 3807 __ cmpp(left, right); 3808 // Make sure rax is non-zero. At this point input operands are 3809 // guaranteed to be non-zero. 3810 ASSERT(right.is(rax)); 3811 __ j(not_equal, &done, Label::kNear); 3812 STATIC_ASSERT(EQUAL == 0); 3813 STATIC_ASSERT(kSmiTag == 0); 3814 __ Move(rax, Smi::FromInt(EQUAL)); 3815 __ bind(&done); 3816 __ ret(0); 3817 3818 __ bind(&miss); 3819 GenerateMiss(masm); 3820} 3821 3822 3823void ICCompareStub::GenerateUniqueNames(MacroAssembler* masm) { 3824 ASSERT(state_ == CompareIC::UNIQUE_NAME); 3825 ASSERT(GetCondition() == equal); 3826 3827 // Registers containing left and right operands respectively. 3828 Register left = rdx; 3829 Register right = rax; 3830 Register tmp1 = rcx; 3831 Register tmp2 = rbx; 3832 3833 // Check that both operands are heap objects. 3834 Label miss; 3835 Condition cond = masm->CheckEitherSmi(left, right, tmp1); 3836 __ j(cond, &miss, Label::kNear); 3837 3838 // Check that both operands are unique names. This leaves the instance 3839 // types loaded in tmp1 and tmp2. 3840 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset)); 3841 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset)); 3842 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); 3843 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); 3844 3845 __ JumpIfNotUniqueName(tmp1, &miss, Label::kNear); 3846 __ JumpIfNotUniqueName(tmp2, &miss, Label::kNear); 3847 3848 // Unique names are compared by identity. 3849 Label done; 3850 __ cmpp(left, right); 3851 // Make sure rax is non-zero. At this point input operands are 3852 // guaranteed to be non-zero. 3853 ASSERT(right.is(rax)); 3854 __ j(not_equal, &done, Label::kNear); 3855 STATIC_ASSERT(EQUAL == 0); 3856 STATIC_ASSERT(kSmiTag == 0); 3857 __ Move(rax, Smi::FromInt(EQUAL)); 3858 __ bind(&done); 3859 __ ret(0); 3860 3861 __ bind(&miss); 3862 GenerateMiss(masm); 3863} 3864 3865 3866void ICCompareStub::GenerateStrings(MacroAssembler* masm) { 3867 ASSERT(state_ == CompareIC::STRING); 3868 Label miss; 3869 3870 bool equality = Token::IsEqualityOp(op_); 3871 3872 // Registers containing left and right operands respectively. 3873 Register left = rdx; 3874 Register right = rax; 3875 Register tmp1 = rcx; 3876 Register tmp2 = rbx; 3877 Register tmp3 = rdi; 3878 3879 // Check that both operands are heap objects. 3880 Condition cond = masm->CheckEitherSmi(left, right, tmp1); 3881 __ j(cond, &miss); 3882 3883 // Check that both operands are strings. This leaves the instance 3884 // types loaded in tmp1 and tmp2. 3885 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset)); 3886 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset)); 3887 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); 3888 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); 3889 __ movp(tmp3, tmp1); 3890 STATIC_ASSERT(kNotStringTag != 0); 3891 __ orp(tmp3, tmp2); 3892 __ testb(tmp3, Immediate(kIsNotStringMask)); 3893 __ j(not_zero, &miss); 3894 3895 // Fast check for identical strings. 3896 Label not_same; 3897 __ cmpp(left, right); 3898 __ j(not_equal, ¬_same, Label::kNear); 3899 STATIC_ASSERT(EQUAL == 0); 3900 STATIC_ASSERT(kSmiTag == 0); 3901 __ Move(rax, Smi::FromInt(EQUAL)); 3902 __ ret(0); 3903 3904 // Handle not identical strings. 3905 __ bind(¬_same); 3906 3907 // Check that both strings are internalized strings. If they are, we're done 3908 // because we already know they are not identical. We also know they are both 3909 // strings. 3910 if (equality) { 3911 Label do_compare; 3912 STATIC_ASSERT(kInternalizedTag == 0); 3913 __ orp(tmp1, tmp2); 3914 __ testb(tmp1, Immediate(kIsNotInternalizedMask)); 3915 __ j(not_zero, &do_compare, Label::kNear); 3916 // Make sure rax is non-zero. At this point input operands are 3917 // guaranteed to be non-zero. 3918 ASSERT(right.is(rax)); 3919 __ ret(0); 3920 __ bind(&do_compare); 3921 } 3922 3923 // Check that both strings are sequential ASCII. 3924 Label runtime; 3925 __ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime); 3926 3927 // Compare flat ASCII strings. Returns when done. 3928 if (equality) { 3929 StringCompareStub::GenerateFlatAsciiStringEquals( 3930 masm, left, right, tmp1, tmp2); 3931 } else { 3932 StringCompareStub::GenerateCompareFlatAsciiStrings( 3933 masm, left, right, tmp1, tmp2, tmp3, kScratchRegister); 3934 } 3935 3936 // Handle more complex cases in runtime. 3937 __ bind(&runtime); 3938 __ PopReturnAddressTo(tmp1); 3939 __ Push(left); 3940 __ Push(right); 3941 __ PushReturnAddressFrom(tmp1); 3942 if (equality) { 3943 __ TailCallRuntime(Runtime::kStringEquals, 2, 1); 3944 } else { 3945 __ TailCallRuntime(Runtime::kHiddenStringCompare, 2, 1); 3946 } 3947 3948 __ bind(&miss); 3949 GenerateMiss(masm); 3950} 3951 3952 3953void ICCompareStub::GenerateObjects(MacroAssembler* masm) { 3954 ASSERT(state_ == CompareIC::OBJECT); 3955 Label miss; 3956 Condition either_smi = masm->CheckEitherSmi(rdx, rax); 3957 __ j(either_smi, &miss, Label::kNear); 3958 3959 __ CmpObjectType(rax, JS_OBJECT_TYPE, rcx); 3960 __ j(not_equal, &miss, Label::kNear); 3961 __ CmpObjectType(rdx, JS_OBJECT_TYPE, rcx); 3962 __ j(not_equal, &miss, Label::kNear); 3963 3964 ASSERT(GetCondition() == equal); 3965 __ subp(rax, rdx); 3966 __ ret(0); 3967 3968 __ bind(&miss); 3969 GenerateMiss(masm); 3970} 3971 3972 3973void ICCompareStub::GenerateKnownObjects(MacroAssembler* masm) { 3974 Label miss; 3975 Condition either_smi = masm->CheckEitherSmi(rdx, rax); 3976 __ j(either_smi, &miss, Label::kNear); 3977 3978 __ movp(rcx, FieldOperand(rax, HeapObject::kMapOffset)); 3979 __ movp(rbx, FieldOperand(rdx, HeapObject::kMapOffset)); 3980 __ Cmp(rcx, known_map_); 3981 __ j(not_equal, &miss, Label::kNear); 3982 __ Cmp(rbx, known_map_); 3983 __ j(not_equal, &miss, Label::kNear); 3984 3985 __ subp(rax, rdx); 3986 __ ret(0); 3987 3988 __ bind(&miss); 3989 GenerateMiss(masm); 3990} 3991 3992 3993void ICCompareStub::GenerateMiss(MacroAssembler* masm) { 3994 { 3995 // Call the runtime system in a fresh internal frame. 3996 ExternalReference miss = 3997 ExternalReference(IC_Utility(IC::kCompareIC_Miss), isolate()); 3998 3999 FrameScope scope(masm, StackFrame::INTERNAL); 4000 __ Push(rdx); 4001 __ Push(rax); 4002 __ Push(rdx); 4003 __ Push(rax); 4004 __ Push(Smi::FromInt(op_)); 4005 __ CallExternalReference(miss, 3); 4006 4007 // Compute the entry point of the rewritten stub. 4008 __ leap(rdi, FieldOperand(rax, Code::kHeaderSize)); 4009 __ Pop(rax); 4010 __ Pop(rdx); 4011 } 4012 4013 // Do a tail call to the rewritten stub. 4014 __ jmp(rdi); 4015} 4016 4017 4018void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm, 4019 Label* miss, 4020 Label* done, 4021 Register properties, 4022 Handle<Name> name, 4023 Register r0) { 4024 ASSERT(name->IsUniqueName()); 4025 // If names of slots in range from 1 to kProbes - 1 for the hash value are 4026 // not equal to the name and kProbes-th slot is not used (its name is the 4027 // undefined value), it guarantees the hash table doesn't contain the 4028 // property. It's true even if some slots represent deleted properties 4029 // (their names are the hole value). 4030 for (int i = 0; i < kInlinedProbes; i++) { 4031 // r0 points to properties hash. 4032 // Compute the masked index: (hash + i + i * i) & mask. 4033 Register index = r0; 4034 // Capacity is smi 2^n. 4035 __ SmiToInteger32(index, FieldOperand(properties, kCapacityOffset)); 4036 __ decl(index); 4037 __ andp(index, 4038 Immediate(name->Hash() + NameDictionary::GetProbeOffset(i))); 4039 4040 // Scale the index by multiplying by the entry size. 4041 ASSERT(NameDictionary::kEntrySize == 3); 4042 __ leap(index, Operand(index, index, times_2, 0)); // index *= 3. 4043 4044 Register entity_name = r0; 4045 // Having undefined at this place means the name is not contained. 4046 ASSERT_EQ(kSmiTagSize, 1); 4047 __ movp(entity_name, Operand(properties, 4048 index, 4049 times_pointer_size, 4050 kElementsStartOffset - kHeapObjectTag)); 4051 __ Cmp(entity_name, masm->isolate()->factory()->undefined_value()); 4052 __ j(equal, done); 4053 4054 // Stop if found the property. 4055 __ Cmp(entity_name, Handle<Name>(name)); 4056 __ j(equal, miss); 4057 4058 Label good; 4059 // Check for the hole and skip. 4060 __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex); 4061 __ j(equal, &good, Label::kNear); 4062 4063 // Check if the entry name is not a unique name. 4064 __ movp(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset)); 4065 __ JumpIfNotUniqueName(FieldOperand(entity_name, Map::kInstanceTypeOffset), 4066 miss); 4067 __ bind(&good); 4068 } 4069 4070 NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0, 4071 NEGATIVE_LOOKUP); 4072 __ Push(Handle<Object>(name)); 4073 __ Push(Immediate(name->Hash())); 4074 __ CallStub(&stub); 4075 __ testp(r0, r0); 4076 __ j(not_zero, miss); 4077 __ jmp(done); 4078} 4079 4080 4081// Probe the name dictionary in the |elements| register. Jump to the 4082// |done| label if a property with the given name is found leaving the 4083// index into the dictionary in |r1|. Jump to the |miss| label 4084// otherwise. 4085void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm, 4086 Label* miss, 4087 Label* done, 4088 Register elements, 4089 Register name, 4090 Register r0, 4091 Register r1) { 4092 ASSERT(!elements.is(r0)); 4093 ASSERT(!elements.is(r1)); 4094 ASSERT(!name.is(r0)); 4095 ASSERT(!name.is(r1)); 4096 4097 __ AssertName(name); 4098 4099 __ SmiToInteger32(r0, FieldOperand(elements, kCapacityOffset)); 4100 __ decl(r0); 4101 4102 for (int i = 0; i < kInlinedProbes; i++) { 4103 // Compute the masked index: (hash + i + i * i) & mask. 4104 __ movl(r1, FieldOperand(name, Name::kHashFieldOffset)); 4105 __ shrl(r1, Immediate(Name::kHashShift)); 4106 if (i > 0) { 4107 __ addl(r1, Immediate(NameDictionary::GetProbeOffset(i))); 4108 } 4109 __ andp(r1, r0); 4110 4111 // Scale the index by multiplying by the entry size. 4112 ASSERT(NameDictionary::kEntrySize == 3); 4113 __ leap(r1, Operand(r1, r1, times_2, 0)); // r1 = r1 * 3 4114 4115 // Check if the key is identical to the name. 4116 __ cmpp(name, Operand(elements, r1, times_pointer_size, 4117 kElementsStartOffset - kHeapObjectTag)); 4118 __ j(equal, done); 4119 } 4120 4121 NameDictionaryLookupStub stub(masm->isolate(), elements, r0, r1, 4122 POSITIVE_LOOKUP); 4123 __ Push(name); 4124 __ movl(r0, FieldOperand(name, Name::kHashFieldOffset)); 4125 __ shrl(r0, Immediate(Name::kHashShift)); 4126 __ Push(r0); 4127 __ CallStub(&stub); 4128 4129 __ testp(r0, r0); 4130 __ j(zero, miss); 4131 __ jmp(done); 4132} 4133 4134 4135void NameDictionaryLookupStub::Generate(MacroAssembler* masm) { 4136 // This stub overrides SometimesSetsUpAFrame() to return false. That means 4137 // we cannot call anything that could cause a GC from this stub. 4138 // Stack frame on entry: 4139 // rsp[0 * kPointerSize] : return address. 4140 // rsp[1 * kPointerSize] : key's hash. 4141 // rsp[2 * kPointerSize] : key. 4142 // Registers: 4143 // dictionary_: NameDictionary to probe. 4144 // result_: used as scratch. 4145 // index_: will hold an index of entry if lookup is successful. 4146 // might alias with result_. 4147 // Returns: 4148 // result_ is zero if lookup failed, non zero otherwise. 4149 4150 Label in_dictionary, maybe_in_dictionary, not_in_dictionary; 4151 4152 Register scratch = result_; 4153 4154 __ SmiToInteger32(scratch, FieldOperand(dictionary_, kCapacityOffset)); 4155 __ decl(scratch); 4156 __ Push(scratch); 4157 4158 // If names of slots in range from 1 to kProbes - 1 for the hash value are 4159 // not equal to the name and kProbes-th slot is not used (its name is the 4160 // undefined value), it guarantees the hash table doesn't contain the 4161 // property. It's true even if some slots represent deleted properties 4162 // (their names are the null value). 4163 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER, 4164 kPointerSize); 4165 for (int i = kInlinedProbes; i < kTotalProbes; i++) { 4166 // Compute the masked index: (hash + i + i * i) & mask. 4167 __ movp(scratch, args.GetArgumentOperand(1)); 4168 if (i > 0) { 4169 __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i))); 4170 } 4171 __ andp(scratch, Operand(rsp, 0)); 4172 4173 // Scale the index by multiplying by the entry size. 4174 ASSERT(NameDictionary::kEntrySize == 3); 4175 __ leap(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3. 4176 4177 // Having undefined at this place means the name is not contained. 4178 __ movp(scratch, Operand(dictionary_, 4179 index_, 4180 times_pointer_size, 4181 kElementsStartOffset - kHeapObjectTag)); 4182 4183 __ Cmp(scratch, isolate()->factory()->undefined_value()); 4184 __ j(equal, ¬_in_dictionary); 4185 4186 // Stop if found the property. 4187 __ cmpp(scratch, args.GetArgumentOperand(0)); 4188 __ j(equal, &in_dictionary); 4189 4190 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) { 4191 // If we hit a key that is not a unique name during negative 4192 // lookup we have to bailout as this key might be equal to the 4193 // key we are looking for. 4194 4195 // Check if the entry name is not a unique name. 4196 __ movp(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); 4197 __ JumpIfNotUniqueName(FieldOperand(scratch, Map::kInstanceTypeOffset), 4198 &maybe_in_dictionary); 4199 } 4200 } 4201 4202 __ bind(&maybe_in_dictionary); 4203 // If we are doing negative lookup then probing failure should be 4204 // treated as a lookup success. For positive lookup probing failure 4205 // should be treated as lookup failure. 4206 if (mode_ == POSITIVE_LOOKUP) { 4207 __ movp(scratch, Immediate(0)); 4208 __ Drop(1); 4209 __ ret(2 * kPointerSize); 4210 } 4211 4212 __ bind(&in_dictionary); 4213 __ movp(scratch, Immediate(1)); 4214 __ Drop(1); 4215 __ ret(2 * kPointerSize); 4216 4217 __ bind(¬_in_dictionary); 4218 __ movp(scratch, Immediate(0)); 4219 __ Drop(1); 4220 __ ret(2 * kPointerSize); 4221} 4222 4223 4224void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime( 4225 Isolate* isolate) { 4226 StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs); 4227 stub1.GetCode(); 4228 StoreBufferOverflowStub stub2(isolate, kSaveFPRegs); 4229 stub2.GetCode(); 4230} 4231 4232 4233// Takes the input in 3 registers: address_ value_ and object_. A pointer to 4234// the value has just been written into the object, now this stub makes sure 4235// we keep the GC informed. The word in the object where the value has been 4236// written is in the address register. 4237void RecordWriteStub::Generate(MacroAssembler* masm) { 4238 Label skip_to_incremental_noncompacting; 4239 Label skip_to_incremental_compacting; 4240 4241 // The first two instructions are generated with labels so as to get the 4242 // offset fixed up correctly by the bind(Label*) call. We patch it back and 4243 // forth between a compare instructions (a nop in this position) and the 4244 // real branch when we start and stop incremental heap marking. 4245 // See RecordWriteStub::Patch for details. 4246 __ jmp(&skip_to_incremental_noncompacting, Label::kNear); 4247 __ jmp(&skip_to_incremental_compacting, Label::kFar); 4248 4249 if (remembered_set_action_ == EMIT_REMEMBERED_SET) { 4250 __ RememberedSetHelper(object_, 4251 address_, 4252 value_, 4253 save_fp_regs_mode_, 4254 MacroAssembler::kReturnAtEnd); 4255 } else { 4256 __ ret(0); 4257 } 4258 4259 __ bind(&skip_to_incremental_noncompacting); 4260 GenerateIncremental(masm, INCREMENTAL); 4261 4262 __ bind(&skip_to_incremental_compacting); 4263 GenerateIncremental(masm, INCREMENTAL_COMPACTION); 4264 4265 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY. 4266 // Will be checked in IncrementalMarking::ActivateGeneratedStub. 4267 masm->set_byte_at(0, kTwoByteNopInstruction); 4268 masm->set_byte_at(2, kFiveByteNopInstruction); 4269} 4270 4271 4272void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) { 4273 regs_.Save(masm); 4274 4275 if (remembered_set_action_ == EMIT_REMEMBERED_SET) { 4276 Label dont_need_remembered_set; 4277 4278 __ movp(regs_.scratch0(), Operand(regs_.address(), 0)); 4279 __ JumpIfNotInNewSpace(regs_.scratch0(), 4280 regs_.scratch0(), 4281 &dont_need_remembered_set); 4282 4283 __ CheckPageFlag(regs_.object(), 4284 regs_.scratch0(), 4285 1 << MemoryChunk::SCAN_ON_SCAVENGE, 4286 not_zero, 4287 &dont_need_remembered_set); 4288 4289 // First notify the incremental marker if necessary, then update the 4290 // remembered set. 4291 CheckNeedsToInformIncrementalMarker( 4292 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode); 4293 InformIncrementalMarker(masm); 4294 regs_.Restore(masm); 4295 __ RememberedSetHelper(object_, 4296 address_, 4297 value_, 4298 save_fp_regs_mode_, 4299 MacroAssembler::kReturnAtEnd); 4300 4301 __ bind(&dont_need_remembered_set); 4302 } 4303 4304 CheckNeedsToInformIncrementalMarker( 4305 masm, kReturnOnNoNeedToInformIncrementalMarker, mode); 4306 InformIncrementalMarker(masm); 4307 regs_.Restore(masm); 4308 __ ret(0); 4309} 4310 4311 4312void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) { 4313 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_); 4314 Register address = 4315 arg_reg_1.is(regs_.address()) ? kScratchRegister : regs_.address(); 4316 ASSERT(!address.is(regs_.object())); 4317 ASSERT(!address.is(arg_reg_1)); 4318 __ Move(address, regs_.address()); 4319 __ Move(arg_reg_1, regs_.object()); 4320 // TODO(gc) Can we just set address arg2 in the beginning? 4321 __ Move(arg_reg_2, address); 4322 __ LoadAddress(arg_reg_3, 4323 ExternalReference::isolate_address(isolate())); 4324 int argument_count = 3; 4325 4326 AllowExternalCallThatCantCauseGC scope(masm); 4327 __ PrepareCallCFunction(argument_count); 4328 __ CallCFunction( 4329 ExternalReference::incremental_marking_record_write_function(isolate()), 4330 argument_count); 4331 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_); 4332} 4333 4334 4335void RecordWriteStub::CheckNeedsToInformIncrementalMarker( 4336 MacroAssembler* masm, 4337 OnNoNeedToInformIncrementalMarker on_no_need, 4338 Mode mode) { 4339 Label on_black; 4340 Label need_incremental; 4341 Label need_incremental_pop_object; 4342 4343 __ movp(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask)); 4344 __ andp(regs_.scratch0(), regs_.object()); 4345 __ movp(regs_.scratch1(), 4346 Operand(regs_.scratch0(), 4347 MemoryChunk::kWriteBarrierCounterOffset)); 4348 __ subp(regs_.scratch1(), Immediate(1)); 4349 __ movp(Operand(regs_.scratch0(), 4350 MemoryChunk::kWriteBarrierCounterOffset), 4351 regs_.scratch1()); 4352 __ j(negative, &need_incremental); 4353 4354 // Let's look at the color of the object: If it is not black we don't have 4355 // to inform the incremental marker. 4356 __ JumpIfBlack(regs_.object(), 4357 regs_.scratch0(), 4358 regs_.scratch1(), 4359 &on_black, 4360 Label::kNear); 4361 4362 regs_.Restore(masm); 4363 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { 4364 __ RememberedSetHelper(object_, 4365 address_, 4366 value_, 4367 save_fp_regs_mode_, 4368 MacroAssembler::kReturnAtEnd); 4369 } else { 4370 __ ret(0); 4371 } 4372 4373 __ bind(&on_black); 4374 4375 // Get the value from the slot. 4376 __ movp(regs_.scratch0(), Operand(regs_.address(), 0)); 4377 4378 if (mode == INCREMENTAL_COMPACTION) { 4379 Label ensure_not_white; 4380 4381 __ CheckPageFlag(regs_.scratch0(), // Contains value. 4382 regs_.scratch1(), // Scratch. 4383 MemoryChunk::kEvacuationCandidateMask, 4384 zero, 4385 &ensure_not_white, 4386 Label::kNear); 4387 4388 __ CheckPageFlag(regs_.object(), 4389 regs_.scratch1(), // Scratch. 4390 MemoryChunk::kSkipEvacuationSlotsRecordingMask, 4391 zero, 4392 &need_incremental); 4393 4394 __ bind(&ensure_not_white); 4395 } 4396 4397 // We need an extra register for this, so we push the object register 4398 // temporarily. 4399 __ Push(regs_.object()); 4400 __ EnsureNotWhite(regs_.scratch0(), // The value. 4401 regs_.scratch1(), // Scratch. 4402 regs_.object(), // Scratch. 4403 &need_incremental_pop_object, 4404 Label::kNear); 4405 __ Pop(regs_.object()); 4406 4407 regs_.Restore(masm); 4408 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { 4409 __ RememberedSetHelper(object_, 4410 address_, 4411 value_, 4412 save_fp_regs_mode_, 4413 MacroAssembler::kReturnAtEnd); 4414 } else { 4415 __ ret(0); 4416 } 4417 4418 __ bind(&need_incremental_pop_object); 4419 __ Pop(regs_.object()); 4420 4421 __ bind(&need_incremental); 4422 4423 // Fall through when we need to inform the incremental marker. 4424} 4425 4426 4427void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) { 4428 // ----------- S t a t e ------------- 4429 // -- rax : element value to store 4430 // -- rcx : element index as smi 4431 // -- rsp[0] : return address 4432 // -- rsp[8] : array literal index in function 4433 // -- rsp[16] : array literal 4434 // clobbers rbx, rdx, rdi 4435 // ----------------------------------- 4436 4437 Label element_done; 4438 Label double_elements; 4439 Label smi_element; 4440 Label slow_elements; 4441 Label fast_elements; 4442 4443 // Get array literal index, array literal and its map. 4444 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER); 4445 __ movp(rdx, args.GetArgumentOperand(1)); 4446 __ movp(rbx, args.GetArgumentOperand(0)); 4447 __ movp(rdi, FieldOperand(rbx, JSObject::kMapOffset)); 4448 4449 __ CheckFastElements(rdi, &double_elements); 4450 4451 // FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS 4452 __ JumpIfSmi(rax, &smi_element); 4453 __ CheckFastSmiElements(rdi, &fast_elements); 4454 4455 // Store into the array literal requires a elements transition. Call into 4456 // the runtime. 4457 4458 __ bind(&slow_elements); 4459 __ PopReturnAddressTo(rdi); 4460 __ Push(rbx); 4461 __ Push(rcx); 4462 __ Push(rax); 4463 __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 4464 __ Push(FieldOperand(rbx, JSFunction::kLiteralsOffset)); 4465 __ Push(rdx); 4466 __ PushReturnAddressFrom(rdi); 4467 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1); 4468 4469 // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object. 4470 __ bind(&fast_elements); 4471 __ SmiToInteger32(kScratchRegister, rcx); 4472 __ movp(rbx, FieldOperand(rbx, JSObject::kElementsOffset)); 4473 __ leap(rcx, FieldOperand(rbx, kScratchRegister, times_pointer_size, 4474 FixedArrayBase::kHeaderSize)); 4475 __ movp(Operand(rcx, 0), rax); 4476 // Update the write barrier for the array store. 4477 __ RecordWrite(rbx, rcx, rax, 4478 kDontSaveFPRegs, 4479 EMIT_REMEMBERED_SET, 4480 OMIT_SMI_CHECK); 4481 __ ret(0); 4482 4483 // Array literal has ElementsKind of FAST_*_SMI_ELEMENTS or 4484 // FAST_*_ELEMENTS, and value is Smi. 4485 __ bind(&smi_element); 4486 __ SmiToInteger32(kScratchRegister, rcx); 4487 __ movp(rbx, FieldOperand(rbx, JSObject::kElementsOffset)); 4488 __ movp(FieldOperand(rbx, kScratchRegister, times_pointer_size, 4489 FixedArrayBase::kHeaderSize), rax); 4490 __ ret(0); 4491 4492 // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS. 4493 __ bind(&double_elements); 4494 4495 __ movp(r9, FieldOperand(rbx, JSObject::kElementsOffset)); 4496 __ SmiToInteger32(r11, rcx); 4497 __ StoreNumberToDoubleElements(rax, 4498 r9, 4499 r11, 4500 xmm0, 4501 &slow_elements); 4502 __ ret(0); 4503} 4504 4505 4506void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { 4507 CEntryStub ces(isolate(), 1, kSaveFPRegs); 4508 __ Call(ces.GetCode(), RelocInfo::CODE_TARGET); 4509 int parameter_count_offset = 4510 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset; 4511 __ movp(rbx, MemOperand(rbp, parameter_count_offset)); 4512 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); 4513 __ PopReturnAddressTo(rcx); 4514 int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE 4515 ? kPointerSize 4516 : 0; 4517 __ leap(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset)); 4518 __ jmp(rcx); // Return to IC Miss stub, continuation still on stack. 4519} 4520 4521 4522void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { 4523 if (masm->isolate()->function_entry_hook() != NULL) { 4524 ProfileEntryHookStub stub(masm->isolate()); 4525 masm->CallStub(&stub); 4526 } 4527} 4528 4529 4530void ProfileEntryHookStub::Generate(MacroAssembler* masm) { 4531 // This stub can be called from essentially anywhere, so it needs to save 4532 // all volatile and callee-save registers. 4533 const size_t kNumSavedRegisters = 2; 4534 __ pushq(arg_reg_1); 4535 __ pushq(arg_reg_2); 4536 4537 // Calculate the original stack pointer and store it in the second arg. 4538 __ leap(arg_reg_2, 4539 Operand(rsp, kNumSavedRegisters * kRegisterSize + kPCOnStackSize)); 4540 4541 // Calculate the function address to the first arg. 4542 __ movp(arg_reg_1, Operand(rsp, kNumSavedRegisters * kRegisterSize)); 4543 __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength)); 4544 4545 // Save the remainder of the volatile registers. 4546 masm->PushCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2); 4547 4548 // Call the entry hook function. 4549 __ Move(rax, FUNCTION_ADDR(isolate()->function_entry_hook()), 4550 Assembler::RelocInfoNone()); 4551 4552 AllowExternalCallThatCantCauseGC scope(masm); 4553 4554 const int kArgumentCount = 2; 4555 __ PrepareCallCFunction(kArgumentCount); 4556 __ CallCFunction(rax, kArgumentCount); 4557 4558 // Restore volatile regs. 4559 masm->PopCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2); 4560 __ popq(arg_reg_2); 4561 __ popq(arg_reg_1); 4562 4563 __ Ret(); 4564} 4565 4566 4567template<class T> 4568static void CreateArrayDispatch(MacroAssembler* masm, 4569 AllocationSiteOverrideMode mode) { 4570 if (mode == DISABLE_ALLOCATION_SITES) { 4571 T stub(masm->isolate(), GetInitialFastElementsKind(), mode); 4572 __ TailCallStub(&stub); 4573 } else if (mode == DONT_OVERRIDE) { 4574 int last_index = GetSequenceIndexFromFastElementsKind( 4575 TERMINAL_FAST_ELEMENTS_KIND); 4576 for (int i = 0; i <= last_index; ++i) { 4577 Label next; 4578 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 4579 __ cmpl(rdx, Immediate(kind)); 4580 __ j(not_equal, &next); 4581 T stub(masm->isolate(), kind); 4582 __ TailCallStub(&stub); 4583 __ bind(&next); 4584 } 4585 4586 // If we reached this point there is a problem. 4587 __ Abort(kUnexpectedElementsKindInArrayConstructor); 4588 } else { 4589 UNREACHABLE(); 4590 } 4591} 4592 4593 4594static void CreateArrayDispatchOneArgument(MacroAssembler* masm, 4595 AllocationSiteOverrideMode mode) { 4596 // rbx - allocation site (if mode != DISABLE_ALLOCATION_SITES) 4597 // rdx - kind (if mode != DISABLE_ALLOCATION_SITES) 4598 // rax - number of arguments 4599 // rdi - constructor? 4600 // rsp[0] - return address 4601 // rsp[8] - last argument 4602 Handle<Object> undefined_sentinel( 4603 masm->isolate()->heap()->undefined_value(), 4604 masm->isolate()); 4605 4606 Label normal_sequence; 4607 if (mode == DONT_OVERRIDE) { 4608 ASSERT(FAST_SMI_ELEMENTS == 0); 4609 ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); 4610 ASSERT(FAST_ELEMENTS == 2); 4611 ASSERT(FAST_HOLEY_ELEMENTS == 3); 4612 ASSERT(FAST_DOUBLE_ELEMENTS == 4); 4613 ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5); 4614 4615 // is the low bit set? If so, we are holey and that is good. 4616 __ testb(rdx, Immediate(1)); 4617 __ j(not_zero, &normal_sequence); 4618 } 4619 4620 // look at the first argument 4621 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER); 4622 __ movp(rcx, args.GetArgumentOperand(0)); 4623 __ testp(rcx, rcx); 4624 __ j(zero, &normal_sequence); 4625 4626 if (mode == DISABLE_ALLOCATION_SITES) { 4627 ElementsKind initial = GetInitialFastElementsKind(); 4628 ElementsKind holey_initial = GetHoleyElementsKind(initial); 4629 4630 ArraySingleArgumentConstructorStub stub_holey(masm->isolate(), 4631 holey_initial, 4632 DISABLE_ALLOCATION_SITES); 4633 __ TailCallStub(&stub_holey); 4634 4635 __ bind(&normal_sequence); 4636 ArraySingleArgumentConstructorStub stub(masm->isolate(), 4637 initial, 4638 DISABLE_ALLOCATION_SITES); 4639 __ TailCallStub(&stub); 4640 } else if (mode == DONT_OVERRIDE) { 4641 // We are going to create a holey array, but our kind is non-holey. 4642 // Fix kind and retry (only if we have an allocation site in the slot). 4643 __ incl(rdx); 4644 4645 if (FLAG_debug_code) { 4646 Handle<Map> allocation_site_map = 4647 masm->isolate()->factory()->allocation_site_map(); 4648 __ Cmp(FieldOperand(rbx, 0), allocation_site_map); 4649 __ Assert(equal, kExpectedAllocationSite); 4650 } 4651 4652 // Save the resulting elements kind in type info. We can't just store r3 4653 // in the AllocationSite::transition_info field because elements kind is 4654 // restricted to a portion of the field...upper bits need to be left alone. 4655 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); 4656 __ SmiAddConstant(FieldOperand(rbx, AllocationSite::kTransitionInfoOffset), 4657 Smi::FromInt(kFastElementsKindPackedToHoley)); 4658 4659 __ bind(&normal_sequence); 4660 int last_index = GetSequenceIndexFromFastElementsKind( 4661 TERMINAL_FAST_ELEMENTS_KIND); 4662 for (int i = 0; i <= last_index; ++i) { 4663 Label next; 4664 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 4665 __ cmpl(rdx, Immediate(kind)); 4666 __ j(not_equal, &next); 4667 ArraySingleArgumentConstructorStub stub(masm->isolate(), kind); 4668 __ TailCallStub(&stub); 4669 __ bind(&next); 4670 } 4671 4672 // If we reached this point there is a problem. 4673 __ Abort(kUnexpectedElementsKindInArrayConstructor); 4674 } else { 4675 UNREACHABLE(); 4676 } 4677} 4678 4679 4680template<class T> 4681static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { 4682 int to_index = GetSequenceIndexFromFastElementsKind( 4683 TERMINAL_FAST_ELEMENTS_KIND); 4684 for (int i = 0; i <= to_index; ++i) { 4685 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 4686 T stub(isolate, kind); 4687 stub.GetCode(); 4688 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) { 4689 T stub1(isolate, kind, DISABLE_ALLOCATION_SITES); 4690 stub1.GetCode(); 4691 } 4692 } 4693} 4694 4695 4696void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) { 4697 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>( 4698 isolate); 4699 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>( 4700 isolate); 4701 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>( 4702 isolate); 4703} 4704 4705 4706void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime( 4707 Isolate* isolate) { 4708 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS }; 4709 for (int i = 0; i < 2; i++) { 4710 // For internal arrays we only need a few things 4711 InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]); 4712 stubh1.GetCode(); 4713 InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]); 4714 stubh2.GetCode(); 4715 InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]); 4716 stubh3.GetCode(); 4717 } 4718} 4719 4720 4721void ArrayConstructorStub::GenerateDispatchToArrayStub( 4722 MacroAssembler* masm, 4723 AllocationSiteOverrideMode mode) { 4724 if (argument_count_ == ANY) { 4725 Label not_zero_case, not_one_case; 4726 __ testp(rax, rax); 4727 __ j(not_zero, ¬_zero_case); 4728 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode); 4729 4730 __ bind(¬_zero_case); 4731 __ cmpl(rax, Immediate(1)); 4732 __ j(greater, ¬_one_case); 4733 CreateArrayDispatchOneArgument(masm, mode); 4734 4735 __ bind(¬_one_case); 4736 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode); 4737 } else if (argument_count_ == NONE) { 4738 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode); 4739 } else if (argument_count_ == ONE) { 4740 CreateArrayDispatchOneArgument(masm, mode); 4741 } else if (argument_count_ == MORE_THAN_ONE) { 4742 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode); 4743 } else { 4744 UNREACHABLE(); 4745 } 4746} 4747 4748 4749void ArrayConstructorStub::Generate(MacroAssembler* masm) { 4750 // ----------- S t a t e ------------- 4751 // -- rax : argc 4752 // -- rbx : AllocationSite or undefined 4753 // -- rdi : constructor 4754 // -- rsp[0] : return address 4755 // -- rsp[8] : last argument 4756 // ----------------------------------- 4757 if (FLAG_debug_code) { 4758 // The array construct code is only set for the global and natives 4759 // builtin Array functions which always have maps. 4760 4761 // Initial map for the builtin Array function should be a map. 4762 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); 4763 // Will both indicate a NULL and a Smi. 4764 STATIC_ASSERT(kSmiTag == 0); 4765 Condition not_smi = NegateCondition(masm->CheckSmi(rcx)); 4766 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction); 4767 __ CmpObjectType(rcx, MAP_TYPE, rcx); 4768 __ Check(equal, kUnexpectedInitialMapForArrayFunction); 4769 4770 // We should either have undefined in rbx or a valid AllocationSite 4771 __ AssertUndefinedOrAllocationSite(rbx); 4772 } 4773 4774 Label no_info; 4775 // If the feedback vector is the undefined value call an array constructor 4776 // that doesn't use AllocationSites. 4777 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); 4778 __ j(equal, &no_info); 4779 4780 // Only look at the lower 16 bits of the transition info. 4781 __ movp(rdx, FieldOperand(rbx, AllocationSite::kTransitionInfoOffset)); 4782 __ SmiToInteger32(rdx, rdx); 4783 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); 4784 __ andp(rdx, Immediate(AllocationSite::ElementsKindBits::kMask)); 4785 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE); 4786 4787 __ bind(&no_info); 4788 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES); 4789} 4790 4791 4792void InternalArrayConstructorStub::GenerateCase( 4793 MacroAssembler* masm, ElementsKind kind) { 4794 Label not_zero_case, not_one_case; 4795 Label normal_sequence; 4796 4797 __ testp(rax, rax); 4798 __ j(not_zero, ¬_zero_case); 4799 InternalArrayNoArgumentConstructorStub stub0(isolate(), kind); 4800 __ TailCallStub(&stub0); 4801 4802 __ bind(¬_zero_case); 4803 __ cmpl(rax, Immediate(1)); 4804 __ j(greater, ¬_one_case); 4805 4806 if (IsFastPackedElementsKind(kind)) { 4807 // We might need to create a holey array 4808 // look at the first argument 4809 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER); 4810 __ movp(rcx, args.GetArgumentOperand(0)); 4811 __ testp(rcx, rcx); 4812 __ j(zero, &normal_sequence); 4813 4814 InternalArraySingleArgumentConstructorStub 4815 stub1_holey(isolate(), GetHoleyElementsKind(kind)); 4816 __ TailCallStub(&stub1_holey); 4817 } 4818 4819 __ bind(&normal_sequence); 4820 InternalArraySingleArgumentConstructorStub stub1(isolate(), kind); 4821 __ TailCallStub(&stub1); 4822 4823 __ bind(¬_one_case); 4824 InternalArrayNArgumentsConstructorStub stubN(isolate(), kind); 4825 __ TailCallStub(&stubN); 4826} 4827 4828 4829void InternalArrayConstructorStub::Generate(MacroAssembler* masm) { 4830 // ----------- S t a t e ------------- 4831 // -- rax : argc 4832 // -- rdi : constructor 4833 // -- rsp[0] : return address 4834 // -- rsp[8] : last argument 4835 // ----------------------------------- 4836 4837 if (FLAG_debug_code) { 4838 // The array construct code is only set for the global and natives 4839 // builtin Array functions which always have maps. 4840 4841 // Initial map for the builtin Array function should be a map. 4842 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); 4843 // Will both indicate a NULL and a Smi. 4844 STATIC_ASSERT(kSmiTag == 0); 4845 Condition not_smi = NegateCondition(masm->CheckSmi(rcx)); 4846 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction); 4847 __ CmpObjectType(rcx, MAP_TYPE, rcx); 4848 __ Check(equal, kUnexpectedInitialMapForArrayFunction); 4849 } 4850 4851 // Figure out the right elements kind 4852 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); 4853 4854 // Load the map's "bit field 2" into |result|. We only need the first byte, 4855 // but the following masking takes care of that anyway. 4856 __ movzxbp(rcx, FieldOperand(rcx, Map::kBitField2Offset)); 4857 // Retrieve elements_kind from bit field 2. 4858 __ DecodeField<Map::ElementsKindBits>(rcx); 4859 4860 if (FLAG_debug_code) { 4861 Label done; 4862 __ cmpl(rcx, Immediate(FAST_ELEMENTS)); 4863 __ j(equal, &done); 4864 __ cmpl(rcx, Immediate(FAST_HOLEY_ELEMENTS)); 4865 __ Assert(equal, 4866 kInvalidElementsKindForInternalArrayOrInternalPackedArray); 4867 __ bind(&done); 4868 } 4869 4870 Label fast_elements_case; 4871 __ cmpl(rcx, Immediate(FAST_ELEMENTS)); 4872 __ j(equal, &fast_elements_case); 4873 GenerateCase(masm, FAST_HOLEY_ELEMENTS); 4874 4875 __ bind(&fast_elements_case); 4876 GenerateCase(masm, FAST_ELEMENTS); 4877} 4878 4879 4880void CallApiFunctionStub::Generate(MacroAssembler* masm) { 4881 // ----------- S t a t e ------------- 4882 // -- rax : callee 4883 // -- rbx : call_data 4884 // -- rcx : holder 4885 // -- rdx : api_function_address 4886 // -- rsi : context 4887 // -- 4888 // -- rsp[0] : return address 4889 // -- rsp[8] : last argument 4890 // -- ... 4891 // -- rsp[argc * 8] : first argument 4892 // -- rsp[(argc + 1) * 8] : receiver 4893 // ----------------------------------- 4894 4895 Register callee = rax; 4896 Register call_data = rbx; 4897 Register holder = rcx; 4898 Register api_function_address = rdx; 4899 Register return_address = rdi; 4900 Register context = rsi; 4901 4902 int argc = ArgumentBits::decode(bit_field_); 4903 bool is_store = IsStoreBits::decode(bit_field_); 4904 bool call_data_undefined = CallDataUndefinedBits::decode(bit_field_); 4905 4906 typedef FunctionCallbackArguments FCA; 4907 4908 STATIC_ASSERT(FCA::kContextSaveIndex == 6); 4909 STATIC_ASSERT(FCA::kCalleeIndex == 5); 4910 STATIC_ASSERT(FCA::kDataIndex == 4); 4911 STATIC_ASSERT(FCA::kReturnValueOffset == 3); 4912 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2); 4913 STATIC_ASSERT(FCA::kIsolateIndex == 1); 4914 STATIC_ASSERT(FCA::kHolderIndex == 0); 4915 STATIC_ASSERT(FCA::kArgsLength == 7); 4916 4917 __ PopReturnAddressTo(return_address); 4918 4919 // context save 4920 __ Push(context); 4921 // load context from callee 4922 __ movp(context, FieldOperand(callee, JSFunction::kContextOffset)); 4923 4924 // callee 4925 __ Push(callee); 4926 4927 // call data 4928 __ Push(call_data); 4929 Register scratch = call_data; 4930 if (!call_data_undefined) { 4931 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); 4932 } 4933 // return value 4934 __ Push(scratch); 4935 // return value default 4936 __ Push(scratch); 4937 // isolate 4938 __ Move(scratch, 4939 ExternalReference::isolate_address(isolate())); 4940 __ Push(scratch); 4941 // holder 4942 __ Push(holder); 4943 4944 __ movp(scratch, rsp); 4945 // Push return address back on stack. 4946 __ PushReturnAddressFrom(return_address); 4947 4948 // Allocate the v8::Arguments structure in the arguments' space since 4949 // it's not controlled by GC. 4950 const int kApiStackSpace = 4; 4951 4952 __ PrepareCallApiFunction(kApiStackSpace); 4953 4954 // FunctionCallbackInfo::implicit_args_. 4955 __ movp(StackSpaceOperand(0), scratch); 4956 __ addp(scratch, Immediate((argc + FCA::kArgsLength - 1) * kPointerSize)); 4957 __ movp(StackSpaceOperand(1), scratch); // FunctionCallbackInfo::values_. 4958 __ Set(StackSpaceOperand(2), argc); // FunctionCallbackInfo::length_. 4959 // FunctionCallbackInfo::is_construct_call_. 4960 __ Set(StackSpaceOperand(3), 0); 4961 4962#if defined(__MINGW64__) || defined(_WIN64) 4963 Register arguments_arg = rcx; 4964 Register callback_arg = rdx; 4965#else 4966 Register arguments_arg = rdi; 4967 Register callback_arg = rsi; 4968#endif 4969 4970 // It's okay if api_function_address == callback_arg 4971 // but not arguments_arg 4972 ASSERT(!api_function_address.is(arguments_arg)); 4973 4974 // v8::InvocationCallback's argument. 4975 __ leap(arguments_arg, StackSpaceOperand(0)); 4976 4977 ExternalReference thunk_ref = 4978 ExternalReference::invoke_function_callback(isolate()); 4979 4980 // Accessor for FunctionCallbackInfo and first js arg. 4981 StackArgumentsAccessor args_from_rbp(rbp, FCA::kArgsLength + 1, 4982 ARGUMENTS_DONT_CONTAIN_RECEIVER); 4983 Operand context_restore_operand = args_from_rbp.GetArgumentOperand( 4984 FCA::kArgsLength - FCA::kContextSaveIndex); 4985 // Stores return the first js argument 4986 Operand return_value_operand = args_from_rbp.GetArgumentOperand( 4987 is_store ? 0 : FCA::kArgsLength - FCA::kReturnValueOffset); 4988 __ CallApiFunctionAndReturn( 4989 api_function_address, 4990 thunk_ref, 4991 callback_arg, 4992 argc + FCA::kArgsLength + 1, 4993 return_value_operand, 4994 &context_restore_operand); 4995} 4996 4997 4998void CallApiGetterStub::Generate(MacroAssembler* masm) { 4999 // ----------- S t a t e ------------- 5000 // -- rsp[0] : return address 5001 // -- rsp[8] : name 5002 // -- rsp[16 - kArgsLength*8] : PropertyCallbackArguments object 5003 // -- ... 5004 // -- r8 : api_function_address 5005 // ----------------------------------- 5006 5007#if defined(__MINGW64__) || defined(_WIN64) 5008 Register getter_arg = r8; 5009 Register accessor_info_arg = rdx; 5010 Register name_arg = rcx; 5011#else 5012 Register getter_arg = rdx; 5013 Register accessor_info_arg = rsi; 5014 Register name_arg = rdi; 5015#endif 5016 Register api_function_address = r8; 5017 Register scratch = rax; 5018 5019 // v8::Arguments::values_ and handler for name. 5020 const int kStackSpace = PropertyCallbackArguments::kArgsLength + 1; 5021 5022 // Allocate v8::AccessorInfo in non-GCed stack space. 5023 const int kArgStackSpace = 1; 5024 5025 __ leap(name_arg, Operand(rsp, kPCOnStackSize)); 5026 5027 __ PrepareCallApiFunction(kArgStackSpace); 5028 __ leap(scratch, Operand(name_arg, 1 * kPointerSize)); 5029 5030 // v8::PropertyAccessorInfo::args_. 5031 __ movp(StackSpaceOperand(0), scratch); 5032 5033 // The context register (rsi) has been saved in PrepareCallApiFunction and 5034 // could be used to pass arguments. 5035 __ leap(accessor_info_arg, StackSpaceOperand(0)); 5036 5037 ExternalReference thunk_ref = 5038 ExternalReference::invoke_accessor_getter_callback(isolate()); 5039 5040 // It's okay if api_function_address == getter_arg 5041 // but not accessor_info_arg or name_arg 5042 ASSERT(!api_function_address.is(accessor_info_arg) && 5043 !api_function_address.is(name_arg)); 5044 5045 // The name handler is counted as an argument. 5046 StackArgumentsAccessor args(rbp, PropertyCallbackArguments::kArgsLength); 5047 Operand return_value_operand = args.GetArgumentOperand( 5048 PropertyCallbackArguments::kArgsLength - 1 - 5049 PropertyCallbackArguments::kReturnValueOffset); 5050 __ CallApiFunctionAndReturn(api_function_address, 5051 thunk_ref, 5052 getter_arg, 5053 kStackSpace, 5054 return_value_operand, 5055 NULL); 5056} 5057 5058 5059#undef __ 5060 5061} } // namespace v8::internal 5062 5063#endif // V8_TARGET_ARCH_X64 5064