code-stubs-ia32.cc revision 109988c7ccb6f3fd1a58574fa3dfb88beaef6632
1// Copyright 2012 the V8 project authors. All rights reserved. 2// Use of this source code is governed by a BSD-style license that can be 3// found in the LICENSE file. 4 5#if V8_TARGET_ARCH_IA32 6 7#include "src/base/bits.h" 8#include "src/bootstrapper.h" 9#include "src/code-stubs.h" 10#include "src/codegen.h" 11#include "src/ia32/code-stubs-ia32.h" 12#include "src/ia32/frames-ia32.h" 13#include "src/ic/handler-compiler.h" 14#include "src/ic/ic.h" 15#include "src/ic/stub-cache.h" 16#include "src/isolate.h" 17#include "src/regexp/jsregexp.h" 18#include "src/regexp/regexp-macro-assembler.h" 19#include "src/runtime/runtime.h" 20 21namespace v8 { 22namespace internal { 23 24 25static void InitializeArrayConstructorDescriptor( 26 Isolate* isolate, CodeStubDescriptor* descriptor, 27 int constant_stack_parameter_count) { 28 // register state 29 // eax -- number of arguments 30 // edi -- function 31 // ebx -- allocation site with elements kind 32 Address deopt_handler = Runtime::FunctionForId( 33 Runtime::kArrayConstructor)->entry; 34 35 if (constant_stack_parameter_count == 0) { 36 descriptor->Initialize(deopt_handler, constant_stack_parameter_count, 37 JS_FUNCTION_STUB_MODE); 38 } else { 39 descriptor->Initialize(eax, deopt_handler, constant_stack_parameter_count, 40 JS_FUNCTION_STUB_MODE); 41 } 42} 43 44 45static void InitializeInternalArrayConstructorDescriptor( 46 Isolate* isolate, CodeStubDescriptor* descriptor, 47 int constant_stack_parameter_count) { 48 // register state 49 // eax -- number of arguments 50 // edi -- constructor function 51 Address deopt_handler = Runtime::FunctionForId( 52 Runtime::kInternalArrayConstructor)->entry; 53 54 if (constant_stack_parameter_count == 0) { 55 descriptor->Initialize(deopt_handler, constant_stack_parameter_count, 56 JS_FUNCTION_STUB_MODE); 57 } else { 58 descriptor->Initialize(eax, deopt_handler, constant_stack_parameter_count, 59 JS_FUNCTION_STUB_MODE); 60 } 61} 62 63 64void ArrayNoArgumentConstructorStub::InitializeDescriptor( 65 CodeStubDescriptor* descriptor) { 66 InitializeArrayConstructorDescriptor(isolate(), descriptor, 0); 67} 68 69 70void ArraySingleArgumentConstructorStub::InitializeDescriptor( 71 CodeStubDescriptor* descriptor) { 72 InitializeArrayConstructorDescriptor(isolate(), descriptor, 1); 73} 74 75 76void ArrayNArgumentsConstructorStub::InitializeDescriptor( 77 CodeStubDescriptor* descriptor) { 78 InitializeArrayConstructorDescriptor(isolate(), descriptor, -1); 79} 80 81 82void InternalArrayNoArgumentConstructorStub::InitializeDescriptor( 83 CodeStubDescriptor* descriptor) { 84 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 0); 85} 86 87 88void InternalArraySingleArgumentConstructorStub::InitializeDescriptor( 89 CodeStubDescriptor* descriptor) { 90 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 1); 91} 92 93 94void InternalArrayNArgumentsConstructorStub::InitializeDescriptor( 95 CodeStubDescriptor* descriptor) { 96 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, -1); 97} 98 99 100#define __ ACCESS_MASM(masm) 101 102 103void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm, 104 ExternalReference miss) { 105 // Update the static counter each time a new code stub is generated. 106 isolate()->counters()->code_stubs()->Increment(); 107 108 CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor(); 109 int param_count = descriptor.GetRegisterParameterCount(); 110 { 111 // Call the runtime system in a fresh internal frame. 112 FrameScope scope(masm, StackFrame::INTERNAL); 113 DCHECK(param_count == 0 || 114 eax.is(descriptor.GetRegisterParameter(param_count - 1))); 115 // Push arguments 116 for (int i = 0; i < param_count; ++i) { 117 __ push(descriptor.GetRegisterParameter(i)); 118 } 119 __ CallExternalReference(miss, param_count); 120 } 121 122 __ ret(0); 123} 124 125 126void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { 127 // We don't allow a GC during a store buffer overflow so there is no need to 128 // store the registers in any particular way, but we do have to store and 129 // restore them. 130 __ pushad(); 131 if (save_doubles()) { 132 __ sub(esp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters)); 133 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) { 134 XMMRegister reg = XMMRegister::from_code(i); 135 __ movsd(Operand(esp, i * kDoubleSize), reg); 136 } 137 } 138 const int argument_count = 1; 139 140 AllowExternalCallThatCantCauseGC scope(masm); 141 __ PrepareCallCFunction(argument_count, ecx); 142 __ mov(Operand(esp, 0 * kPointerSize), 143 Immediate(ExternalReference::isolate_address(isolate()))); 144 __ CallCFunction( 145 ExternalReference::store_buffer_overflow_function(isolate()), 146 argument_count); 147 if (save_doubles()) { 148 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) { 149 XMMRegister reg = XMMRegister::from_code(i); 150 __ movsd(reg, Operand(esp, i * kDoubleSize)); 151 } 152 __ add(esp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters)); 153 } 154 __ popad(); 155 __ ret(0); 156} 157 158 159class FloatingPointHelper : public AllStatic { 160 public: 161 enum ArgLocation { 162 ARGS_ON_STACK, 163 ARGS_IN_REGISTERS 164 }; 165 166 // Code pattern for loading a floating point value. Input value must 167 // be either a smi or a heap number object (fp value). Requirements: 168 // operand in register number. Returns operand as floating point number 169 // on FPU stack. 170 static void LoadFloatOperand(MacroAssembler* masm, Register number); 171 172 // Test if operands are smi or number objects (fp). Requirements: 173 // operand_1 in eax, operand_2 in edx; falls through on float 174 // operands, jumps to the non_float label otherwise. 175 static void CheckFloatOperands(MacroAssembler* masm, 176 Label* non_float, 177 Register scratch); 178 179 // Test if operands are numbers (smi or HeapNumber objects), and load 180 // them into xmm0 and xmm1 if they are. Jump to label not_numbers if 181 // either operand is not a number. Operands are in edx and eax. 182 // Leaves operands unchanged. 183 static void LoadSSE2Operands(MacroAssembler* masm, Label* not_numbers); 184}; 185 186 187void DoubleToIStub::Generate(MacroAssembler* masm) { 188 Register input_reg = this->source(); 189 Register final_result_reg = this->destination(); 190 DCHECK(is_truncating()); 191 192 Label check_negative, process_64_bits, done, done_no_stash; 193 194 int double_offset = offset(); 195 196 // Account for return address and saved regs if input is esp. 197 if (input_reg.is(esp)) double_offset += 3 * kPointerSize; 198 199 MemOperand mantissa_operand(MemOperand(input_reg, double_offset)); 200 MemOperand exponent_operand(MemOperand(input_reg, 201 double_offset + kDoubleSize / 2)); 202 203 Register scratch1; 204 { 205 Register scratch_candidates[3] = { ebx, edx, edi }; 206 for (int i = 0; i < 3; i++) { 207 scratch1 = scratch_candidates[i]; 208 if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break; 209 } 210 } 211 // Since we must use ecx for shifts below, use some other register (eax) 212 // to calculate the result if ecx is the requested return register. 213 Register result_reg = final_result_reg.is(ecx) ? eax : final_result_reg; 214 // Save ecx if it isn't the return register and therefore volatile, or if it 215 // is the return register, then save the temp register we use in its stead for 216 // the result. 217 Register save_reg = final_result_reg.is(ecx) ? eax : ecx; 218 __ push(scratch1); 219 __ push(save_reg); 220 221 bool stash_exponent_copy = !input_reg.is(esp); 222 __ mov(scratch1, mantissa_operand); 223 if (CpuFeatures::IsSupported(SSE3)) { 224 CpuFeatureScope scope(masm, SSE3); 225 // Load x87 register with heap number. 226 __ fld_d(mantissa_operand); 227 } 228 __ mov(ecx, exponent_operand); 229 if (stash_exponent_copy) __ push(ecx); 230 231 __ and_(ecx, HeapNumber::kExponentMask); 232 __ shr(ecx, HeapNumber::kExponentShift); 233 __ lea(result_reg, MemOperand(ecx, -HeapNumber::kExponentBias)); 234 __ cmp(result_reg, Immediate(HeapNumber::kMantissaBits)); 235 __ j(below, &process_64_bits); 236 237 // Result is entirely in lower 32-bits of mantissa 238 int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize; 239 if (CpuFeatures::IsSupported(SSE3)) { 240 __ fstp(0); 241 } 242 __ sub(ecx, Immediate(delta)); 243 __ xor_(result_reg, result_reg); 244 __ cmp(ecx, Immediate(31)); 245 __ j(above, &done); 246 __ shl_cl(scratch1); 247 __ jmp(&check_negative); 248 249 __ bind(&process_64_bits); 250 if (CpuFeatures::IsSupported(SSE3)) { 251 CpuFeatureScope scope(masm, SSE3); 252 if (stash_exponent_copy) { 253 // Already a copy of the exponent on the stack, overwrite it. 254 STATIC_ASSERT(kDoubleSize == 2 * kPointerSize); 255 __ sub(esp, Immediate(kDoubleSize / 2)); 256 } else { 257 // Reserve space for 64 bit answer. 258 __ sub(esp, Immediate(kDoubleSize)); // Nolint. 259 } 260 // Do conversion, which cannot fail because we checked the exponent. 261 __ fisttp_d(Operand(esp, 0)); 262 __ mov(result_reg, Operand(esp, 0)); // Load low word of answer as result 263 __ add(esp, Immediate(kDoubleSize)); 264 __ jmp(&done_no_stash); 265 } else { 266 // Result must be extracted from shifted 32-bit mantissa 267 __ sub(ecx, Immediate(delta)); 268 __ neg(ecx); 269 if (stash_exponent_copy) { 270 __ mov(result_reg, MemOperand(esp, 0)); 271 } else { 272 __ mov(result_reg, exponent_operand); 273 } 274 __ and_(result_reg, 275 Immediate(static_cast<uint32_t>(Double::kSignificandMask >> 32))); 276 __ add(result_reg, 277 Immediate(static_cast<uint32_t>(Double::kHiddenBit >> 32))); 278 __ shrd(result_reg, scratch1); 279 __ shr_cl(result_reg); 280 __ test(ecx, Immediate(32)); 281 __ cmov(not_equal, scratch1, result_reg); 282 } 283 284 // If the double was negative, negate the integer result. 285 __ bind(&check_negative); 286 __ mov(result_reg, scratch1); 287 __ neg(result_reg); 288 if (stash_exponent_copy) { 289 __ cmp(MemOperand(esp, 0), Immediate(0)); 290 } else { 291 __ cmp(exponent_operand, Immediate(0)); 292 } 293 __ cmov(greater, result_reg, scratch1); 294 295 // Restore registers 296 __ bind(&done); 297 if (stash_exponent_copy) { 298 __ add(esp, Immediate(kDoubleSize / 2)); 299 } 300 __ bind(&done_no_stash); 301 if (!final_result_reg.is(result_reg)) { 302 DCHECK(final_result_reg.is(ecx)); 303 __ mov(final_result_reg, result_reg); 304 } 305 __ pop(save_reg); 306 __ pop(scratch1); 307 __ ret(0); 308} 309 310 311void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm, 312 Register number) { 313 Label load_smi, done; 314 315 __ JumpIfSmi(number, &load_smi, Label::kNear); 316 __ fld_d(FieldOperand(number, HeapNumber::kValueOffset)); 317 __ jmp(&done, Label::kNear); 318 319 __ bind(&load_smi); 320 __ SmiUntag(number); 321 __ push(number); 322 __ fild_s(Operand(esp, 0)); 323 __ pop(number); 324 325 __ bind(&done); 326} 327 328 329void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm, 330 Label* not_numbers) { 331 Label load_smi_edx, load_eax, load_smi_eax, load_float_eax, done; 332 // Load operand in edx into xmm0, or branch to not_numbers. 333 __ JumpIfSmi(edx, &load_smi_edx, Label::kNear); 334 Factory* factory = masm->isolate()->factory(); 335 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), factory->heap_number_map()); 336 __ j(not_equal, not_numbers); // Argument in edx is not a number. 337 __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset)); 338 __ bind(&load_eax); 339 // Load operand in eax into xmm1, or branch to not_numbers. 340 __ JumpIfSmi(eax, &load_smi_eax, Label::kNear); 341 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), factory->heap_number_map()); 342 __ j(equal, &load_float_eax, Label::kNear); 343 __ jmp(not_numbers); // Argument in eax is not a number. 344 __ bind(&load_smi_edx); 345 __ SmiUntag(edx); // Untag smi before converting to float. 346 __ Cvtsi2sd(xmm0, edx); 347 __ SmiTag(edx); // Retag smi for heap number overwriting test. 348 __ jmp(&load_eax); 349 __ bind(&load_smi_eax); 350 __ SmiUntag(eax); // Untag smi before converting to float. 351 __ Cvtsi2sd(xmm1, eax); 352 __ SmiTag(eax); // Retag smi for heap number overwriting test. 353 __ jmp(&done, Label::kNear); 354 __ bind(&load_float_eax); 355 __ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset)); 356 __ bind(&done); 357} 358 359 360void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm, 361 Label* non_float, 362 Register scratch) { 363 Label test_other, done; 364 // Test if both operands are floats or smi -> scratch=k_is_float; 365 // Otherwise scratch = k_not_float. 366 __ JumpIfSmi(edx, &test_other, Label::kNear); 367 __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset)); 368 Factory* factory = masm->isolate()->factory(); 369 __ cmp(scratch, factory->heap_number_map()); 370 __ j(not_equal, non_float); // argument in edx is not a number -> NaN 371 372 __ bind(&test_other); 373 __ JumpIfSmi(eax, &done, Label::kNear); 374 __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset)); 375 __ cmp(scratch, factory->heap_number_map()); 376 __ j(not_equal, non_float); // argument in eax is not a number -> NaN 377 378 // Fall-through: Both operands are numbers. 379 __ bind(&done); 380} 381 382 383void MathPowStub::Generate(MacroAssembler* masm) { 384 Factory* factory = isolate()->factory(); 385 const Register exponent = MathPowTaggedDescriptor::exponent(); 386 DCHECK(exponent.is(eax)); 387 const Register base = edx; 388 const Register scratch = ecx; 389 const XMMRegister double_result = xmm3; 390 const XMMRegister double_base = xmm2; 391 const XMMRegister double_exponent = xmm1; 392 const XMMRegister double_scratch = xmm4; 393 394 Label call_runtime, done, exponent_not_smi, int_exponent; 395 396 // Save 1 in double_result - we need this several times later on. 397 __ mov(scratch, Immediate(1)); 398 __ Cvtsi2sd(double_result, scratch); 399 400 if (exponent_type() == ON_STACK) { 401 Label base_is_smi, unpack_exponent; 402 // The exponent and base are supplied as arguments on the stack. 403 // This can only happen if the stub is called from non-optimized code. 404 // Load input parameters from stack. 405 __ mov(base, Operand(esp, 2 * kPointerSize)); 406 __ mov(exponent, Operand(esp, 1 * kPointerSize)); 407 408 __ JumpIfSmi(base, &base_is_smi, Label::kNear); 409 __ cmp(FieldOperand(base, HeapObject::kMapOffset), 410 factory->heap_number_map()); 411 __ j(not_equal, &call_runtime); 412 413 __ movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset)); 414 __ jmp(&unpack_exponent, Label::kNear); 415 416 __ bind(&base_is_smi); 417 __ SmiUntag(base); 418 __ Cvtsi2sd(double_base, base); 419 420 __ bind(&unpack_exponent); 421 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear); 422 __ SmiUntag(exponent); 423 __ jmp(&int_exponent); 424 425 __ bind(&exponent_not_smi); 426 __ cmp(FieldOperand(exponent, HeapObject::kMapOffset), 427 factory->heap_number_map()); 428 __ j(not_equal, &call_runtime); 429 __ movsd(double_exponent, 430 FieldOperand(exponent, HeapNumber::kValueOffset)); 431 } else if (exponent_type() == TAGGED) { 432 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear); 433 __ SmiUntag(exponent); 434 __ jmp(&int_exponent); 435 436 __ bind(&exponent_not_smi); 437 __ movsd(double_exponent, 438 FieldOperand(exponent, HeapNumber::kValueOffset)); 439 } 440 441 if (exponent_type() != INTEGER) { 442 Label fast_power, try_arithmetic_simplification; 443 __ DoubleToI(exponent, double_exponent, double_scratch, 444 TREAT_MINUS_ZERO_AS_ZERO, &try_arithmetic_simplification, 445 &try_arithmetic_simplification, 446 &try_arithmetic_simplification); 447 __ jmp(&int_exponent); 448 449 __ bind(&try_arithmetic_simplification); 450 // Skip to runtime if possibly NaN (indicated by the indefinite integer). 451 __ cvttsd2si(exponent, Operand(double_exponent)); 452 __ cmp(exponent, Immediate(0x1)); 453 __ j(overflow, &call_runtime); 454 455 if (exponent_type() == ON_STACK) { 456 // Detect square root case. Crankshaft detects constant +/-0.5 at 457 // compile time and uses DoMathPowHalf instead. We then skip this check 458 // for non-constant cases of +/-0.5 as these hardly occur. 459 Label continue_sqrt, continue_rsqrt, not_plus_half; 460 // Test for 0.5. 461 // Load double_scratch with 0.5. 462 __ mov(scratch, Immediate(0x3F000000u)); 463 __ movd(double_scratch, scratch); 464 __ cvtss2sd(double_scratch, double_scratch); 465 // Already ruled out NaNs for exponent. 466 __ ucomisd(double_scratch, double_exponent); 467 __ j(not_equal, ¬_plus_half, Label::kNear); 468 469 // Calculates square root of base. Check for the special case of 470 // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13). 471 // According to IEEE-754, single-precision -Infinity has the highest 472 // 9 bits set and the lowest 23 bits cleared. 473 __ mov(scratch, 0xFF800000u); 474 __ movd(double_scratch, scratch); 475 __ cvtss2sd(double_scratch, double_scratch); 476 __ ucomisd(double_base, double_scratch); 477 // Comparing -Infinity with NaN results in "unordered", which sets the 478 // zero flag as if both were equal. However, it also sets the carry flag. 479 __ j(not_equal, &continue_sqrt, Label::kNear); 480 __ j(carry, &continue_sqrt, Label::kNear); 481 482 // Set result to Infinity in the special case. 483 __ xorps(double_result, double_result); 484 __ subsd(double_result, double_scratch); 485 __ jmp(&done); 486 487 __ bind(&continue_sqrt); 488 // sqrtsd returns -0 when input is -0. ECMA spec requires +0. 489 __ xorps(double_scratch, double_scratch); 490 __ addsd(double_scratch, double_base); // Convert -0 to +0. 491 __ sqrtsd(double_result, double_scratch); 492 __ jmp(&done); 493 494 // Test for -0.5. 495 __ bind(¬_plus_half); 496 // Load double_exponent with -0.5 by substracting 1. 497 __ subsd(double_scratch, double_result); 498 // Already ruled out NaNs for exponent. 499 __ ucomisd(double_scratch, double_exponent); 500 __ j(not_equal, &fast_power, Label::kNear); 501 502 // Calculates reciprocal of square root of base. Check for the special 503 // case of Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13). 504 // According to IEEE-754, single-precision -Infinity has the highest 505 // 9 bits set and the lowest 23 bits cleared. 506 __ mov(scratch, 0xFF800000u); 507 __ movd(double_scratch, scratch); 508 __ cvtss2sd(double_scratch, double_scratch); 509 __ ucomisd(double_base, double_scratch); 510 // Comparing -Infinity with NaN results in "unordered", which sets the 511 // zero flag as if both were equal. However, it also sets the carry flag. 512 __ j(not_equal, &continue_rsqrt, Label::kNear); 513 __ j(carry, &continue_rsqrt, Label::kNear); 514 515 // Set result to 0 in the special case. 516 __ xorps(double_result, double_result); 517 __ jmp(&done); 518 519 __ bind(&continue_rsqrt); 520 // sqrtsd returns -0 when input is -0. ECMA spec requires +0. 521 __ xorps(double_exponent, double_exponent); 522 __ addsd(double_exponent, double_base); // Convert -0 to +0. 523 __ sqrtsd(double_exponent, double_exponent); 524 __ divsd(double_result, double_exponent); 525 __ jmp(&done); 526 } 527 528 // Using FPU instructions to calculate power. 529 Label fast_power_failed; 530 __ bind(&fast_power); 531 __ fnclex(); // Clear flags to catch exceptions later. 532 // Transfer (B)ase and (E)xponent onto the FPU register stack. 533 __ sub(esp, Immediate(kDoubleSize)); 534 __ movsd(Operand(esp, 0), double_exponent); 535 __ fld_d(Operand(esp, 0)); // E 536 __ movsd(Operand(esp, 0), double_base); 537 __ fld_d(Operand(esp, 0)); // B, E 538 539 // Exponent is in st(1) and base is in st(0) 540 // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B) 541 // FYL2X calculates st(1) * log2(st(0)) 542 __ fyl2x(); // X 543 __ fld(0); // X, X 544 __ frndint(); // rnd(X), X 545 __ fsub(1); // rnd(X), X-rnd(X) 546 __ fxch(1); // X - rnd(X), rnd(X) 547 // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1 548 __ f2xm1(); // 2^(X-rnd(X)) - 1, rnd(X) 549 __ fld1(); // 1, 2^(X-rnd(X)) - 1, rnd(X) 550 __ faddp(1); // 2^(X-rnd(X)), rnd(X) 551 // FSCALE calculates st(0) * 2^st(1) 552 __ fscale(); // 2^X, rnd(X) 553 __ fstp(1); // 2^X 554 // Bail out to runtime in case of exceptions in the status word. 555 __ fnstsw_ax(); 556 __ test_b(eax, 0x5F); // We check for all but precision exception. 557 __ j(not_zero, &fast_power_failed, Label::kNear); 558 __ fstp_d(Operand(esp, 0)); 559 __ movsd(double_result, Operand(esp, 0)); 560 __ add(esp, Immediate(kDoubleSize)); 561 __ jmp(&done); 562 563 __ bind(&fast_power_failed); 564 __ fninit(); 565 __ add(esp, Immediate(kDoubleSize)); 566 __ jmp(&call_runtime); 567 } 568 569 // Calculate power with integer exponent. 570 __ bind(&int_exponent); 571 const XMMRegister double_scratch2 = double_exponent; 572 __ mov(scratch, exponent); // Back up exponent. 573 __ movsd(double_scratch, double_base); // Back up base. 574 __ movsd(double_scratch2, double_result); // Load double_exponent with 1. 575 576 // Get absolute value of exponent. 577 Label no_neg, while_true, while_false; 578 __ test(scratch, scratch); 579 __ j(positive, &no_neg, Label::kNear); 580 __ neg(scratch); 581 __ bind(&no_neg); 582 583 __ j(zero, &while_false, Label::kNear); 584 __ shr(scratch, 1); 585 // Above condition means CF==0 && ZF==0. This means that the 586 // bit that has been shifted out is 0 and the result is not 0. 587 __ j(above, &while_true, Label::kNear); 588 __ movsd(double_result, double_scratch); 589 __ j(zero, &while_false, Label::kNear); 590 591 __ bind(&while_true); 592 __ shr(scratch, 1); 593 __ mulsd(double_scratch, double_scratch); 594 __ j(above, &while_true, Label::kNear); 595 __ mulsd(double_result, double_scratch); 596 __ j(not_zero, &while_true); 597 598 __ bind(&while_false); 599 // scratch has the original value of the exponent - if the exponent is 600 // negative, return 1/result. 601 __ test(exponent, exponent); 602 __ j(positive, &done); 603 __ divsd(double_scratch2, double_result); 604 __ movsd(double_result, double_scratch2); 605 // Test whether result is zero. Bail out to check for subnormal result. 606 // Due to subnormals, x^-y == (1/x)^y does not hold in all cases. 607 __ xorps(double_scratch2, double_scratch2); 608 __ ucomisd(double_scratch2, double_result); // Result cannot be NaN. 609 // double_exponent aliased as double_scratch2 has already been overwritten 610 // and may not have contained the exponent value in the first place when the 611 // exponent is a smi. We reset it with exponent value before bailing out. 612 __ j(not_equal, &done); 613 __ Cvtsi2sd(double_exponent, exponent); 614 615 // Returning or bailing out. 616 if (exponent_type() == ON_STACK) { 617 // The arguments are still on the stack. 618 __ bind(&call_runtime); 619 __ TailCallRuntime(Runtime::kMathPowRT); 620 621 // The stub is called from non-optimized code, which expects the result 622 // as heap number in exponent. 623 __ bind(&done); 624 __ AllocateHeapNumber(eax, scratch, base, &call_runtime); 625 __ movsd(FieldOperand(eax, HeapNumber::kValueOffset), double_result); 626 __ ret(2 * kPointerSize); 627 } else { 628 __ bind(&call_runtime); 629 { 630 AllowExternalCallThatCantCauseGC scope(masm); 631 __ PrepareCallCFunction(4, scratch); 632 __ movsd(Operand(esp, 0 * kDoubleSize), double_base); 633 __ movsd(Operand(esp, 1 * kDoubleSize), double_exponent); 634 __ CallCFunction( 635 ExternalReference::power_double_double_function(isolate()), 4); 636 } 637 // Return value is in st(0) on ia32. 638 // Store it into the (fixed) result register. 639 __ sub(esp, Immediate(kDoubleSize)); 640 __ fstp_d(Operand(esp, 0)); 641 __ movsd(double_result, Operand(esp, 0)); 642 __ add(esp, Immediate(kDoubleSize)); 643 644 __ bind(&done); 645 __ ret(0); 646 } 647} 648 649 650void FunctionPrototypeStub::Generate(MacroAssembler* masm) { 651 Label miss; 652 Register receiver = LoadDescriptor::ReceiverRegister(); 653 // With careful management, we won't have to save slot and vector on 654 // the stack. Simply handle the possibly missing case first. 655 // TODO(mvstanton): this code can be more efficient. 656 __ cmp(FieldOperand(receiver, JSFunction::kPrototypeOrInitialMapOffset), 657 Immediate(isolate()->factory()->the_hole_value())); 658 __ j(equal, &miss); 659 __ TryGetFunctionPrototype(receiver, eax, ebx, &miss); 660 __ ret(0); 661 662 __ bind(&miss); 663 PropertyAccessCompiler::TailCallBuiltin( 664 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC)); 665} 666 667 668void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) { 669 // Return address is on the stack. 670 Label slow; 671 672 Register receiver = LoadDescriptor::ReceiverRegister(); 673 Register key = LoadDescriptor::NameRegister(); 674 Register scratch = eax; 675 DCHECK(!scratch.is(receiver) && !scratch.is(key)); 676 677 // Check that the key is an array index, that is Uint32. 678 __ test(key, Immediate(kSmiTagMask | kSmiSignMask)); 679 __ j(not_zero, &slow); 680 681 // Everything is fine, call runtime. 682 __ pop(scratch); 683 __ push(receiver); // receiver 684 __ push(key); // key 685 __ push(scratch); // return address 686 687 // Perform tail call to the entry. 688 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor); 689 690 __ bind(&slow); 691 PropertyAccessCompiler::TailCallBuiltin( 692 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC)); 693} 694 695 696void LoadIndexedStringStub::Generate(MacroAssembler* masm) { 697 // Return address is on the stack. 698 Label miss; 699 700 Register receiver = LoadDescriptor::ReceiverRegister(); 701 Register index = LoadDescriptor::NameRegister(); 702 Register scratch = edi; 703 DCHECK(!scratch.is(receiver) && !scratch.is(index)); 704 Register result = eax; 705 DCHECK(!result.is(scratch)); 706 DCHECK(!scratch.is(LoadWithVectorDescriptor::VectorRegister()) && 707 result.is(LoadDescriptor::SlotRegister())); 708 709 // StringCharAtGenerator doesn't use the result register until it's passed 710 // the different miss possibilities. If it did, we would have a conflict 711 // when FLAG_vector_ics is true. 712 StringCharAtGenerator char_at_generator(receiver, index, scratch, result, 713 &miss, // When not a string. 714 &miss, // When not a number. 715 &miss, // When index out of range. 716 STRING_INDEX_IS_ARRAY_INDEX, 717 RECEIVER_IS_STRING); 718 char_at_generator.GenerateFast(masm); 719 __ ret(0); 720 721 StubRuntimeCallHelper call_helper; 722 char_at_generator.GenerateSlow(masm, PART_OF_IC_HANDLER, call_helper); 723 724 __ bind(&miss); 725 PropertyAccessCompiler::TailCallBuiltin( 726 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC)); 727} 728 729 730void RegExpExecStub::Generate(MacroAssembler* masm) { 731 // Just jump directly to runtime if native RegExp is not selected at compile 732 // time or if regexp entry in generated code is turned off runtime switch or 733 // at compilation. 734#ifdef V8_INTERPRETED_REGEXP 735 __ TailCallRuntime(Runtime::kRegExpExec); 736#else // V8_INTERPRETED_REGEXP 737 738 // Stack frame on entry. 739 // esp[0]: return address 740 // esp[4]: last_match_info (expected JSArray) 741 // esp[8]: previous index 742 // esp[12]: subject string 743 // esp[16]: JSRegExp object 744 745 static const int kLastMatchInfoOffset = 1 * kPointerSize; 746 static const int kPreviousIndexOffset = 2 * kPointerSize; 747 static const int kSubjectOffset = 3 * kPointerSize; 748 static const int kJSRegExpOffset = 4 * kPointerSize; 749 750 Label runtime; 751 Factory* factory = isolate()->factory(); 752 753 // Ensure that a RegExp stack is allocated. 754 ExternalReference address_of_regexp_stack_memory_address = 755 ExternalReference::address_of_regexp_stack_memory_address(isolate()); 756 ExternalReference address_of_regexp_stack_memory_size = 757 ExternalReference::address_of_regexp_stack_memory_size(isolate()); 758 __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size)); 759 __ test(ebx, ebx); 760 __ j(zero, &runtime); 761 762 // Check that the first argument is a JSRegExp object. 763 __ mov(eax, Operand(esp, kJSRegExpOffset)); 764 STATIC_ASSERT(kSmiTag == 0); 765 __ JumpIfSmi(eax, &runtime); 766 __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx); 767 __ j(not_equal, &runtime); 768 769 // Check that the RegExp has been compiled (data contains a fixed array). 770 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset)); 771 if (FLAG_debug_code) { 772 __ test(ecx, Immediate(kSmiTagMask)); 773 __ Check(not_zero, kUnexpectedTypeForRegExpDataFixedArrayExpected); 774 __ CmpObjectType(ecx, FIXED_ARRAY_TYPE, ebx); 775 __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected); 776 } 777 778 // ecx: RegExp data (FixedArray) 779 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP. 780 __ mov(ebx, FieldOperand(ecx, JSRegExp::kDataTagOffset)); 781 __ cmp(ebx, Immediate(Smi::FromInt(JSRegExp::IRREGEXP))); 782 __ j(not_equal, &runtime); 783 784 // ecx: RegExp data (FixedArray) 785 // Check that the number of captures fit in the static offsets vector buffer. 786 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset)); 787 // Check (number_of_captures + 1) * 2 <= offsets vector size 788 // Or number_of_captures * 2 <= offsets vector size - 2 789 // Multiplying by 2 comes for free since edx is smi-tagged. 790 STATIC_ASSERT(kSmiTag == 0); 791 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1); 792 STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2); 793 __ cmp(edx, Isolate::kJSRegexpStaticOffsetsVectorSize - 2); 794 __ j(above, &runtime); 795 796 // Reset offset for possibly sliced string. 797 __ Move(edi, Immediate(0)); 798 __ mov(eax, Operand(esp, kSubjectOffset)); 799 __ JumpIfSmi(eax, &runtime); 800 __ mov(edx, eax); // Make a copy of the original subject string. 801 802 // eax: subject string 803 // edx: subject string 804 // ecx: RegExp data (FixedArray) 805 // Handle subject string according to its encoding and representation: 806 // (1) Sequential two byte? If yes, go to (9). 807 // (2) Sequential one byte? If yes, go to (5). 808 // (3) Sequential or cons? If not, go to (6). 809 // (4) Cons string. If the string is flat, replace subject with first string 810 // and go to (1). Otherwise bail out to runtime. 811 // (5) One byte sequential. Load regexp code for one byte. 812 // (E) Carry on. 813 /// [...] 814 815 // Deferred code at the end of the stub: 816 // (6) Long external string? If not, go to (10). 817 // (7) External string. Make it, offset-wise, look like a sequential string. 818 // (8) Is the external string one byte? If yes, go to (5). 819 // (9) Two byte sequential. Load regexp code for two byte. Go to (E). 820 // (10) Short external string or not a string? If yes, bail out to runtime. 821 // (11) Sliced string. Replace subject with parent. Go to (1). 822 823 Label seq_one_byte_string /* 5 */, seq_two_byte_string /* 9 */, 824 external_string /* 7 */, check_underlying /* 1 */, 825 not_seq_nor_cons /* 6 */, check_code /* E */, not_long_external /* 10 */; 826 827 __ bind(&check_underlying); 828 // (1) Sequential two byte? If yes, go to (9). 829 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); 830 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset)); 831 832 __ and_(ebx, kIsNotStringMask | 833 kStringRepresentationMask | 834 kStringEncodingMask | 835 kShortExternalStringMask); 836 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0); 837 __ j(zero, &seq_two_byte_string); // Go to (9). 838 839 // (2) Sequential one byte? If yes, go to (5). 840 // Any other sequential string must be one byte. 841 __ and_(ebx, Immediate(kIsNotStringMask | 842 kStringRepresentationMask | 843 kShortExternalStringMask)); 844 __ j(zero, &seq_one_byte_string, Label::kNear); // Go to (5). 845 846 // (3) Sequential or cons? If not, go to (6). 847 // We check whether the subject string is a cons, since sequential strings 848 // have already been covered. 849 STATIC_ASSERT(kConsStringTag < kExternalStringTag); 850 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag); 851 STATIC_ASSERT(kIsNotStringMask > kExternalStringTag); 852 STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag); 853 __ cmp(ebx, Immediate(kExternalStringTag)); 854 __ j(greater_equal, ¬_seq_nor_cons); // Go to (6). 855 856 // (4) Cons string. Check that it's flat. 857 // Replace subject with first string and reload instance type. 858 __ cmp(FieldOperand(eax, ConsString::kSecondOffset), factory->empty_string()); 859 __ j(not_equal, &runtime); 860 __ mov(eax, FieldOperand(eax, ConsString::kFirstOffset)); 861 __ jmp(&check_underlying); 862 863 // eax: sequential subject string (or look-alike, external string) 864 // edx: original subject string 865 // ecx: RegExp data (FixedArray) 866 // (5) One byte sequential. Load regexp code for one byte. 867 __ bind(&seq_one_byte_string); 868 // Load previous index and check range before edx is overwritten. We have 869 // to use edx instead of eax here because it might have been only made to 870 // look like a sequential string when it actually is an external string. 871 __ mov(ebx, Operand(esp, kPreviousIndexOffset)); 872 __ JumpIfNotSmi(ebx, &runtime); 873 __ cmp(ebx, FieldOperand(edx, String::kLengthOffset)); 874 __ j(above_equal, &runtime); 875 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataOneByteCodeOffset)); 876 __ Move(ecx, Immediate(1)); // Type is one byte. 877 878 // (E) Carry on. String handling is done. 879 __ bind(&check_code); 880 // edx: irregexp code 881 // Check that the irregexp code has been generated for the actual string 882 // encoding. If it has, the field contains a code object otherwise it contains 883 // a smi (code flushing support). 884 __ JumpIfSmi(edx, &runtime); 885 886 // eax: subject string 887 // ebx: previous index (smi) 888 // edx: code 889 // ecx: encoding of subject string (1 if one_byte, 0 if two_byte); 890 // All checks done. Now push arguments for native regexp code. 891 Counters* counters = isolate()->counters(); 892 __ IncrementCounter(counters->regexp_entry_native(), 1); 893 894 // Isolates: note we add an additional parameter here (isolate pointer). 895 static const int kRegExpExecuteArguments = 9; 896 __ EnterApiExitFrame(kRegExpExecuteArguments); 897 898 // Argument 9: Pass current isolate address. 899 __ mov(Operand(esp, 8 * kPointerSize), 900 Immediate(ExternalReference::isolate_address(isolate()))); 901 902 // Argument 8: Indicate that this is a direct call from JavaScript. 903 __ mov(Operand(esp, 7 * kPointerSize), Immediate(1)); 904 905 // Argument 7: Start (high end) of backtracking stack memory area. 906 __ mov(esi, Operand::StaticVariable(address_of_regexp_stack_memory_address)); 907 __ add(esi, Operand::StaticVariable(address_of_regexp_stack_memory_size)); 908 __ mov(Operand(esp, 6 * kPointerSize), esi); 909 910 // Argument 6: Set the number of capture registers to zero to force global 911 // regexps to behave as non-global. This does not affect non-global regexps. 912 __ mov(Operand(esp, 5 * kPointerSize), Immediate(0)); 913 914 // Argument 5: static offsets vector buffer. 915 __ mov(Operand(esp, 4 * kPointerSize), 916 Immediate(ExternalReference::address_of_static_offsets_vector( 917 isolate()))); 918 919 // Argument 2: Previous index. 920 __ SmiUntag(ebx); 921 __ mov(Operand(esp, 1 * kPointerSize), ebx); 922 923 // Argument 1: Original subject string. 924 // The original subject is in the previous stack frame. Therefore we have to 925 // use ebp, which points exactly to one pointer size below the previous esp. 926 // (Because creating a new stack frame pushes the previous ebp onto the stack 927 // and thereby moves up esp by one kPointerSize.) 928 __ mov(esi, Operand(ebp, kSubjectOffset + kPointerSize)); 929 __ mov(Operand(esp, 0 * kPointerSize), esi); 930 931 // esi: original subject string 932 // eax: underlying subject string 933 // ebx: previous index 934 // ecx: encoding of subject string (1 if one_byte 0 if two_byte); 935 // edx: code 936 // Argument 4: End of string data 937 // Argument 3: Start of string data 938 // Prepare start and end index of the input. 939 // Load the length from the original sliced string if that is the case. 940 __ mov(esi, FieldOperand(esi, String::kLengthOffset)); 941 __ add(esi, edi); // Calculate input end wrt offset. 942 __ SmiUntag(edi); 943 __ add(ebx, edi); // Calculate input start wrt offset. 944 945 // ebx: start index of the input string 946 // esi: end index of the input string 947 Label setup_two_byte, setup_rest; 948 __ test(ecx, ecx); 949 __ j(zero, &setup_two_byte, Label::kNear); 950 __ SmiUntag(esi); 951 __ lea(ecx, FieldOperand(eax, esi, times_1, SeqOneByteString::kHeaderSize)); 952 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4. 953 __ lea(ecx, FieldOperand(eax, ebx, times_1, SeqOneByteString::kHeaderSize)); 954 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3. 955 __ jmp(&setup_rest, Label::kNear); 956 957 __ bind(&setup_two_byte); 958 STATIC_ASSERT(kSmiTag == 0); 959 STATIC_ASSERT(kSmiTagSize == 1); // esi is smi (powered by 2). 960 __ lea(ecx, FieldOperand(eax, esi, times_1, SeqTwoByteString::kHeaderSize)); 961 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4. 962 __ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize)); 963 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3. 964 965 __ bind(&setup_rest); 966 967 // Locate the code entry and call it. 968 __ add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag)); 969 __ call(edx); 970 971 // Drop arguments and come back to JS mode. 972 __ LeaveApiExitFrame(true); 973 974 // Check the result. 975 Label success; 976 __ cmp(eax, 1); 977 // We expect exactly one result since we force the called regexp to behave 978 // as non-global. 979 __ j(equal, &success); 980 Label failure; 981 __ cmp(eax, NativeRegExpMacroAssembler::FAILURE); 982 __ j(equal, &failure); 983 __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION); 984 // If not exception it can only be retry. Handle that in the runtime system. 985 __ j(not_equal, &runtime); 986 // Result must now be exception. If there is no pending exception already a 987 // stack overflow (on the backtrack stack) was detected in RegExp code but 988 // haven't created the exception yet. Handle that in the runtime system. 989 // TODO(592): Rerunning the RegExp to get the stack overflow exception. 990 ExternalReference pending_exception(Isolate::kPendingExceptionAddress, 991 isolate()); 992 __ mov(edx, Immediate(isolate()->factory()->the_hole_value())); 993 __ mov(eax, Operand::StaticVariable(pending_exception)); 994 __ cmp(edx, eax); 995 __ j(equal, &runtime); 996 997 // For exception, throw the exception again. 998 __ TailCallRuntime(Runtime::kRegExpExecReThrow); 999 1000 __ bind(&failure); 1001 // For failure to match, return null. 1002 __ mov(eax, factory->null_value()); 1003 __ ret(4 * kPointerSize); 1004 1005 // Load RegExp data. 1006 __ bind(&success); 1007 __ mov(eax, Operand(esp, kJSRegExpOffset)); 1008 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset)); 1009 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset)); 1010 // Calculate number of capture registers (number_of_captures + 1) * 2. 1011 STATIC_ASSERT(kSmiTag == 0); 1012 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1); 1013 __ add(edx, Immediate(2)); // edx was a smi. 1014 1015 // edx: Number of capture registers 1016 // Load last_match_info which is still known to be a fast case JSArray. 1017 // Check that the fourth object is a JSArray object. 1018 __ mov(eax, Operand(esp, kLastMatchInfoOffset)); 1019 __ JumpIfSmi(eax, &runtime); 1020 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx); 1021 __ j(not_equal, &runtime); 1022 // Check that the JSArray is in fast case. 1023 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset)); 1024 __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset)); 1025 __ cmp(eax, factory->fixed_array_map()); 1026 __ j(not_equal, &runtime); 1027 // Check that the last match info has space for the capture registers and the 1028 // additional information. 1029 __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset)); 1030 __ SmiUntag(eax); 1031 __ sub(eax, Immediate(RegExpImpl::kLastMatchOverhead)); 1032 __ cmp(edx, eax); 1033 __ j(greater, &runtime); 1034 1035 // ebx: last_match_info backing store (FixedArray) 1036 // edx: number of capture registers 1037 // Store the capture count. 1038 __ SmiTag(edx); // Number of capture registers to smi. 1039 __ mov(FieldOperand(ebx, RegExpImpl::kLastCaptureCountOffset), edx); 1040 __ SmiUntag(edx); // Number of capture registers back from smi. 1041 // Store last subject and last input. 1042 __ mov(eax, Operand(esp, kSubjectOffset)); 1043 __ mov(ecx, eax); 1044 __ mov(FieldOperand(ebx, RegExpImpl::kLastSubjectOffset), eax); 1045 __ RecordWriteField(ebx, 1046 RegExpImpl::kLastSubjectOffset, 1047 eax, 1048 edi, 1049 kDontSaveFPRegs); 1050 __ mov(eax, ecx); 1051 __ mov(FieldOperand(ebx, RegExpImpl::kLastInputOffset), eax); 1052 __ RecordWriteField(ebx, 1053 RegExpImpl::kLastInputOffset, 1054 eax, 1055 edi, 1056 kDontSaveFPRegs); 1057 1058 // Get the static offsets vector filled by the native regexp code. 1059 ExternalReference address_of_static_offsets_vector = 1060 ExternalReference::address_of_static_offsets_vector(isolate()); 1061 __ mov(ecx, Immediate(address_of_static_offsets_vector)); 1062 1063 // ebx: last_match_info backing store (FixedArray) 1064 // ecx: offsets vector 1065 // edx: number of capture registers 1066 Label next_capture, done; 1067 // Capture register counter starts from number of capture registers and 1068 // counts down until wraping after zero. 1069 __ bind(&next_capture); 1070 __ sub(edx, Immediate(1)); 1071 __ j(negative, &done, Label::kNear); 1072 // Read the value from the static offsets vector buffer. 1073 __ mov(edi, Operand(ecx, edx, times_int_size, 0)); 1074 __ SmiTag(edi); 1075 // Store the smi value in the last match info. 1076 __ mov(FieldOperand(ebx, 1077 edx, 1078 times_pointer_size, 1079 RegExpImpl::kFirstCaptureOffset), 1080 edi); 1081 __ jmp(&next_capture); 1082 __ bind(&done); 1083 1084 // Return last match info. 1085 __ mov(eax, Operand(esp, kLastMatchInfoOffset)); 1086 __ ret(4 * kPointerSize); 1087 1088 // Do the runtime call to execute the regexp. 1089 __ bind(&runtime); 1090 __ TailCallRuntime(Runtime::kRegExpExec); 1091 1092 // Deferred code for string handling. 1093 // (6) Long external string? If not, go to (10). 1094 __ bind(¬_seq_nor_cons); 1095 // Compare flags are still set from (3). 1096 __ j(greater, ¬_long_external, Label::kNear); // Go to (10). 1097 1098 // (7) External string. Short external strings have been ruled out. 1099 __ bind(&external_string); 1100 // Reload instance type. 1101 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); 1102 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset)); 1103 if (FLAG_debug_code) { 1104 // Assert that we do not have a cons or slice (indirect strings) here. 1105 // Sequential strings have already been ruled out. 1106 __ test_b(ebx, kIsIndirectStringMask); 1107 __ Assert(zero, kExternalStringExpectedButNotFound); 1108 } 1109 __ mov(eax, FieldOperand(eax, ExternalString::kResourceDataOffset)); 1110 // Move the pointer so that offset-wise, it looks like a sequential string. 1111 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize); 1112 __ sub(eax, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); 1113 STATIC_ASSERT(kTwoByteStringTag == 0); 1114 // (8) Is the external string one byte? If yes, go to (5). 1115 __ test_b(ebx, kStringEncodingMask); 1116 __ j(not_zero, &seq_one_byte_string); // Go to (5). 1117 1118 // eax: sequential subject string (or look-alike, external string) 1119 // edx: original subject string 1120 // ecx: RegExp data (FixedArray) 1121 // (9) Two byte sequential. Load regexp code for two byte. Go to (E). 1122 __ bind(&seq_two_byte_string); 1123 // Load previous index and check range before edx is overwritten. We have 1124 // to use edx instead of eax here because it might have been only made to 1125 // look like a sequential string when it actually is an external string. 1126 __ mov(ebx, Operand(esp, kPreviousIndexOffset)); 1127 __ JumpIfNotSmi(ebx, &runtime); 1128 __ cmp(ebx, FieldOperand(edx, String::kLengthOffset)); 1129 __ j(above_equal, &runtime); 1130 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataUC16CodeOffset)); 1131 __ Move(ecx, Immediate(0)); // Type is two byte. 1132 __ jmp(&check_code); // Go to (E). 1133 1134 // (10) Not a string or a short external string? If yes, bail out to runtime. 1135 __ bind(¬_long_external); 1136 // Catch non-string subject or short external string. 1137 STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0); 1138 __ test(ebx, Immediate(kIsNotStringMask | kShortExternalStringTag)); 1139 __ j(not_zero, &runtime); 1140 1141 // (11) Sliced string. Replace subject with parent. Go to (1). 1142 // Load offset into edi and replace subject string with parent. 1143 __ mov(edi, FieldOperand(eax, SlicedString::kOffsetOffset)); 1144 __ mov(eax, FieldOperand(eax, SlicedString::kParentOffset)); 1145 __ jmp(&check_underlying); // Go to (1). 1146#endif // V8_INTERPRETED_REGEXP 1147} 1148 1149 1150static int NegativeComparisonResult(Condition cc) { 1151 DCHECK(cc != equal); 1152 DCHECK((cc == less) || (cc == less_equal) 1153 || (cc == greater) || (cc == greater_equal)); 1154 return (cc == greater || cc == greater_equal) ? LESS : GREATER; 1155} 1156 1157 1158static void CheckInputType(MacroAssembler* masm, Register input, 1159 CompareICState::State expected, Label* fail) { 1160 Label ok; 1161 if (expected == CompareICState::SMI) { 1162 __ JumpIfNotSmi(input, fail); 1163 } else if (expected == CompareICState::NUMBER) { 1164 __ JumpIfSmi(input, &ok); 1165 __ cmp(FieldOperand(input, HeapObject::kMapOffset), 1166 Immediate(masm->isolate()->factory()->heap_number_map())); 1167 __ j(not_equal, fail); 1168 } 1169 // We could be strict about internalized/non-internalized here, but as long as 1170 // hydrogen doesn't care, the stub doesn't have to care either. 1171 __ bind(&ok); 1172} 1173 1174 1175static void BranchIfNotInternalizedString(MacroAssembler* masm, 1176 Label* label, 1177 Register object, 1178 Register scratch) { 1179 __ JumpIfSmi(object, label); 1180 __ mov(scratch, FieldOperand(object, HeapObject::kMapOffset)); 1181 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset)); 1182 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0); 1183 __ test(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask)); 1184 __ j(not_zero, label); 1185} 1186 1187 1188void CompareICStub::GenerateGeneric(MacroAssembler* masm) { 1189 Label runtime_call, check_unequal_objects; 1190 Condition cc = GetCondition(); 1191 1192 Label miss; 1193 CheckInputType(masm, edx, left(), &miss); 1194 CheckInputType(masm, eax, right(), &miss); 1195 1196 // Compare two smis. 1197 Label non_smi, smi_done; 1198 __ mov(ecx, edx); 1199 __ or_(ecx, eax); 1200 __ JumpIfNotSmi(ecx, &non_smi, Label::kNear); 1201 __ sub(edx, eax); // Return on the result of the subtraction. 1202 __ j(no_overflow, &smi_done, Label::kNear); 1203 __ not_(edx); // Correct sign in case of overflow. edx is never 0 here. 1204 __ bind(&smi_done); 1205 __ mov(eax, edx); 1206 __ ret(0); 1207 __ bind(&non_smi); 1208 1209 // NOTICE! This code is only reached after a smi-fast-case check, so 1210 // it is certain that at least one operand isn't a smi. 1211 1212 // Identical objects can be compared fast, but there are some tricky cases 1213 // for NaN and undefined. 1214 Label generic_heap_number_comparison; 1215 { 1216 Label not_identical; 1217 __ cmp(eax, edx); 1218 __ j(not_equal, ¬_identical); 1219 1220 if (cc != equal) { 1221 // Check for undefined. undefined OP undefined is false even though 1222 // undefined == undefined. 1223 __ cmp(edx, isolate()->factory()->undefined_value()); 1224 Label check_for_nan; 1225 __ j(not_equal, &check_for_nan, Label::kNear); 1226 __ Move(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc)))); 1227 __ ret(0); 1228 __ bind(&check_for_nan); 1229 } 1230 1231 // Test for NaN. Compare heap numbers in a general way, 1232 // to handle NaNs correctly. 1233 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), 1234 Immediate(isolate()->factory()->heap_number_map())); 1235 __ j(equal, &generic_heap_number_comparison, Label::kNear); 1236 if (cc != equal) { 1237 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset)); 1238 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset)); 1239 // Call runtime on identical JSObjects. Otherwise return equal. 1240 __ cmpb(ecx, static_cast<uint8_t>(FIRST_JS_RECEIVER_TYPE)); 1241 __ j(above_equal, &runtime_call, Label::kFar); 1242 // Call runtime on identical symbols since we need to throw a TypeError. 1243 __ cmpb(ecx, static_cast<uint8_t>(SYMBOL_TYPE)); 1244 __ j(equal, &runtime_call, Label::kFar); 1245 // Call runtime on identical SIMD values since we must throw a TypeError. 1246 __ cmpb(ecx, static_cast<uint8_t>(SIMD128_VALUE_TYPE)); 1247 __ j(equal, &runtime_call, Label::kFar); 1248 } 1249 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); 1250 __ ret(0); 1251 1252 1253 __ bind(¬_identical); 1254 } 1255 1256 // Strict equality can quickly decide whether objects are equal. 1257 // Non-strict object equality is slower, so it is handled later in the stub. 1258 if (cc == equal && strict()) { 1259 Label slow; // Fallthrough label. 1260 Label not_smis; 1261 // If we're doing a strict equality comparison, we don't have to do 1262 // type conversion, so we generate code to do fast comparison for objects 1263 // and oddballs. Non-smi numbers and strings still go through the usual 1264 // slow-case code. 1265 // If either is a Smi (we know that not both are), then they can only 1266 // be equal if the other is a HeapNumber. If so, use the slow case. 1267 STATIC_ASSERT(kSmiTag == 0); 1268 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0)); 1269 __ mov(ecx, Immediate(kSmiTagMask)); 1270 __ and_(ecx, eax); 1271 __ test(ecx, edx); 1272 __ j(not_zero, ¬_smis, Label::kNear); 1273 // One operand is a smi. 1274 1275 // Check whether the non-smi is a heap number. 1276 STATIC_ASSERT(kSmiTagMask == 1); 1277 // ecx still holds eax & kSmiTag, which is either zero or one. 1278 __ sub(ecx, Immediate(0x01)); 1279 __ mov(ebx, edx); 1280 __ xor_(ebx, eax); 1281 __ and_(ebx, ecx); // ebx holds either 0 or eax ^ edx. 1282 __ xor_(ebx, eax); 1283 // if eax was smi, ebx is now edx, else eax. 1284 1285 // Check if the non-smi operand is a heap number. 1286 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset), 1287 Immediate(isolate()->factory()->heap_number_map())); 1288 // If heap number, handle it in the slow case. 1289 __ j(equal, &slow, Label::kNear); 1290 // Return non-equal (ebx is not zero) 1291 __ mov(eax, ebx); 1292 __ ret(0); 1293 1294 __ bind(¬_smis); 1295 // If either operand is a JSObject or an oddball value, then they are not 1296 // equal since their pointers are different 1297 // There is no test for undetectability in strict equality. 1298 1299 // Get the type of the first operand. 1300 // If the first object is a JS object, we have done pointer comparison. 1301 Label first_non_object; 1302 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE); 1303 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx); 1304 __ j(below, &first_non_object, Label::kNear); 1305 1306 // Return non-zero (eax is not zero) 1307 Label return_not_equal; 1308 STATIC_ASSERT(kHeapObjectTag != 0); 1309 __ bind(&return_not_equal); 1310 __ ret(0); 1311 1312 __ bind(&first_non_object); 1313 // Check for oddballs: true, false, null, undefined. 1314 __ CmpInstanceType(ecx, ODDBALL_TYPE); 1315 __ j(equal, &return_not_equal); 1316 1317 __ CmpObjectType(edx, FIRST_JS_RECEIVER_TYPE, ecx); 1318 __ j(above_equal, &return_not_equal); 1319 1320 // Check for oddballs: true, false, null, undefined. 1321 __ CmpInstanceType(ecx, ODDBALL_TYPE); 1322 __ j(equal, &return_not_equal); 1323 1324 // Fall through to the general case. 1325 __ bind(&slow); 1326 } 1327 1328 // Generate the number comparison code. 1329 Label non_number_comparison; 1330 Label unordered; 1331 __ bind(&generic_heap_number_comparison); 1332 1333 FloatingPointHelper::LoadSSE2Operands(masm, &non_number_comparison); 1334 __ ucomisd(xmm0, xmm1); 1335 // Don't base result on EFLAGS when a NaN is involved. 1336 __ j(parity_even, &unordered, Label::kNear); 1337 1338 __ mov(eax, 0); // equal 1339 __ mov(ecx, Immediate(Smi::FromInt(1))); 1340 __ cmov(above, eax, ecx); 1341 __ mov(ecx, Immediate(Smi::FromInt(-1))); 1342 __ cmov(below, eax, ecx); 1343 __ ret(0); 1344 1345 // If one of the numbers was NaN, then the result is always false. 1346 // The cc is never not-equal. 1347 __ bind(&unordered); 1348 DCHECK(cc != not_equal); 1349 if (cc == less || cc == less_equal) { 1350 __ mov(eax, Immediate(Smi::FromInt(1))); 1351 } else { 1352 __ mov(eax, Immediate(Smi::FromInt(-1))); 1353 } 1354 __ ret(0); 1355 1356 // The number comparison code did not provide a valid result. 1357 __ bind(&non_number_comparison); 1358 1359 // Fast negative check for internalized-to-internalized equality. 1360 Label check_for_strings; 1361 if (cc == equal) { 1362 BranchIfNotInternalizedString(masm, &check_for_strings, eax, ecx); 1363 BranchIfNotInternalizedString(masm, &check_for_strings, edx, ecx); 1364 1365 // We've already checked for object identity, so if both operands 1366 // are internalized they aren't equal. Register eax already holds a 1367 // non-zero value, which indicates not equal, so just return. 1368 __ ret(0); 1369 } 1370 1371 __ bind(&check_for_strings); 1372 1373 __ JumpIfNotBothSequentialOneByteStrings(edx, eax, ecx, ebx, 1374 &check_unequal_objects); 1375 1376 // Inline comparison of one-byte strings. 1377 if (cc == equal) { 1378 StringHelper::GenerateFlatOneByteStringEquals(masm, edx, eax, ecx, ebx); 1379 } else { 1380 StringHelper::GenerateCompareFlatOneByteStrings(masm, edx, eax, ecx, ebx, 1381 edi); 1382 } 1383#ifdef DEBUG 1384 __ Abort(kUnexpectedFallThroughFromStringComparison); 1385#endif 1386 1387 __ bind(&check_unequal_objects); 1388 if (cc == equal && !strict()) { 1389 // Non-strict equality. Objects are unequal if 1390 // they are both JSObjects and not undetectable, 1391 // and their pointers are different. 1392 Label return_unequal, undetectable; 1393 // At most one is a smi, so we can test for smi by adding the two. 1394 // A smi plus a heap object has the low bit set, a heap object plus 1395 // a heap object has the low bit clear. 1396 STATIC_ASSERT(kSmiTag == 0); 1397 STATIC_ASSERT(kSmiTagMask == 1); 1398 __ lea(ecx, Operand(eax, edx, times_1, 0)); 1399 __ test(ecx, Immediate(kSmiTagMask)); 1400 __ j(not_zero, &runtime_call, Label::kNear); 1401 1402 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset)); 1403 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset)); 1404 1405 __ test_b(FieldOperand(ebx, Map::kBitFieldOffset), 1406 1 << Map::kIsUndetectable); 1407 __ j(not_zero, &undetectable, Label::kNear); 1408 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset), 1409 1 << Map::kIsUndetectable); 1410 __ j(not_zero, &return_unequal, Label::kNear); 1411 1412 __ CmpInstanceType(ebx, FIRST_JS_RECEIVER_TYPE); 1413 __ j(below, &runtime_call, Label::kNear); 1414 __ CmpInstanceType(ecx, FIRST_JS_RECEIVER_TYPE); 1415 __ j(below, &runtime_call, Label::kNear); 1416 1417 __ bind(&return_unequal); 1418 // Return non-equal by returning the non-zero object pointer in eax. 1419 __ ret(0); // eax, edx were pushed 1420 1421 __ bind(&undetectable); 1422 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset), 1423 1 << Map::kIsUndetectable); 1424 __ j(zero, &return_unequal, Label::kNear); 1425 __ Move(eax, Immediate(EQUAL)); 1426 __ ret(0); // eax, edx were pushed 1427 } 1428 __ bind(&runtime_call); 1429 1430 if (cc == equal) { 1431 { 1432 FrameScope scope(masm, StackFrame::INTERNAL); 1433 __ Push(edx); 1434 __ Push(eax); 1435 __ CallRuntime(strict() ? Runtime::kStrictEqual : Runtime::kEqual); 1436 } 1437 // Turn true into 0 and false into some non-zero value. 1438 STATIC_ASSERT(EQUAL == 0); 1439 __ sub(eax, Immediate(isolate()->factory()->true_value())); 1440 __ Ret(); 1441 } else { 1442 // Push arguments below the return address. 1443 __ pop(ecx); 1444 __ push(edx); 1445 __ push(eax); 1446 __ push(Immediate(Smi::FromInt(NegativeComparisonResult(cc)))); 1447 __ push(ecx); 1448 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) 1449 // tagged as a small integer. 1450 __ TailCallRuntime(Runtime::kCompare); 1451 } 1452 1453 __ bind(&miss); 1454 GenerateMiss(masm); 1455} 1456 1457 1458static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) { 1459 // eax : number of arguments to the construct function 1460 // ebx : feedback vector 1461 // edx : slot in feedback vector (Smi) 1462 // edi : the function to call 1463 1464 { 1465 FrameScope scope(masm, StackFrame::INTERNAL); 1466 1467 // Number-of-arguments register must be smi-tagged to call out. 1468 __ SmiTag(eax); 1469 __ push(eax); 1470 __ push(edi); 1471 __ push(edx); 1472 __ push(ebx); 1473 1474 __ CallStub(stub); 1475 1476 __ pop(ebx); 1477 __ pop(edx); 1478 __ pop(edi); 1479 __ pop(eax); 1480 __ SmiUntag(eax); 1481 } 1482} 1483 1484 1485static void GenerateRecordCallTarget(MacroAssembler* masm) { 1486 // Cache the called function in a feedback vector slot. Cache states 1487 // are uninitialized, monomorphic (indicated by a JSFunction), and 1488 // megamorphic. 1489 // eax : number of arguments to the construct function 1490 // ebx : feedback vector 1491 // edx : slot in feedback vector (Smi) 1492 // edi : the function to call 1493 Isolate* isolate = masm->isolate(); 1494 Label initialize, done, miss, megamorphic, not_array_function; 1495 1496 // Load the cache state into ecx. 1497 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size, 1498 FixedArray::kHeaderSize)); 1499 1500 // A monomorphic cache hit or an already megamorphic state: invoke the 1501 // function without changing the state. 1502 // We don't know if ecx is a WeakCell or a Symbol, but it's harmless to read 1503 // at this position in a symbol (see static asserts in 1504 // type-feedback-vector.h). 1505 Label check_allocation_site; 1506 __ cmp(edi, FieldOperand(ecx, WeakCell::kValueOffset)); 1507 __ j(equal, &done, Label::kFar); 1508 __ CompareRoot(ecx, Heap::kmegamorphic_symbolRootIndex); 1509 __ j(equal, &done, Label::kFar); 1510 __ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset), 1511 Heap::kWeakCellMapRootIndex); 1512 __ j(not_equal, &check_allocation_site); 1513 1514 // If the weak cell is cleared, we have a new chance to become monomorphic. 1515 __ JumpIfSmi(FieldOperand(ecx, WeakCell::kValueOffset), &initialize); 1516 __ jmp(&megamorphic); 1517 1518 __ bind(&check_allocation_site); 1519 // If we came here, we need to see if we are the array function. 1520 // If we didn't have a matching function, and we didn't find the megamorph 1521 // sentinel, then we have in the slot either some other function or an 1522 // AllocationSite. 1523 __ CompareRoot(FieldOperand(ecx, 0), Heap::kAllocationSiteMapRootIndex); 1524 __ j(not_equal, &miss); 1525 1526 // Make sure the function is the Array() function 1527 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx); 1528 __ cmp(edi, ecx); 1529 __ j(not_equal, &megamorphic); 1530 __ jmp(&done, Label::kFar); 1531 1532 __ bind(&miss); 1533 1534 // A monomorphic miss (i.e, here the cache is not uninitialized) goes 1535 // megamorphic. 1536 __ CompareRoot(ecx, Heap::kuninitialized_symbolRootIndex); 1537 __ j(equal, &initialize); 1538 // MegamorphicSentinel is an immortal immovable object (undefined) so no 1539 // write-barrier is needed. 1540 __ bind(&megamorphic); 1541 __ mov( 1542 FieldOperand(ebx, edx, times_half_pointer_size, FixedArray::kHeaderSize), 1543 Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate))); 1544 __ jmp(&done, Label::kFar); 1545 1546 // An uninitialized cache is patched with the function or sentinel to 1547 // indicate the ElementsKind if function is the Array constructor. 1548 __ bind(&initialize); 1549 // Make sure the function is the Array() function 1550 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx); 1551 __ cmp(edi, ecx); 1552 __ j(not_equal, ¬_array_function); 1553 1554 // The target function is the Array constructor, 1555 // Create an AllocationSite if we don't already have it, store it in the 1556 // slot. 1557 CreateAllocationSiteStub create_stub(isolate); 1558 CallStubInRecordCallTarget(masm, &create_stub); 1559 __ jmp(&done); 1560 1561 __ bind(¬_array_function); 1562 CreateWeakCellStub weak_cell_stub(isolate); 1563 CallStubInRecordCallTarget(masm, &weak_cell_stub); 1564 __ bind(&done); 1565} 1566 1567 1568void CallConstructStub::Generate(MacroAssembler* masm) { 1569 // eax : number of arguments 1570 // ebx : feedback vector 1571 // edx : slot in feedback vector (Smi, for RecordCallTarget) 1572 // edi : constructor function 1573 1574 Label non_function; 1575 // Check that function is not a smi. 1576 __ JumpIfSmi(edi, &non_function); 1577 // Check that function is a JSFunction. 1578 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); 1579 __ j(not_equal, &non_function); 1580 1581 GenerateRecordCallTarget(masm); 1582 1583 Label feedback_register_initialized; 1584 // Put the AllocationSite from the feedback vector into ebx, or undefined. 1585 __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size, 1586 FixedArray::kHeaderSize)); 1587 Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map(); 1588 __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map)); 1589 __ j(equal, &feedback_register_initialized); 1590 __ mov(ebx, isolate()->factory()->undefined_value()); 1591 __ bind(&feedback_register_initialized); 1592 1593 __ AssertUndefinedOrAllocationSite(ebx); 1594 1595 // Pass new target to construct stub. 1596 __ mov(edx, edi); 1597 1598 // Tail call to the function-specific construct stub (still in the caller 1599 // context at this point). 1600 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); 1601 __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kConstructStubOffset)); 1602 __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize)); 1603 __ jmp(ecx); 1604 1605 __ bind(&non_function); 1606 __ mov(edx, edi); 1607 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); 1608} 1609 1610 1611void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { 1612 // edi - function 1613 // edx - slot id 1614 // ebx - vector 1615 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx); 1616 __ cmp(edi, ecx); 1617 __ j(not_equal, miss); 1618 1619 __ mov(eax, arg_count()); 1620 // Reload ecx. 1621 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size, 1622 FixedArray::kHeaderSize)); 1623 1624 // Increment the call count for monomorphic function calls. 1625 __ add(FieldOperand(ebx, edx, times_half_pointer_size, 1626 FixedArray::kHeaderSize + kPointerSize), 1627 Immediate(Smi::FromInt(CallICNexus::kCallCountIncrement))); 1628 1629 __ mov(ebx, ecx); 1630 __ mov(edx, edi); 1631 ArrayConstructorStub stub(masm->isolate(), arg_count()); 1632 __ TailCallStub(&stub); 1633 1634 // Unreachable. 1635} 1636 1637 1638void CallICStub::Generate(MacroAssembler* masm) { 1639 // edi - function 1640 // edx - slot id 1641 // ebx - vector 1642 Isolate* isolate = masm->isolate(); 1643 Label extra_checks_or_miss, call, call_function; 1644 int argc = arg_count(); 1645 ParameterCount actual(argc); 1646 1647 // The checks. First, does edi match the recorded monomorphic target? 1648 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size, 1649 FixedArray::kHeaderSize)); 1650 1651 // We don't know that we have a weak cell. We might have a private symbol 1652 // or an AllocationSite, but the memory is safe to examine. 1653 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to 1654 // FixedArray. 1655 // WeakCell::kValueOffset - contains a JSFunction or Smi(0) 1656 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not 1657 // computed, meaning that it can't appear to be a pointer. If the low bit is 1658 // 0, then hash is computed, but the 0 bit prevents the field from appearing 1659 // to be a pointer. 1660 STATIC_ASSERT(WeakCell::kSize >= kPointerSize); 1661 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset == 1662 WeakCell::kValueOffset && 1663 WeakCell::kValueOffset == Symbol::kHashFieldSlot); 1664 1665 __ cmp(edi, FieldOperand(ecx, WeakCell::kValueOffset)); 1666 __ j(not_equal, &extra_checks_or_miss); 1667 1668 // The compare above could have been a SMI/SMI comparison. Guard against this 1669 // convincing us that we have a monomorphic JSFunction. 1670 __ JumpIfSmi(edi, &extra_checks_or_miss); 1671 1672 // Increment the call count for monomorphic function calls. 1673 __ add(FieldOperand(ebx, edx, times_half_pointer_size, 1674 FixedArray::kHeaderSize + kPointerSize), 1675 Immediate(Smi::FromInt(CallICNexus::kCallCountIncrement))); 1676 1677 __ bind(&call_function); 1678 __ Set(eax, argc); 1679 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(), 1680 tail_call_mode()), 1681 RelocInfo::CODE_TARGET); 1682 1683 __ bind(&extra_checks_or_miss); 1684 Label uninitialized, miss, not_allocation_site; 1685 1686 __ cmp(ecx, Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate))); 1687 __ j(equal, &call); 1688 1689 // Check if we have an allocation site. 1690 __ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset), 1691 Heap::kAllocationSiteMapRootIndex); 1692 __ j(not_equal, ¬_allocation_site); 1693 1694 // We have an allocation site. 1695 HandleArrayCase(masm, &miss); 1696 1697 __ bind(¬_allocation_site); 1698 1699 // The following cases attempt to handle MISS cases without going to the 1700 // runtime. 1701 if (FLAG_trace_ic) { 1702 __ jmp(&miss); 1703 } 1704 1705 __ cmp(ecx, Immediate(TypeFeedbackVector::UninitializedSentinel(isolate))); 1706 __ j(equal, &uninitialized); 1707 1708 // We are going megamorphic. If the feedback is a JSFunction, it is fine 1709 // to handle it here. More complex cases are dealt with in the runtime. 1710 __ AssertNotSmi(ecx); 1711 __ CmpObjectType(ecx, JS_FUNCTION_TYPE, ecx); 1712 __ j(not_equal, &miss); 1713 __ mov( 1714 FieldOperand(ebx, edx, times_half_pointer_size, FixedArray::kHeaderSize), 1715 Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate))); 1716 1717 __ bind(&call); 1718 __ Set(eax, argc); 1719 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()), 1720 RelocInfo::CODE_TARGET); 1721 1722 __ bind(&uninitialized); 1723 1724 // We are going monomorphic, provided we actually have a JSFunction. 1725 __ JumpIfSmi(edi, &miss); 1726 1727 // Goto miss case if we do not have a function. 1728 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); 1729 __ j(not_equal, &miss); 1730 1731 // Make sure the function is not the Array() function, which requires special 1732 // behavior on MISS. 1733 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx); 1734 __ cmp(edi, ecx); 1735 __ j(equal, &miss); 1736 1737 // Make sure the function belongs to the same native context. 1738 __ mov(ecx, FieldOperand(edi, JSFunction::kContextOffset)); 1739 __ mov(ecx, ContextOperand(ecx, Context::NATIVE_CONTEXT_INDEX)); 1740 __ cmp(ecx, NativeContextOperand()); 1741 __ j(not_equal, &miss); 1742 1743 // Initialize the call counter. 1744 __ mov(FieldOperand(ebx, edx, times_half_pointer_size, 1745 FixedArray::kHeaderSize + kPointerSize), 1746 Immediate(Smi::FromInt(CallICNexus::kCallCountIncrement))); 1747 1748 // Store the function. Use a stub since we need a frame for allocation. 1749 // ebx - vector 1750 // edx - slot 1751 // edi - function 1752 { 1753 FrameScope scope(masm, StackFrame::INTERNAL); 1754 CreateWeakCellStub create_stub(isolate); 1755 __ push(edi); 1756 __ CallStub(&create_stub); 1757 __ pop(edi); 1758 } 1759 1760 __ jmp(&call_function); 1761 1762 // We are here because tracing is on or we encountered a MISS case we can't 1763 // handle here. 1764 __ bind(&miss); 1765 GenerateMiss(masm); 1766 1767 __ jmp(&call); 1768 1769 // Unreachable 1770 __ int3(); 1771} 1772 1773 1774void CallICStub::GenerateMiss(MacroAssembler* masm) { 1775 FrameScope scope(masm, StackFrame::INTERNAL); 1776 1777 // Push the function and feedback info. 1778 __ push(edi); 1779 __ push(ebx); 1780 __ push(edx); 1781 1782 // Call the entry. 1783 __ CallRuntime(Runtime::kCallIC_Miss); 1784 1785 // Move result to edi and exit the internal frame. 1786 __ mov(edi, eax); 1787} 1788 1789 1790bool CEntryStub::NeedsImmovableCode() { 1791 return false; 1792} 1793 1794 1795void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { 1796 CEntryStub::GenerateAheadOfTime(isolate); 1797 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); 1798 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); 1799 // It is important that the store buffer overflow stubs are generated first. 1800 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); 1801 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); 1802 CreateWeakCellStub::GenerateAheadOfTime(isolate); 1803 BinaryOpICStub::GenerateAheadOfTime(isolate); 1804 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); 1805 StoreFastElementStub::GenerateAheadOfTime(isolate); 1806 TypeofStub::GenerateAheadOfTime(isolate); 1807} 1808 1809 1810void CodeStub::GenerateFPStubs(Isolate* isolate) { 1811 // Generate if not already in cache. 1812 CEntryStub(isolate, 1, kSaveFPRegs).GetCode(); 1813 isolate->set_fp_stubs_generated(true); 1814} 1815 1816 1817void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { 1818 CEntryStub stub(isolate, 1, kDontSaveFPRegs); 1819 stub.GetCode(); 1820} 1821 1822 1823void CEntryStub::Generate(MacroAssembler* masm) { 1824 // eax: number of arguments including receiver 1825 // ebx: pointer to C function (C callee-saved) 1826 // ebp: frame pointer (restored after C call) 1827 // esp: stack pointer (restored after C call) 1828 // esi: current context (C callee-saved) 1829 // edi: JS function of the caller (C callee-saved) 1830 // 1831 // If argv_in_register(): 1832 // ecx: pointer to the first argument 1833 1834 ProfileEntryHookStub::MaybeCallEntryHook(masm); 1835 1836 // Reserve space on the stack for the three arguments passed to the call. If 1837 // result size is greater than can be returned in registers, also reserve 1838 // space for the hidden argument for the result location, and space for the 1839 // result itself. 1840 int arg_stack_space = result_size() < 3 ? 3 : 4 + result_size(); 1841 1842 // Enter the exit frame that transitions from JavaScript to C++. 1843 if (argv_in_register()) { 1844 DCHECK(!save_doubles()); 1845 __ EnterApiExitFrame(arg_stack_space); 1846 1847 // Move argc and argv into the correct registers. 1848 __ mov(esi, ecx); 1849 __ mov(edi, eax); 1850 } else { 1851 __ EnterExitFrame(arg_stack_space, save_doubles()); 1852 } 1853 1854 // ebx: pointer to C function (C callee-saved) 1855 // ebp: frame pointer (restored after C call) 1856 // esp: stack pointer (restored after C call) 1857 // edi: number of arguments including receiver (C callee-saved) 1858 // esi: pointer to the first argument (C callee-saved) 1859 1860 // Result returned in eax, or eax+edx if result size is 2. 1861 1862 // Check stack alignment. 1863 if (FLAG_debug_code) { 1864 __ CheckStackAlignment(); 1865 } 1866 // Call C function. 1867 if (result_size() <= 2) { 1868 __ mov(Operand(esp, 0 * kPointerSize), edi); // argc. 1869 __ mov(Operand(esp, 1 * kPointerSize), esi); // argv. 1870 __ mov(Operand(esp, 2 * kPointerSize), 1871 Immediate(ExternalReference::isolate_address(isolate()))); 1872 } else { 1873 DCHECK_EQ(3, result_size()); 1874 // Pass a pointer to the result location as the first argument. 1875 __ lea(eax, Operand(esp, 4 * kPointerSize)); 1876 __ mov(Operand(esp, 0 * kPointerSize), eax); 1877 __ mov(Operand(esp, 1 * kPointerSize), edi); // argc. 1878 __ mov(Operand(esp, 2 * kPointerSize), esi); // argv. 1879 __ mov(Operand(esp, 3 * kPointerSize), 1880 Immediate(ExternalReference::isolate_address(isolate()))); 1881 } 1882 __ call(ebx); 1883 1884 if (result_size() > 2) { 1885 DCHECK_EQ(3, result_size()); 1886#ifndef _WIN32 1887 // Restore the "hidden" argument on the stack which was popped by caller. 1888 __ sub(esp, Immediate(kPointerSize)); 1889#endif 1890 // Read result values stored on stack. Result is stored above the arguments. 1891 __ mov(kReturnRegister0, Operand(esp, 4 * kPointerSize)); 1892 __ mov(kReturnRegister1, Operand(esp, 5 * kPointerSize)); 1893 __ mov(kReturnRegister2, Operand(esp, 6 * kPointerSize)); 1894 } 1895 // Result is in eax, edx:eax or edi:edx:eax - do not destroy these registers! 1896 1897 // Check result for exception sentinel. 1898 Label exception_returned; 1899 __ cmp(eax, isolate()->factory()->exception()); 1900 __ j(equal, &exception_returned); 1901 1902 // Check that there is no pending exception, otherwise we 1903 // should have returned the exception sentinel. 1904 if (FLAG_debug_code) { 1905 __ push(edx); 1906 __ mov(edx, Immediate(isolate()->factory()->the_hole_value())); 1907 Label okay; 1908 ExternalReference pending_exception_address( 1909 Isolate::kPendingExceptionAddress, isolate()); 1910 __ cmp(edx, Operand::StaticVariable(pending_exception_address)); 1911 // Cannot use check here as it attempts to generate call into runtime. 1912 __ j(equal, &okay, Label::kNear); 1913 __ int3(); 1914 __ bind(&okay); 1915 __ pop(edx); 1916 } 1917 1918 // Exit the JavaScript to C++ exit frame. 1919 __ LeaveExitFrame(save_doubles(), !argv_in_register()); 1920 __ ret(0); 1921 1922 // Handling of exception. 1923 __ bind(&exception_returned); 1924 1925 ExternalReference pending_handler_context_address( 1926 Isolate::kPendingHandlerContextAddress, isolate()); 1927 ExternalReference pending_handler_code_address( 1928 Isolate::kPendingHandlerCodeAddress, isolate()); 1929 ExternalReference pending_handler_offset_address( 1930 Isolate::kPendingHandlerOffsetAddress, isolate()); 1931 ExternalReference pending_handler_fp_address( 1932 Isolate::kPendingHandlerFPAddress, isolate()); 1933 ExternalReference pending_handler_sp_address( 1934 Isolate::kPendingHandlerSPAddress, isolate()); 1935 1936 // Ask the runtime for help to determine the handler. This will set eax to 1937 // contain the current pending exception, don't clobber it. 1938 ExternalReference find_handler(Runtime::kUnwindAndFindExceptionHandler, 1939 isolate()); 1940 { 1941 FrameScope scope(masm, StackFrame::MANUAL); 1942 __ PrepareCallCFunction(3, eax); 1943 __ mov(Operand(esp, 0 * kPointerSize), Immediate(0)); // argc. 1944 __ mov(Operand(esp, 1 * kPointerSize), Immediate(0)); // argv. 1945 __ mov(Operand(esp, 2 * kPointerSize), 1946 Immediate(ExternalReference::isolate_address(isolate()))); 1947 __ CallCFunction(find_handler, 3); 1948 } 1949 1950 // Retrieve the handler context, SP and FP. 1951 __ mov(esi, Operand::StaticVariable(pending_handler_context_address)); 1952 __ mov(esp, Operand::StaticVariable(pending_handler_sp_address)); 1953 __ mov(ebp, Operand::StaticVariable(pending_handler_fp_address)); 1954 1955 // If the handler is a JS frame, restore the context to the frame. Note that 1956 // the context will be set to (esi == 0) for non-JS frames. 1957 Label skip; 1958 __ test(esi, esi); 1959 __ j(zero, &skip, Label::kNear); 1960 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi); 1961 __ bind(&skip); 1962 1963 // Compute the handler entry address and jump to it. 1964 __ mov(edi, Operand::StaticVariable(pending_handler_code_address)); 1965 __ mov(edx, Operand::StaticVariable(pending_handler_offset_address)); 1966 __ lea(edi, FieldOperand(edi, edx, times_1, Code::kHeaderSize)); 1967 __ jmp(edi); 1968} 1969 1970 1971void JSEntryStub::Generate(MacroAssembler* masm) { 1972 Label invoke, handler_entry, exit; 1973 Label not_outermost_js, not_outermost_js_2; 1974 1975 ProfileEntryHookStub::MaybeCallEntryHook(masm); 1976 1977 // Set up frame. 1978 __ push(ebp); 1979 __ mov(ebp, esp); 1980 1981 // Push marker in two places. 1982 int marker = type(); 1983 __ push(Immediate(Smi::FromInt(marker))); // context slot 1984 __ push(Immediate(Smi::FromInt(marker))); // function slot 1985 // Save callee-saved registers (C calling conventions). 1986 __ push(edi); 1987 __ push(esi); 1988 __ push(ebx); 1989 1990 // Save copies of the top frame descriptor on the stack. 1991 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate()); 1992 __ push(Operand::StaticVariable(c_entry_fp)); 1993 1994 // If this is the outermost JS call, set js_entry_sp value. 1995 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate()); 1996 __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0)); 1997 __ j(not_equal, ¬_outermost_js, Label::kNear); 1998 __ mov(Operand::StaticVariable(js_entry_sp), ebp); 1999 __ push(Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME))); 2000 __ jmp(&invoke, Label::kNear); 2001 __ bind(¬_outermost_js); 2002 __ push(Immediate(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME))); 2003 2004 // Jump to a faked try block that does the invoke, with a faked catch 2005 // block that sets the pending exception. 2006 __ jmp(&invoke); 2007 __ bind(&handler_entry); 2008 handler_offset_ = handler_entry.pos(); 2009 // Caught exception: Store result (exception) in the pending exception 2010 // field in the JSEnv and return a failure sentinel. 2011 ExternalReference pending_exception(Isolate::kPendingExceptionAddress, 2012 isolate()); 2013 __ mov(Operand::StaticVariable(pending_exception), eax); 2014 __ mov(eax, Immediate(isolate()->factory()->exception())); 2015 __ jmp(&exit); 2016 2017 // Invoke: Link this frame into the handler chain. 2018 __ bind(&invoke); 2019 __ PushStackHandler(); 2020 2021 // Clear any pending exceptions. 2022 __ mov(edx, Immediate(isolate()->factory()->the_hole_value())); 2023 __ mov(Operand::StaticVariable(pending_exception), edx); 2024 2025 // Fake a receiver (NULL). 2026 __ push(Immediate(0)); // receiver 2027 2028 // Invoke the function by calling through JS entry trampoline builtin and 2029 // pop the faked function when we return. Notice that we cannot store a 2030 // reference to the trampoline code directly in this stub, because the 2031 // builtin stubs may not have been generated yet. 2032 if (type() == StackFrame::ENTRY_CONSTRUCT) { 2033 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline, 2034 isolate()); 2035 __ mov(edx, Immediate(construct_entry)); 2036 } else { 2037 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate()); 2038 __ mov(edx, Immediate(entry)); 2039 } 2040 __ mov(edx, Operand(edx, 0)); // deref address 2041 __ lea(edx, FieldOperand(edx, Code::kHeaderSize)); 2042 __ call(edx); 2043 2044 // Unlink this frame from the handler chain. 2045 __ PopStackHandler(); 2046 2047 __ bind(&exit); 2048 // Check if the current stack frame is marked as the outermost JS frame. 2049 __ pop(ebx); 2050 __ cmp(ebx, Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME))); 2051 __ j(not_equal, ¬_outermost_js_2); 2052 __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0)); 2053 __ bind(¬_outermost_js_2); 2054 2055 // Restore the top frame descriptor from the stack. 2056 __ pop(Operand::StaticVariable(ExternalReference( 2057 Isolate::kCEntryFPAddress, isolate()))); 2058 2059 // Restore callee-saved registers (C calling conventions). 2060 __ pop(ebx); 2061 __ pop(esi); 2062 __ pop(edi); 2063 __ add(esp, Immediate(2 * kPointerSize)); // remove markers 2064 2065 // Restore frame pointer and return. 2066 __ pop(ebp); 2067 __ ret(0); 2068} 2069 2070 2071void InstanceOfStub::Generate(MacroAssembler* masm) { 2072 Register const object = edx; // Object (lhs). 2073 Register const function = eax; // Function (rhs). 2074 Register const object_map = ecx; // Map of {object}. 2075 Register const function_map = ebx; // Map of {function}. 2076 Register const function_prototype = function_map; // Prototype of {function}. 2077 Register const scratch = edi; 2078 2079 DCHECK(object.is(InstanceOfDescriptor::LeftRegister())); 2080 DCHECK(function.is(InstanceOfDescriptor::RightRegister())); 2081 2082 // Check if {object} is a smi. 2083 Label object_is_smi; 2084 __ JumpIfSmi(object, &object_is_smi, Label::kNear); 2085 2086 // Lookup the {function} and the {object} map in the global instanceof cache. 2087 // Note: This is safe because we clear the global instanceof cache whenever 2088 // we change the prototype of any object. 2089 Label fast_case, slow_case; 2090 __ mov(object_map, FieldOperand(object, HeapObject::kMapOffset)); 2091 __ CompareRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex); 2092 __ j(not_equal, &fast_case, Label::kNear); 2093 __ CompareRoot(object_map, scratch, Heap::kInstanceofCacheMapRootIndex); 2094 __ j(not_equal, &fast_case, Label::kNear); 2095 __ LoadRoot(eax, Heap::kInstanceofCacheAnswerRootIndex); 2096 __ ret(0); 2097 2098 // If {object} is a smi we can safely return false if {function} is a JS 2099 // function, otherwise we have to miss to the runtime and throw an exception. 2100 __ bind(&object_is_smi); 2101 __ JumpIfSmi(function, &slow_case); 2102 __ CmpObjectType(function, JS_FUNCTION_TYPE, function_map); 2103 __ j(not_equal, &slow_case); 2104 __ LoadRoot(eax, Heap::kFalseValueRootIndex); 2105 __ ret(0); 2106 2107 // Fast-case: The {function} must be a valid JSFunction. 2108 __ bind(&fast_case); 2109 __ JumpIfSmi(function, &slow_case); 2110 __ CmpObjectType(function, JS_FUNCTION_TYPE, function_map); 2111 __ j(not_equal, &slow_case); 2112 2113 // Ensure that {function} has an instance prototype. 2114 __ test_b(FieldOperand(function_map, Map::kBitFieldOffset), 2115 static_cast<uint8_t>(1 << Map::kHasNonInstancePrototype)); 2116 __ j(not_zero, &slow_case); 2117 2118 // Get the "prototype" (or initial map) of the {function}. 2119 __ mov(function_prototype, 2120 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); 2121 __ AssertNotSmi(function_prototype); 2122 2123 // Resolve the prototype if the {function} has an initial map. Afterwards the 2124 // {function_prototype} will be either the JSReceiver prototype object or the 2125 // hole value, which means that no instances of the {function} were created so 2126 // far and hence we should return false. 2127 Label function_prototype_valid; 2128 Register const function_prototype_map = scratch; 2129 __ CmpObjectType(function_prototype, MAP_TYPE, function_prototype_map); 2130 __ j(not_equal, &function_prototype_valid, Label::kNear); 2131 __ mov(function_prototype, 2132 FieldOperand(function_prototype, Map::kPrototypeOffset)); 2133 __ bind(&function_prototype_valid); 2134 __ AssertNotSmi(function_prototype); 2135 2136 // Update the global instanceof cache with the current {object} map and 2137 // {function}. The cached answer will be set when it is known below. 2138 __ StoreRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex); 2139 __ StoreRoot(object_map, scratch, Heap::kInstanceofCacheMapRootIndex); 2140 2141 // Loop through the prototype chain looking for the {function} prototype. 2142 // Assume true, and change to false if not found. 2143 Label done, loop, fast_runtime_fallback; 2144 __ mov(eax, isolate()->factory()->true_value()); 2145 __ bind(&loop); 2146 2147 // Check if the object needs to be access checked. 2148 __ test_b(FieldOperand(object_map, Map::kBitFieldOffset), 2149 1 << Map::kIsAccessCheckNeeded); 2150 __ j(not_zero, &fast_runtime_fallback, Label::kNear); 2151 // Check if the current object is a Proxy. 2152 __ CmpInstanceType(object_map, JS_PROXY_TYPE); 2153 __ j(equal, &fast_runtime_fallback, Label::kNear); 2154 2155 __ mov(object, FieldOperand(object_map, Map::kPrototypeOffset)); 2156 __ cmp(object, function_prototype); 2157 __ j(equal, &done, Label::kNear); 2158 __ mov(object_map, FieldOperand(object, HeapObject::kMapOffset)); 2159 __ cmp(object, isolate()->factory()->null_value()); 2160 __ j(not_equal, &loop); 2161 __ mov(eax, isolate()->factory()->false_value()); 2162 2163 __ bind(&done); 2164 __ StoreRoot(eax, scratch, Heap::kInstanceofCacheAnswerRootIndex); 2165 __ ret(0); 2166 2167 // Found Proxy or access check needed: Call the runtime. 2168 __ bind(&fast_runtime_fallback); 2169 __ PopReturnAddressTo(scratch); 2170 __ Push(object); 2171 __ Push(function_prototype); 2172 __ PushReturnAddressFrom(scratch); 2173 // Invalidate the instanceof cache. 2174 __ Move(eax, Immediate(Smi::FromInt(0))); 2175 __ StoreRoot(eax, scratch, Heap::kInstanceofCacheFunctionRootIndex); 2176 __ TailCallRuntime(Runtime::kHasInPrototypeChain); 2177 2178 // Slow-case: Call the %InstanceOf runtime function. 2179 __ bind(&slow_case); 2180 __ PopReturnAddressTo(scratch); 2181 __ Push(object); 2182 __ Push(function); 2183 __ PushReturnAddressFrom(scratch); 2184 __ TailCallRuntime(Runtime::kInstanceOf); 2185} 2186 2187 2188// ------------------------------------------------------------------------- 2189// StringCharCodeAtGenerator 2190 2191void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { 2192 // If the receiver is a smi trigger the non-string case. 2193 STATIC_ASSERT(kSmiTag == 0); 2194 if (check_mode_ == RECEIVER_IS_UNKNOWN) { 2195 __ JumpIfSmi(object_, receiver_not_string_); 2196 2197 // Fetch the instance type of the receiver into result register. 2198 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset)); 2199 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset)); 2200 // If the receiver is not a string trigger the non-string case. 2201 __ test(result_, Immediate(kIsNotStringMask)); 2202 __ j(not_zero, receiver_not_string_); 2203 } 2204 2205 // If the index is non-smi trigger the non-smi case. 2206 STATIC_ASSERT(kSmiTag == 0); 2207 __ JumpIfNotSmi(index_, &index_not_smi_); 2208 __ bind(&got_smi_index_); 2209 2210 // Check for index out of range. 2211 __ cmp(index_, FieldOperand(object_, String::kLengthOffset)); 2212 __ j(above_equal, index_out_of_range_); 2213 2214 __ SmiUntag(index_); 2215 2216 Factory* factory = masm->isolate()->factory(); 2217 StringCharLoadGenerator::Generate( 2218 masm, factory, object_, index_, result_, &call_runtime_); 2219 2220 __ SmiTag(result_); 2221 __ bind(&exit_); 2222} 2223 2224 2225void StringCharCodeAtGenerator::GenerateSlow( 2226 MacroAssembler* masm, EmbedMode embed_mode, 2227 const RuntimeCallHelper& call_helper) { 2228 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase); 2229 2230 // Index is not a smi. 2231 __ bind(&index_not_smi_); 2232 // If index is a heap number, try converting it to an integer. 2233 __ CheckMap(index_, 2234 masm->isolate()->factory()->heap_number_map(), 2235 index_not_number_, 2236 DONT_DO_SMI_CHECK); 2237 call_helper.BeforeCall(masm); 2238 if (embed_mode == PART_OF_IC_HANDLER) { 2239 __ push(LoadWithVectorDescriptor::VectorRegister()); 2240 __ push(LoadDescriptor::SlotRegister()); 2241 } 2242 __ push(object_); 2243 __ push(index_); // Consumed by runtime conversion function. 2244 if (index_flags_ == STRING_INDEX_IS_NUMBER) { 2245 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero); 2246 } else { 2247 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX); 2248 // NumberToSmi discards numbers that are not exact integers. 2249 __ CallRuntime(Runtime::kNumberToSmi); 2250 } 2251 if (!index_.is(eax)) { 2252 // Save the conversion result before the pop instructions below 2253 // have a chance to overwrite it. 2254 __ mov(index_, eax); 2255 } 2256 __ pop(object_); 2257 if (embed_mode == PART_OF_IC_HANDLER) { 2258 __ pop(LoadDescriptor::SlotRegister()); 2259 __ pop(LoadWithVectorDescriptor::VectorRegister()); 2260 } 2261 // Reload the instance type. 2262 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset)); 2263 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset)); 2264 call_helper.AfterCall(masm); 2265 // If index is still not a smi, it must be out of range. 2266 STATIC_ASSERT(kSmiTag == 0); 2267 __ JumpIfNotSmi(index_, index_out_of_range_); 2268 // Otherwise, return to the fast path. 2269 __ jmp(&got_smi_index_); 2270 2271 // Call runtime. We get here when the receiver is a string and the 2272 // index is a number, but the code of getting the actual character 2273 // is too complex (e.g., when the string needs to be flattened). 2274 __ bind(&call_runtime_); 2275 call_helper.BeforeCall(masm); 2276 __ push(object_); 2277 __ SmiTag(index_); 2278 __ push(index_); 2279 __ CallRuntime(Runtime::kStringCharCodeAtRT); 2280 if (!result_.is(eax)) { 2281 __ mov(result_, eax); 2282 } 2283 call_helper.AfterCall(masm); 2284 __ jmp(&exit_); 2285 2286 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase); 2287} 2288 2289 2290// ------------------------------------------------------------------------- 2291// StringCharFromCodeGenerator 2292 2293void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) { 2294 // Fast case of Heap::LookupSingleCharacterStringFromCode. 2295 STATIC_ASSERT(kSmiTag == 0); 2296 STATIC_ASSERT(kSmiShiftSize == 0); 2297 DCHECK(base::bits::IsPowerOfTwo32(String::kMaxOneByteCharCodeU + 1)); 2298 __ test(code_, Immediate(kSmiTagMask | 2299 ((~String::kMaxOneByteCharCodeU) << kSmiTagSize))); 2300 __ j(not_zero, &slow_case_); 2301 2302 Factory* factory = masm->isolate()->factory(); 2303 __ Move(result_, Immediate(factory->single_character_string_cache())); 2304 STATIC_ASSERT(kSmiTag == 0); 2305 STATIC_ASSERT(kSmiTagSize == 1); 2306 STATIC_ASSERT(kSmiShiftSize == 0); 2307 // At this point code register contains smi tagged one byte char code. 2308 __ mov(result_, FieldOperand(result_, 2309 code_, times_half_pointer_size, 2310 FixedArray::kHeaderSize)); 2311 __ cmp(result_, factory->undefined_value()); 2312 __ j(equal, &slow_case_); 2313 __ bind(&exit_); 2314} 2315 2316 2317void StringCharFromCodeGenerator::GenerateSlow( 2318 MacroAssembler* masm, 2319 const RuntimeCallHelper& call_helper) { 2320 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase); 2321 2322 __ bind(&slow_case_); 2323 call_helper.BeforeCall(masm); 2324 __ push(code_); 2325 __ CallRuntime(Runtime::kStringCharFromCode); 2326 if (!result_.is(eax)) { 2327 __ mov(result_, eax); 2328 } 2329 call_helper.AfterCall(masm); 2330 __ jmp(&exit_); 2331 2332 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase); 2333} 2334 2335 2336void StringHelper::GenerateCopyCharacters(MacroAssembler* masm, 2337 Register dest, 2338 Register src, 2339 Register count, 2340 Register scratch, 2341 String::Encoding encoding) { 2342 DCHECK(!scratch.is(dest)); 2343 DCHECK(!scratch.is(src)); 2344 DCHECK(!scratch.is(count)); 2345 2346 // Nothing to do for zero characters. 2347 Label done; 2348 __ test(count, count); 2349 __ j(zero, &done); 2350 2351 // Make count the number of bytes to copy. 2352 if (encoding == String::TWO_BYTE_ENCODING) { 2353 __ shl(count, 1); 2354 } 2355 2356 Label loop; 2357 __ bind(&loop); 2358 __ mov_b(scratch, Operand(src, 0)); 2359 __ mov_b(Operand(dest, 0), scratch); 2360 __ inc(src); 2361 __ inc(dest); 2362 __ dec(count); 2363 __ j(not_zero, &loop); 2364 2365 __ bind(&done); 2366} 2367 2368 2369void SubStringStub::Generate(MacroAssembler* masm) { 2370 Label runtime; 2371 2372 // Stack frame on entry. 2373 // esp[0]: return address 2374 // esp[4]: to 2375 // esp[8]: from 2376 // esp[12]: string 2377 2378 // Make sure first argument is a string. 2379 __ mov(eax, Operand(esp, 3 * kPointerSize)); 2380 STATIC_ASSERT(kSmiTag == 0); 2381 __ JumpIfSmi(eax, &runtime); 2382 Condition is_string = masm->IsObjectStringType(eax, ebx, ebx); 2383 __ j(NegateCondition(is_string), &runtime); 2384 2385 // eax: string 2386 // ebx: instance type 2387 2388 // Calculate length of sub string using the smi values. 2389 __ mov(ecx, Operand(esp, 1 * kPointerSize)); // To index. 2390 __ JumpIfNotSmi(ecx, &runtime); 2391 __ mov(edx, Operand(esp, 2 * kPointerSize)); // From index. 2392 __ JumpIfNotSmi(edx, &runtime); 2393 __ sub(ecx, edx); 2394 __ cmp(ecx, FieldOperand(eax, String::kLengthOffset)); 2395 Label not_original_string; 2396 // Shorter than original string's length: an actual substring. 2397 __ j(below, ¬_original_string, Label::kNear); 2398 // Longer than original string's length or negative: unsafe arguments. 2399 __ j(above, &runtime); 2400 // Return original string. 2401 Counters* counters = isolate()->counters(); 2402 __ IncrementCounter(counters->sub_string_native(), 1); 2403 __ ret(3 * kPointerSize); 2404 __ bind(¬_original_string); 2405 2406 Label single_char; 2407 __ cmp(ecx, Immediate(Smi::FromInt(1))); 2408 __ j(equal, &single_char); 2409 2410 // eax: string 2411 // ebx: instance type 2412 // ecx: sub string length (smi) 2413 // edx: from index (smi) 2414 // Deal with different string types: update the index if necessary 2415 // and put the underlying string into edi. 2416 Label underlying_unpacked, sliced_string, seq_or_external_string; 2417 // If the string is not indirect, it can only be sequential or external. 2418 STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag)); 2419 STATIC_ASSERT(kIsIndirectStringMask != 0); 2420 __ test(ebx, Immediate(kIsIndirectStringMask)); 2421 __ j(zero, &seq_or_external_string, Label::kNear); 2422 2423 Factory* factory = isolate()->factory(); 2424 __ test(ebx, Immediate(kSlicedNotConsMask)); 2425 __ j(not_zero, &sliced_string, Label::kNear); 2426 // Cons string. Check whether it is flat, then fetch first part. 2427 // Flat cons strings have an empty second part. 2428 __ cmp(FieldOperand(eax, ConsString::kSecondOffset), 2429 factory->empty_string()); 2430 __ j(not_equal, &runtime); 2431 __ mov(edi, FieldOperand(eax, ConsString::kFirstOffset)); 2432 // Update instance type. 2433 __ mov(ebx, FieldOperand(edi, HeapObject::kMapOffset)); 2434 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset)); 2435 __ jmp(&underlying_unpacked, Label::kNear); 2436 2437 __ bind(&sliced_string); 2438 // Sliced string. Fetch parent and adjust start index by offset. 2439 __ add(edx, FieldOperand(eax, SlicedString::kOffsetOffset)); 2440 __ mov(edi, FieldOperand(eax, SlicedString::kParentOffset)); 2441 // Update instance type. 2442 __ mov(ebx, FieldOperand(edi, HeapObject::kMapOffset)); 2443 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset)); 2444 __ jmp(&underlying_unpacked, Label::kNear); 2445 2446 __ bind(&seq_or_external_string); 2447 // Sequential or external string. Just move string to the expected register. 2448 __ mov(edi, eax); 2449 2450 __ bind(&underlying_unpacked); 2451 2452 if (FLAG_string_slices) { 2453 Label copy_routine; 2454 // edi: underlying subject string 2455 // ebx: instance type of underlying subject string 2456 // edx: adjusted start index (smi) 2457 // ecx: length (smi) 2458 __ cmp(ecx, Immediate(Smi::FromInt(SlicedString::kMinLength))); 2459 // Short slice. Copy instead of slicing. 2460 __ j(less, ©_routine); 2461 // Allocate new sliced string. At this point we do not reload the instance 2462 // type including the string encoding because we simply rely on the info 2463 // provided by the original string. It does not matter if the original 2464 // string's encoding is wrong because we always have to recheck encoding of 2465 // the newly created string's parent anyways due to externalized strings. 2466 Label two_byte_slice, set_slice_header; 2467 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0); 2468 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0); 2469 __ test(ebx, Immediate(kStringEncodingMask)); 2470 __ j(zero, &two_byte_slice, Label::kNear); 2471 __ AllocateOneByteSlicedString(eax, ebx, no_reg, &runtime); 2472 __ jmp(&set_slice_header, Label::kNear); 2473 __ bind(&two_byte_slice); 2474 __ AllocateTwoByteSlicedString(eax, ebx, no_reg, &runtime); 2475 __ bind(&set_slice_header); 2476 __ mov(FieldOperand(eax, SlicedString::kLengthOffset), ecx); 2477 __ mov(FieldOperand(eax, SlicedString::kHashFieldOffset), 2478 Immediate(String::kEmptyHashField)); 2479 __ mov(FieldOperand(eax, SlicedString::kParentOffset), edi); 2480 __ mov(FieldOperand(eax, SlicedString::kOffsetOffset), edx); 2481 __ IncrementCounter(counters->sub_string_native(), 1); 2482 __ ret(3 * kPointerSize); 2483 2484 __ bind(©_routine); 2485 } 2486 2487 // edi: underlying subject string 2488 // ebx: instance type of underlying subject string 2489 // edx: adjusted start index (smi) 2490 // ecx: length (smi) 2491 // The subject string can only be external or sequential string of either 2492 // encoding at this point. 2493 Label two_byte_sequential, runtime_drop_two, sequential_string; 2494 STATIC_ASSERT(kExternalStringTag != 0); 2495 STATIC_ASSERT(kSeqStringTag == 0); 2496 __ test_b(ebx, kExternalStringTag); 2497 __ j(zero, &sequential_string); 2498 2499 // Handle external string. 2500 // Rule out short external strings. 2501 STATIC_ASSERT(kShortExternalStringTag != 0); 2502 __ test_b(ebx, kShortExternalStringMask); 2503 __ j(not_zero, &runtime); 2504 __ mov(edi, FieldOperand(edi, ExternalString::kResourceDataOffset)); 2505 // Move the pointer so that offset-wise, it looks like a sequential string. 2506 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize); 2507 __ sub(edi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); 2508 2509 __ bind(&sequential_string); 2510 // Stash away (adjusted) index and (underlying) string. 2511 __ push(edx); 2512 __ push(edi); 2513 __ SmiUntag(ecx); 2514 STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0); 2515 __ test_b(ebx, kStringEncodingMask); 2516 __ j(zero, &two_byte_sequential); 2517 2518 // Sequential one byte string. Allocate the result. 2519 __ AllocateOneByteString(eax, ecx, ebx, edx, edi, &runtime_drop_two); 2520 2521 // eax: result string 2522 // ecx: result string length 2523 // Locate first character of result. 2524 __ mov(edi, eax); 2525 __ add(edi, Immediate(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 2526 // Load string argument and locate character of sub string start. 2527 __ pop(edx); 2528 __ pop(ebx); 2529 __ SmiUntag(ebx); 2530 __ lea(edx, FieldOperand(edx, ebx, times_1, SeqOneByteString::kHeaderSize)); 2531 2532 // eax: result string 2533 // ecx: result length 2534 // edi: first character of result 2535 // edx: character of sub string start 2536 StringHelper::GenerateCopyCharacters( 2537 masm, edi, edx, ecx, ebx, String::ONE_BYTE_ENCODING); 2538 __ IncrementCounter(counters->sub_string_native(), 1); 2539 __ ret(3 * kPointerSize); 2540 2541 __ bind(&two_byte_sequential); 2542 // Sequential two-byte string. Allocate the result. 2543 __ AllocateTwoByteString(eax, ecx, ebx, edx, edi, &runtime_drop_two); 2544 2545 // eax: result string 2546 // ecx: result string length 2547 // Locate first character of result. 2548 __ mov(edi, eax); 2549 __ add(edi, 2550 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); 2551 // Load string argument and locate character of sub string start. 2552 __ pop(edx); 2553 __ pop(ebx); 2554 // As from is a smi it is 2 times the value which matches the size of a two 2555 // byte character. 2556 STATIC_ASSERT(kSmiTag == 0); 2557 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1); 2558 __ lea(edx, FieldOperand(edx, ebx, times_1, SeqTwoByteString::kHeaderSize)); 2559 2560 // eax: result string 2561 // ecx: result length 2562 // edi: first character of result 2563 // edx: character of sub string start 2564 StringHelper::GenerateCopyCharacters( 2565 masm, edi, edx, ecx, ebx, String::TWO_BYTE_ENCODING); 2566 __ IncrementCounter(counters->sub_string_native(), 1); 2567 __ ret(3 * kPointerSize); 2568 2569 // Drop pushed values on the stack before tail call. 2570 __ bind(&runtime_drop_two); 2571 __ Drop(2); 2572 2573 // Just jump to runtime to create the sub string. 2574 __ bind(&runtime); 2575 __ TailCallRuntime(Runtime::kSubString); 2576 2577 __ bind(&single_char); 2578 // eax: string 2579 // ebx: instance type 2580 // ecx: sub string length (smi) 2581 // edx: from index (smi) 2582 StringCharAtGenerator generator(eax, edx, ecx, eax, &runtime, &runtime, 2583 &runtime, STRING_INDEX_IS_NUMBER, 2584 RECEIVER_IS_STRING); 2585 generator.GenerateFast(masm); 2586 __ ret(3 * kPointerSize); 2587 generator.SkipSlow(masm, &runtime); 2588} 2589 2590 2591void ToNumberStub::Generate(MacroAssembler* masm) { 2592 // The ToNumber stub takes one argument in eax. 2593 Label not_smi; 2594 __ JumpIfNotSmi(eax, ¬_smi, Label::kNear); 2595 __ Ret(); 2596 __ bind(¬_smi); 2597 2598 Label not_heap_number; 2599 __ CompareMap(eax, masm->isolate()->factory()->heap_number_map()); 2600 __ j(not_equal, ¬_heap_number, Label::kNear); 2601 __ Ret(); 2602 __ bind(¬_heap_number); 2603 2604 Label not_string, slow_string; 2605 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edi); 2606 // eax: object 2607 // edi: object map 2608 __ j(above_equal, ¬_string, Label::kNear); 2609 // Check if string has a cached array index. 2610 __ test(FieldOperand(eax, String::kHashFieldOffset), 2611 Immediate(String::kContainsCachedArrayIndexMask)); 2612 __ j(not_zero, &slow_string, Label::kNear); 2613 __ mov(eax, FieldOperand(eax, String::kHashFieldOffset)); 2614 __ IndexFromHash(eax, eax); 2615 __ Ret(); 2616 __ bind(&slow_string); 2617 __ pop(ecx); // Pop return address. 2618 __ push(eax); // Push argument. 2619 __ push(ecx); // Push return address. 2620 __ TailCallRuntime(Runtime::kStringToNumber); 2621 __ bind(¬_string); 2622 2623 Label not_oddball; 2624 __ CmpInstanceType(edi, ODDBALL_TYPE); 2625 __ j(not_equal, ¬_oddball, Label::kNear); 2626 __ mov(eax, FieldOperand(eax, Oddball::kToNumberOffset)); 2627 __ Ret(); 2628 __ bind(¬_oddball); 2629 2630 __ pop(ecx); // Pop return address. 2631 __ push(eax); // Push argument. 2632 __ push(ecx); // Push return address. 2633 __ TailCallRuntime(Runtime::kToNumber); 2634} 2635 2636 2637void ToLengthStub::Generate(MacroAssembler* masm) { 2638 // The ToLength stub takes on argument in eax. 2639 Label not_smi, positive_smi; 2640 __ JumpIfNotSmi(eax, ¬_smi, Label::kNear); 2641 STATIC_ASSERT(kSmiTag == 0); 2642 __ test(eax, eax); 2643 __ j(greater_equal, &positive_smi, Label::kNear); 2644 __ xor_(eax, eax); 2645 __ bind(&positive_smi); 2646 __ Ret(); 2647 __ bind(¬_smi); 2648 2649 __ pop(ecx); // Pop return address. 2650 __ push(eax); // Push argument. 2651 __ push(ecx); // Push return address. 2652 __ TailCallRuntime(Runtime::kToLength); 2653} 2654 2655 2656void ToStringStub::Generate(MacroAssembler* masm) { 2657 // The ToString stub takes one argument in eax. 2658 Label is_number; 2659 __ JumpIfSmi(eax, &is_number, Label::kNear); 2660 2661 Label not_string; 2662 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edi); 2663 // eax: receiver 2664 // edi: receiver map 2665 __ j(above_equal, ¬_string, Label::kNear); 2666 __ Ret(); 2667 __ bind(¬_string); 2668 2669 Label not_heap_number; 2670 __ CompareMap(eax, masm->isolate()->factory()->heap_number_map()); 2671 __ j(not_equal, ¬_heap_number, Label::kNear); 2672 __ bind(&is_number); 2673 NumberToStringStub stub(isolate()); 2674 __ TailCallStub(&stub); 2675 __ bind(¬_heap_number); 2676 2677 Label not_oddball; 2678 __ CmpInstanceType(edi, ODDBALL_TYPE); 2679 __ j(not_equal, ¬_oddball, Label::kNear); 2680 __ mov(eax, FieldOperand(eax, Oddball::kToStringOffset)); 2681 __ Ret(); 2682 __ bind(¬_oddball); 2683 2684 __ pop(ecx); // Pop return address. 2685 __ push(eax); // Push argument. 2686 __ push(ecx); // Push return address. 2687 __ TailCallRuntime(Runtime::kToString); 2688} 2689 2690 2691void ToNameStub::Generate(MacroAssembler* masm) { 2692 // The ToName stub takes one argument in eax. 2693 Label is_number; 2694 __ JumpIfSmi(eax, &is_number, Label::kNear); 2695 2696 Label not_name; 2697 STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE); 2698 __ CmpObjectType(eax, LAST_NAME_TYPE, edi); 2699 // eax: receiver 2700 // edi: receiver map 2701 __ j(above, ¬_name, Label::kNear); 2702 __ Ret(); 2703 __ bind(¬_name); 2704 2705 Label not_heap_number; 2706 __ CompareMap(eax, masm->isolate()->factory()->heap_number_map()); 2707 __ j(not_equal, ¬_heap_number, Label::kNear); 2708 __ bind(&is_number); 2709 NumberToStringStub stub(isolate()); 2710 __ TailCallStub(&stub); 2711 __ bind(¬_heap_number); 2712 2713 Label not_oddball; 2714 __ CmpInstanceType(edi, ODDBALL_TYPE); 2715 __ j(not_equal, ¬_oddball, Label::kNear); 2716 __ mov(eax, FieldOperand(eax, Oddball::kToStringOffset)); 2717 __ Ret(); 2718 __ bind(¬_oddball); 2719 2720 __ pop(ecx); // Pop return address. 2721 __ push(eax); // Push argument. 2722 __ push(ecx); // Push return address. 2723 __ TailCallRuntime(Runtime::kToName); 2724} 2725 2726 2727void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm, 2728 Register left, 2729 Register right, 2730 Register scratch1, 2731 Register scratch2) { 2732 Register length = scratch1; 2733 2734 // Compare lengths. 2735 Label strings_not_equal, check_zero_length; 2736 __ mov(length, FieldOperand(left, String::kLengthOffset)); 2737 __ cmp(length, FieldOperand(right, String::kLengthOffset)); 2738 __ j(equal, &check_zero_length, Label::kNear); 2739 __ bind(&strings_not_equal); 2740 __ Move(eax, Immediate(Smi::FromInt(NOT_EQUAL))); 2741 __ ret(0); 2742 2743 // Check if the length is zero. 2744 Label compare_chars; 2745 __ bind(&check_zero_length); 2746 STATIC_ASSERT(kSmiTag == 0); 2747 __ test(length, length); 2748 __ j(not_zero, &compare_chars, Label::kNear); 2749 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); 2750 __ ret(0); 2751 2752 // Compare characters. 2753 __ bind(&compare_chars); 2754 GenerateOneByteCharsCompareLoop(masm, left, right, length, scratch2, 2755 &strings_not_equal, Label::kNear); 2756 2757 // Characters are equal. 2758 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); 2759 __ ret(0); 2760} 2761 2762 2763void StringHelper::GenerateCompareFlatOneByteStrings( 2764 MacroAssembler* masm, Register left, Register right, Register scratch1, 2765 Register scratch2, Register scratch3) { 2766 Counters* counters = masm->isolate()->counters(); 2767 __ IncrementCounter(counters->string_compare_native(), 1); 2768 2769 // Find minimum length. 2770 Label left_shorter; 2771 __ mov(scratch1, FieldOperand(left, String::kLengthOffset)); 2772 __ mov(scratch3, scratch1); 2773 __ sub(scratch3, FieldOperand(right, String::kLengthOffset)); 2774 2775 Register length_delta = scratch3; 2776 2777 __ j(less_equal, &left_shorter, Label::kNear); 2778 // Right string is shorter. Change scratch1 to be length of right string. 2779 __ sub(scratch1, length_delta); 2780 __ bind(&left_shorter); 2781 2782 Register min_length = scratch1; 2783 2784 // If either length is zero, just compare lengths. 2785 Label compare_lengths; 2786 __ test(min_length, min_length); 2787 __ j(zero, &compare_lengths, Label::kNear); 2788 2789 // Compare characters. 2790 Label result_not_equal; 2791 GenerateOneByteCharsCompareLoop(masm, left, right, min_length, scratch2, 2792 &result_not_equal, Label::kNear); 2793 2794 // Compare lengths - strings up to min-length are equal. 2795 __ bind(&compare_lengths); 2796 __ test(length_delta, length_delta); 2797 Label length_not_equal; 2798 __ j(not_zero, &length_not_equal, Label::kNear); 2799 2800 // Result is EQUAL. 2801 STATIC_ASSERT(EQUAL == 0); 2802 STATIC_ASSERT(kSmiTag == 0); 2803 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); 2804 __ ret(0); 2805 2806 Label result_greater; 2807 Label result_less; 2808 __ bind(&length_not_equal); 2809 __ j(greater, &result_greater, Label::kNear); 2810 __ jmp(&result_less, Label::kNear); 2811 __ bind(&result_not_equal); 2812 __ j(above, &result_greater, Label::kNear); 2813 __ bind(&result_less); 2814 2815 // Result is LESS. 2816 __ Move(eax, Immediate(Smi::FromInt(LESS))); 2817 __ ret(0); 2818 2819 // Result is GREATER. 2820 __ bind(&result_greater); 2821 __ Move(eax, Immediate(Smi::FromInt(GREATER))); 2822 __ ret(0); 2823} 2824 2825 2826void StringHelper::GenerateOneByteCharsCompareLoop( 2827 MacroAssembler* masm, Register left, Register right, Register length, 2828 Register scratch, Label* chars_not_equal, 2829 Label::Distance chars_not_equal_near) { 2830 // Change index to run from -length to -1 by adding length to string 2831 // start. This means that loop ends when index reaches zero, which 2832 // doesn't need an additional compare. 2833 __ SmiUntag(length); 2834 __ lea(left, 2835 FieldOperand(left, length, times_1, SeqOneByteString::kHeaderSize)); 2836 __ lea(right, 2837 FieldOperand(right, length, times_1, SeqOneByteString::kHeaderSize)); 2838 __ neg(length); 2839 Register index = length; // index = -length; 2840 2841 // Compare loop. 2842 Label loop; 2843 __ bind(&loop); 2844 __ mov_b(scratch, Operand(left, index, times_1, 0)); 2845 __ cmpb(scratch, Operand(right, index, times_1, 0)); 2846 __ j(not_equal, chars_not_equal, chars_not_equal_near); 2847 __ inc(index); 2848 __ j(not_zero, &loop); 2849} 2850 2851 2852void StringCompareStub::Generate(MacroAssembler* masm) { 2853 // ----------- S t a t e ------------- 2854 // -- edx : left string 2855 // -- eax : right string 2856 // -- esp[0] : return address 2857 // ----------------------------------- 2858 __ AssertString(edx); 2859 __ AssertString(eax); 2860 2861 Label not_same; 2862 __ cmp(edx, eax); 2863 __ j(not_equal, ¬_same, Label::kNear); 2864 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); 2865 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1); 2866 __ Ret(); 2867 2868 __ bind(¬_same); 2869 2870 // Check that both objects are sequential one-byte strings. 2871 Label runtime; 2872 __ JumpIfNotBothSequentialOneByteStrings(edx, eax, ecx, ebx, &runtime); 2873 2874 // Compare flat one-byte strings. 2875 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1); 2876 StringHelper::GenerateCompareFlatOneByteStrings(masm, edx, eax, ecx, ebx, 2877 edi); 2878 2879 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) 2880 // tagged as a small integer. 2881 __ bind(&runtime); 2882 __ PopReturnAddressTo(ecx); 2883 __ Push(edx); 2884 __ Push(eax); 2885 __ PushReturnAddressFrom(ecx); 2886 __ TailCallRuntime(Runtime::kStringCompare); 2887} 2888 2889 2890void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { 2891 // ----------- S t a t e ------------- 2892 // -- edx : left 2893 // -- eax : right 2894 // -- esp[0] : return address 2895 // ----------------------------------- 2896 2897 // Load ecx with the allocation site. We stick an undefined dummy value here 2898 // and replace it with the real allocation site later when we instantiate this 2899 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate(). 2900 __ mov(ecx, handle(isolate()->heap()->undefined_value())); 2901 2902 // Make sure that we actually patched the allocation site. 2903 if (FLAG_debug_code) { 2904 __ test(ecx, Immediate(kSmiTagMask)); 2905 __ Assert(not_equal, kExpectedAllocationSite); 2906 __ cmp(FieldOperand(ecx, HeapObject::kMapOffset), 2907 isolate()->factory()->allocation_site_map()); 2908 __ Assert(equal, kExpectedAllocationSite); 2909 } 2910 2911 // Tail call into the stub that handles binary operations with allocation 2912 // sites. 2913 BinaryOpWithAllocationSiteStub stub(isolate(), state()); 2914 __ TailCallStub(&stub); 2915} 2916 2917 2918void CompareICStub::GenerateBooleans(MacroAssembler* masm) { 2919 DCHECK_EQ(CompareICState::BOOLEAN, state()); 2920 Label miss; 2921 Label::Distance const miss_distance = 2922 masm->emit_debug_code() ? Label::kFar : Label::kNear; 2923 2924 __ JumpIfSmi(edx, &miss, miss_distance); 2925 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset)); 2926 __ JumpIfSmi(eax, &miss, miss_distance); 2927 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); 2928 __ JumpIfNotRoot(ecx, Heap::kBooleanMapRootIndex, &miss, miss_distance); 2929 __ JumpIfNotRoot(ebx, Heap::kBooleanMapRootIndex, &miss, miss_distance); 2930 if (!Token::IsEqualityOp(op())) { 2931 __ mov(eax, FieldOperand(eax, Oddball::kToNumberOffset)); 2932 __ AssertSmi(eax); 2933 __ mov(edx, FieldOperand(edx, Oddball::kToNumberOffset)); 2934 __ AssertSmi(edx); 2935 __ push(eax); 2936 __ mov(eax, edx); 2937 __ pop(edx); 2938 } 2939 __ sub(eax, edx); 2940 __ Ret(); 2941 2942 __ bind(&miss); 2943 GenerateMiss(masm); 2944} 2945 2946 2947void CompareICStub::GenerateSmis(MacroAssembler* masm) { 2948 DCHECK(state() == CompareICState::SMI); 2949 Label miss; 2950 __ mov(ecx, edx); 2951 __ or_(ecx, eax); 2952 __ JumpIfNotSmi(ecx, &miss, Label::kNear); 2953 2954 if (GetCondition() == equal) { 2955 // For equality we do not care about the sign of the result. 2956 __ sub(eax, edx); 2957 } else { 2958 Label done; 2959 __ sub(edx, eax); 2960 __ j(no_overflow, &done, Label::kNear); 2961 // Correct sign of result in case of overflow. 2962 __ not_(edx); 2963 __ bind(&done); 2964 __ mov(eax, edx); 2965 } 2966 __ ret(0); 2967 2968 __ bind(&miss); 2969 GenerateMiss(masm); 2970} 2971 2972 2973void CompareICStub::GenerateNumbers(MacroAssembler* masm) { 2974 DCHECK(state() == CompareICState::NUMBER); 2975 2976 Label generic_stub; 2977 Label unordered, maybe_undefined1, maybe_undefined2; 2978 Label miss; 2979 2980 if (left() == CompareICState::SMI) { 2981 __ JumpIfNotSmi(edx, &miss); 2982 } 2983 if (right() == CompareICState::SMI) { 2984 __ JumpIfNotSmi(eax, &miss); 2985 } 2986 2987 // Load left and right operand. 2988 Label done, left, left_smi, right_smi; 2989 __ JumpIfSmi(eax, &right_smi, Label::kNear); 2990 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), 2991 isolate()->factory()->heap_number_map()); 2992 __ j(not_equal, &maybe_undefined1, Label::kNear); 2993 __ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset)); 2994 __ jmp(&left, Label::kNear); 2995 __ bind(&right_smi); 2996 __ mov(ecx, eax); // Can't clobber eax because we can still jump away. 2997 __ SmiUntag(ecx); 2998 __ Cvtsi2sd(xmm1, ecx); 2999 3000 __ bind(&left); 3001 __ JumpIfSmi(edx, &left_smi, Label::kNear); 3002 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), 3003 isolate()->factory()->heap_number_map()); 3004 __ j(not_equal, &maybe_undefined2, Label::kNear); 3005 __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset)); 3006 __ jmp(&done); 3007 __ bind(&left_smi); 3008 __ mov(ecx, edx); // Can't clobber edx because we can still jump away. 3009 __ SmiUntag(ecx); 3010 __ Cvtsi2sd(xmm0, ecx); 3011 3012 __ bind(&done); 3013 // Compare operands. 3014 __ ucomisd(xmm0, xmm1); 3015 3016 // Don't base result on EFLAGS when a NaN is involved. 3017 __ j(parity_even, &unordered, Label::kNear); 3018 3019 // Return a result of -1, 0, or 1, based on EFLAGS. 3020 // Performing mov, because xor would destroy the flag register. 3021 __ mov(eax, 0); // equal 3022 __ mov(ecx, Immediate(Smi::FromInt(1))); 3023 __ cmov(above, eax, ecx); 3024 __ mov(ecx, Immediate(Smi::FromInt(-1))); 3025 __ cmov(below, eax, ecx); 3026 __ ret(0); 3027 3028 __ bind(&unordered); 3029 __ bind(&generic_stub); 3030 CompareICStub stub(isolate(), op(), CompareICState::GENERIC, 3031 CompareICState::GENERIC, CompareICState::GENERIC); 3032 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); 3033 3034 __ bind(&maybe_undefined1); 3035 if (Token::IsOrderedRelationalCompareOp(op())) { 3036 __ cmp(eax, Immediate(isolate()->factory()->undefined_value())); 3037 __ j(not_equal, &miss); 3038 __ JumpIfSmi(edx, &unordered); 3039 __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx); 3040 __ j(not_equal, &maybe_undefined2, Label::kNear); 3041 __ jmp(&unordered); 3042 } 3043 3044 __ bind(&maybe_undefined2); 3045 if (Token::IsOrderedRelationalCompareOp(op())) { 3046 __ cmp(edx, Immediate(isolate()->factory()->undefined_value())); 3047 __ j(equal, &unordered); 3048 } 3049 3050 __ bind(&miss); 3051 GenerateMiss(masm); 3052} 3053 3054 3055void CompareICStub::GenerateInternalizedStrings(MacroAssembler* masm) { 3056 DCHECK(state() == CompareICState::INTERNALIZED_STRING); 3057 DCHECK(GetCondition() == equal); 3058 3059 // Registers containing left and right operands respectively. 3060 Register left = edx; 3061 Register right = eax; 3062 Register tmp1 = ecx; 3063 Register tmp2 = ebx; 3064 3065 // Check that both operands are heap objects. 3066 Label miss; 3067 __ mov(tmp1, left); 3068 STATIC_ASSERT(kSmiTag == 0); 3069 __ and_(tmp1, right); 3070 __ JumpIfSmi(tmp1, &miss, Label::kNear); 3071 3072 // Check that both operands are internalized strings. 3073 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset)); 3074 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset)); 3075 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); 3076 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); 3077 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0); 3078 __ or_(tmp1, tmp2); 3079 __ test(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask)); 3080 __ j(not_zero, &miss, Label::kNear); 3081 3082 // Internalized strings are compared by identity. 3083 Label done; 3084 __ cmp(left, right); 3085 // Make sure eax is non-zero. At this point input operands are 3086 // guaranteed to be non-zero. 3087 DCHECK(right.is(eax)); 3088 __ j(not_equal, &done, Label::kNear); 3089 STATIC_ASSERT(EQUAL == 0); 3090 STATIC_ASSERT(kSmiTag == 0); 3091 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); 3092 __ bind(&done); 3093 __ ret(0); 3094 3095 __ bind(&miss); 3096 GenerateMiss(masm); 3097} 3098 3099 3100void CompareICStub::GenerateUniqueNames(MacroAssembler* masm) { 3101 DCHECK(state() == CompareICState::UNIQUE_NAME); 3102 DCHECK(GetCondition() == equal); 3103 3104 // Registers containing left and right operands respectively. 3105 Register left = edx; 3106 Register right = eax; 3107 Register tmp1 = ecx; 3108 Register tmp2 = ebx; 3109 3110 // Check that both operands are heap objects. 3111 Label miss; 3112 __ mov(tmp1, left); 3113 STATIC_ASSERT(kSmiTag == 0); 3114 __ and_(tmp1, right); 3115 __ JumpIfSmi(tmp1, &miss, Label::kNear); 3116 3117 // Check that both operands are unique names. This leaves the instance 3118 // types loaded in tmp1 and tmp2. 3119 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset)); 3120 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset)); 3121 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); 3122 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); 3123 3124 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss, Label::kNear); 3125 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss, Label::kNear); 3126 3127 // Unique names are compared by identity. 3128 Label done; 3129 __ cmp(left, right); 3130 // Make sure eax is non-zero. At this point input operands are 3131 // guaranteed to be non-zero. 3132 DCHECK(right.is(eax)); 3133 __ j(not_equal, &done, Label::kNear); 3134 STATIC_ASSERT(EQUAL == 0); 3135 STATIC_ASSERT(kSmiTag == 0); 3136 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); 3137 __ bind(&done); 3138 __ ret(0); 3139 3140 __ bind(&miss); 3141 GenerateMiss(masm); 3142} 3143 3144 3145void CompareICStub::GenerateStrings(MacroAssembler* masm) { 3146 DCHECK(state() == CompareICState::STRING); 3147 Label miss; 3148 3149 bool equality = Token::IsEqualityOp(op()); 3150 3151 // Registers containing left and right operands respectively. 3152 Register left = edx; 3153 Register right = eax; 3154 Register tmp1 = ecx; 3155 Register tmp2 = ebx; 3156 Register tmp3 = edi; 3157 3158 // Check that both operands are heap objects. 3159 __ mov(tmp1, left); 3160 STATIC_ASSERT(kSmiTag == 0); 3161 __ and_(tmp1, right); 3162 __ JumpIfSmi(tmp1, &miss); 3163 3164 // Check that both operands are strings. This leaves the instance 3165 // types loaded in tmp1 and tmp2. 3166 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset)); 3167 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset)); 3168 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); 3169 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); 3170 __ mov(tmp3, tmp1); 3171 STATIC_ASSERT(kNotStringTag != 0); 3172 __ or_(tmp3, tmp2); 3173 __ test(tmp3, Immediate(kIsNotStringMask)); 3174 __ j(not_zero, &miss); 3175 3176 // Fast check for identical strings. 3177 Label not_same; 3178 __ cmp(left, right); 3179 __ j(not_equal, ¬_same, Label::kNear); 3180 STATIC_ASSERT(EQUAL == 0); 3181 STATIC_ASSERT(kSmiTag == 0); 3182 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); 3183 __ ret(0); 3184 3185 // Handle not identical strings. 3186 __ bind(¬_same); 3187 3188 // Check that both strings are internalized. If they are, we're done 3189 // because we already know they are not identical. But in the case of 3190 // non-equality compare, we still need to determine the order. We 3191 // also know they are both strings. 3192 if (equality) { 3193 Label do_compare; 3194 STATIC_ASSERT(kInternalizedTag == 0); 3195 __ or_(tmp1, tmp2); 3196 __ test(tmp1, Immediate(kIsNotInternalizedMask)); 3197 __ j(not_zero, &do_compare, Label::kNear); 3198 // Make sure eax is non-zero. At this point input operands are 3199 // guaranteed to be non-zero. 3200 DCHECK(right.is(eax)); 3201 __ ret(0); 3202 __ bind(&do_compare); 3203 } 3204 3205 // Check that both strings are sequential one-byte. 3206 Label runtime; 3207 __ JumpIfNotBothSequentialOneByteStrings(left, right, tmp1, tmp2, &runtime); 3208 3209 // Compare flat one byte strings. Returns when done. 3210 if (equality) { 3211 StringHelper::GenerateFlatOneByteStringEquals(masm, left, right, tmp1, 3212 tmp2); 3213 } else { 3214 StringHelper::GenerateCompareFlatOneByteStrings(masm, left, right, tmp1, 3215 tmp2, tmp3); 3216 } 3217 3218 // Handle more complex cases in runtime. 3219 __ bind(&runtime); 3220 __ pop(tmp1); // Return address. 3221 __ push(left); 3222 __ push(right); 3223 __ push(tmp1); 3224 if (equality) { 3225 __ TailCallRuntime(Runtime::kStringEquals); 3226 } else { 3227 __ TailCallRuntime(Runtime::kStringCompare); 3228 } 3229 3230 __ bind(&miss); 3231 GenerateMiss(masm); 3232} 3233 3234 3235void CompareICStub::GenerateReceivers(MacroAssembler* masm) { 3236 DCHECK_EQ(CompareICState::RECEIVER, state()); 3237 Label miss; 3238 __ mov(ecx, edx); 3239 __ and_(ecx, eax); 3240 __ JumpIfSmi(ecx, &miss, Label::kNear); 3241 3242 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE); 3243 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx); 3244 __ j(below, &miss, Label::kNear); 3245 __ CmpObjectType(edx, FIRST_JS_RECEIVER_TYPE, ecx); 3246 __ j(below, &miss, Label::kNear); 3247 3248 DCHECK_EQ(equal, GetCondition()); 3249 __ sub(eax, edx); 3250 __ ret(0); 3251 3252 __ bind(&miss); 3253 GenerateMiss(masm); 3254} 3255 3256 3257void CompareICStub::GenerateKnownReceivers(MacroAssembler* masm) { 3258 Label miss; 3259 Handle<WeakCell> cell = Map::WeakCellForMap(known_map_); 3260 __ mov(ecx, edx); 3261 __ and_(ecx, eax); 3262 __ JumpIfSmi(ecx, &miss, Label::kNear); 3263 3264 __ GetWeakValue(edi, cell); 3265 __ cmp(edi, FieldOperand(eax, HeapObject::kMapOffset)); 3266 __ j(not_equal, &miss, Label::kNear); 3267 __ cmp(edi, FieldOperand(edx, HeapObject::kMapOffset)); 3268 __ j(not_equal, &miss, Label::kNear); 3269 3270 if (Token::IsEqualityOp(op())) { 3271 __ sub(eax, edx); 3272 __ ret(0); 3273 } else { 3274 __ PopReturnAddressTo(ecx); 3275 __ Push(edx); 3276 __ Push(eax); 3277 __ Push(Immediate(Smi::FromInt(NegativeComparisonResult(GetCondition())))); 3278 __ PushReturnAddressFrom(ecx); 3279 __ TailCallRuntime(Runtime::kCompare); 3280 } 3281 3282 __ bind(&miss); 3283 GenerateMiss(masm); 3284} 3285 3286 3287void CompareICStub::GenerateMiss(MacroAssembler* masm) { 3288 { 3289 // Call the runtime system in a fresh internal frame. 3290 FrameScope scope(masm, StackFrame::INTERNAL); 3291 __ push(edx); // Preserve edx and eax. 3292 __ push(eax); 3293 __ push(edx); // And also use them as the arguments. 3294 __ push(eax); 3295 __ push(Immediate(Smi::FromInt(op()))); 3296 __ CallRuntime(Runtime::kCompareIC_Miss); 3297 // Compute the entry point of the rewritten stub. 3298 __ lea(edi, FieldOperand(eax, Code::kHeaderSize)); 3299 __ pop(eax); 3300 __ pop(edx); 3301 } 3302 3303 // Do a tail call to the rewritten stub. 3304 __ jmp(edi); 3305} 3306 3307 3308// Helper function used to check that the dictionary doesn't contain 3309// the property. This function may return false negatives, so miss_label 3310// must always call a backup property check that is complete. 3311// This function is safe to call if the receiver has fast properties. 3312// Name must be a unique name and receiver must be a heap object. 3313void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm, 3314 Label* miss, 3315 Label* done, 3316 Register properties, 3317 Handle<Name> name, 3318 Register r0) { 3319 DCHECK(name->IsUniqueName()); 3320 3321 // If names of slots in range from 1 to kProbes - 1 for the hash value are 3322 // not equal to the name and kProbes-th slot is not used (its name is the 3323 // undefined value), it guarantees the hash table doesn't contain the 3324 // property. It's true even if some slots represent deleted properties 3325 // (their names are the hole value). 3326 for (int i = 0; i < kInlinedProbes; i++) { 3327 // Compute the masked index: (hash + i + i * i) & mask. 3328 Register index = r0; 3329 // Capacity is smi 2^n. 3330 __ mov(index, FieldOperand(properties, kCapacityOffset)); 3331 __ dec(index); 3332 __ and_(index, 3333 Immediate(Smi::FromInt(name->Hash() + 3334 NameDictionary::GetProbeOffset(i)))); 3335 3336 // Scale the index by multiplying by the entry size. 3337 STATIC_ASSERT(NameDictionary::kEntrySize == 3); 3338 __ lea(index, Operand(index, index, times_2, 0)); // index *= 3. 3339 Register entity_name = r0; 3340 // Having undefined at this place means the name is not contained. 3341 STATIC_ASSERT(kSmiTagSize == 1); 3342 __ mov(entity_name, Operand(properties, index, times_half_pointer_size, 3343 kElementsStartOffset - kHeapObjectTag)); 3344 __ cmp(entity_name, masm->isolate()->factory()->undefined_value()); 3345 __ j(equal, done); 3346 3347 // Stop if found the property. 3348 __ cmp(entity_name, Handle<Name>(name)); 3349 __ j(equal, miss); 3350 3351 Label good; 3352 // Check for the hole and skip. 3353 __ cmp(entity_name, masm->isolate()->factory()->the_hole_value()); 3354 __ j(equal, &good, Label::kNear); 3355 3356 // Check if the entry name is not a unique name. 3357 __ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset)); 3358 __ JumpIfNotUniqueNameInstanceType( 3359 FieldOperand(entity_name, Map::kInstanceTypeOffset), miss); 3360 __ bind(&good); 3361 } 3362 3363 NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0, 3364 NEGATIVE_LOOKUP); 3365 __ push(Immediate(Handle<Object>(name))); 3366 __ push(Immediate(name->Hash())); 3367 __ CallStub(&stub); 3368 __ test(r0, r0); 3369 __ j(not_zero, miss); 3370 __ jmp(done); 3371} 3372 3373 3374// Probe the name dictionary in the |elements| register. Jump to the 3375// |done| label if a property with the given name is found leaving the 3376// index into the dictionary in |r0|. Jump to the |miss| label 3377// otherwise. 3378void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm, 3379 Label* miss, 3380 Label* done, 3381 Register elements, 3382 Register name, 3383 Register r0, 3384 Register r1) { 3385 DCHECK(!elements.is(r0)); 3386 DCHECK(!elements.is(r1)); 3387 DCHECK(!name.is(r0)); 3388 DCHECK(!name.is(r1)); 3389 3390 __ AssertName(name); 3391 3392 __ mov(r1, FieldOperand(elements, kCapacityOffset)); 3393 __ shr(r1, kSmiTagSize); // convert smi to int 3394 __ dec(r1); 3395 3396 // Generate an unrolled loop that performs a few probes before 3397 // giving up. Measurements done on Gmail indicate that 2 probes 3398 // cover ~93% of loads from dictionaries. 3399 for (int i = 0; i < kInlinedProbes; i++) { 3400 // Compute the masked index: (hash + i + i * i) & mask. 3401 __ mov(r0, FieldOperand(name, Name::kHashFieldOffset)); 3402 __ shr(r0, Name::kHashShift); 3403 if (i > 0) { 3404 __ add(r0, Immediate(NameDictionary::GetProbeOffset(i))); 3405 } 3406 __ and_(r0, r1); 3407 3408 // Scale the index by multiplying by the entry size. 3409 STATIC_ASSERT(NameDictionary::kEntrySize == 3); 3410 __ lea(r0, Operand(r0, r0, times_2, 0)); // r0 = r0 * 3 3411 3412 // Check if the key is identical to the name. 3413 __ cmp(name, Operand(elements, 3414 r0, 3415 times_4, 3416 kElementsStartOffset - kHeapObjectTag)); 3417 __ j(equal, done); 3418 } 3419 3420 NameDictionaryLookupStub stub(masm->isolate(), elements, r1, r0, 3421 POSITIVE_LOOKUP); 3422 __ push(name); 3423 __ mov(r0, FieldOperand(name, Name::kHashFieldOffset)); 3424 __ shr(r0, Name::kHashShift); 3425 __ push(r0); 3426 __ CallStub(&stub); 3427 3428 __ test(r1, r1); 3429 __ j(zero, miss); 3430 __ jmp(done); 3431} 3432 3433 3434void NameDictionaryLookupStub::Generate(MacroAssembler* masm) { 3435 // This stub overrides SometimesSetsUpAFrame() to return false. That means 3436 // we cannot call anything that could cause a GC from this stub. 3437 // Stack frame on entry: 3438 // esp[0 * kPointerSize]: return address. 3439 // esp[1 * kPointerSize]: key's hash. 3440 // esp[2 * kPointerSize]: key. 3441 // Registers: 3442 // dictionary_: NameDictionary to probe. 3443 // result_: used as scratch. 3444 // index_: will hold an index of entry if lookup is successful. 3445 // might alias with result_. 3446 // Returns: 3447 // result_ is zero if lookup failed, non zero otherwise. 3448 3449 Label in_dictionary, maybe_in_dictionary, not_in_dictionary; 3450 3451 Register scratch = result(); 3452 3453 __ mov(scratch, FieldOperand(dictionary(), kCapacityOffset)); 3454 __ dec(scratch); 3455 __ SmiUntag(scratch); 3456 __ push(scratch); 3457 3458 // If names of slots in range from 1 to kProbes - 1 for the hash value are 3459 // not equal to the name and kProbes-th slot is not used (its name is the 3460 // undefined value), it guarantees the hash table doesn't contain the 3461 // property. It's true even if some slots represent deleted properties 3462 // (their names are the null value). 3463 for (int i = kInlinedProbes; i < kTotalProbes; i++) { 3464 // Compute the masked index: (hash + i + i * i) & mask. 3465 __ mov(scratch, Operand(esp, 2 * kPointerSize)); 3466 if (i > 0) { 3467 __ add(scratch, Immediate(NameDictionary::GetProbeOffset(i))); 3468 } 3469 __ and_(scratch, Operand(esp, 0)); 3470 3471 // Scale the index by multiplying by the entry size. 3472 STATIC_ASSERT(NameDictionary::kEntrySize == 3); 3473 __ lea(index(), Operand(scratch, scratch, times_2, 0)); // index *= 3. 3474 3475 // Having undefined at this place means the name is not contained. 3476 STATIC_ASSERT(kSmiTagSize == 1); 3477 __ mov(scratch, Operand(dictionary(), index(), times_pointer_size, 3478 kElementsStartOffset - kHeapObjectTag)); 3479 __ cmp(scratch, isolate()->factory()->undefined_value()); 3480 __ j(equal, ¬_in_dictionary); 3481 3482 // Stop if found the property. 3483 __ cmp(scratch, Operand(esp, 3 * kPointerSize)); 3484 __ j(equal, &in_dictionary); 3485 3486 if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) { 3487 // If we hit a key that is not a unique name during negative 3488 // lookup we have to bailout as this key might be equal to the 3489 // key we are looking for. 3490 3491 // Check if the entry name is not a unique name. 3492 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); 3493 __ JumpIfNotUniqueNameInstanceType( 3494 FieldOperand(scratch, Map::kInstanceTypeOffset), 3495 &maybe_in_dictionary); 3496 } 3497 } 3498 3499 __ bind(&maybe_in_dictionary); 3500 // If we are doing negative lookup then probing failure should be 3501 // treated as a lookup success. For positive lookup probing failure 3502 // should be treated as lookup failure. 3503 if (mode() == POSITIVE_LOOKUP) { 3504 __ mov(result(), Immediate(0)); 3505 __ Drop(1); 3506 __ ret(2 * kPointerSize); 3507 } 3508 3509 __ bind(&in_dictionary); 3510 __ mov(result(), Immediate(1)); 3511 __ Drop(1); 3512 __ ret(2 * kPointerSize); 3513 3514 __ bind(¬_in_dictionary); 3515 __ mov(result(), Immediate(0)); 3516 __ Drop(1); 3517 __ ret(2 * kPointerSize); 3518} 3519 3520 3521void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime( 3522 Isolate* isolate) { 3523 StoreBufferOverflowStub stub(isolate, kDontSaveFPRegs); 3524 stub.GetCode(); 3525 StoreBufferOverflowStub stub2(isolate, kSaveFPRegs); 3526 stub2.GetCode(); 3527} 3528 3529 3530// Takes the input in 3 registers: address_ value_ and object_. A pointer to 3531// the value has just been written into the object, now this stub makes sure 3532// we keep the GC informed. The word in the object where the value has been 3533// written is in the address register. 3534void RecordWriteStub::Generate(MacroAssembler* masm) { 3535 Label skip_to_incremental_noncompacting; 3536 Label skip_to_incremental_compacting; 3537 3538 // The first two instructions are generated with labels so as to get the 3539 // offset fixed up correctly by the bind(Label*) call. We patch it back and 3540 // forth between a compare instructions (a nop in this position) and the 3541 // real branch when we start and stop incremental heap marking. 3542 __ jmp(&skip_to_incremental_noncompacting, Label::kNear); 3543 __ jmp(&skip_to_incremental_compacting, Label::kFar); 3544 3545 if (remembered_set_action() == EMIT_REMEMBERED_SET) { 3546 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), 3547 MacroAssembler::kReturnAtEnd); 3548 } else { 3549 __ ret(0); 3550 } 3551 3552 __ bind(&skip_to_incremental_noncompacting); 3553 GenerateIncremental(masm, INCREMENTAL); 3554 3555 __ bind(&skip_to_incremental_compacting); 3556 GenerateIncremental(masm, INCREMENTAL_COMPACTION); 3557 3558 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY. 3559 // Will be checked in IncrementalMarking::ActivateGeneratedStub. 3560 masm->set_byte_at(0, kTwoByteNopInstruction); 3561 masm->set_byte_at(2, kFiveByteNopInstruction); 3562} 3563 3564 3565void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) { 3566 regs_.Save(masm); 3567 3568 if (remembered_set_action() == EMIT_REMEMBERED_SET) { 3569 Label dont_need_remembered_set; 3570 3571 __ mov(regs_.scratch0(), Operand(regs_.address(), 0)); 3572 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value. 3573 regs_.scratch0(), 3574 &dont_need_remembered_set); 3575 3576 __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(), 3577 &dont_need_remembered_set); 3578 3579 // First notify the incremental marker if necessary, then update the 3580 // remembered set. 3581 CheckNeedsToInformIncrementalMarker( 3582 masm, 3583 kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, 3584 mode); 3585 InformIncrementalMarker(masm); 3586 regs_.Restore(masm); 3587 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), 3588 MacroAssembler::kReturnAtEnd); 3589 3590 __ bind(&dont_need_remembered_set); 3591 } 3592 3593 CheckNeedsToInformIncrementalMarker( 3594 masm, 3595 kReturnOnNoNeedToInformIncrementalMarker, 3596 mode); 3597 InformIncrementalMarker(masm); 3598 regs_.Restore(masm); 3599 __ ret(0); 3600} 3601 3602 3603void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) { 3604 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode()); 3605 int argument_count = 3; 3606 __ PrepareCallCFunction(argument_count, regs_.scratch0()); 3607 __ mov(Operand(esp, 0 * kPointerSize), regs_.object()); 3608 __ mov(Operand(esp, 1 * kPointerSize), regs_.address()); // Slot. 3609 __ mov(Operand(esp, 2 * kPointerSize), 3610 Immediate(ExternalReference::isolate_address(isolate()))); 3611 3612 AllowExternalCallThatCantCauseGC scope(masm); 3613 __ CallCFunction( 3614 ExternalReference::incremental_marking_record_write_function(isolate()), 3615 argument_count); 3616 3617 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode()); 3618} 3619 3620 3621void RecordWriteStub::CheckNeedsToInformIncrementalMarker( 3622 MacroAssembler* masm, 3623 OnNoNeedToInformIncrementalMarker on_no_need, 3624 Mode mode) { 3625 Label object_is_black, need_incremental, need_incremental_pop_object; 3626 3627 __ mov(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask)); 3628 __ and_(regs_.scratch0(), regs_.object()); 3629 __ mov(regs_.scratch1(), 3630 Operand(regs_.scratch0(), 3631 MemoryChunk::kWriteBarrierCounterOffset)); 3632 __ sub(regs_.scratch1(), Immediate(1)); 3633 __ mov(Operand(regs_.scratch0(), 3634 MemoryChunk::kWriteBarrierCounterOffset), 3635 regs_.scratch1()); 3636 __ j(negative, &need_incremental); 3637 3638 // Let's look at the color of the object: If it is not black we don't have 3639 // to inform the incremental marker. 3640 __ JumpIfBlack(regs_.object(), 3641 regs_.scratch0(), 3642 regs_.scratch1(), 3643 &object_is_black, 3644 Label::kNear); 3645 3646 regs_.Restore(masm); 3647 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { 3648 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), 3649 MacroAssembler::kReturnAtEnd); 3650 } else { 3651 __ ret(0); 3652 } 3653 3654 __ bind(&object_is_black); 3655 3656 // Get the value from the slot. 3657 __ mov(regs_.scratch0(), Operand(regs_.address(), 0)); 3658 3659 if (mode == INCREMENTAL_COMPACTION) { 3660 Label ensure_not_white; 3661 3662 __ CheckPageFlag(regs_.scratch0(), // Contains value. 3663 regs_.scratch1(), // Scratch. 3664 MemoryChunk::kEvacuationCandidateMask, 3665 zero, 3666 &ensure_not_white, 3667 Label::kNear); 3668 3669 __ CheckPageFlag(regs_.object(), 3670 regs_.scratch1(), // Scratch. 3671 MemoryChunk::kSkipEvacuationSlotsRecordingMask, 3672 not_zero, 3673 &ensure_not_white, 3674 Label::kNear); 3675 3676 __ jmp(&need_incremental); 3677 3678 __ bind(&ensure_not_white); 3679 } 3680 3681 // We need an extra register for this, so we push the object register 3682 // temporarily. 3683 __ push(regs_.object()); 3684 __ JumpIfWhite(regs_.scratch0(), // The value. 3685 regs_.scratch1(), // Scratch. 3686 regs_.object(), // Scratch. 3687 &need_incremental_pop_object, Label::kNear); 3688 __ pop(regs_.object()); 3689 3690 regs_.Restore(masm); 3691 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { 3692 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), 3693 MacroAssembler::kReturnAtEnd); 3694 } else { 3695 __ ret(0); 3696 } 3697 3698 __ bind(&need_incremental_pop_object); 3699 __ pop(regs_.object()); 3700 3701 __ bind(&need_incremental); 3702 3703 // Fall through when we need to inform the incremental marker. 3704} 3705 3706 3707void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { 3708 CEntryStub ces(isolate(), 1, kSaveFPRegs); 3709 __ call(ces.GetCode(), RelocInfo::CODE_TARGET); 3710 int parameter_count_offset = 3711 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset; 3712 __ mov(ebx, MemOperand(ebp, parameter_count_offset)); 3713 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); 3714 __ pop(ecx); 3715 int additional_offset = 3716 function_mode() == JS_FUNCTION_STUB_MODE ? kPointerSize : 0; 3717 __ lea(esp, MemOperand(esp, ebx, times_pointer_size, additional_offset)); 3718 __ jmp(ecx); // Return to IC Miss stub, continuation still on stack. 3719} 3720 3721 3722void LoadICTrampolineStub::Generate(MacroAssembler* masm) { 3723 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister()); 3724 LoadICStub stub(isolate(), state()); 3725 stub.GenerateForTrampoline(masm); 3726} 3727 3728 3729void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) { 3730 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister()); 3731 KeyedLoadICStub stub(isolate(), state()); 3732 stub.GenerateForTrampoline(masm); 3733} 3734 3735 3736static void HandleArrayCases(MacroAssembler* masm, Register receiver, 3737 Register key, Register vector, Register slot, 3738 Register feedback, bool is_polymorphic, 3739 Label* miss) { 3740 // feedback initially contains the feedback array 3741 Label next, next_loop, prepare_next; 3742 Label load_smi_map, compare_map; 3743 Label start_polymorphic; 3744 3745 __ push(receiver); 3746 __ push(vector); 3747 3748 Register receiver_map = receiver; 3749 Register cached_map = vector; 3750 3751 // Receiver might not be a heap object. 3752 __ JumpIfSmi(receiver, &load_smi_map); 3753 __ mov(receiver_map, FieldOperand(receiver, 0)); 3754 __ bind(&compare_map); 3755 __ mov(cached_map, FieldOperand(feedback, FixedArray::OffsetOfElementAt(0))); 3756 3757 // A named keyed load might have a 2 element array, all other cases can count 3758 // on an array with at least 2 {map, handler} pairs, so they can go right 3759 // into polymorphic array handling. 3760 __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset)); 3761 __ j(not_equal, is_polymorphic ? &start_polymorphic : &next); 3762 3763 // found, now call handler. 3764 Register handler = feedback; 3765 __ mov(handler, FieldOperand(feedback, FixedArray::OffsetOfElementAt(1))); 3766 __ pop(vector); 3767 __ pop(receiver); 3768 __ lea(handler, FieldOperand(handler, Code::kHeaderSize)); 3769 __ jmp(handler); 3770 3771 if (!is_polymorphic) { 3772 __ bind(&next); 3773 __ cmp(FieldOperand(feedback, FixedArray::kLengthOffset), 3774 Immediate(Smi::FromInt(2))); 3775 __ j(not_equal, &start_polymorphic); 3776 __ pop(vector); 3777 __ pop(receiver); 3778 __ jmp(miss); 3779 } 3780 3781 // Polymorphic, we have to loop from 2 to N 3782 __ bind(&start_polymorphic); 3783 __ push(key); 3784 Register counter = key; 3785 __ mov(counter, Immediate(Smi::FromInt(2))); 3786 __ bind(&next_loop); 3787 __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size, 3788 FixedArray::kHeaderSize)); 3789 __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset)); 3790 __ j(not_equal, &prepare_next); 3791 __ mov(handler, FieldOperand(feedback, counter, times_half_pointer_size, 3792 FixedArray::kHeaderSize + kPointerSize)); 3793 __ pop(key); 3794 __ pop(vector); 3795 __ pop(receiver); 3796 __ lea(handler, FieldOperand(handler, Code::kHeaderSize)); 3797 __ jmp(handler); 3798 3799 __ bind(&prepare_next); 3800 __ add(counter, Immediate(Smi::FromInt(2))); 3801 __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset)); 3802 __ j(less, &next_loop); 3803 3804 // We exhausted our array of map handler pairs. 3805 __ pop(key); 3806 __ pop(vector); 3807 __ pop(receiver); 3808 __ jmp(miss); 3809 3810 __ bind(&load_smi_map); 3811 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); 3812 __ jmp(&compare_map); 3813} 3814 3815 3816static void HandleMonomorphicCase(MacroAssembler* masm, Register receiver, 3817 Register key, Register vector, Register slot, 3818 Register weak_cell, Label* miss) { 3819 // feedback initially contains the feedback array 3820 Label compare_smi_map; 3821 3822 // Move the weak map into the weak_cell register. 3823 Register ic_map = weak_cell; 3824 __ mov(ic_map, FieldOperand(weak_cell, WeakCell::kValueOffset)); 3825 3826 // Receiver might not be a heap object. 3827 __ JumpIfSmi(receiver, &compare_smi_map); 3828 __ cmp(ic_map, FieldOperand(receiver, 0)); 3829 __ j(not_equal, miss); 3830 Register handler = weak_cell; 3831 __ mov(handler, FieldOperand(vector, slot, times_half_pointer_size, 3832 FixedArray::kHeaderSize + kPointerSize)); 3833 __ lea(handler, FieldOperand(handler, Code::kHeaderSize)); 3834 __ jmp(handler); 3835 3836 // In microbenchmarks, it made sense to unroll this code so that the call to 3837 // the handler is duplicated for a HeapObject receiver and a Smi receiver. 3838 __ bind(&compare_smi_map); 3839 __ CompareRoot(ic_map, Heap::kHeapNumberMapRootIndex); 3840 __ j(not_equal, miss); 3841 __ mov(handler, FieldOperand(vector, slot, times_half_pointer_size, 3842 FixedArray::kHeaderSize + kPointerSize)); 3843 __ lea(handler, FieldOperand(handler, Code::kHeaderSize)); 3844 __ jmp(handler); 3845} 3846 3847 3848void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); } 3849 3850 3851void LoadICStub::GenerateForTrampoline(MacroAssembler* masm) { 3852 GenerateImpl(masm, true); 3853} 3854 3855 3856void LoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { 3857 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // edx 3858 Register name = LoadWithVectorDescriptor::NameRegister(); // ecx 3859 Register vector = LoadWithVectorDescriptor::VectorRegister(); // ebx 3860 Register slot = LoadWithVectorDescriptor::SlotRegister(); // eax 3861 Register scratch = edi; 3862 __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size, 3863 FixedArray::kHeaderSize)); 3864 3865 // Is it a weak cell? 3866 Label try_array; 3867 Label not_array, smi_key, key_okay, miss; 3868 __ CompareRoot(FieldOperand(scratch, 0), Heap::kWeakCellMapRootIndex); 3869 __ j(not_equal, &try_array); 3870 HandleMonomorphicCase(masm, receiver, name, vector, slot, scratch, &miss); 3871 3872 // Is it a fixed array? 3873 __ bind(&try_array); 3874 __ CompareRoot(FieldOperand(scratch, 0), Heap::kFixedArrayMapRootIndex); 3875 __ j(not_equal, ¬_array); 3876 HandleArrayCases(masm, receiver, name, vector, slot, scratch, true, &miss); 3877 3878 __ bind(¬_array); 3879 __ CompareRoot(scratch, Heap::kmegamorphic_symbolRootIndex); 3880 __ j(not_equal, &miss); 3881 __ push(slot); 3882 __ push(vector); 3883 Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags( 3884 Code::ComputeHandlerFlags(Code::LOAD_IC)); 3885 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::LOAD_IC, code_flags, 3886 receiver, name, vector, scratch); 3887 __ pop(vector); 3888 __ pop(slot); 3889 3890 __ bind(&miss); 3891 LoadIC::GenerateMiss(masm); 3892} 3893 3894 3895void KeyedLoadICStub::Generate(MacroAssembler* masm) { 3896 GenerateImpl(masm, false); 3897} 3898 3899 3900void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) { 3901 GenerateImpl(masm, true); 3902} 3903 3904 3905void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { 3906 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // edx 3907 Register key = LoadWithVectorDescriptor::NameRegister(); // ecx 3908 Register vector = LoadWithVectorDescriptor::VectorRegister(); // ebx 3909 Register slot = LoadWithVectorDescriptor::SlotRegister(); // eax 3910 Register feedback = edi; 3911 __ mov(feedback, FieldOperand(vector, slot, times_half_pointer_size, 3912 FixedArray::kHeaderSize)); 3913 // Is it a weak cell? 3914 Label try_array; 3915 Label not_array, smi_key, key_okay, miss; 3916 __ CompareRoot(FieldOperand(feedback, 0), Heap::kWeakCellMapRootIndex); 3917 __ j(not_equal, &try_array); 3918 HandleMonomorphicCase(masm, receiver, key, vector, slot, feedback, &miss); 3919 3920 __ bind(&try_array); 3921 // Is it a fixed array? 3922 __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex); 3923 __ j(not_equal, ¬_array); 3924 3925 // We have a polymorphic element handler. 3926 Label polymorphic, try_poly_name; 3927 __ bind(&polymorphic); 3928 HandleArrayCases(masm, receiver, key, vector, slot, feedback, true, &miss); 3929 3930 __ bind(¬_array); 3931 // Is it generic? 3932 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex); 3933 __ j(not_equal, &try_poly_name); 3934 Handle<Code> megamorphic_stub = 3935 KeyedLoadIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState()); 3936 __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET); 3937 3938 __ bind(&try_poly_name); 3939 // We might have a name in feedback, and a fixed array in the next slot. 3940 __ cmp(key, feedback); 3941 __ j(not_equal, &miss); 3942 // If the name comparison succeeded, we know we have a fixed array with 3943 // at least one map/handler pair. 3944 __ mov(feedback, FieldOperand(vector, slot, times_half_pointer_size, 3945 FixedArray::kHeaderSize + kPointerSize)); 3946 HandleArrayCases(masm, receiver, key, vector, slot, feedback, false, &miss); 3947 3948 __ bind(&miss); 3949 KeyedLoadIC::GenerateMiss(masm); 3950} 3951 3952 3953void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) { 3954 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister()); 3955 VectorStoreICStub stub(isolate(), state()); 3956 stub.GenerateForTrampoline(masm); 3957} 3958 3959 3960void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) { 3961 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister()); 3962 VectorKeyedStoreICStub stub(isolate(), state()); 3963 stub.GenerateForTrampoline(masm); 3964} 3965 3966 3967void VectorStoreICStub::Generate(MacroAssembler* masm) { 3968 GenerateImpl(masm, false); 3969} 3970 3971 3972void VectorStoreICStub::GenerateForTrampoline(MacroAssembler* masm) { 3973 GenerateImpl(masm, true); 3974} 3975 3976 3977// value is on the stack already. 3978static void HandlePolymorphicStoreCase(MacroAssembler* masm, Register receiver, 3979 Register key, Register vector, 3980 Register slot, Register feedback, 3981 bool is_polymorphic, Label* miss) { 3982 // feedback initially contains the feedback array 3983 Label next, next_loop, prepare_next; 3984 Label load_smi_map, compare_map; 3985 Label start_polymorphic; 3986 Label pop_and_miss; 3987 ExternalReference virtual_register = 3988 ExternalReference::virtual_handler_register(masm->isolate()); 3989 3990 __ push(receiver); 3991 __ push(vector); 3992 3993 Register receiver_map = receiver; 3994 Register cached_map = vector; 3995 3996 // Receiver might not be a heap object. 3997 __ JumpIfSmi(receiver, &load_smi_map); 3998 __ mov(receiver_map, FieldOperand(receiver, 0)); 3999 __ bind(&compare_map); 4000 __ mov(cached_map, FieldOperand(feedback, FixedArray::OffsetOfElementAt(0))); 4001 4002 // A named keyed store might have a 2 element array, all other cases can count 4003 // on an array with at least 2 {map, handler} pairs, so they can go right 4004 // into polymorphic array handling. 4005 __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset)); 4006 __ j(not_equal, &start_polymorphic); 4007 4008 // found, now call handler. 4009 Register handler = feedback; 4010 DCHECK(handler.is(VectorStoreICDescriptor::ValueRegister())); 4011 __ mov(handler, FieldOperand(feedback, FixedArray::OffsetOfElementAt(1))); 4012 __ pop(vector); 4013 __ pop(receiver); 4014 __ lea(handler, FieldOperand(handler, Code::kHeaderSize)); 4015 __ mov(Operand::StaticVariable(virtual_register), handler); 4016 __ pop(handler); // Pop "value". 4017 __ jmp(Operand::StaticVariable(virtual_register)); 4018 4019 // Polymorphic, we have to loop from 2 to N 4020 __ bind(&start_polymorphic); 4021 __ push(key); 4022 Register counter = key; 4023 __ mov(counter, Immediate(Smi::FromInt(2))); 4024 4025 if (!is_polymorphic) { 4026 // If is_polymorphic is false, we may only have a two element array. 4027 // Check against length now in that case. 4028 __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset)); 4029 __ j(greater_equal, &pop_and_miss); 4030 } 4031 4032 __ bind(&next_loop); 4033 __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size, 4034 FixedArray::kHeaderSize)); 4035 __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset)); 4036 __ j(not_equal, &prepare_next); 4037 __ mov(handler, FieldOperand(feedback, counter, times_half_pointer_size, 4038 FixedArray::kHeaderSize + kPointerSize)); 4039 __ lea(handler, FieldOperand(handler, Code::kHeaderSize)); 4040 __ pop(key); 4041 __ pop(vector); 4042 __ pop(receiver); 4043 __ mov(Operand::StaticVariable(virtual_register), handler); 4044 __ pop(handler); // Pop "value". 4045 __ jmp(Operand::StaticVariable(virtual_register)); 4046 4047 __ bind(&prepare_next); 4048 __ add(counter, Immediate(Smi::FromInt(2))); 4049 __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset)); 4050 __ j(less, &next_loop); 4051 4052 // We exhausted our array of map handler pairs. 4053 __ bind(&pop_and_miss); 4054 __ pop(key); 4055 __ pop(vector); 4056 __ pop(receiver); 4057 __ jmp(miss); 4058 4059 __ bind(&load_smi_map); 4060 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); 4061 __ jmp(&compare_map); 4062} 4063 4064 4065static void HandleMonomorphicStoreCase(MacroAssembler* masm, Register receiver, 4066 Register key, Register vector, 4067 Register slot, Register weak_cell, 4068 Label* miss) { 4069 // The store ic value is on the stack. 4070 DCHECK(weak_cell.is(VectorStoreICDescriptor::ValueRegister())); 4071 ExternalReference virtual_register = 4072 ExternalReference::virtual_handler_register(masm->isolate()); 4073 4074 // feedback initially contains the feedback array 4075 Label compare_smi_map; 4076 4077 // Move the weak map into the weak_cell register. 4078 Register ic_map = weak_cell; 4079 __ mov(ic_map, FieldOperand(weak_cell, WeakCell::kValueOffset)); 4080 4081 // Receiver might not be a heap object. 4082 __ JumpIfSmi(receiver, &compare_smi_map); 4083 __ cmp(ic_map, FieldOperand(receiver, 0)); 4084 __ j(not_equal, miss); 4085 __ mov(weak_cell, FieldOperand(vector, slot, times_half_pointer_size, 4086 FixedArray::kHeaderSize + kPointerSize)); 4087 __ lea(weak_cell, FieldOperand(weak_cell, Code::kHeaderSize)); 4088 // Put the store ic value back in it's register. 4089 __ mov(Operand::StaticVariable(virtual_register), weak_cell); 4090 __ pop(weak_cell); // Pop "value". 4091 // jump to the handler. 4092 __ jmp(Operand::StaticVariable(virtual_register)); 4093 4094 // In microbenchmarks, it made sense to unroll this code so that the call to 4095 // the handler is duplicated for a HeapObject receiver and a Smi receiver. 4096 __ bind(&compare_smi_map); 4097 __ CompareRoot(ic_map, Heap::kHeapNumberMapRootIndex); 4098 __ j(not_equal, miss); 4099 __ mov(weak_cell, FieldOperand(vector, slot, times_half_pointer_size, 4100 FixedArray::kHeaderSize + kPointerSize)); 4101 __ lea(weak_cell, FieldOperand(weak_cell, Code::kHeaderSize)); 4102 __ mov(Operand::StaticVariable(virtual_register), weak_cell); 4103 __ pop(weak_cell); // Pop "value". 4104 // jump to the handler. 4105 __ jmp(Operand::StaticVariable(virtual_register)); 4106} 4107 4108 4109void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { 4110 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // edx 4111 Register key = VectorStoreICDescriptor::NameRegister(); // ecx 4112 Register value = VectorStoreICDescriptor::ValueRegister(); // eax 4113 Register vector = VectorStoreICDescriptor::VectorRegister(); // ebx 4114 Register slot = VectorStoreICDescriptor::SlotRegister(); // edi 4115 Label miss; 4116 4117 __ push(value); 4118 4119 Register scratch = value; 4120 __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size, 4121 FixedArray::kHeaderSize)); 4122 4123 // Is it a weak cell? 4124 Label try_array; 4125 Label not_array, smi_key, key_okay; 4126 __ CompareRoot(FieldOperand(scratch, 0), Heap::kWeakCellMapRootIndex); 4127 __ j(not_equal, &try_array); 4128 HandleMonomorphicStoreCase(masm, receiver, key, vector, slot, scratch, &miss); 4129 4130 // Is it a fixed array? 4131 __ bind(&try_array); 4132 __ CompareRoot(FieldOperand(scratch, 0), Heap::kFixedArrayMapRootIndex); 4133 __ j(not_equal, ¬_array); 4134 HandlePolymorphicStoreCase(masm, receiver, key, vector, slot, scratch, true, 4135 &miss); 4136 4137 __ bind(¬_array); 4138 __ CompareRoot(scratch, Heap::kmegamorphic_symbolRootIndex); 4139 __ j(not_equal, &miss); 4140 4141 __ pop(value); 4142 __ push(slot); 4143 __ push(vector); 4144 Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags( 4145 Code::ComputeHandlerFlags(Code::STORE_IC)); 4146 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::STORE_IC, code_flags, 4147 receiver, key, slot, no_reg); 4148 __ pop(vector); 4149 __ pop(slot); 4150 Label no_pop_miss; 4151 __ jmp(&no_pop_miss); 4152 4153 __ bind(&miss); 4154 __ pop(value); 4155 __ bind(&no_pop_miss); 4156 StoreIC::GenerateMiss(masm); 4157} 4158 4159 4160void VectorKeyedStoreICStub::Generate(MacroAssembler* masm) { 4161 GenerateImpl(masm, false); 4162} 4163 4164 4165void VectorKeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) { 4166 GenerateImpl(masm, true); 4167} 4168 4169 4170static void HandlePolymorphicKeyedStoreCase(MacroAssembler* masm, 4171 Register receiver, Register key, 4172 Register vector, Register slot, 4173 Register feedback, Label* miss) { 4174 // feedback initially contains the feedback array 4175 Label next, next_loop, prepare_next; 4176 Label load_smi_map, compare_map; 4177 Label transition_call; 4178 Label pop_and_miss; 4179 ExternalReference virtual_register = 4180 ExternalReference::virtual_handler_register(masm->isolate()); 4181 ExternalReference virtual_slot = 4182 ExternalReference::virtual_slot_register(masm->isolate()); 4183 4184 __ push(receiver); 4185 __ push(vector); 4186 4187 Register receiver_map = receiver; 4188 Register cached_map = vector; 4189 Register value = StoreDescriptor::ValueRegister(); 4190 4191 // Receiver might not be a heap object. 4192 __ JumpIfSmi(receiver, &load_smi_map); 4193 __ mov(receiver_map, FieldOperand(receiver, 0)); 4194 __ bind(&compare_map); 4195 4196 // Polymorphic, we have to loop from 0 to N - 1 4197 __ push(key); 4198 // Current stack layout: 4199 // - esp[0] -- key 4200 // - esp[4] -- vector 4201 // - esp[8] -- receiver 4202 // - esp[12] -- value 4203 // - esp[16] -- return address 4204 // 4205 // Required stack layout for handler call: 4206 // - esp[0] -- return address 4207 // - receiver, key, value, vector, slot in registers. 4208 // - handler in virtual register. 4209 Register counter = key; 4210 __ mov(counter, Immediate(Smi::FromInt(0))); 4211 __ bind(&next_loop); 4212 __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size, 4213 FixedArray::kHeaderSize)); 4214 __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset)); 4215 __ j(not_equal, &prepare_next); 4216 __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size, 4217 FixedArray::kHeaderSize + kPointerSize)); 4218 __ CompareRoot(cached_map, Heap::kUndefinedValueRootIndex); 4219 __ j(not_equal, &transition_call); 4220 __ mov(feedback, FieldOperand(feedback, counter, times_half_pointer_size, 4221 FixedArray::kHeaderSize + 2 * kPointerSize)); 4222 __ pop(key); 4223 __ pop(vector); 4224 __ pop(receiver); 4225 __ lea(feedback, FieldOperand(feedback, Code::kHeaderSize)); 4226 __ mov(Operand::StaticVariable(virtual_register), feedback); 4227 __ pop(value); 4228 __ jmp(Operand::StaticVariable(virtual_register)); 4229 4230 __ bind(&transition_call); 4231 // Current stack layout: 4232 // - esp[0] -- key 4233 // - esp[4] -- vector 4234 // - esp[8] -- receiver 4235 // - esp[12] -- value 4236 // - esp[16] -- return address 4237 // 4238 // Required stack layout for handler call: 4239 // - esp[0] -- return address 4240 // - receiver, key, value, map, vector in registers. 4241 // - handler and slot in virtual registers. 4242 __ mov(Operand::StaticVariable(virtual_slot), slot); 4243 __ mov(feedback, FieldOperand(feedback, counter, times_half_pointer_size, 4244 FixedArray::kHeaderSize + 2 * kPointerSize)); 4245 __ lea(feedback, FieldOperand(feedback, Code::kHeaderSize)); 4246 __ mov(Operand::StaticVariable(virtual_register), feedback); 4247 4248 __ mov(cached_map, FieldOperand(cached_map, WeakCell::kValueOffset)); 4249 // The weak cell may have been cleared. 4250 __ JumpIfSmi(cached_map, &pop_and_miss); 4251 DCHECK(!cached_map.is(VectorStoreTransitionDescriptor::MapRegister())); 4252 __ mov(VectorStoreTransitionDescriptor::MapRegister(), cached_map); 4253 4254 // Pop key into place. 4255 __ pop(key); 4256 __ pop(vector); 4257 __ pop(receiver); 4258 __ pop(value); 4259 __ jmp(Operand::StaticVariable(virtual_register)); 4260 4261 __ bind(&prepare_next); 4262 __ add(counter, Immediate(Smi::FromInt(3))); 4263 __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset)); 4264 __ j(less, &next_loop); 4265 4266 // We exhausted our array of map handler pairs. 4267 __ bind(&pop_and_miss); 4268 __ pop(key); 4269 __ pop(vector); 4270 __ pop(receiver); 4271 __ jmp(miss); 4272 4273 __ bind(&load_smi_map); 4274 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); 4275 __ jmp(&compare_map); 4276} 4277 4278 4279void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { 4280 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // edx 4281 Register key = VectorStoreICDescriptor::NameRegister(); // ecx 4282 Register value = VectorStoreICDescriptor::ValueRegister(); // eax 4283 Register vector = VectorStoreICDescriptor::VectorRegister(); // ebx 4284 Register slot = VectorStoreICDescriptor::SlotRegister(); // edi 4285 Label miss; 4286 4287 __ push(value); 4288 4289 Register scratch = value; 4290 __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size, 4291 FixedArray::kHeaderSize)); 4292 4293 // Is it a weak cell? 4294 Label try_array; 4295 Label not_array, smi_key, key_okay; 4296 __ CompareRoot(FieldOperand(scratch, 0), Heap::kWeakCellMapRootIndex); 4297 __ j(not_equal, &try_array); 4298 HandleMonomorphicStoreCase(masm, receiver, key, vector, slot, scratch, &miss); 4299 4300 // Is it a fixed array? 4301 __ bind(&try_array); 4302 __ CompareRoot(FieldOperand(scratch, 0), Heap::kFixedArrayMapRootIndex); 4303 __ j(not_equal, ¬_array); 4304 HandlePolymorphicKeyedStoreCase(masm, receiver, key, vector, slot, scratch, 4305 &miss); 4306 4307 __ bind(¬_array); 4308 Label try_poly_name; 4309 __ CompareRoot(scratch, Heap::kmegamorphic_symbolRootIndex); 4310 __ j(not_equal, &try_poly_name); 4311 4312 __ pop(value); 4313 4314 Handle<Code> megamorphic_stub = 4315 KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState()); 4316 __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET); 4317 4318 __ bind(&try_poly_name); 4319 // We might have a name in feedback, and a fixed array in the next slot. 4320 __ cmp(key, scratch); 4321 __ j(not_equal, &miss); 4322 // If the name comparison succeeded, we know we have a fixed array with 4323 // at least one map/handler pair. 4324 __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size, 4325 FixedArray::kHeaderSize + kPointerSize)); 4326 HandlePolymorphicStoreCase(masm, receiver, key, vector, slot, scratch, false, 4327 &miss); 4328 4329 __ bind(&miss); 4330 __ pop(value); 4331 KeyedStoreIC::GenerateMiss(masm); 4332} 4333 4334 4335void CallICTrampolineStub::Generate(MacroAssembler* masm) { 4336 __ EmitLoadTypeFeedbackVector(ebx); 4337 CallICStub stub(isolate(), state()); 4338 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); 4339} 4340 4341 4342void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { 4343 if (masm->isolate()->function_entry_hook() != NULL) { 4344 ProfileEntryHookStub stub(masm->isolate()); 4345 masm->CallStub(&stub); 4346 } 4347} 4348 4349 4350void ProfileEntryHookStub::Generate(MacroAssembler* masm) { 4351 // Save volatile registers. 4352 const int kNumSavedRegisters = 3; 4353 __ push(eax); 4354 __ push(ecx); 4355 __ push(edx); 4356 4357 // Calculate and push the original stack pointer. 4358 __ lea(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize)); 4359 __ push(eax); 4360 4361 // Retrieve our return address and use it to calculate the calling 4362 // function's address. 4363 __ mov(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize)); 4364 __ sub(eax, Immediate(Assembler::kCallInstructionLength)); 4365 __ push(eax); 4366 4367 // Call the entry hook. 4368 DCHECK(isolate()->function_entry_hook() != NULL); 4369 __ call(FUNCTION_ADDR(isolate()->function_entry_hook()), 4370 RelocInfo::RUNTIME_ENTRY); 4371 __ add(esp, Immediate(2 * kPointerSize)); 4372 4373 // Restore ecx. 4374 __ pop(edx); 4375 __ pop(ecx); 4376 __ pop(eax); 4377 4378 __ ret(0); 4379} 4380 4381 4382template<class T> 4383static void CreateArrayDispatch(MacroAssembler* masm, 4384 AllocationSiteOverrideMode mode) { 4385 if (mode == DISABLE_ALLOCATION_SITES) { 4386 T stub(masm->isolate(), 4387 GetInitialFastElementsKind(), 4388 mode); 4389 __ TailCallStub(&stub); 4390 } else if (mode == DONT_OVERRIDE) { 4391 int last_index = GetSequenceIndexFromFastElementsKind( 4392 TERMINAL_FAST_ELEMENTS_KIND); 4393 for (int i = 0; i <= last_index; ++i) { 4394 Label next; 4395 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 4396 __ cmp(edx, kind); 4397 __ j(not_equal, &next); 4398 T stub(masm->isolate(), kind); 4399 __ TailCallStub(&stub); 4400 __ bind(&next); 4401 } 4402 4403 // If we reached this point there is a problem. 4404 __ Abort(kUnexpectedElementsKindInArrayConstructor); 4405 } else { 4406 UNREACHABLE(); 4407 } 4408} 4409 4410 4411static void CreateArrayDispatchOneArgument(MacroAssembler* masm, 4412 AllocationSiteOverrideMode mode) { 4413 // ebx - allocation site (if mode != DISABLE_ALLOCATION_SITES) 4414 // edx - kind (if mode != DISABLE_ALLOCATION_SITES) 4415 // eax - number of arguments 4416 // edi - constructor? 4417 // esp[0] - return address 4418 // esp[4] - last argument 4419 Label normal_sequence; 4420 if (mode == DONT_OVERRIDE) { 4421 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); 4422 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); 4423 STATIC_ASSERT(FAST_ELEMENTS == 2); 4424 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3); 4425 STATIC_ASSERT(FAST_DOUBLE_ELEMENTS == 4); 4426 STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5); 4427 4428 // is the low bit set? If so, we are holey and that is good. 4429 __ test_b(edx, 1); 4430 __ j(not_zero, &normal_sequence); 4431 } 4432 4433 // look at the first argument 4434 __ mov(ecx, Operand(esp, kPointerSize)); 4435 __ test(ecx, ecx); 4436 __ j(zero, &normal_sequence); 4437 4438 if (mode == DISABLE_ALLOCATION_SITES) { 4439 ElementsKind initial = GetInitialFastElementsKind(); 4440 ElementsKind holey_initial = GetHoleyElementsKind(initial); 4441 4442 ArraySingleArgumentConstructorStub stub_holey(masm->isolate(), 4443 holey_initial, 4444 DISABLE_ALLOCATION_SITES); 4445 __ TailCallStub(&stub_holey); 4446 4447 __ bind(&normal_sequence); 4448 ArraySingleArgumentConstructorStub stub(masm->isolate(), 4449 initial, 4450 DISABLE_ALLOCATION_SITES); 4451 __ TailCallStub(&stub); 4452 } else if (mode == DONT_OVERRIDE) { 4453 // We are going to create a holey array, but our kind is non-holey. 4454 // Fix kind and retry. 4455 __ inc(edx); 4456 4457 if (FLAG_debug_code) { 4458 Handle<Map> allocation_site_map = 4459 masm->isolate()->factory()->allocation_site_map(); 4460 __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map)); 4461 __ Assert(equal, kExpectedAllocationSite); 4462 } 4463 4464 // Save the resulting elements kind in type info. We can't just store r3 4465 // in the AllocationSite::transition_info field because elements kind is 4466 // restricted to a portion of the field...upper bits need to be left alone. 4467 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); 4468 __ add(FieldOperand(ebx, AllocationSite::kTransitionInfoOffset), 4469 Immediate(Smi::FromInt(kFastElementsKindPackedToHoley))); 4470 4471 __ bind(&normal_sequence); 4472 int last_index = GetSequenceIndexFromFastElementsKind( 4473 TERMINAL_FAST_ELEMENTS_KIND); 4474 for (int i = 0; i <= last_index; ++i) { 4475 Label next; 4476 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 4477 __ cmp(edx, kind); 4478 __ j(not_equal, &next); 4479 ArraySingleArgumentConstructorStub stub(masm->isolate(), kind); 4480 __ TailCallStub(&stub); 4481 __ bind(&next); 4482 } 4483 4484 // If we reached this point there is a problem. 4485 __ Abort(kUnexpectedElementsKindInArrayConstructor); 4486 } else { 4487 UNREACHABLE(); 4488 } 4489} 4490 4491 4492template<class T> 4493static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { 4494 int to_index = GetSequenceIndexFromFastElementsKind( 4495 TERMINAL_FAST_ELEMENTS_KIND); 4496 for (int i = 0; i <= to_index; ++i) { 4497 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 4498 T stub(isolate, kind); 4499 stub.GetCode(); 4500 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) { 4501 T stub1(isolate, kind, DISABLE_ALLOCATION_SITES); 4502 stub1.GetCode(); 4503 } 4504 } 4505} 4506 4507 4508void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) { 4509 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>( 4510 isolate); 4511 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>( 4512 isolate); 4513 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>( 4514 isolate); 4515} 4516 4517 4518void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime( 4519 Isolate* isolate) { 4520 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS }; 4521 for (int i = 0; i < 2; i++) { 4522 // For internal arrays we only need a few things 4523 InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]); 4524 stubh1.GetCode(); 4525 InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]); 4526 stubh2.GetCode(); 4527 InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]); 4528 stubh3.GetCode(); 4529 } 4530} 4531 4532 4533void ArrayConstructorStub::GenerateDispatchToArrayStub( 4534 MacroAssembler* masm, 4535 AllocationSiteOverrideMode mode) { 4536 if (argument_count() == ANY) { 4537 Label not_zero_case, not_one_case; 4538 __ test(eax, eax); 4539 __ j(not_zero, ¬_zero_case); 4540 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode); 4541 4542 __ bind(¬_zero_case); 4543 __ cmp(eax, 1); 4544 __ j(greater, ¬_one_case); 4545 CreateArrayDispatchOneArgument(masm, mode); 4546 4547 __ bind(¬_one_case); 4548 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode); 4549 } else if (argument_count() == NONE) { 4550 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode); 4551 } else if (argument_count() == ONE) { 4552 CreateArrayDispatchOneArgument(masm, mode); 4553 } else if (argument_count() == MORE_THAN_ONE) { 4554 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode); 4555 } else { 4556 UNREACHABLE(); 4557 } 4558} 4559 4560 4561void ArrayConstructorStub::Generate(MacroAssembler* masm) { 4562 // ----------- S t a t e ------------- 4563 // -- eax : argc (only if argument_count() is ANY or MORE_THAN_ONE) 4564 // -- ebx : AllocationSite or undefined 4565 // -- edi : constructor 4566 // -- edx : Original constructor 4567 // -- esp[0] : return address 4568 // -- esp[4] : last argument 4569 // ----------------------------------- 4570 if (FLAG_debug_code) { 4571 // The array construct code is only set for the global and natives 4572 // builtin Array functions which always have maps. 4573 4574 // Initial map for the builtin Array function should be a map. 4575 __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset)); 4576 // Will both indicate a NULL and a Smi. 4577 __ test(ecx, Immediate(kSmiTagMask)); 4578 __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction); 4579 __ CmpObjectType(ecx, MAP_TYPE, ecx); 4580 __ Assert(equal, kUnexpectedInitialMapForArrayFunction); 4581 4582 // We should either have undefined in ebx or a valid AllocationSite 4583 __ AssertUndefinedOrAllocationSite(ebx); 4584 } 4585 4586 Label subclassing; 4587 4588 // Enter the context of the Array function. 4589 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); 4590 4591 __ cmp(edx, edi); 4592 __ j(not_equal, &subclassing); 4593 4594 Label no_info; 4595 // If the feedback vector is the undefined value call an array constructor 4596 // that doesn't use AllocationSites. 4597 __ cmp(ebx, isolate()->factory()->undefined_value()); 4598 __ j(equal, &no_info); 4599 4600 // Only look at the lower 16 bits of the transition info. 4601 __ mov(edx, FieldOperand(ebx, AllocationSite::kTransitionInfoOffset)); 4602 __ SmiUntag(edx); 4603 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); 4604 __ and_(edx, Immediate(AllocationSite::ElementsKindBits::kMask)); 4605 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE); 4606 4607 __ bind(&no_info); 4608 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES); 4609 4610 // Subclassing. 4611 __ bind(&subclassing); 4612 switch (argument_count()) { 4613 case ANY: 4614 case MORE_THAN_ONE: 4615 __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi); 4616 __ add(eax, Immediate(3)); 4617 break; 4618 case NONE: 4619 __ mov(Operand(esp, 1 * kPointerSize), edi); 4620 __ mov(eax, Immediate(3)); 4621 break; 4622 case ONE: 4623 __ mov(Operand(esp, 2 * kPointerSize), edi); 4624 __ mov(eax, Immediate(4)); 4625 break; 4626 } 4627 __ PopReturnAddressTo(ecx); 4628 __ Push(edx); 4629 __ Push(ebx); 4630 __ PushReturnAddressFrom(ecx); 4631 __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate())); 4632} 4633 4634 4635void InternalArrayConstructorStub::GenerateCase( 4636 MacroAssembler* masm, ElementsKind kind) { 4637 Label not_zero_case, not_one_case; 4638 Label normal_sequence; 4639 4640 __ test(eax, eax); 4641 __ j(not_zero, ¬_zero_case); 4642 InternalArrayNoArgumentConstructorStub stub0(isolate(), kind); 4643 __ TailCallStub(&stub0); 4644 4645 __ bind(¬_zero_case); 4646 __ cmp(eax, 1); 4647 __ j(greater, ¬_one_case); 4648 4649 if (IsFastPackedElementsKind(kind)) { 4650 // We might need to create a holey array 4651 // look at the first argument 4652 __ mov(ecx, Operand(esp, kPointerSize)); 4653 __ test(ecx, ecx); 4654 __ j(zero, &normal_sequence); 4655 4656 InternalArraySingleArgumentConstructorStub 4657 stub1_holey(isolate(), GetHoleyElementsKind(kind)); 4658 __ TailCallStub(&stub1_holey); 4659 } 4660 4661 __ bind(&normal_sequence); 4662 InternalArraySingleArgumentConstructorStub stub1(isolate(), kind); 4663 __ TailCallStub(&stub1); 4664 4665 __ bind(¬_one_case); 4666 InternalArrayNArgumentsConstructorStub stubN(isolate(), kind); 4667 __ TailCallStub(&stubN); 4668} 4669 4670 4671void InternalArrayConstructorStub::Generate(MacroAssembler* masm) { 4672 // ----------- S t a t e ------------- 4673 // -- eax : argc 4674 // -- edi : constructor 4675 // -- esp[0] : return address 4676 // -- esp[4] : last argument 4677 // ----------------------------------- 4678 4679 if (FLAG_debug_code) { 4680 // The array construct code is only set for the global and natives 4681 // builtin Array functions which always have maps. 4682 4683 // Initial map for the builtin Array function should be a map. 4684 __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset)); 4685 // Will both indicate a NULL and a Smi. 4686 __ test(ecx, Immediate(kSmiTagMask)); 4687 __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction); 4688 __ CmpObjectType(ecx, MAP_TYPE, ecx); 4689 __ Assert(equal, kUnexpectedInitialMapForArrayFunction); 4690 } 4691 4692 // Figure out the right elements kind 4693 __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset)); 4694 4695 // Load the map's "bit field 2" into |result|. We only need the first byte, 4696 // but the following masking takes care of that anyway. 4697 __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset)); 4698 // Retrieve elements_kind from bit field 2. 4699 __ DecodeField<Map::ElementsKindBits>(ecx); 4700 4701 if (FLAG_debug_code) { 4702 Label done; 4703 __ cmp(ecx, Immediate(FAST_ELEMENTS)); 4704 __ j(equal, &done); 4705 __ cmp(ecx, Immediate(FAST_HOLEY_ELEMENTS)); 4706 __ Assert(equal, 4707 kInvalidElementsKindForInternalArrayOrInternalPackedArray); 4708 __ bind(&done); 4709 } 4710 4711 Label fast_elements_case; 4712 __ cmp(ecx, Immediate(FAST_ELEMENTS)); 4713 __ j(equal, &fast_elements_case); 4714 GenerateCase(masm, FAST_HOLEY_ELEMENTS); 4715 4716 __ bind(&fast_elements_case); 4717 GenerateCase(masm, FAST_ELEMENTS); 4718} 4719 4720 4721void FastNewObjectStub::Generate(MacroAssembler* masm) { 4722 // ----------- S t a t e ------------- 4723 // -- edi : target 4724 // -- edx : new target 4725 // -- esi : context 4726 // -- esp[0] : return address 4727 // ----------------------------------- 4728 __ AssertFunction(edi); 4729 __ AssertReceiver(edx); 4730 4731 // Verify that the new target is a JSFunction. 4732 Label new_object; 4733 __ CmpObjectType(edx, JS_FUNCTION_TYPE, ebx); 4734 __ j(not_equal, &new_object); 4735 4736 // Load the initial map and verify that it's in fact a map. 4737 __ mov(ecx, FieldOperand(edx, JSFunction::kPrototypeOrInitialMapOffset)); 4738 __ JumpIfSmi(ecx, &new_object); 4739 __ CmpObjectType(ecx, MAP_TYPE, ebx); 4740 __ j(not_equal, &new_object); 4741 4742 // Fall back to runtime if the target differs from the new target's 4743 // initial map constructor. 4744 __ cmp(edi, FieldOperand(ecx, Map::kConstructorOrBackPointerOffset)); 4745 __ j(not_equal, &new_object); 4746 4747 // Allocate the JSObject on the heap. 4748 Label allocate, done_allocate; 4749 __ movzx_b(ebx, FieldOperand(ecx, Map::kInstanceSizeOffset)); 4750 __ lea(ebx, Operand(ebx, times_pointer_size, 0)); 4751 __ Allocate(ebx, eax, edi, no_reg, &allocate, NO_ALLOCATION_FLAGS); 4752 __ bind(&done_allocate); 4753 4754 // Initialize the JSObject fields. 4755 __ mov(Operand(eax, JSObject::kMapOffset), ecx); 4756 __ mov(Operand(eax, JSObject::kPropertiesOffset), 4757 masm->isolate()->factory()->empty_fixed_array()); 4758 __ mov(Operand(eax, JSObject::kElementsOffset), 4759 masm->isolate()->factory()->empty_fixed_array()); 4760 STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize); 4761 __ lea(ebx, Operand(eax, JSObject::kHeaderSize)); 4762 4763 // ----------- S t a t e ------------- 4764 // -- eax : result (untagged) 4765 // -- ebx : result fields (untagged) 4766 // -- edi : result end (untagged) 4767 // -- ecx : initial map 4768 // -- esi : context 4769 // -- esp[0] : return address 4770 // ----------------------------------- 4771 4772 // Perform in-object slack tracking if requested. 4773 Label slack_tracking; 4774 STATIC_ASSERT(Map::kNoSlackTracking == 0); 4775 __ test(FieldOperand(ecx, Map::kBitField3Offset), 4776 Immediate(Map::ConstructionCounter::kMask)); 4777 __ j(not_zero, &slack_tracking, Label::kNear); 4778 { 4779 // Initialize all in-object fields with undefined. 4780 __ LoadRoot(edx, Heap::kUndefinedValueRootIndex); 4781 __ InitializeFieldsWithFiller(ebx, edi, edx); 4782 4783 // Add the object tag to make the JSObject real. 4784 STATIC_ASSERT(kHeapObjectTag == 1); 4785 __ inc(eax); 4786 __ Ret(); 4787 } 4788 __ bind(&slack_tracking); 4789 { 4790 // Decrease generous allocation count. 4791 STATIC_ASSERT(Map::ConstructionCounter::kNext == 32); 4792 __ sub(FieldOperand(ecx, Map::kBitField3Offset), 4793 Immediate(1 << Map::ConstructionCounter::kShift)); 4794 4795 // Initialize the in-object fields with undefined. 4796 __ movzx_b(edx, FieldOperand(ecx, Map::kUnusedPropertyFieldsOffset)); 4797 __ neg(edx); 4798 __ lea(edx, Operand(edi, edx, times_pointer_size, 0)); 4799 __ LoadRoot(edi, Heap::kUndefinedValueRootIndex); 4800 __ InitializeFieldsWithFiller(ebx, edx, edi); 4801 4802 // Initialize the remaining (reserved) fields with one pointer filler map. 4803 __ movzx_b(edx, FieldOperand(ecx, Map::kUnusedPropertyFieldsOffset)); 4804 __ lea(edx, Operand(ebx, edx, times_pointer_size, 0)); 4805 __ LoadRoot(edi, Heap::kOnePointerFillerMapRootIndex); 4806 __ InitializeFieldsWithFiller(ebx, edx, edi); 4807 4808 // Add the object tag to make the JSObject real. 4809 STATIC_ASSERT(kHeapObjectTag == 1); 4810 __ inc(eax); 4811 4812 // Check if we can finalize the instance size. 4813 Label finalize; 4814 STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1); 4815 __ test(FieldOperand(ecx, Map::kBitField3Offset), 4816 Immediate(Map::ConstructionCounter::kMask)); 4817 __ j(zero, &finalize, Label::kNear); 4818 __ Ret(); 4819 4820 // Finalize the instance size. 4821 __ bind(&finalize); 4822 { 4823 FrameScope scope(masm, StackFrame::INTERNAL); 4824 __ Push(eax); 4825 __ Push(ecx); 4826 __ CallRuntime(Runtime::kFinalizeInstanceSize); 4827 __ Pop(eax); 4828 } 4829 __ Ret(); 4830 } 4831 4832 // Fall back to %AllocateInNewSpace. 4833 __ bind(&allocate); 4834 { 4835 FrameScope scope(masm, StackFrame::INTERNAL); 4836 __ SmiTag(ebx); 4837 __ Push(ecx); 4838 __ Push(ebx); 4839 __ CallRuntime(Runtime::kAllocateInNewSpace); 4840 __ Pop(ecx); 4841 } 4842 STATIC_ASSERT(kHeapObjectTag == 1); 4843 __ dec(eax); 4844 __ movzx_b(ebx, FieldOperand(ecx, Map::kInstanceSizeOffset)); 4845 __ lea(edi, Operand(eax, ebx, times_pointer_size, 0)); 4846 __ jmp(&done_allocate); 4847 4848 // Fall back to %NewObject. 4849 __ bind(&new_object); 4850 __ PopReturnAddressTo(ecx); 4851 __ Push(edi); 4852 __ Push(edx); 4853 __ PushReturnAddressFrom(ecx); 4854 __ TailCallRuntime(Runtime::kNewObject); 4855} 4856 4857 4858void FastNewRestParameterStub::Generate(MacroAssembler* masm) { 4859 // ----------- S t a t e ------------- 4860 // -- edi : function 4861 // -- esi : context 4862 // -- ebp : frame pointer 4863 // -- esp[0] : return address 4864 // ----------------------------------- 4865 __ AssertFunction(edi); 4866 4867 // For Ignition we need to skip all possible handler/stub frames until 4868 // we reach the JavaScript frame for the function (similar to what the 4869 // runtime fallback implementation does). So make edx point to that 4870 // JavaScript frame. 4871 { 4872 Label loop, loop_entry; 4873 __ mov(edx, ebp); 4874 __ jmp(&loop_entry, Label::kNear); 4875 __ bind(&loop); 4876 __ mov(edx, Operand(edx, StandardFrameConstants::kCallerFPOffset)); 4877 __ bind(&loop_entry); 4878 __ cmp(edi, Operand(edx, StandardFrameConstants::kMarkerOffset)); 4879 __ j(not_equal, &loop); 4880 } 4881 4882 // Check if we have rest parameters (only possible if we have an 4883 // arguments adaptor frame below the function frame). 4884 Label no_rest_parameters; 4885 __ mov(ebx, Operand(edx, StandardFrameConstants::kCallerFPOffset)); 4886 __ cmp(Operand(ebx, StandardFrameConstants::kContextOffset), 4887 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 4888 __ j(not_equal, &no_rest_parameters, Label::kNear); 4889 4890 // Check if the arguments adaptor frame contains more arguments than 4891 // specified by the function's internal formal parameter count. 4892 Label rest_parameters; 4893 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); 4894 __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset)); 4895 __ sub(eax, 4896 FieldOperand(ecx, SharedFunctionInfo::kFormalParameterCountOffset)); 4897 __ j(greater, &rest_parameters); 4898 4899 // Return an empty rest parameter array. 4900 __ bind(&no_rest_parameters); 4901 { 4902 // ----------- S t a t e ------------- 4903 // -- esi : context 4904 // -- esp[0] : return address 4905 // ----------------------------------- 4906 4907 // Allocate an empty rest parameter array. 4908 Label allocate, done_allocate; 4909 __ Allocate(JSArray::kSize, eax, edx, ecx, &allocate, TAG_OBJECT); 4910 __ bind(&done_allocate); 4911 4912 // Setup the rest parameter array in rax. 4913 __ LoadGlobalFunction(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, ecx); 4914 __ mov(FieldOperand(eax, JSArray::kMapOffset), ecx); 4915 __ mov(ecx, isolate()->factory()->empty_fixed_array()); 4916 __ mov(FieldOperand(eax, JSArray::kPropertiesOffset), ecx); 4917 __ mov(FieldOperand(eax, JSArray::kElementsOffset), ecx); 4918 __ mov(FieldOperand(eax, JSArray::kLengthOffset), 4919 Immediate(Smi::FromInt(0))); 4920 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize); 4921 __ Ret(); 4922 4923 // Fall back to %AllocateInNewSpace. 4924 __ bind(&allocate); 4925 { 4926 FrameScope scope(masm, StackFrame::INTERNAL); 4927 __ Push(Smi::FromInt(JSArray::kSize)); 4928 __ CallRuntime(Runtime::kAllocateInNewSpace); 4929 } 4930 __ jmp(&done_allocate); 4931 } 4932 4933 __ bind(&rest_parameters); 4934 { 4935 // Compute the pointer to the first rest parameter (skippping the receiver). 4936 __ lea(ebx, 4937 Operand(ebx, eax, times_half_pointer_size, 4938 StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize)); 4939 4940 // ----------- S t a t e ------------- 4941 // -- esi : context 4942 // -- eax : number of rest parameters (tagged) 4943 // -- ebx : pointer to first rest parameters 4944 // -- esp[0] : return address 4945 // ----------------------------------- 4946 4947 // Allocate space for the rest parameter array plus the backing store. 4948 Label allocate, done_allocate; 4949 __ lea(ecx, Operand(eax, times_half_pointer_size, 4950 JSArray::kSize + FixedArray::kHeaderSize)); 4951 __ Allocate(ecx, edx, edi, no_reg, &allocate, TAG_OBJECT); 4952 __ bind(&done_allocate); 4953 4954 // Setup the elements array in edx. 4955 __ mov(FieldOperand(edx, FixedArray::kMapOffset), 4956 isolate()->factory()->fixed_array_map()); 4957 __ mov(FieldOperand(edx, FixedArray::kLengthOffset), eax); 4958 { 4959 Label loop, done_loop; 4960 __ Move(ecx, Smi::FromInt(0)); 4961 __ bind(&loop); 4962 __ cmp(ecx, eax); 4963 __ j(equal, &done_loop, Label::kNear); 4964 __ mov(edi, Operand(ebx, 0 * kPointerSize)); 4965 __ mov(FieldOperand(edx, ecx, times_half_pointer_size, 4966 FixedArray::kHeaderSize), 4967 edi); 4968 __ sub(ebx, Immediate(1 * kPointerSize)); 4969 __ add(ecx, Immediate(Smi::FromInt(1))); 4970 __ jmp(&loop); 4971 __ bind(&done_loop); 4972 } 4973 4974 // Setup the rest parameter array in edi. 4975 __ lea(edi, 4976 Operand(edx, eax, times_half_pointer_size, FixedArray::kHeaderSize)); 4977 __ LoadGlobalFunction(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, ecx); 4978 __ mov(FieldOperand(edi, JSArray::kMapOffset), ecx); 4979 __ mov(FieldOperand(edi, JSArray::kPropertiesOffset), 4980 isolate()->factory()->empty_fixed_array()); 4981 __ mov(FieldOperand(edi, JSArray::kElementsOffset), edx); 4982 __ mov(FieldOperand(edi, JSArray::kLengthOffset), eax); 4983 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize); 4984 __ mov(eax, edi); 4985 __ Ret(); 4986 4987 // Fall back to %AllocateInNewSpace. 4988 __ bind(&allocate); 4989 { 4990 FrameScope scope(masm, StackFrame::INTERNAL); 4991 __ SmiTag(ecx); 4992 __ Push(eax); 4993 __ Push(ebx); 4994 __ Push(ecx); 4995 __ CallRuntime(Runtime::kAllocateInNewSpace); 4996 __ mov(edx, eax); 4997 __ Pop(ebx); 4998 __ Pop(eax); 4999 } 5000 __ jmp(&done_allocate); 5001 } 5002} 5003 5004 5005void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) { 5006 // ----------- S t a t e ------------- 5007 // -- edi : function 5008 // -- esi : context 5009 // -- ebp : frame pointer 5010 // -- esp[0] : return address 5011 // ----------------------------------- 5012 __ AssertFunction(edi); 5013 5014 // TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub. 5015 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); 5016 __ mov(ecx, 5017 FieldOperand(ecx, SharedFunctionInfo::kFormalParameterCountOffset)); 5018 __ lea(edx, Operand(ebp, ecx, times_half_pointer_size, 5019 StandardFrameConstants::kCallerSPOffset)); 5020 5021 // ecx : number of parameters (tagged) 5022 // edx : parameters pointer 5023 // edi : function 5024 // esp[0] : return address 5025 5026 // Check if the calling frame is an arguments adaptor frame. 5027 Label adaptor_frame, try_allocate, runtime; 5028 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); 5029 __ mov(eax, Operand(ebx, StandardFrameConstants::kContextOffset)); 5030 __ cmp(eax, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 5031 __ j(equal, &adaptor_frame, Label::kNear); 5032 5033 // No adaptor, parameter count = argument count. 5034 __ mov(ebx, ecx); 5035 __ push(ecx); 5036 __ jmp(&try_allocate, Label::kNear); 5037 5038 // We have an adaptor frame. Patch the parameters pointer. 5039 __ bind(&adaptor_frame); 5040 __ mov(ebx, ecx); 5041 __ push(ecx); 5042 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); 5043 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset)); 5044 __ lea(edx, Operand(edx, ecx, times_2, 5045 StandardFrameConstants::kCallerSPOffset)); 5046 5047 // ebx = parameter count (tagged) 5048 // ecx = argument count (smi-tagged) 5049 // Compute the mapped parameter count = min(ebx, ecx) in ebx. 5050 __ cmp(ebx, ecx); 5051 __ j(less_equal, &try_allocate, Label::kNear); 5052 __ mov(ebx, ecx); 5053 5054 // Save mapped parameter count and function. 5055 __ bind(&try_allocate); 5056 __ push(edi); 5057 __ push(ebx); 5058 5059 // Compute the sizes of backing store, parameter map, and arguments object. 5060 // 1. Parameter map, has 2 extra words containing context and backing store. 5061 const int kParameterMapHeaderSize = 5062 FixedArray::kHeaderSize + 2 * kPointerSize; 5063 Label no_parameter_map; 5064 __ test(ebx, ebx); 5065 __ j(zero, &no_parameter_map, Label::kNear); 5066 __ lea(ebx, Operand(ebx, times_2, kParameterMapHeaderSize)); 5067 __ bind(&no_parameter_map); 5068 5069 // 2. Backing store. 5070 __ lea(ebx, Operand(ebx, ecx, times_2, FixedArray::kHeaderSize)); 5071 5072 // 3. Arguments object. 5073 __ add(ebx, Immediate(JSSloppyArgumentsObject::kSize)); 5074 5075 // Do the allocation of all three objects in one go. 5076 __ Allocate(ebx, eax, edi, no_reg, &runtime, TAG_OBJECT); 5077 5078 // eax = address of new object(s) (tagged) 5079 // ecx = argument count (smi-tagged) 5080 // esp[0] = mapped parameter count (tagged) 5081 // esp[4] = function 5082 // esp[8] = parameter count (tagged) 5083 // Get the arguments map from the current native context into edi. 5084 Label has_mapped_parameters, instantiate; 5085 __ mov(edi, NativeContextOperand()); 5086 __ mov(ebx, Operand(esp, 0 * kPointerSize)); 5087 __ test(ebx, ebx); 5088 __ j(not_zero, &has_mapped_parameters, Label::kNear); 5089 __ mov( 5090 edi, 5091 Operand(edi, Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX))); 5092 __ jmp(&instantiate, Label::kNear); 5093 5094 __ bind(&has_mapped_parameters); 5095 __ mov(edi, Operand(edi, Context::SlotOffset( 5096 Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX))); 5097 __ bind(&instantiate); 5098 5099 // eax = address of new object (tagged) 5100 // ebx = mapped parameter count (tagged) 5101 // ecx = argument count (smi-tagged) 5102 // edi = address of arguments map (tagged) 5103 // esp[0] = mapped parameter count (tagged) 5104 // esp[4] = function 5105 // esp[8] = parameter count (tagged) 5106 // Copy the JS object part. 5107 __ mov(FieldOperand(eax, JSObject::kMapOffset), edi); 5108 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), 5109 masm->isolate()->factory()->empty_fixed_array()); 5110 __ mov(FieldOperand(eax, JSObject::kElementsOffset), 5111 masm->isolate()->factory()->empty_fixed_array()); 5112 5113 // Set up the callee in-object property. 5114 STATIC_ASSERT(JSSloppyArgumentsObject::kCalleeIndex == 1); 5115 __ mov(edi, Operand(esp, 1 * kPointerSize)); 5116 __ AssertNotSmi(edi); 5117 __ mov(FieldOperand(eax, JSSloppyArgumentsObject::kCalleeOffset), edi); 5118 5119 // Use the length (smi tagged) and set that as an in-object property too. 5120 __ AssertSmi(ecx); 5121 __ mov(FieldOperand(eax, JSSloppyArgumentsObject::kLengthOffset), ecx); 5122 5123 // Set up the elements pointer in the allocated arguments object. 5124 // If we allocated a parameter map, edi will point there, otherwise to the 5125 // backing store. 5126 __ lea(edi, Operand(eax, JSSloppyArgumentsObject::kSize)); 5127 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi); 5128 5129 // eax = address of new object (tagged) 5130 // ebx = mapped parameter count (tagged) 5131 // ecx = argument count (tagged) 5132 // edx = address of receiver argument 5133 // edi = address of parameter map or backing store (tagged) 5134 // esp[0] = mapped parameter count (tagged) 5135 // esp[4] = function 5136 // esp[8] = parameter count (tagged) 5137 // Free two registers. 5138 __ push(edx); 5139 __ push(eax); 5140 5141 // Initialize parameter map. If there are no mapped arguments, we're done. 5142 Label skip_parameter_map; 5143 __ test(ebx, ebx); 5144 __ j(zero, &skip_parameter_map); 5145 5146 __ mov(FieldOperand(edi, FixedArray::kMapOffset), 5147 Immediate(isolate()->factory()->sloppy_arguments_elements_map())); 5148 __ lea(eax, Operand(ebx, reinterpret_cast<intptr_t>(Smi::FromInt(2)))); 5149 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), eax); 5150 __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 0 * kPointerSize), esi); 5151 __ lea(eax, Operand(edi, ebx, times_2, kParameterMapHeaderSize)); 5152 __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 1 * kPointerSize), eax); 5153 5154 // Copy the parameter slots and the holes in the arguments. 5155 // We need to fill in mapped_parameter_count slots. They index the context, 5156 // where parameters are stored in reverse order, at 5157 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1 5158 // The mapped parameter thus need to get indices 5159 // MIN_CONTEXT_SLOTS+parameter_count-1 .. 5160 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count 5161 // We loop from right to left. 5162 Label parameters_loop, parameters_test; 5163 __ push(ecx); 5164 __ mov(eax, Operand(esp, 3 * kPointerSize)); 5165 __ mov(ebx, Immediate(Smi::FromInt(Context::MIN_CONTEXT_SLOTS))); 5166 __ add(ebx, Operand(esp, 5 * kPointerSize)); 5167 __ sub(ebx, eax); 5168 __ mov(ecx, isolate()->factory()->the_hole_value()); 5169 __ mov(edx, edi); 5170 __ lea(edi, Operand(edi, eax, times_2, kParameterMapHeaderSize)); 5171 // eax = loop variable (tagged) 5172 // ebx = mapping index (tagged) 5173 // ecx = the hole value 5174 // edx = address of parameter map (tagged) 5175 // edi = address of backing store (tagged) 5176 // esp[0] = argument count (tagged) 5177 // esp[4] = address of new object (tagged) 5178 // esp[8] = address of receiver argument 5179 // esp[12] = mapped parameter count (tagged) 5180 // esp[16] = function 5181 // esp[20] = parameter count (tagged) 5182 __ jmp(¶meters_test, Label::kNear); 5183 5184 __ bind(¶meters_loop); 5185 __ sub(eax, Immediate(Smi::FromInt(1))); 5186 __ mov(FieldOperand(edx, eax, times_2, kParameterMapHeaderSize), ebx); 5187 __ mov(FieldOperand(edi, eax, times_2, FixedArray::kHeaderSize), ecx); 5188 __ add(ebx, Immediate(Smi::FromInt(1))); 5189 __ bind(¶meters_test); 5190 __ test(eax, eax); 5191 __ j(not_zero, ¶meters_loop, Label::kNear); 5192 __ pop(ecx); 5193 5194 __ bind(&skip_parameter_map); 5195 5196 // ecx = argument count (tagged) 5197 // edi = address of backing store (tagged) 5198 // esp[0] = address of new object (tagged) 5199 // esp[4] = address of receiver argument 5200 // esp[8] = mapped parameter count (tagged) 5201 // esp[12] = function 5202 // esp[16] = parameter count (tagged) 5203 // Copy arguments header and remaining slots (if there are any). 5204 __ mov(FieldOperand(edi, FixedArray::kMapOffset), 5205 Immediate(isolate()->factory()->fixed_array_map())); 5206 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx); 5207 5208 Label arguments_loop, arguments_test; 5209 __ mov(ebx, Operand(esp, 2 * kPointerSize)); 5210 __ mov(edx, Operand(esp, 1 * kPointerSize)); 5211 __ sub(edx, ebx); // Is there a smarter way to do negative scaling? 5212 __ sub(edx, ebx); 5213 __ jmp(&arguments_test, Label::kNear); 5214 5215 __ bind(&arguments_loop); 5216 __ sub(edx, Immediate(kPointerSize)); 5217 __ mov(eax, Operand(edx, 0)); 5218 __ mov(FieldOperand(edi, ebx, times_2, FixedArray::kHeaderSize), eax); 5219 __ add(ebx, Immediate(Smi::FromInt(1))); 5220 5221 __ bind(&arguments_test); 5222 __ cmp(ebx, ecx); 5223 __ j(less, &arguments_loop, Label::kNear); 5224 5225 // Restore. 5226 __ pop(eax); // Address of arguments object. 5227 __ Drop(4); 5228 5229 // Return. 5230 __ ret(0); 5231 5232 // Do the runtime call to allocate the arguments object. 5233 __ bind(&runtime); 5234 __ pop(eax); // Remove saved mapped parameter count. 5235 __ pop(edi); // Pop saved function. 5236 __ pop(eax); // Remove saved parameter count. 5237 __ pop(eax); // Pop return address. 5238 __ push(edi); // Push function. 5239 __ push(edx); // Push parameters pointer. 5240 __ push(ecx); // Push parameter count. 5241 __ push(eax); // Push return address. 5242 __ TailCallRuntime(Runtime::kNewSloppyArguments); 5243} 5244 5245 5246void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) { 5247 // ----------- S t a t e ------------- 5248 // -- edi : function 5249 // -- esi : context 5250 // -- ebp : frame pointer 5251 // -- esp[0] : return address 5252 // ----------------------------------- 5253 __ AssertFunction(edi); 5254 5255 // For Ignition we need to skip all possible handler/stub frames until 5256 // we reach the JavaScript frame for the function (similar to what the 5257 // runtime fallback implementation does). So make edx point to that 5258 // JavaScript frame. 5259 { 5260 Label loop, loop_entry; 5261 __ mov(edx, ebp); 5262 __ jmp(&loop_entry, Label::kNear); 5263 __ bind(&loop); 5264 __ mov(edx, Operand(edx, StandardFrameConstants::kCallerFPOffset)); 5265 __ bind(&loop_entry); 5266 __ cmp(edi, Operand(edx, StandardFrameConstants::kMarkerOffset)); 5267 __ j(not_equal, &loop); 5268 } 5269 5270 // Check if we have an arguments adaptor frame below the function frame. 5271 Label arguments_adaptor, arguments_done; 5272 __ mov(ebx, Operand(edx, StandardFrameConstants::kCallerFPOffset)); 5273 __ cmp(Operand(ebx, StandardFrameConstants::kContextOffset), 5274 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 5275 __ j(equal, &arguments_adaptor, Label::kNear); 5276 { 5277 __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); 5278 __ mov(eax, 5279 FieldOperand(eax, SharedFunctionInfo::kFormalParameterCountOffset)); 5280 __ lea(ebx, 5281 Operand(edx, eax, times_half_pointer_size, 5282 StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize)); 5283 } 5284 __ jmp(&arguments_done, Label::kNear); 5285 __ bind(&arguments_adaptor); 5286 { 5287 __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset)); 5288 __ lea(ebx, 5289 Operand(ebx, eax, times_half_pointer_size, 5290 StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize)); 5291 } 5292 __ bind(&arguments_done); 5293 5294 // ----------- S t a t e ------------- 5295 // -- eax : number of arguments (tagged) 5296 // -- ebx : pointer to the first argument 5297 // -- esi : context 5298 // -- esp[0] : return address 5299 // ----------------------------------- 5300 5301 // Allocate space for the strict arguments object plus the backing store. 5302 Label allocate, done_allocate; 5303 __ lea(ecx, 5304 Operand(eax, times_half_pointer_size, 5305 JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize)); 5306 __ Allocate(ecx, edx, edi, no_reg, &allocate, TAG_OBJECT); 5307 __ bind(&done_allocate); 5308 5309 // Setup the elements array in edx. 5310 __ mov(FieldOperand(edx, FixedArray::kMapOffset), 5311 isolate()->factory()->fixed_array_map()); 5312 __ mov(FieldOperand(edx, FixedArray::kLengthOffset), eax); 5313 { 5314 Label loop, done_loop; 5315 __ Move(ecx, Smi::FromInt(0)); 5316 __ bind(&loop); 5317 __ cmp(ecx, eax); 5318 __ j(equal, &done_loop, Label::kNear); 5319 __ mov(edi, Operand(ebx, 0 * kPointerSize)); 5320 __ mov(FieldOperand(edx, ecx, times_half_pointer_size, 5321 FixedArray::kHeaderSize), 5322 edi); 5323 __ sub(ebx, Immediate(1 * kPointerSize)); 5324 __ add(ecx, Immediate(Smi::FromInt(1))); 5325 __ jmp(&loop); 5326 __ bind(&done_loop); 5327 } 5328 5329 // Setup the rest parameter array in edi. 5330 __ lea(edi, 5331 Operand(edx, eax, times_half_pointer_size, FixedArray::kHeaderSize)); 5332 __ LoadGlobalFunction(Context::STRICT_ARGUMENTS_MAP_INDEX, ecx); 5333 __ mov(FieldOperand(edi, JSStrictArgumentsObject::kMapOffset), ecx); 5334 __ mov(FieldOperand(edi, JSStrictArgumentsObject::kPropertiesOffset), 5335 isolate()->factory()->empty_fixed_array()); 5336 __ mov(FieldOperand(edi, JSStrictArgumentsObject::kElementsOffset), edx); 5337 __ mov(FieldOperand(edi, JSStrictArgumentsObject::kLengthOffset), eax); 5338 STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize); 5339 __ mov(eax, edi); 5340 __ Ret(); 5341 5342 // Fall back to %AllocateInNewSpace. 5343 __ bind(&allocate); 5344 { 5345 FrameScope scope(masm, StackFrame::INTERNAL); 5346 __ SmiTag(ecx); 5347 __ Push(eax); 5348 __ Push(ebx); 5349 __ Push(ecx); 5350 __ CallRuntime(Runtime::kAllocateInNewSpace); 5351 __ mov(edx, eax); 5352 __ Pop(ebx); 5353 __ Pop(eax); 5354 } 5355 __ jmp(&done_allocate); 5356} 5357 5358 5359void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) { 5360 Register context_reg = esi; 5361 Register slot_reg = ebx; 5362 Register result_reg = eax; 5363 Label slow_case; 5364 5365 // Go up context chain to the script context. 5366 for (int i = 0; i < depth(); ++i) { 5367 __ mov(result_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX)); 5368 context_reg = result_reg; 5369 } 5370 5371 // Load the PropertyCell value at the specified slot. 5372 __ mov(result_reg, ContextOperand(context_reg, slot_reg)); 5373 __ mov(result_reg, FieldOperand(result_reg, PropertyCell::kValueOffset)); 5374 5375 // Check that value is not the_hole. 5376 __ CompareRoot(result_reg, Heap::kTheHoleValueRootIndex); 5377 __ j(equal, &slow_case, Label::kNear); 5378 __ Ret(); 5379 5380 // Fallback to the runtime. 5381 __ bind(&slow_case); 5382 __ SmiTag(slot_reg); 5383 __ Pop(result_reg); // Pop return address. 5384 __ Push(slot_reg); 5385 __ Push(result_reg); // Push return address. 5386 __ TailCallRuntime(Runtime::kLoadGlobalViaContext); 5387} 5388 5389 5390void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) { 5391 Register context_reg = esi; 5392 Register slot_reg = ebx; 5393 Register value_reg = eax; 5394 Register cell_reg = edi; 5395 Register cell_details_reg = edx; 5396 Register cell_value_reg = ecx; 5397 Label fast_heapobject_case, fast_smi_case, slow_case; 5398 5399 if (FLAG_debug_code) { 5400 __ CompareRoot(value_reg, Heap::kTheHoleValueRootIndex); 5401 __ Check(not_equal, kUnexpectedValue); 5402 } 5403 5404 // Go up context chain to the script context. 5405 for (int i = 0; i < depth(); ++i) { 5406 __ mov(cell_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX)); 5407 context_reg = cell_reg; 5408 } 5409 5410 // Load the PropertyCell at the specified slot. 5411 __ mov(cell_reg, ContextOperand(context_reg, slot_reg)); 5412 5413 // Load PropertyDetails for the cell (actually only the cell_type and kind). 5414 __ mov(cell_details_reg, 5415 FieldOperand(cell_reg, PropertyCell::kDetailsOffset)); 5416 __ SmiUntag(cell_details_reg); 5417 __ and_(cell_details_reg, 5418 Immediate(PropertyDetails::PropertyCellTypeField::kMask | 5419 PropertyDetails::KindField::kMask | 5420 PropertyDetails::kAttributesReadOnlyMask)); 5421 5422 // Check if PropertyCell holds mutable data. 5423 Label not_mutable_data; 5424 __ cmp(cell_details_reg, 5425 Immediate(PropertyDetails::PropertyCellTypeField::encode( 5426 PropertyCellType::kMutable) | 5427 PropertyDetails::KindField::encode(kData))); 5428 __ j(not_equal, ¬_mutable_data); 5429 __ JumpIfSmi(value_reg, &fast_smi_case); 5430 __ bind(&fast_heapobject_case); 5431 __ mov(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg); 5432 __ RecordWriteField(cell_reg, PropertyCell::kValueOffset, value_reg, 5433 cell_details_reg, kDontSaveFPRegs, EMIT_REMEMBERED_SET, 5434 OMIT_SMI_CHECK); 5435 // RecordWriteField clobbers the value register, so we need to reload. 5436 __ mov(value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset)); 5437 __ Ret(); 5438 __ bind(¬_mutable_data); 5439 5440 // Check if PropertyCell value matches the new value (relevant for Constant, 5441 // ConstantType and Undefined cells). 5442 Label not_same_value; 5443 __ mov(cell_value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset)); 5444 __ cmp(cell_value_reg, value_reg); 5445 __ j(not_equal, ¬_same_value, 5446 FLAG_debug_code ? Label::kFar : Label::kNear); 5447 // Make sure the PropertyCell is not marked READ_ONLY. 5448 __ test(cell_details_reg, 5449 Immediate(PropertyDetails::kAttributesReadOnlyMask)); 5450 __ j(not_zero, &slow_case); 5451 if (FLAG_debug_code) { 5452 Label done; 5453 // This can only be true for Constant, ConstantType and Undefined cells, 5454 // because we never store the_hole via this stub. 5455 __ cmp(cell_details_reg, 5456 Immediate(PropertyDetails::PropertyCellTypeField::encode( 5457 PropertyCellType::kConstant) | 5458 PropertyDetails::KindField::encode(kData))); 5459 __ j(equal, &done); 5460 __ cmp(cell_details_reg, 5461 Immediate(PropertyDetails::PropertyCellTypeField::encode( 5462 PropertyCellType::kConstantType) | 5463 PropertyDetails::KindField::encode(kData))); 5464 __ j(equal, &done); 5465 __ cmp(cell_details_reg, 5466 Immediate(PropertyDetails::PropertyCellTypeField::encode( 5467 PropertyCellType::kUndefined) | 5468 PropertyDetails::KindField::encode(kData))); 5469 __ Check(equal, kUnexpectedValue); 5470 __ bind(&done); 5471 } 5472 __ Ret(); 5473 __ bind(¬_same_value); 5474 5475 // Check if PropertyCell contains data with constant type (and is not 5476 // READ_ONLY). 5477 __ cmp(cell_details_reg, 5478 Immediate(PropertyDetails::PropertyCellTypeField::encode( 5479 PropertyCellType::kConstantType) | 5480 PropertyDetails::KindField::encode(kData))); 5481 __ j(not_equal, &slow_case, Label::kNear); 5482 5483 // Now either both old and new values must be SMIs or both must be heap 5484 // objects with same map. 5485 Label value_is_heap_object; 5486 __ JumpIfNotSmi(value_reg, &value_is_heap_object, Label::kNear); 5487 __ JumpIfNotSmi(cell_value_reg, &slow_case, Label::kNear); 5488 // Old and new values are SMIs, no need for a write barrier here. 5489 __ bind(&fast_smi_case); 5490 __ mov(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg); 5491 __ Ret(); 5492 __ bind(&value_is_heap_object); 5493 __ JumpIfSmi(cell_value_reg, &slow_case, Label::kNear); 5494 Register cell_value_map_reg = cell_value_reg; 5495 __ mov(cell_value_map_reg, 5496 FieldOperand(cell_value_reg, HeapObject::kMapOffset)); 5497 __ cmp(cell_value_map_reg, FieldOperand(value_reg, HeapObject::kMapOffset)); 5498 __ j(equal, &fast_heapobject_case); 5499 5500 // Fallback to the runtime. 5501 __ bind(&slow_case); 5502 __ SmiTag(slot_reg); 5503 __ Pop(cell_reg); // Pop return address. 5504 __ Push(slot_reg); 5505 __ Push(value_reg); 5506 __ Push(cell_reg); // Push return address. 5507 __ TailCallRuntime(is_strict(language_mode()) 5508 ? Runtime::kStoreGlobalViaContext_Strict 5509 : Runtime::kStoreGlobalViaContext_Sloppy); 5510} 5511 5512 5513// Generates an Operand for saving parameters after PrepareCallApiFunction. 5514static Operand ApiParameterOperand(int index) { 5515 return Operand(esp, index * kPointerSize); 5516} 5517 5518 5519// Prepares stack to put arguments (aligns and so on). Reserves 5520// space for return value if needed (assumes the return value is a handle). 5521// Arguments must be stored in ApiParameterOperand(0), ApiParameterOperand(1) 5522// etc. Saves context (esi). If space was reserved for return value then 5523// stores the pointer to the reserved slot into esi. 5524static void PrepareCallApiFunction(MacroAssembler* masm, int argc) { 5525 __ EnterApiExitFrame(argc); 5526 if (__ emit_debug_code()) { 5527 __ mov(esi, Immediate(bit_cast<int32_t>(kZapValue))); 5528 } 5529} 5530 5531 5532// Calls an API function. Allocates HandleScope, extracts returned value 5533// from handle and propagates exceptions. Clobbers ebx, edi and 5534// caller-save registers. Restores context. On return removes 5535// stack_space * kPointerSize (GCed). 5536static void CallApiFunctionAndReturn(MacroAssembler* masm, 5537 Register function_address, 5538 ExternalReference thunk_ref, 5539 Operand thunk_last_arg, int stack_space, 5540 Operand* stack_space_operand, 5541 Operand return_value_operand, 5542 Operand* context_restore_operand) { 5543 Isolate* isolate = masm->isolate(); 5544 5545 ExternalReference next_address = 5546 ExternalReference::handle_scope_next_address(isolate); 5547 ExternalReference limit_address = 5548 ExternalReference::handle_scope_limit_address(isolate); 5549 ExternalReference level_address = 5550 ExternalReference::handle_scope_level_address(isolate); 5551 5552 DCHECK(edx.is(function_address)); 5553 // Allocate HandleScope in callee-save registers. 5554 __ mov(ebx, Operand::StaticVariable(next_address)); 5555 __ mov(edi, Operand::StaticVariable(limit_address)); 5556 __ add(Operand::StaticVariable(level_address), Immediate(1)); 5557 5558 if (FLAG_log_timer_events) { 5559 FrameScope frame(masm, StackFrame::MANUAL); 5560 __ PushSafepointRegisters(); 5561 __ PrepareCallCFunction(1, eax); 5562 __ mov(Operand(esp, 0), 5563 Immediate(ExternalReference::isolate_address(isolate))); 5564 __ CallCFunction(ExternalReference::log_enter_external_function(isolate), 5565 1); 5566 __ PopSafepointRegisters(); 5567 } 5568 5569 5570 Label profiler_disabled; 5571 Label end_profiler_check; 5572 __ mov(eax, Immediate(ExternalReference::is_profiling_address(isolate))); 5573 __ cmpb(Operand(eax, 0), 0); 5574 __ j(zero, &profiler_disabled); 5575 5576 // Additional parameter is the address of the actual getter function. 5577 __ mov(thunk_last_arg, function_address); 5578 // Call the api function. 5579 __ mov(eax, Immediate(thunk_ref)); 5580 __ call(eax); 5581 __ jmp(&end_profiler_check); 5582 5583 __ bind(&profiler_disabled); 5584 // Call the api function. 5585 __ call(function_address); 5586 __ bind(&end_profiler_check); 5587 5588 if (FLAG_log_timer_events) { 5589 FrameScope frame(masm, StackFrame::MANUAL); 5590 __ PushSafepointRegisters(); 5591 __ PrepareCallCFunction(1, eax); 5592 __ mov(Operand(esp, 0), 5593 Immediate(ExternalReference::isolate_address(isolate))); 5594 __ CallCFunction(ExternalReference::log_leave_external_function(isolate), 5595 1); 5596 __ PopSafepointRegisters(); 5597 } 5598 5599 Label prologue; 5600 // Load the value from ReturnValue 5601 __ mov(eax, return_value_operand); 5602 5603 Label promote_scheduled_exception; 5604 Label delete_allocated_handles; 5605 Label leave_exit_frame; 5606 5607 __ bind(&prologue); 5608 // No more valid handles (the result handle was the last one). Restore 5609 // previous handle scope. 5610 __ mov(Operand::StaticVariable(next_address), ebx); 5611 __ sub(Operand::StaticVariable(level_address), Immediate(1)); 5612 __ Assert(above_equal, kInvalidHandleScopeLevel); 5613 __ cmp(edi, Operand::StaticVariable(limit_address)); 5614 __ j(not_equal, &delete_allocated_handles); 5615 5616 // Leave the API exit frame. 5617 __ bind(&leave_exit_frame); 5618 bool restore_context = context_restore_operand != NULL; 5619 if (restore_context) { 5620 __ mov(esi, *context_restore_operand); 5621 } 5622 if (stack_space_operand != nullptr) { 5623 __ mov(ebx, *stack_space_operand); 5624 } 5625 __ LeaveApiExitFrame(!restore_context); 5626 5627 // Check if the function scheduled an exception. 5628 ExternalReference scheduled_exception_address = 5629 ExternalReference::scheduled_exception_address(isolate); 5630 __ cmp(Operand::StaticVariable(scheduled_exception_address), 5631 Immediate(isolate->factory()->the_hole_value())); 5632 __ j(not_equal, &promote_scheduled_exception); 5633 5634#if DEBUG 5635 // Check if the function returned a valid JavaScript value. 5636 Label ok; 5637 Register return_value = eax; 5638 Register map = ecx; 5639 5640 __ JumpIfSmi(return_value, &ok, Label::kNear); 5641 __ mov(map, FieldOperand(return_value, HeapObject::kMapOffset)); 5642 5643 __ CmpInstanceType(map, LAST_NAME_TYPE); 5644 __ j(below_equal, &ok, Label::kNear); 5645 5646 __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE); 5647 __ j(above_equal, &ok, Label::kNear); 5648 5649 __ cmp(map, isolate->factory()->heap_number_map()); 5650 __ j(equal, &ok, Label::kNear); 5651 5652 __ cmp(return_value, isolate->factory()->undefined_value()); 5653 __ j(equal, &ok, Label::kNear); 5654 5655 __ cmp(return_value, isolate->factory()->true_value()); 5656 __ j(equal, &ok, Label::kNear); 5657 5658 __ cmp(return_value, isolate->factory()->false_value()); 5659 __ j(equal, &ok, Label::kNear); 5660 5661 __ cmp(return_value, isolate->factory()->null_value()); 5662 __ j(equal, &ok, Label::kNear); 5663 5664 __ Abort(kAPICallReturnedInvalidObject); 5665 5666 __ bind(&ok); 5667#endif 5668 5669 if (stack_space_operand != nullptr) { 5670 DCHECK_EQ(0, stack_space); 5671 __ pop(ecx); 5672 __ add(esp, ebx); 5673 __ jmp(ecx); 5674 } else { 5675 __ ret(stack_space * kPointerSize); 5676 } 5677 5678 // Re-throw by promoting a scheduled exception. 5679 __ bind(&promote_scheduled_exception); 5680 __ TailCallRuntime(Runtime::kPromoteScheduledException); 5681 5682 // HandleScope limit has changed. Delete allocated extensions. 5683 ExternalReference delete_extensions = 5684 ExternalReference::delete_handle_scope_extensions(isolate); 5685 __ bind(&delete_allocated_handles); 5686 __ mov(Operand::StaticVariable(limit_address), edi); 5687 __ mov(edi, eax); 5688 __ mov(Operand(esp, 0), 5689 Immediate(ExternalReference::isolate_address(isolate))); 5690 __ mov(eax, Immediate(delete_extensions)); 5691 __ call(eax); 5692 __ mov(eax, edi); 5693 __ jmp(&leave_exit_frame); 5694} 5695 5696static void CallApiFunctionStubHelper(MacroAssembler* masm, 5697 const ParameterCount& argc, 5698 bool return_first_arg, 5699 bool call_data_undefined, bool is_lazy) { 5700 // ----------- S t a t e ------------- 5701 // -- edi : callee 5702 // -- ebx : call_data 5703 // -- ecx : holder 5704 // -- edx : api_function_address 5705 // -- esi : context 5706 // -- eax : number of arguments if argc is a register 5707 // -- 5708 // -- esp[0] : return address 5709 // -- esp[4] : last argument 5710 // -- ... 5711 // -- esp[argc * 4] : first argument 5712 // -- esp[(argc + 1) * 4] : receiver 5713 // ----------------------------------- 5714 5715 Register callee = edi; 5716 Register call_data = ebx; 5717 Register holder = ecx; 5718 Register api_function_address = edx; 5719 Register context = esi; 5720 Register return_address = eax; 5721 5722 typedef FunctionCallbackArguments FCA; 5723 5724 STATIC_ASSERT(FCA::kContextSaveIndex == 6); 5725 STATIC_ASSERT(FCA::kCalleeIndex == 5); 5726 STATIC_ASSERT(FCA::kDataIndex == 4); 5727 STATIC_ASSERT(FCA::kReturnValueOffset == 3); 5728 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2); 5729 STATIC_ASSERT(FCA::kIsolateIndex == 1); 5730 STATIC_ASSERT(FCA::kHolderIndex == 0); 5731 STATIC_ASSERT(FCA::kArgsLength == 7); 5732 5733 DCHECK(argc.is_immediate() || eax.is(argc.reg())); 5734 5735 if (argc.is_immediate()) { 5736 __ pop(return_address); 5737 // context save. 5738 __ push(context); 5739 } else { 5740 // pop return address and save context 5741 __ xchg(context, Operand(esp, 0)); 5742 return_address = context; 5743 } 5744 5745 // callee 5746 __ push(callee); 5747 5748 // call data 5749 __ push(call_data); 5750 5751 Register scratch = call_data; 5752 if (!call_data_undefined) { 5753 // return value 5754 __ push(Immediate(masm->isolate()->factory()->undefined_value())); 5755 // return value default 5756 __ push(Immediate(masm->isolate()->factory()->undefined_value())); 5757 } else { 5758 // return value 5759 __ push(scratch); 5760 // return value default 5761 __ push(scratch); 5762 } 5763 // isolate 5764 __ push(Immediate(reinterpret_cast<int>(masm->isolate()))); 5765 // holder 5766 __ push(holder); 5767 5768 __ mov(scratch, esp); 5769 5770 // push return address 5771 __ push(return_address); 5772 5773 if (!is_lazy) { 5774 // load context from callee 5775 __ mov(context, FieldOperand(callee, JSFunction::kContextOffset)); 5776 } 5777 5778 // API function gets reference to the v8::Arguments. If CPU profiler 5779 // is enabled wrapper function will be called and we need to pass 5780 // address of the callback as additional parameter, always allocate 5781 // space for it. 5782 const int kApiArgc = 1 + 1; 5783 5784 // Allocate the v8::Arguments structure in the arguments' space since 5785 // it's not controlled by GC. 5786 const int kApiStackSpace = 4; 5787 5788 PrepareCallApiFunction(masm, kApiArgc + kApiStackSpace); 5789 5790 // FunctionCallbackInfo::implicit_args_. 5791 __ mov(ApiParameterOperand(2), scratch); 5792 if (argc.is_immediate()) { 5793 __ add(scratch, 5794 Immediate((argc.immediate() + FCA::kArgsLength - 1) * kPointerSize)); 5795 // FunctionCallbackInfo::values_. 5796 __ mov(ApiParameterOperand(3), scratch); 5797 // FunctionCallbackInfo::length_. 5798 __ Move(ApiParameterOperand(4), Immediate(argc.immediate())); 5799 // FunctionCallbackInfo::is_construct_call_. 5800 __ Move(ApiParameterOperand(5), Immediate(0)); 5801 } else { 5802 __ lea(scratch, Operand(scratch, argc.reg(), times_pointer_size, 5803 (FCA::kArgsLength - 1) * kPointerSize)); 5804 // FunctionCallbackInfo::values_. 5805 __ mov(ApiParameterOperand(3), scratch); 5806 // FunctionCallbackInfo::length_. 5807 __ mov(ApiParameterOperand(4), argc.reg()); 5808 // FunctionCallbackInfo::is_construct_call_. 5809 __ lea(argc.reg(), Operand(argc.reg(), times_pointer_size, 5810 (FCA::kArgsLength + 1) * kPointerSize)); 5811 __ mov(ApiParameterOperand(5), argc.reg()); 5812 } 5813 5814 // v8::InvocationCallback's argument. 5815 __ lea(scratch, ApiParameterOperand(2)); 5816 __ mov(ApiParameterOperand(0), scratch); 5817 5818 ExternalReference thunk_ref = 5819 ExternalReference::invoke_function_callback(masm->isolate()); 5820 5821 Operand context_restore_operand(ebp, 5822 (2 + FCA::kContextSaveIndex) * kPointerSize); 5823 // Stores return the first js argument 5824 int return_value_offset = 0; 5825 if (return_first_arg) { 5826 return_value_offset = 2 + FCA::kArgsLength; 5827 } else { 5828 return_value_offset = 2 + FCA::kReturnValueOffset; 5829 } 5830 Operand return_value_operand(ebp, return_value_offset * kPointerSize); 5831 int stack_space = 0; 5832 Operand is_construct_call_operand = ApiParameterOperand(5); 5833 Operand* stack_space_operand = &is_construct_call_operand; 5834 if (argc.is_immediate()) { 5835 stack_space = argc.immediate() + FCA::kArgsLength + 1; 5836 stack_space_operand = nullptr; 5837 } 5838 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, 5839 ApiParameterOperand(1), stack_space, 5840 stack_space_operand, return_value_operand, 5841 &context_restore_operand); 5842} 5843 5844 5845void CallApiFunctionStub::Generate(MacroAssembler* masm) { 5846 bool call_data_undefined = this->call_data_undefined(); 5847 CallApiFunctionStubHelper(masm, ParameterCount(eax), false, 5848 call_data_undefined, false); 5849} 5850 5851 5852void CallApiAccessorStub::Generate(MacroAssembler* masm) { 5853 bool is_store = this->is_store(); 5854 int argc = this->argc(); 5855 bool call_data_undefined = this->call_data_undefined(); 5856 bool is_lazy = this->is_lazy(); 5857 CallApiFunctionStubHelper(masm, ParameterCount(argc), is_store, 5858 call_data_undefined, is_lazy); 5859} 5860 5861 5862void CallApiGetterStub::Generate(MacroAssembler* masm) { 5863 // ----------- S t a t e ------------- 5864 // -- esp[0] : return address 5865 // -- esp[4] : name 5866 // -- esp[8 .. (8 + kArgsLength*4)] : v8::PropertyCallbackInfo::args_ 5867 // -- ... 5868 // -- edx : api_function_address 5869 // ----------------------------------- 5870 DCHECK(edx.is(ApiGetterDescriptor::function_address())); 5871 5872 // v8::PropertyCallbackInfo::args_ array and name handle. 5873 const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1; 5874 5875 // Allocate v8::PropertyCallbackInfo object, arguments for callback and 5876 // space for optional callback address parameter (in case CPU profiler is 5877 // active) in non-GCed stack space. 5878 const int kApiArgc = 3 + 1; 5879 5880 Register api_function_address = edx; 5881 Register scratch = ebx; 5882 5883 // Load address of v8::PropertyAccessorInfo::args_ array. 5884 __ lea(scratch, Operand(esp, 2 * kPointerSize)); 5885 5886 PrepareCallApiFunction(masm, kApiArgc); 5887 // Create v8::PropertyCallbackInfo object on the stack and initialize 5888 // it's args_ field. 5889 Operand info_object = ApiParameterOperand(3); 5890 __ mov(info_object, scratch); 5891 5892 __ sub(scratch, Immediate(kPointerSize)); 5893 __ mov(ApiParameterOperand(0), scratch); // name. 5894 __ lea(scratch, info_object); 5895 __ mov(ApiParameterOperand(1), scratch); // arguments pointer. 5896 // Reserve space for optional callback address parameter. 5897 Operand thunk_last_arg = ApiParameterOperand(2); 5898 5899 ExternalReference thunk_ref = 5900 ExternalReference::invoke_accessor_getter_callback(isolate()); 5901 5902 // +3 is to skip prolog, return address and name handle. 5903 Operand return_value_operand( 5904 ebp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize); 5905 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, 5906 thunk_last_arg, kStackUnwindSpace, nullptr, 5907 return_value_operand, NULL); 5908} 5909 5910 5911#undef __ 5912 5913} // namespace internal 5914} // namespace v8 5915 5916#endif // V8_TARGET_ARCH_IA32 5917