stub-cache-ia32.cc revision b8e0da25ee8efac3bb05cd6b2730aafbd96119f4
1// Copyright 2006-2009 the V8 project authors. All rights reserved. 2// Redistribution and use in source and binary forms, with or without 3// modification, are permitted provided that the following conditions are 4// met: 5// 6// * Redistributions of source code must retain the above copyright 7// notice, this list of conditions and the following disclaimer. 8// * Redistributions in binary form must reproduce the above 9// copyright notice, this list of conditions and the following 10// disclaimer in the documentation and/or other materials provided 11// with the distribution. 12// * Neither the name of Google Inc. nor the names of its 13// contributors may be used to endorse or promote products derived 14// from this software without specific prior written permission. 15// 16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28#include "v8.h" 29 30#if defined(V8_TARGET_ARCH_IA32) 31 32#include "ic-inl.h" 33#include "codegen-inl.h" 34#include "stub-cache.h" 35 36namespace v8 { 37namespace internal { 38 39#define __ ACCESS_MASM(masm) 40 41 42static void ProbeTable(MacroAssembler* masm, 43 Code::Flags flags, 44 StubCache::Table table, 45 Register name, 46 Register offset, 47 Register extra) { 48 ExternalReference key_offset(SCTableReference::keyReference(table)); 49 ExternalReference value_offset(SCTableReference::valueReference(table)); 50 51 Label miss; 52 53 if (extra.is_valid()) { 54 // Get the code entry from the cache. 55 __ mov(extra, Operand::StaticArray(offset, times_2, value_offset)); 56 57 // Check that the key in the entry matches the name. 58 __ cmp(name, Operand::StaticArray(offset, times_2, key_offset)); 59 __ j(not_equal, &miss, not_taken); 60 61 // Check that the flags match what we're looking for. 62 __ mov(offset, FieldOperand(extra, Code::kFlagsOffset)); 63 __ and_(offset, ~Code::kFlagsNotUsedInLookup); 64 __ cmp(offset, flags); 65 __ j(not_equal, &miss); 66 67 // Jump to the first instruction in the code stub. 68 __ add(Operand(extra), Immediate(Code::kHeaderSize - kHeapObjectTag)); 69 __ jmp(Operand(extra)); 70 71 __ bind(&miss); 72 } else { 73 // Save the offset on the stack. 74 __ push(offset); 75 76 // Check that the key in the entry matches the name. 77 __ cmp(name, Operand::StaticArray(offset, times_2, key_offset)); 78 __ j(not_equal, &miss, not_taken); 79 80 // Get the code entry from the cache. 81 __ mov(offset, Operand::StaticArray(offset, times_2, value_offset)); 82 83 // Check that the flags match what we're looking for. 84 __ mov(offset, FieldOperand(offset, Code::kFlagsOffset)); 85 __ and_(offset, ~Code::kFlagsNotUsedInLookup); 86 __ cmp(offset, flags); 87 __ j(not_equal, &miss); 88 89 // Restore offset and re-load code entry from cache. 90 __ pop(offset); 91 __ mov(offset, Operand::StaticArray(offset, times_2, value_offset)); 92 93 // Jump to the first instruction in the code stub. 94 __ add(Operand(offset), Immediate(Code::kHeaderSize - kHeapObjectTag)); 95 __ jmp(Operand(offset)); 96 97 // Pop at miss. 98 __ bind(&miss); 99 __ pop(offset); 100 } 101} 102 103 104// Helper function used to check that the dictionary doesn't contain 105// the property. This function may return false negatives, so miss_label 106// must always call a backup property check that is complete. 107// This function is safe to call if the receiver has fast properties. 108// Name must be a symbol and receiver must be a heap object. 109static void GenerateDictionaryNegativeLookup(MacroAssembler* masm, 110 Label* miss_label, 111 Register receiver, 112 String* name, 113 Register r0, 114 Register r1) { 115 ASSERT(name->IsSymbol()); 116 __ IncrementCounter(&Counters::negative_lookups, 1); 117 __ IncrementCounter(&Counters::negative_lookups_miss, 1); 118 119 Label done; 120 __ mov(r0, FieldOperand(receiver, HeapObject::kMapOffset)); 121 122 const int kInterceptorOrAccessCheckNeededMask = 123 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded); 124 125 // Bail out if the receiver has a named interceptor or requires access checks. 126 __ test_b(FieldOperand(r0, Map::kBitFieldOffset), 127 kInterceptorOrAccessCheckNeededMask); 128 __ j(not_zero, miss_label, not_taken); 129 130 // Check that receiver is a JSObject. 131 __ CmpInstanceType(r0, FIRST_JS_OBJECT_TYPE); 132 __ j(below, miss_label, not_taken); 133 134 // Load properties array. 135 Register properties = r0; 136 __ mov(properties, FieldOperand(receiver, JSObject::kPropertiesOffset)); 137 138 // Check that the properties array is a dictionary. 139 __ cmp(FieldOperand(properties, HeapObject::kMapOffset), 140 Immediate(Factory::hash_table_map())); 141 __ j(not_equal, miss_label); 142 143 // Compute the capacity mask. 144 const int kCapacityOffset = 145 StringDictionary::kHeaderSize + 146 StringDictionary::kCapacityIndex * kPointerSize; 147 148 // Generate an unrolled loop that performs a few probes before 149 // giving up. 150 static const int kProbes = 4; 151 const int kElementsStartOffset = 152 StringDictionary::kHeaderSize + 153 StringDictionary::kElementsStartIndex * kPointerSize; 154 155 // If names of slots in range from 1 to kProbes - 1 for the hash value are 156 // not equal to the name and kProbes-th slot is not used (its name is the 157 // undefined value), it guarantees the hash table doesn't contain the 158 // property. It's true even if some slots represent deleted properties 159 // (their names are the null value). 160 for (int i = 0; i < kProbes; i++) { 161 // r0 points to properties hash. 162 // Compute the masked index: (hash + i + i * i) & mask. 163 Register index = r1; 164 // Capacity is smi 2^n. 165 __ mov(index, FieldOperand(properties, kCapacityOffset)); 166 __ dec(index); 167 __ and_(Operand(index), 168 Immediate(Smi::FromInt(name->Hash() + 169 StringDictionary::GetProbeOffset(i)))); 170 171 // Scale the index by multiplying by the entry size. 172 ASSERT(StringDictionary::kEntrySize == 3); 173 __ lea(index, Operand(index, index, times_2, 0)); // index *= 3. 174 175 Register entity_name = r1; 176 // Having undefined at this place means the name is not contained. 177 ASSERT_EQ(kSmiTagSize, 1); 178 __ mov(entity_name, Operand(properties, index, times_half_pointer_size, 179 kElementsStartOffset - kHeapObjectTag)); 180 __ cmp(entity_name, Factory::undefined_value()); 181 if (i != kProbes - 1) { 182 __ j(equal, &done, taken); 183 184 // Stop if found the property. 185 __ cmp(entity_name, Handle<String>(name)); 186 __ j(equal, miss_label, not_taken); 187 188 // Check if the entry name is not a symbol. 189 __ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset)); 190 __ test_b(FieldOperand(entity_name, Map::kInstanceTypeOffset), 191 kIsSymbolMask); 192 __ j(zero, miss_label, not_taken); 193 } else { 194 // Give up probing if still not found the undefined value. 195 __ j(not_equal, miss_label, not_taken); 196 } 197 } 198 199 __ bind(&done); 200 __ DecrementCounter(&Counters::negative_lookups_miss, 1); 201} 202 203 204void StubCache::GenerateProbe(MacroAssembler* masm, 205 Code::Flags flags, 206 Register receiver, 207 Register name, 208 Register scratch, 209 Register extra, 210 Register extra2) { 211 Label miss; 212 USE(extra2); // The register extra2 is not used on the ia32 platform. 213 214 // Make sure that code is valid. The shifting code relies on the 215 // entry size being 8. 216 ASSERT(sizeof(Entry) == 8); 217 218 // Make sure the flags does not name a specific type. 219 ASSERT(Code::ExtractTypeFromFlags(flags) == 0); 220 221 // Make sure that there are no register conflicts. 222 ASSERT(!scratch.is(receiver)); 223 ASSERT(!scratch.is(name)); 224 ASSERT(!extra.is(receiver)); 225 ASSERT(!extra.is(name)); 226 ASSERT(!extra.is(scratch)); 227 228 // Check scratch and extra registers are valid, and extra2 is unused. 229 ASSERT(!scratch.is(no_reg)); 230 ASSERT(extra2.is(no_reg)); 231 232 // Check that the receiver isn't a smi. 233 __ test(receiver, Immediate(kSmiTagMask)); 234 __ j(zero, &miss, not_taken); 235 236 // Get the map of the receiver and compute the hash. 237 __ mov(scratch, FieldOperand(name, String::kHashFieldOffset)); 238 __ add(scratch, FieldOperand(receiver, HeapObject::kMapOffset)); 239 __ xor_(scratch, flags); 240 __ and_(scratch, (kPrimaryTableSize - 1) << kHeapObjectTagSize); 241 242 // Probe the primary table. 243 ProbeTable(masm, flags, kPrimary, name, scratch, extra); 244 245 // Primary miss: Compute hash for secondary probe. 246 __ mov(scratch, FieldOperand(name, String::kHashFieldOffset)); 247 __ add(scratch, FieldOperand(receiver, HeapObject::kMapOffset)); 248 __ xor_(scratch, flags); 249 __ and_(scratch, (kPrimaryTableSize - 1) << kHeapObjectTagSize); 250 __ sub(scratch, Operand(name)); 251 __ add(Operand(scratch), Immediate(flags)); 252 __ and_(scratch, (kSecondaryTableSize - 1) << kHeapObjectTagSize); 253 254 // Probe the secondary table. 255 ProbeTable(masm, flags, kSecondary, name, scratch, extra); 256 257 // Cache miss: Fall-through and let caller handle the miss by 258 // entering the runtime system. 259 __ bind(&miss); 260} 261 262 263void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm, 264 int index, 265 Register prototype) { 266 __ LoadGlobalFunction(index, prototype); 267 __ LoadGlobalFunctionInitialMap(prototype, prototype); 268 // Load the prototype from the initial map. 269 __ mov(prototype, FieldOperand(prototype, Map::kPrototypeOffset)); 270} 271 272 273void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype( 274 MacroAssembler* masm, int index, Register prototype, Label* miss) { 275 // Check we're still in the same context. 276 __ cmp(Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)), 277 Top::global()); 278 __ j(not_equal, miss); 279 // Get the global function with the given index. 280 JSFunction* function = JSFunction::cast(Top::global_context()->get(index)); 281 // Load its initial map. The global functions all have initial maps. 282 __ Set(prototype, Immediate(Handle<Map>(function->initial_map()))); 283 // Load the prototype from the initial map. 284 __ mov(prototype, FieldOperand(prototype, Map::kPrototypeOffset)); 285} 286 287 288void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm, 289 Register receiver, 290 Register scratch, 291 Label* miss_label) { 292 // Check that the receiver isn't a smi. 293 __ test(receiver, Immediate(kSmiTagMask)); 294 __ j(zero, miss_label, not_taken); 295 296 // Check that the object is a JS array. 297 __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch); 298 __ j(not_equal, miss_label, not_taken); 299 300 // Load length directly from the JS array. 301 __ mov(eax, FieldOperand(receiver, JSArray::kLengthOffset)); 302 __ ret(0); 303} 304 305 306// Generate code to check if an object is a string. If the object is 307// a string, the map's instance type is left in the scratch register. 308static void GenerateStringCheck(MacroAssembler* masm, 309 Register receiver, 310 Register scratch, 311 Label* smi, 312 Label* non_string_object) { 313 // Check that the object isn't a smi. 314 __ test(receiver, Immediate(kSmiTagMask)); 315 __ j(zero, smi, not_taken); 316 317 // Check that the object is a string. 318 __ mov(scratch, FieldOperand(receiver, HeapObject::kMapOffset)); 319 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset)); 320 ASSERT(kNotStringTag != 0); 321 __ test(scratch, Immediate(kNotStringTag)); 322 __ j(not_zero, non_string_object, not_taken); 323} 324 325 326void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm, 327 Register receiver, 328 Register scratch1, 329 Register scratch2, 330 Label* miss) { 331 Label check_wrapper; 332 333 // Check if the object is a string leaving the instance type in the 334 // scratch register. 335 GenerateStringCheck(masm, receiver, scratch1, miss, &check_wrapper); 336 337 // Load length from the string and convert to a smi. 338 __ mov(eax, FieldOperand(receiver, String::kLengthOffset)); 339 __ ret(0); 340 341 // Check if the object is a JSValue wrapper. 342 __ bind(&check_wrapper); 343 __ cmp(scratch1, JS_VALUE_TYPE); 344 __ j(not_equal, miss, not_taken); 345 346 // Check if the wrapped value is a string and load the length 347 // directly if it is. 348 __ mov(scratch2, FieldOperand(receiver, JSValue::kValueOffset)); 349 GenerateStringCheck(masm, scratch2, scratch1, miss, miss); 350 __ mov(eax, FieldOperand(scratch2, String::kLengthOffset)); 351 __ ret(0); 352} 353 354 355void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm, 356 Register receiver, 357 Register scratch1, 358 Register scratch2, 359 Label* miss_label) { 360 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label); 361 __ mov(eax, Operand(scratch1)); 362 __ ret(0); 363} 364 365 366// Load a fast property out of a holder object (src). In-object properties 367// are loaded directly otherwise the property is loaded from the properties 368// fixed array. 369void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm, 370 Register dst, Register src, 371 JSObject* holder, int index) { 372 // Adjust for the number of properties stored in the holder. 373 index -= holder->map()->inobject_properties(); 374 if (index < 0) { 375 // Get the property straight out of the holder. 376 int offset = holder->map()->instance_size() + (index * kPointerSize); 377 __ mov(dst, FieldOperand(src, offset)); 378 } else { 379 // Calculate the offset into the properties array. 380 int offset = index * kPointerSize + FixedArray::kHeaderSize; 381 __ mov(dst, FieldOperand(src, JSObject::kPropertiesOffset)); 382 __ mov(dst, FieldOperand(dst, offset)); 383 } 384} 385 386 387static void PushInterceptorArguments(MacroAssembler* masm, 388 Register receiver, 389 Register holder, 390 Register name, 391 JSObject* holder_obj) { 392 __ push(name); 393 InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor(); 394 ASSERT(!Heap::InNewSpace(interceptor)); 395 Register scratch = name; 396 __ mov(scratch, Immediate(Handle<Object>(interceptor))); 397 __ push(scratch); 398 __ push(receiver); 399 __ push(holder); 400 __ push(FieldOperand(scratch, InterceptorInfo::kDataOffset)); 401} 402 403 404static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm, 405 Register receiver, 406 Register holder, 407 Register name, 408 JSObject* holder_obj) { 409 PushInterceptorArguments(masm, receiver, holder, name, holder_obj); 410 __ CallExternalReference( 411 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly)), 412 5); 413} 414 415 416// Number of pointers to be reserved on stack for fast API call. 417static const int kFastApiCallArguments = 3; 418 419 420// Reserves space for the extra arguments to API function in the 421// caller's frame. 422// 423// These arguments are set by CheckPrototypes and GenerateFastApiCall. 424static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) { 425 // ----------- S t a t e ------------- 426 // -- esp[0] : return address 427 // -- esp[4] : last argument in the internal frame of the caller 428 // ----------------------------------- 429 __ pop(scratch); 430 for (int i = 0; i < kFastApiCallArguments; i++) { 431 __ push(Immediate(Smi::FromInt(0))); 432 } 433 __ push(scratch); 434} 435 436 437// Undoes the effects of ReserveSpaceForFastApiCall. 438static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) { 439 // ----------- S t a t e ------------- 440 // -- esp[0] : return address. 441 // -- esp[4] : last fast api call extra argument. 442 // -- ... 443 // -- esp[kFastApiCallArguments * 4] : first fast api call extra argument. 444 // -- esp[kFastApiCallArguments * 4 + 4] : last argument in the internal 445 // frame. 446 // ----------------------------------- 447 __ pop(scratch); 448 __ add(Operand(esp), Immediate(kPointerSize * kFastApiCallArguments)); 449 __ push(scratch); 450} 451 452 453// Generates call to API function. 454static bool GenerateFastApiCall(MacroAssembler* masm, 455 const CallOptimization& optimization, 456 int argc, 457 Failure** failure) { 458 // ----------- S t a t e ------------- 459 // -- esp[0] : return address 460 // -- esp[4] : object passing the type check 461 // (last fast api call extra argument, 462 // set by CheckPrototypes) 463 // -- esp[8] : api function 464 // (first fast api call extra argument) 465 // -- esp[12] : api call data 466 // -- esp[16] : last argument 467 // -- ... 468 // -- esp[(argc + 3) * 4] : first argument 469 // -- esp[(argc + 4) * 4] : receiver 470 // ----------------------------------- 471 // Get the function and setup the context. 472 JSFunction* function = optimization.constant_function(); 473 __ mov(edi, Immediate(Handle<JSFunction>(function))); 474 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); 475 476 // Pass the additional arguments. 477 __ mov(Operand(esp, 2 * kPointerSize), edi); 478 Object* call_data = optimization.api_call_info()->data(); 479 Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info()); 480 if (Heap::InNewSpace(call_data)) { 481 __ mov(ecx, api_call_info_handle); 482 __ mov(ebx, FieldOperand(ecx, CallHandlerInfo::kDataOffset)); 483 __ mov(Operand(esp, 3 * kPointerSize), ebx); 484 } else { 485 __ mov(Operand(esp, 3 * kPointerSize), 486 Immediate(Handle<Object>(call_data))); 487 } 488 489 // Prepare arguments. 490 __ lea(eax, Operand(esp, 3 * kPointerSize)); 491 492 Object* callback = optimization.api_call_info()->callback(); 493 Address api_function_address = v8::ToCData<Address>(callback); 494 ApiFunction fun(api_function_address); 495 496 const int kApiArgc = 1; // API function gets reference to the v8::Arguments. 497 498 // Allocate the v8::Arguments structure in the arguments' space since 499 // it's not controlled by GC. 500 const int kApiStackSpace = 4; 501 502 __ PrepareCallApiFunction(kApiArgc + kApiStackSpace, ebx); 503 504 __ mov(ApiParameterOperand(1), eax); // v8::Arguments::implicit_args_. 505 __ add(Operand(eax), Immediate(argc * kPointerSize)); 506 __ mov(ApiParameterOperand(2), eax); // v8::Arguments::values_. 507 __ Set(ApiParameterOperand(3), Immediate(argc)); // v8::Arguments::length_. 508 // v8::Arguments::is_construct_call_. 509 __ Set(ApiParameterOperand(4), Immediate(0)); 510 511 // v8::InvocationCallback's argument. 512 __ lea(eax, ApiParameterOperand(1)); 513 __ mov(ApiParameterOperand(0), eax); 514 515 // Emitting a stub call may try to allocate (if the code is not 516 // already generated). Do not allow the assembler to perform a 517 // garbage collection but instead return the allocation failure 518 // object. 519 MaybeObject* result = 520 masm->TryCallApiFunctionAndReturn(&fun, argc + kFastApiCallArguments + 1); 521 if (result->IsFailure()) { 522 *failure = Failure::cast(result); 523 return false; 524 } 525 return true; 526} 527 528 529class CallInterceptorCompiler BASE_EMBEDDED { 530 public: 531 CallInterceptorCompiler(StubCompiler* stub_compiler, 532 const ParameterCount& arguments, 533 Register name) 534 : stub_compiler_(stub_compiler), 535 arguments_(arguments), 536 name_(name) {} 537 538 bool Compile(MacroAssembler* masm, 539 JSObject* object, 540 JSObject* holder, 541 String* name, 542 LookupResult* lookup, 543 Register receiver, 544 Register scratch1, 545 Register scratch2, 546 Register scratch3, 547 Label* miss, 548 Failure** failure) { 549 ASSERT(holder->HasNamedInterceptor()); 550 ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined()); 551 552 // Check that the receiver isn't a smi. 553 __ test(receiver, Immediate(kSmiTagMask)); 554 __ j(zero, miss, not_taken); 555 556 CallOptimization optimization(lookup); 557 558 if (optimization.is_constant_call()) { 559 return CompileCacheable(masm, 560 object, 561 receiver, 562 scratch1, 563 scratch2, 564 scratch3, 565 holder, 566 lookup, 567 name, 568 optimization, 569 miss, 570 failure); 571 } else { 572 CompileRegular(masm, 573 object, 574 receiver, 575 scratch1, 576 scratch2, 577 scratch3, 578 name, 579 holder, 580 miss); 581 return true; 582 } 583 } 584 585 private: 586 bool CompileCacheable(MacroAssembler* masm, 587 JSObject* object, 588 Register receiver, 589 Register scratch1, 590 Register scratch2, 591 Register scratch3, 592 JSObject* interceptor_holder, 593 LookupResult* lookup, 594 String* name, 595 const CallOptimization& optimization, 596 Label* miss_label, 597 Failure** failure) { 598 ASSERT(optimization.is_constant_call()); 599 ASSERT(!lookup->holder()->IsGlobalObject()); 600 601 int depth1 = kInvalidProtoDepth; 602 int depth2 = kInvalidProtoDepth; 603 bool can_do_fast_api_call = false; 604 if (optimization.is_simple_api_call() && 605 !lookup->holder()->IsGlobalObject()) { 606 depth1 = 607 optimization.GetPrototypeDepthOfExpectedType(object, 608 interceptor_holder); 609 if (depth1 == kInvalidProtoDepth) { 610 depth2 = 611 optimization.GetPrototypeDepthOfExpectedType(interceptor_holder, 612 lookup->holder()); 613 } 614 can_do_fast_api_call = (depth1 != kInvalidProtoDepth) || 615 (depth2 != kInvalidProtoDepth); 616 } 617 618 __ IncrementCounter(&Counters::call_const_interceptor, 1); 619 620 if (can_do_fast_api_call) { 621 __ IncrementCounter(&Counters::call_const_interceptor_fast_api, 1); 622 ReserveSpaceForFastApiCall(masm, scratch1); 623 } 624 625 // Check that the maps from receiver to interceptor's holder 626 // haven't changed and thus we can invoke interceptor. 627 Label miss_cleanup; 628 Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label; 629 Register holder = 630 stub_compiler_->CheckPrototypes(object, receiver, 631 interceptor_holder, scratch1, 632 scratch2, scratch3, name, depth1, miss); 633 634 // Invoke an interceptor and if it provides a value, 635 // branch to |regular_invoke|. 636 Label regular_invoke; 637 LoadWithInterceptor(masm, receiver, holder, interceptor_holder, 638 ®ular_invoke); 639 640 // Interceptor returned nothing for this property. Try to use cached 641 // constant function. 642 643 // Check that the maps from interceptor's holder to constant function's 644 // holder haven't changed and thus we can use cached constant function. 645 if (interceptor_holder != lookup->holder()) { 646 stub_compiler_->CheckPrototypes(interceptor_holder, receiver, 647 lookup->holder(), scratch1, 648 scratch2, scratch3, name, depth2, miss); 649 } else { 650 // CheckPrototypes has a side effect of fetching a 'holder' 651 // for API (object which is instanceof for the signature). It's 652 // safe to omit it here, as if present, it should be fetched 653 // by the previous CheckPrototypes. 654 ASSERT(depth2 == kInvalidProtoDepth); 655 } 656 657 // Invoke function. 658 if (can_do_fast_api_call) { 659 bool success = GenerateFastApiCall(masm, optimization, 660 arguments_.immediate(), failure); 661 if (!success) { 662 return false; 663 } 664 } else { 665 __ InvokeFunction(optimization.constant_function(), arguments_, 666 JUMP_FUNCTION); 667 } 668 669 // Deferred code for fast API call case---clean preallocated space. 670 if (can_do_fast_api_call) { 671 __ bind(&miss_cleanup); 672 FreeSpaceForFastApiCall(masm, scratch1); 673 __ jmp(miss_label); 674 } 675 676 // Invoke a regular function. 677 __ bind(®ular_invoke); 678 if (can_do_fast_api_call) { 679 FreeSpaceForFastApiCall(masm, scratch1); 680 } 681 682 return true; 683 } 684 685 void CompileRegular(MacroAssembler* masm, 686 JSObject* object, 687 Register receiver, 688 Register scratch1, 689 Register scratch2, 690 Register scratch3, 691 String* name, 692 JSObject* interceptor_holder, 693 Label* miss_label) { 694 Register holder = 695 stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder, 696 scratch1, scratch2, scratch3, name, 697 miss_label); 698 699 __ EnterInternalFrame(); 700 // Save the name_ register across the call. 701 __ push(name_); 702 703 PushInterceptorArguments(masm, 704 receiver, 705 holder, 706 name_, 707 interceptor_holder); 708 709 __ CallExternalReference( 710 ExternalReference( 711 IC_Utility(IC::kLoadPropertyWithInterceptorForCall)), 712 5); 713 714 // Restore the name_ register. 715 __ pop(name_); 716 __ LeaveInternalFrame(); 717 } 718 719 void LoadWithInterceptor(MacroAssembler* masm, 720 Register receiver, 721 Register holder, 722 JSObject* holder_obj, 723 Label* interceptor_succeeded) { 724 __ EnterInternalFrame(); 725 __ push(holder); // Save the holder. 726 __ push(name_); // Save the name. 727 728 CompileCallLoadPropertyWithInterceptor(masm, 729 receiver, 730 holder, 731 name_, 732 holder_obj); 733 734 __ pop(name_); // Restore the name. 735 __ pop(receiver); // Restore the holder. 736 __ LeaveInternalFrame(); 737 738 __ cmp(eax, Factory::no_interceptor_result_sentinel()); 739 __ j(not_equal, interceptor_succeeded); 740 } 741 742 StubCompiler* stub_compiler_; 743 const ParameterCount& arguments_; 744 Register name_; 745}; 746 747 748void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) { 749 ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC); 750 Code* code = NULL; 751 if (kind == Code::LOAD_IC) { 752 code = Builtins::builtin(Builtins::LoadIC_Miss); 753 } else { 754 code = Builtins::builtin(Builtins::KeyedLoadIC_Miss); 755 } 756 757 Handle<Code> ic(code); 758 __ jmp(ic, RelocInfo::CODE_TARGET); 759} 760 761 762// Both name_reg and receiver_reg are preserved on jumps to miss_label, 763// but may be destroyed if store is successful. 764void StubCompiler::GenerateStoreField(MacroAssembler* masm, 765 JSObject* object, 766 int index, 767 Map* transition, 768 Register receiver_reg, 769 Register name_reg, 770 Register scratch, 771 Label* miss_label) { 772 // Check that the object isn't a smi. 773 __ test(receiver_reg, Immediate(kSmiTagMask)); 774 __ j(zero, miss_label, not_taken); 775 776 // Check that the map of the object hasn't changed. 777 __ cmp(FieldOperand(receiver_reg, HeapObject::kMapOffset), 778 Immediate(Handle<Map>(object->map()))); 779 __ j(not_equal, miss_label, not_taken); 780 781 // Perform global security token check if needed. 782 if (object->IsJSGlobalProxy()) { 783 __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label); 784 } 785 786 // Stub never generated for non-global objects that require access 787 // checks. 788 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); 789 790 // Perform map transition for the receiver if necessary. 791 if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) { 792 // The properties must be extended before we can store the value. 793 // We jump to a runtime call that extends the properties array. 794 __ pop(scratch); // Return address. 795 __ push(receiver_reg); 796 __ push(Immediate(Handle<Map>(transition))); 797 __ push(eax); 798 __ push(scratch); 799 __ TailCallExternalReference( 800 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage)), 3, 1); 801 return; 802 } 803 804 if (transition != NULL) { 805 // Update the map of the object; no write barrier updating is 806 // needed because the map is never in new space. 807 __ mov(FieldOperand(receiver_reg, HeapObject::kMapOffset), 808 Immediate(Handle<Map>(transition))); 809 } 810 811 // Adjust for the number of properties stored in the object. Even in the 812 // face of a transition we can use the old map here because the size of the 813 // object and the number of in-object properties is not going to change. 814 index -= object->map()->inobject_properties(); 815 816 if (index < 0) { 817 // Set the property straight into the object. 818 int offset = object->map()->instance_size() + (index * kPointerSize); 819 __ mov(FieldOperand(receiver_reg, offset), eax); 820 821 // Update the write barrier for the array address. 822 // Pass the value being stored in the now unused name_reg. 823 __ mov(name_reg, Operand(eax)); 824 __ RecordWrite(receiver_reg, offset, name_reg, scratch); 825 } else { 826 // Write to the properties array. 827 int offset = index * kPointerSize + FixedArray::kHeaderSize; 828 // Get the properties array (optimistically). 829 __ mov(scratch, FieldOperand(receiver_reg, JSObject::kPropertiesOffset)); 830 __ mov(FieldOperand(scratch, offset), eax); 831 832 // Update the write barrier for the array address. 833 // Pass the value being stored in the now unused name_reg. 834 __ mov(name_reg, Operand(eax)); 835 __ RecordWrite(scratch, offset, name_reg, receiver_reg); 836 } 837 838 // Return the value (register eax). 839 __ ret(0); 840} 841 842 843// Generate code to check that a global property cell is empty. Create 844// the property cell at compilation time if no cell exists for the 845// property. 846MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCell( 847 MacroAssembler* masm, 848 GlobalObject* global, 849 String* name, 850 Register scratch, 851 Label* miss) { 852 Object* probe; 853 { MaybeObject* maybe_probe = global->EnsurePropertyCell(name); 854 if (!maybe_probe->ToObject(&probe)) return maybe_probe; 855 } 856 JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(probe); 857 ASSERT(cell->value()->IsTheHole()); 858 if (Serializer::enabled()) { 859 __ mov(scratch, Immediate(Handle<Object>(cell))); 860 __ cmp(FieldOperand(scratch, JSGlobalPropertyCell::kValueOffset), 861 Immediate(Factory::the_hole_value())); 862 } else { 863 __ cmp(Operand::Cell(Handle<JSGlobalPropertyCell>(cell)), 864 Immediate(Factory::the_hole_value())); 865 } 866 __ j(not_equal, miss, not_taken); 867 return cell; 868} 869 870 871// Calls GenerateCheckPropertyCell for each global object in the prototype chain 872// from object to (but not including) holder. 873MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCells( 874 MacroAssembler* masm, 875 JSObject* object, 876 JSObject* holder, 877 String* name, 878 Register scratch, 879 Label* miss) { 880 JSObject* current = object; 881 while (current != holder) { 882 if (current->IsGlobalObject()) { 883 // Returns a cell or a failure. 884 MaybeObject* result = GenerateCheckPropertyCell( 885 masm, 886 GlobalObject::cast(current), 887 name, 888 scratch, 889 miss); 890 if (result->IsFailure()) return result; 891 } 892 ASSERT(current->IsJSObject()); 893 current = JSObject::cast(current->GetPrototype()); 894 } 895 return NULL; 896} 897 898 899#undef __ 900#define __ ACCESS_MASM(masm()) 901 902 903Register StubCompiler::CheckPrototypes(JSObject* object, 904 Register object_reg, 905 JSObject* holder, 906 Register holder_reg, 907 Register scratch1, 908 Register scratch2, 909 String* name, 910 int save_at_depth, 911 Label* miss) { 912 // Make sure there's no overlap between holder and object registers. 913 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg)); 914 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg) 915 && !scratch2.is(scratch1)); 916 // Keep track of the current object in register reg. 917 Register reg = object_reg; 918 JSObject* current = object; 919 int depth = 0; 920 921 if (save_at_depth == depth) { 922 __ mov(Operand(esp, kPointerSize), reg); 923 } 924 925 // Traverse the prototype chain and check the maps in the prototype chain for 926 // fast and global objects or do negative lookup for normal objects. 927 while (current != holder) { 928 depth++; 929 930 // Only global objects and objects that do not require access 931 // checks are allowed in stubs. 932 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded()); 933 934 ASSERT(current->GetPrototype()->IsJSObject()); 935 JSObject* prototype = JSObject::cast(current->GetPrototype()); 936 if (!current->HasFastProperties() && 937 !current->IsJSGlobalObject() && 938 !current->IsJSGlobalProxy()) { 939 if (!name->IsSymbol()) { 940 MaybeObject* maybe_lookup_result = Heap::LookupSymbol(name); 941 Object* lookup_result = NULL; // Initialization to please compiler. 942 if (!maybe_lookup_result->ToObject(&lookup_result)) { 943 set_failure(Failure::cast(maybe_lookup_result)); 944 return reg; 945 } 946 name = String::cast(lookup_result); 947 } 948 ASSERT(current->property_dictionary()->FindEntry(name) == 949 StringDictionary::kNotFound); 950 951 GenerateDictionaryNegativeLookup(masm(), 952 miss, 953 reg, 954 name, 955 scratch1, 956 scratch2); 957 __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset)); 958 reg = holder_reg; // from now the object is in holder_reg 959 __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset)); 960 } else if (Heap::InNewSpace(prototype)) { 961 // Get the map of the current object. 962 __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset)); 963 __ cmp(Operand(scratch1), Immediate(Handle<Map>(current->map()))); 964 // Branch on the result of the map check. 965 __ j(not_equal, miss, not_taken); 966 // Check access rights to the global object. This has to happen 967 // after the map check so that we know that the object is 968 // actually a global object. 969 if (current->IsJSGlobalProxy()) { 970 __ CheckAccessGlobalProxy(reg, scratch1, miss); 971 972 // Restore scratch register to be the map of the object. 973 // We load the prototype from the map in the scratch register. 974 __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset)); 975 } 976 // The prototype is in new space; we cannot store a reference 977 // to it in the code. Load it from the map. 978 reg = holder_reg; // from now the object is in holder_reg 979 __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset)); 980 } else { 981 // Check the map of the current object. 982 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), 983 Immediate(Handle<Map>(current->map()))); 984 // Branch on the result of the map check. 985 __ j(not_equal, miss, not_taken); 986 // Check access rights to the global object. This has to happen 987 // after the map check so that we know that the object is 988 // actually a global object. 989 if (current->IsJSGlobalProxy()) { 990 __ CheckAccessGlobalProxy(reg, scratch1, miss); 991 } 992 // The prototype is in old space; load it directly. 993 reg = holder_reg; // from now the object is in holder_reg 994 __ mov(reg, Handle<JSObject>(prototype)); 995 } 996 997 if (save_at_depth == depth) { 998 __ mov(Operand(esp, kPointerSize), reg); 999 } 1000 1001 // Go to the next object in the prototype chain. 1002 current = prototype; 1003 } 1004 ASSERT(current == holder); 1005 1006 // Log the check depth. 1007 LOG(IntEvent("check-maps-depth", depth + 1)); 1008 1009 // Check the holder map. 1010 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), 1011 Immediate(Handle<Map>(holder->map()))); 1012 __ j(not_equal, miss, not_taken); 1013 1014 // Perform security check for access to the global object. 1015 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded()); 1016 if (holder->IsJSGlobalProxy()) { 1017 __ CheckAccessGlobalProxy(reg, scratch1, miss); 1018 }; 1019 1020 // If we've skipped any global objects, it's not enough to verify 1021 // that their maps haven't changed. We also need to check that the 1022 // property cell for the property is still empty. 1023 MaybeObject* result = GenerateCheckPropertyCells(masm(), 1024 object, 1025 holder, 1026 name, 1027 scratch1, 1028 miss); 1029 if (result->IsFailure()) set_failure(Failure::cast(result)); 1030 1031 // Return the register containing the holder. 1032 return reg; 1033} 1034 1035 1036void StubCompiler::GenerateLoadField(JSObject* object, 1037 JSObject* holder, 1038 Register receiver, 1039 Register scratch1, 1040 Register scratch2, 1041 Register scratch3, 1042 int index, 1043 String* name, 1044 Label* miss) { 1045 // Check that the receiver isn't a smi. 1046 __ test(receiver, Immediate(kSmiTagMask)); 1047 __ j(zero, miss, not_taken); 1048 1049 // Check the prototype chain. 1050 Register reg = 1051 CheckPrototypes(object, receiver, holder, 1052 scratch1, scratch2, scratch3, name, miss); 1053 1054 // Get the value from the properties. 1055 GenerateFastPropertyLoad(masm(), eax, reg, holder, index); 1056 __ ret(0); 1057} 1058 1059 1060bool StubCompiler::GenerateLoadCallback(JSObject* object, 1061 JSObject* holder, 1062 Register receiver, 1063 Register name_reg, 1064 Register scratch1, 1065 Register scratch2, 1066 Register scratch3, 1067 AccessorInfo* callback, 1068 String* name, 1069 Label* miss, 1070 Failure** failure) { 1071 // Check that the receiver isn't a smi. 1072 __ test(receiver, Immediate(kSmiTagMask)); 1073 __ j(zero, miss, not_taken); 1074 1075 // Check that the maps haven't changed. 1076 Register reg = 1077 CheckPrototypes(object, receiver, holder, scratch1, 1078 scratch2, scratch3, name, miss); 1079 1080 Handle<AccessorInfo> callback_handle(callback); 1081 1082 // Insert additional parameters into the stack frame above return address. 1083 ASSERT(!scratch3.is(reg)); 1084 __ pop(scratch3); // Get return address to place it below. 1085 1086 __ push(receiver); // receiver 1087 __ mov(scratch2, Operand(esp)); 1088 ASSERT(!scratch2.is(reg)); 1089 __ push(reg); // holder 1090 // Push data from AccessorInfo. 1091 if (Heap::InNewSpace(callback_handle->data())) { 1092 __ mov(scratch1, Immediate(callback_handle)); 1093 __ push(FieldOperand(scratch1, AccessorInfo::kDataOffset)); 1094 } else { 1095 __ push(Immediate(Handle<Object>(callback_handle->data()))); 1096 } 1097 1098 // Save a pointer to where we pushed the arguments pointer. 1099 // This will be passed as the const AccessorInfo& to the C++ callback. 1100 __ push(scratch2); 1101 1102 __ push(name_reg); // name 1103 __ mov(ebx, esp); // esp points to reference to name (handler). 1104 1105 __ push(scratch3); // Restore return address. 1106 1107 // Do call through the api. 1108 Address getter_address = v8::ToCData<Address>(callback->getter()); 1109 ApiFunction fun(getter_address); 1110 1111 // 3 elements array for v8::Agruments::values_, handler for name and pointer 1112 // to the values (it considered as smi in GC). 1113 const int kStackSpace = 5; 1114 const int kApiArgc = 2; 1115 1116 __ PrepareCallApiFunction(kApiArgc, eax); 1117 __ mov(ApiParameterOperand(0), ebx); // name. 1118 __ add(Operand(ebx), Immediate(kPointerSize)); 1119 __ mov(ApiParameterOperand(1), ebx); // arguments pointer. 1120 1121 // Emitting a stub call may try to allocate (if the code is not 1122 // already generated). Do not allow the assembler to perform a 1123 // garbage collection but instead return the allocation failure 1124 // object. 1125 MaybeObject* result = masm()->TryCallApiFunctionAndReturn(&fun, kStackSpace); 1126 if (result->IsFailure()) { 1127 *failure = Failure::cast(result); 1128 return false; 1129 } 1130 1131 return true; 1132} 1133 1134 1135void StubCompiler::GenerateLoadConstant(JSObject* object, 1136 JSObject* holder, 1137 Register receiver, 1138 Register scratch1, 1139 Register scratch2, 1140 Register scratch3, 1141 Object* value, 1142 String* name, 1143 Label* miss) { 1144 // Check that the receiver isn't a smi. 1145 __ test(receiver, Immediate(kSmiTagMask)); 1146 __ j(zero, miss, not_taken); 1147 1148 // Check that the maps haven't changed. 1149 CheckPrototypes(object, receiver, holder, 1150 scratch1, scratch2, scratch3, name, miss); 1151 1152 // Return the constant value. 1153 __ mov(eax, Handle<Object>(value)); 1154 __ ret(0); 1155} 1156 1157 1158void StubCompiler::GenerateLoadInterceptor(JSObject* object, 1159 JSObject* interceptor_holder, 1160 LookupResult* lookup, 1161 Register receiver, 1162 Register name_reg, 1163 Register scratch1, 1164 Register scratch2, 1165 Register scratch3, 1166 String* name, 1167 Label* miss) { 1168 ASSERT(interceptor_holder->HasNamedInterceptor()); 1169 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined()); 1170 1171 // Check that the receiver isn't a smi. 1172 __ test(receiver, Immediate(kSmiTagMask)); 1173 __ j(zero, miss, not_taken); 1174 1175 // So far the most popular follow ups for interceptor loads are FIELD 1176 // and CALLBACKS, so inline only them, other cases may be added 1177 // later. 1178 bool compile_followup_inline = false; 1179 if (lookup->IsProperty() && lookup->IsCacheable()) { 1180 if (lookup->type() == FIELD) { 1181 compile_followup_inline = true; 1182 } else if (lookup->type() == CALLBACKS && 1183 lookup->GetCallbackObject()->IsAccessorInfo() && 1184 AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL) { 1185 compile_followup_inline = true; 1186 } 1187 } 1188 1189 if (compile_followup_inline) { 1190 // Compile the interceptor call, followed by inline code to load the 1191 // property from further up the prototype chain if the call fails. 1192 // Check that the maps haven't changed. 1193 Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder, 1194 scratch1, scratch2, scratch3, 1195 name, miss); 1196 ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1)); 1197 1198 // Save necessary data before invoking an interceptor. 1199 // Requires a frame to make GC aware of pushed pointers. 1200 __ EnterInternalFrame(); 1201 1202 if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) { 1203 // CALLBACKS case needs a receiver to be passed into C++ callback. 1204 __ push(receiver); 1205 } 1206 __ push(holder_reg); 1207 __ push(name_reg); 1208 1209 // Invoke an interceptor. Note: map checks from receiver to 1210 // interceptor's holder has been compiled before (see a caller 1211 // of this method.) 1212 CompileCallLoadPropertyWithInterceptor(masm(), 1213 receiver, 1214 holder_reg, 1215 name_reg, 1216 interceptor_holder); 1217 1218 // Check if interceptor provided a value for property. If it's 1219 // the case, return immediately. 1220 Label interceptor_failed; 1221 __ cmp(eax, Factory::no_interceptor_result_sentinel()); 1222 __ j(equal, &interceptor_failed); 1223 __ LeaveInternalFrame(); 1224 __ ret(0); 1225 1226 __ bind(&interceptor_failed); 1227 __ pop(name_reg); 1228 __ pop(holder_reg); 1229 if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) { 1230 __ pop(receiver); 1231 } 1232 1233 __ LeaveInternalFrame(); 1234 1235 // Check that the maps from interceptor's holder to lookup's holder 1236 // haven't changed. And load lookup's holder into holder_reg. 1237 if (interceptor_holder != lookup->holder()) { 1238 holder_reg = CheckPrototypes(interceptor_holder, 1239 holder_reg, 1240 lookup->holder(), 1241 scratch1, 1242 scratch2, 1243 scratch3, 1244 name, 1245 miss); 1246 } 1247 1248 if (lookup->type() == FIELD) { 1249 // We found FIELD property in prototype chain of interceptor's holder. 1250 // Retrieve a field from field's holder. 1251 GenerateFastPropertyLoad(masm(), eax, holder_reg, 1252 lookup->holder(), lookup->GetFieldIndex()); 1253 __ ret(0); 1254 } else { 1255 // We found CALLBACKS property in prototype chain of interceptor's 1256 // holder. 1257 ASSERT(lookup->type() == CALLBACKS); 1258 ASSERT(lookup->GetCallbackObject()->IsAccessorInfo()); 1259 AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject()); 1260 ASSERT(callback != NULL); 1261 ASSERT(callback->getter() != NULL); 1262 1263 // Tail call to runtime. 1264 // Important invariant in CALLBACKS case: the code above must be 1265 // structured to never clobber |receiver| register. 1266 __ pop(scratch2); // return address 1267 __ push(receiver); 1268 __ push(holder_reg); 1269 __ mov(holder_reg, Immediate(Handle<AccessorInfo>(callback))); 1270 __ push(FieldOperand(holder_reg, AccessorInfo::kDataOffset)); 1271 __ push(holder_reg); 1272 __ push(name_reg); 1273 __ push(scratch2); // restore return address 1274 1275 ExternalReference ref = 1276 ExternalReference(IC_Utility(IC::kLoadCallbackProperty)); 1277 __ TailCallExternalReference(ref, 5, 1); 1278 } 1279 } else { // !compile_followup_inline 1280 // Call the runtime system to load the interceptor. 1281 // Check that the maps haven't changed. 1282 Register holder_reg = 1283 CheckPrototypes(object, receiver, interceptor_holder, 1284 scratch1, scratch2, scratch3, name, miss); 1285 __ pop(scratch2); // save old return address 1286 PushInterceptorArguments(masm(), receiver, holder_reg, 1287 name_reg, interceptor_holder); 1288 __ push(scratch2); // restore old return address 1289 1290 ExternalReference ref = ExternalReference( 1291 IC_Utility(IC::kLoadPropertyWithInterceptorForLoad)); 1292 __ TailCallExternalReference(ref, 5, 1); 1293 } 1294} 1295 1296 1297void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) { 1298 if (kind_ == Code::KEYED_CALL_IC) { 1299 __ cmp(Operand(ecx), Immediate(Handle<String>(name))); 1300 __ j(not_equal, miss, not_taken); 1301 } 1302} 1303 1304 1305void CallStubCompiler::GenerateGlobalReceiverCheck(JSObject* object, 1306 JSObject* holder, 1307 String* name, 1308 Label* miss) { 1309 ASSERT(holder->IsGlobalObject()); 1310 1311 // Get the number of arguments. 1312 const int argc = arguments().immediate(); 1313 1314 // Get the receiver from the stack. 1315 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize)); 1316 1317 // If the object is the holder then we know that it's a global 1318 // object which can only happen for contextual calls. In this case, 1319 // the receiver cannot be a smi. 1320 if (object != holder) { 1321 __ test(edx, Immediate(kSmiTagMask)); 1322 __ j(zero, miss, not_taken); 1323 } 1324 1325 // Check that the maps haven't changed. 1326 CheckPrototypes(object, edx, holder, ebx, eax, edi, name, miss); 1327} 1328 1329 1330void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell, 1331 JSFunction* function, 1332 Label* miss) { 1333 // Get the value from the cell. 1334 if (Serializer::enabled()) { 1335 __ mov(edi, Immediate(Handle<JSGlobalPropertyCell>(cell))); 1336 __ mov(edi, FieldOperand(edi, JSGlobalPropertyCell::kValueOffset)); 1337 } else { 1338 __ mov(edi, Operand::Cell(Handle<JSGlobalPropertyCell>(cell))); 1339 } 1340 1341 // Check that the cell contains the same function. 1342 if (Heap::InNewSpace(function)) { 1343 // We can't embed a pointer to a function in new space so we have 1344 // to verify that the shared function info is unchanged. This has 1345 // the nice side effect that multiple closures based on the same 1346 // function can all use this call IC. Before we load through the 1347 // function, we have to verify that it still is a function. 1348 __ test(edi, Immediate(kSmiTagMask)); 1349 __ j(zero, miss, not_taken); 1350 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ebx); 1351 __ j(not_equal, miss, not_taken); 1352 1353 // Check the shared function info. Make sure it hasn't changed. 1354 __ cmp(FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset), 1355 Immediate(Handle<SharedFunctionInfo>(function->shared()))); 1356 __ j(not_equal, miss, not_taken); 1357 } else { 1358 __ cmp(Operand(edi), Immediate(Handle<JSFunction>(function))); 1359 __ j(not_equal, miss, not_taken); 1360 } 1361} 1362 1363 1364MaybeObject* CallStubCompiler::GenerateMissBranch() { 1365 MaybeObject* maybe_obj = StubCache::ComputeCallMiss(arguments().immediate(), 1366 kind_); 1367 Object* obj; 1368 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 1369 __ jmp(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET); 1370 return obj; 1371} 1372 1373 1374MUST_USE_RESULT MaybeObject* CallStubCompiler::CompileCallField( 1375 JSObject* object, 1376 JSObject* holder, 1377 int index, 1378 String* name) { 1379 // ----------- S t a t e ------------- 1380 // -- ecx : name 1381 // -- esp[0] : return address 1382 // -- esp[(argc - n) * 4] : arg[n] (zero-based) 1383 // -- ... 1384 // -- esp[(argc + 1) * 4] : receiver 1385 // ----------------------------------- 1386 Label miss; 1387 1388 GenerateNameCheck(name, &miss); 1389 1390 // Get the receiver from the stack. 1391 const int argc = arguments().immediate(); 1392 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize)); 1393 1394 // Check that the receiver isn't a smi. 1395 __ test(edx, Immediate(kSmiTagMask)); 1396 __ j(zero, &miss, not_taken); 1397 1398 // Do the right check and compute the holder register. 1399 Register reg = CheckPrototypes(object, edx, holder, ebx, eax, edi, 1400 name, &miss); 1401 1402 GenerateFastPropertyLoad(masm(), edi, reg, holder, index); 1403 1404 // Check that the function really is a function. 1405 __ test(edi, Immediate(kSmiTagMask)); 1406 __ j(zero, &miss, not_taken); 1407 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ebx); 1408 __ j(not_equal, &miss, not_taken); 1409 1410 // Patch the receiver on the stack with the global proxy if 1411 // necessary. 1412 if (object->IsGlobalObject()) { 1413 __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalReceiverOffset)); 1414 __ mov(Operand(esp, (argc + 1) * kPointerSize), edx); 1415 } 1416 1417 // Invoke the function. 1418 __ InvokeFunction(edi, arguments(), JUMP_FUNCTION); 1419 1420 // Handle call cache miss. 1421 __ bind(&miss); 1422 Object* obj; 1423 { MaybeObject* maybe_obj = GenerateMissBranch(); 1424 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 1425 } 1426 1427 // Return the generated code. 1428 return GetCode(FIELD, name); 1429} 1430 1431 1432MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object, 1433 JSObject* holder, 1434 JSGlobalPropertyCell* cell, 1435 JSFunction* function, 1436 String* name) { 1437 // ----------- S t a t e ------------- 1438 // -- ecx : name 1439 // -- esp[0] : return address 1440 // -- esp[(argc - n) * 4] : arg[n] (zero-based) 1441 // -- ... 1442 // -- esp[(argc + 1) * 4] : receiver 1443 // ----------------------------------- 1444 1445 // If object is not an array, bail out to regular call. 1446 if (!object->IsJSArray() || cell != NULL) return Heap::undefined_value(); 1447 1448 Label miss; 1449 1450 GenerateNameCheck(name, &miss); 1451 1452 // Get the receiver from the stack. 1453 const int argc = arguments().immediate(); 1454 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize)); 1455 1456 // Check that the receiver isn't a smi. 1457 __ test(edx, Immediate(kSmiTagMask)); 1458 __ j(zero, &miss); 1459 1460 CheckPrototypes(JSObject::cast(object), edx, 1461 holder, ebx, 1462 eax, edi, name, &miss); 1463 1464 if (argc == 0) { 1465 // Noop, return the length. 1466 __ mov(eax, FieldOperand(edx, JSArray::kLengthOffset)); 1467 __ ret((argc + 1) * kPointerSize); 1468 } else { 1469 Label call_builtin; 1470 1471 // Get the elements array of the object. 1472 __ mov(ebx, FieldOperand(edx, JSArray::kElementsOffset)); 1473 1474 // Check that the elements are in fast mode and writable. 1475 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset), 1476 Immediate(Factory::fixed_array_map())); 1477 __ j(not_equal, &call_builtin); 1478 1479 if (argc == 1) { // Otherwise fall through to call builtin. 1480 Label exit, with_write_barrier, attempt_to_grow_elements; 1481 1482 // Get the array's length into eax and calculate new length. 1483 __ mov(eax, FieldOperand(edx, JSArray::kLengthOffset)); 1484 STATIC_ASSERT(kSmiTagSize == 1); 1485 STATIC_ASSERT(kSmiTag == 0); 1486 __ add(Operand(eax), Immediate(Smi::FromInt(argc))); 1487 1488 // Get the element's length into ecx. 1489 __ mov(ecx, FieldOperand(ebx, FixedArray::kLengthOffset)); 1490 1491 // Check if we could survive without allocation. 1492 __ cmp(eax, Operand(ecx)); 1493 __ j(greater, &attempt_to_grow_elements); 1494 1495 // Save new length. 1496 __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax); 1497 1498 // Push the element. 1499 __ lea(edx, FieldOperand(ebx, 1500 eax, times_half_pointer_size, 1501 FixedArray::kHeaderSize - argc * kPointerSize)); 1502 __ mov(ecx, Operand(esp, argc * kPointerSize)); 1503 __ mov(Operand(edx, 0), ecx); 1504 1505 // Check if value is a smi. 1506 __ test(ecx, Immediate(kSmiTagMask)); 1507 __ j(not_zero, &with_write_barrier); 1508 1509 __ bind(&exit); 1510 __ ret((argc + 1) * kPointerSize); 1511 1512 __ bind(&with_write_barrier); 1513 1514 __ InNewSpace(ebx, ecx, equal, &exit); 1515 1516 __ RecordWriteHelper(ebx, edx, ecx); 1517 __ ret((argc + 1) * kPointerSize); 1518 1519 __ bind(&attempt_to_grow_elements); 1520 if (!FLAG_inline_new) { 1521 __ jmp(&call_builtin); 1522 } 1523 1524 ExternalReference new_space_allocation_top = 1525 ExternalReference::new_space_allocation_top_address(); 1526 ExternalReference new_space_allocation_limit = 1527 ExternalReference::new_space_allocation_limit_address(); 1528 1529 const int kAllocationDelta = 4; 1530 // Load top. 1531 __ mov(ecx, Operand::StaticVariable(new_space_allocation_top)); 1532 1533 // Check if it's the end of elements. 1534 __ lea(edx, FieldOperand(ebx, 1535 eax, times_half_pointer_size, 1536 FixedArray::kHeaderSize - argc * kPointerSize)); 1537 __ cmp(edx, Operand(ecx)); 1538 __ j(not_equal, &call_builtin); 1539 __ add(Operand(ecx), Immediate(kAllocationDelta * kPointerSize)); 1540 __ cmp(ecx, Operand::StaticVariable(new_space_allocation_limit)); 1541 __ j(above, &call_builtin); 1542 1543 // We fit and could grow elements. 1544 __ mov(Operand::StaticVariable(new_space_allocation_top), ecx); 1545 __ mov(ecx, Operand(esp, argc * kPointerSize)); 1546 1547 // Push the argument... 1548 __ mov(Operand(edx, 0), ecx); 1549 // ... and fill the rest with holes. 1550 for (int i = 1; i < kAllocationDelta; i++) { 1551 __ mov(Operand(edx, i * kPointerSize), 1552 Immediate(Factory::the_hole_value())); 1553 } 1554 1555 // Restore receiver to edx as finish sequence assumes it's here. 1556 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize)); 1557 1558 // Increment element's and array's sizes. 1559 __ add(FieldOperand(ebx, FixedArray::kLengthOffset), 1560 Immediate(Smi::FromInt(kAllocationDelta))); 1561 __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax); 1562 1563 // Elements are in new space, so write barrier is not required. 1564 __ ret((argc + 1) * kPointerSize); 1565 } 1566 1567 __ bind(&call_builtin); 1568 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush), 1569 argc + 1, 1570 1); 1571 } 1572 1573 __ bind(&miss); 1574 Object* obj; 1575 { MaybeObject* maybe_obj = GenerateMissBranch(); 1576 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 1577 } 1578 1579 // Return the generated code. 1580 return GetCode(function); 1581} 1582 1583 1584MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object, 1585 JSObject* holder, 1586 JSGlobalPropertyCell* cell, 1587 JSFunction* function, 1588 String* name) { 1589 // ----------- S t a t e ------------- 1590 // -- ecx : name 1591 // -- esp[0] : return address 1592 // -- esp[(argc - n) * 4] : arg[n] (zero-based) 1593 // -- ... 1594 // -- esp[(argc + 1) * 4] : receiver 1595 // ----------------------------------- 1596 1597 // If object is not an array, bail out to regular call. 1598 if (!object->IsJSArray() || cell != NULL) return Heap::undefined_value(); 1599 1600 Label miss, return_undefined, call_builtin; 1601 1602 GenerateNameCheck(name, &miss); 1603 1604 // Get the receiver from the stack. 1605 const int argc = arguments().immediate(); 1606 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize)); 1607 1608 // Check that the receiver isn't a smi. 1609 __ test(edx, Immediate(kSmiTagMask)); 1610 __ j(zero, &miss); 1611 CheckPrototypes(JSObject::cast(object), edx, 1612 holder, ebx, 1613 eax, edi, name, &miss); 1614 1615 // Get the elements array of the object. 1616 __ mov(ebx, FieldOperand(edx, JSArray::kElementsOffset)); 1617 1618 // Check that the elements are in fast mode and writable. 1619 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset), 1620 Immediate(Factory::fixed_array_map())); 1621 __ j(not_equal, &call_builtin); 1622 1623 // Get the array's length into ecx and calculate new length. 1624 __ mov(ecx, FieldOperand(edx, JSArray::kLengthOffset)); 1625 __ sub(Operand(ecx), Immediate(Smi::FromInt(1))); 1626 __ j(negative, &return_undefined); 1627 1628 // Get the last element. 1629 STATIC_ASSERT(kSmiTagSize == 1); 1630 STATIC_ASSERT(kSmiTag == 0); 1631 __ mov(eax, FieldOperand(ebx, 1632 ecx, times_half_pointer_size, 1633 FixedArray::kHeaderSize)); 1634 __ cmp(Operand(eax), Immediate(Factory::the_hole_value())); 1635 __ j(equal, &call_builtin); 1636 1637 // Set the array's length. 1638 __ mov(FieldOperand(edx, JSArray::kLengthOffset), ecx); 1639 1640 // Fill with the hole. 1641 __ mov(FieldOperand(ebx, 1642 ecx, times_half_pointer_size, 1643 FixedArray::kHeaderSize), 1644 Immediate(Factory::the_hole_value())); 1645 __ ret((argc + 1) * kPointerSize); 1646 1647 __ bind(&return_undefined); 1648 __ mov(eax, Immediate(Factory::undefined_value())); 1649 __ ret((argc + 1) * kPointerSize); 1650 1651 __ bind(&call_builtin); 1652 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop), 1653 argc + 1, 1654 1); 1655 1656 __ bind(&miss); 1657 Object* obj; 1658 { MaybeObject* maybe_obj = GenerateMissBranch(); 1659 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 1660 } 1661 1662 // Return the generated code. 1663 return GetCode(function); 1664} 1665 1666 1667MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall( 1668 Object* object, 1669 JSObject* holder, 1670 JSGlobalPropertyCell* cell, 1671 JSFunction* function, 1672 String* name) { 1673 // ----------- S t a t e ------------- 1674 // -- ecx : function name 1675 // -- esp[0] : return address 1676 // -- esp[(argc - n) * 4] : arg[n] (zero-based) 1677 // -- ... 1678 // -- esp[(argc + 1) * 4] : receiver 1679 // ----------------------------------- 1680 1681 // If object is not a string, bail out to regular call. 1682 if (!object->IsString() || cell != NULL) return Heap::undefined_value(); 1683 1684 const int argc = arguments().immediate(); 1685 1686 Label miss; 1687 Label name_miss; 1688 Label index_out_of_range; 1689 Label* index_out_of_range_label = &index_out_of_range; 1690 1691 if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) { 1692 index_out_of_range_label = &miss; 1693 } 1694 1695 GenerateNameCheck(name, &name_miss); 1696 1697 // Check that the maps starting from the prototype haven't changed. 1698 GenerateDirectLoadGlobalFunctionPrototype(masm(), 1699 Context::STRING_FUNCTION_INDEX, 1700 eax, 1701 &miss); 1702 ASSERT(object != holder); 1703 CheckPrototypes(JSObject::cast(object->GetPrototype()), eax, holder, 1704 ebx, edx, edi, name, &miss); 1705 1706 Register receiver = ebx; 1707 Register index = edi; 1708 Register scratch = edx; 1709 Register result = eax; 1710 __ mov(receiver, Operand(esp, (argc + 1) * kPointerSize)); 1711 if (argc > 0) { 1712 __ mov(index, Operand(esp, (argc - 0) * kPointerSize)); 1713 } else { 1714 __ Set(index, Immediate(Factory::undefined_value())); 1715 } 1716 1717 StringCharCodeAtGenerator char_code_at_generator(receiver, 1718 index, 1719 scratch, 1720 result, 1721 &miss, // When not a string. 1722 &miss, // When not a number. 1723 index_out_of_range_label, 1724 STRING_INDEX_IS_NUMBER); 1725 char_code_at_generator.GenerateFast(masm()); 1726 __ ret((argc + 1) * kPointerSize); 1727 1728 StubRuntimeCallHelper call_helper; 1729 char_code_at_generator.GenerateSlow(masm(), call_helper); 1730 1731 if (index_out_of_range.is_linked()) { 1732 __ bind(&index_out_of_range); 1733 __ Set(eax, Immediate(Factory::nan_value())); 1734 __ ret((argc + 1) * kPointerSize); 1735 } 1736 1737 __ bind(&miss); 1738 // Restore function name in ecx. 1739 __ Set(ecx, Immediate(Handle<String>(name))); 1740 __ bind(&name_miss); 1741 Object* obj; 1742 { MaybeObject* maybe_obj = GenerateMissBranch(); 1743 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 1744 } 1745 1746 // Return the generated code. 1747 return GetCode(function); 1748} 1749 1750 1751MaybeObject* CallStubCompiler::CompileStringCharAtCall( 1752 Object* object, 1753 JSObject* holder, 1754 JSGlobalPropertyCell* cell, 1755 JSFunction* function, 1756 String* name) { 1757 // ----------- S t a t e ------------- 1758 // -- ecx : function name 1759 // -- esp[0] : return address 1760 // -- esp[(argc - n) * 4] : arg[n] (zero-based) 1761 // -- ... 1762 // -- esp[(argc + 1) * 4] : receiver 1763 // ----------------------------------- 1764 1765 // If object is not a string, bail out to regular call. 1766 if (!object->IsString() || cell != NULL) return Heap::undefined_value(); 1767 1768 const int argc = arguments().immediate(); 1769 1770 Label miss; 1771 Label name_miss; 1772 Label index_out_of_range; 1773 Label* index_out_of_range_label = &index_out_of_range; 1774 1775 if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) { 1776 index_out_of_range_label = &miss; 1777 } 1778 1779 GenerateNameCheck(name, &name_miss); 1780 1781 // Check that the maps starting from the prototype haven't changed. 1782 GenerateDirectLoadGlobalFunctionPrototype(masm(), 1783 Context::STRING_FUNCTION_INDEX, 1784 eax, 1785 &miss); 1786 ASSERT(object != holder); 1787 CheckPrototypes(JSObject::cast(object->GetPrototype()), eax, holder, 1788 ebx, edx, edi, name, &miss); 1789 1790 Register receiver = eax; 1791 Register index = edi; 1792 Register scratch1 = ebx; 1793 Register scratch2 = edx; 1794 Register result = eax; 1795 __ mov(receiver, Operand(esp, (argc + 1) * kPointerSize)); 1796 if (argc > 0) { 1797 __ mov(index, Operand(esp, (argc - 0) * kPointerSize)); 1798 } else { 1799 __ Set(index, Immediate(Factory::undefined_value())); 1800 } 1801 1802 StringCharAtGenerator char_at_generator(receiver, 1803 index, 1804 scratch1, 1805 scratch2, 1806 result, 1807 &miss, // When not a string. 1808 &miss, // When not a number. 1809 index_out_of_range_label, 1810 STRING_INDEX_IS_NUMBER); 1811 char_at_generator.GenerateFast(masm()); 1812 __ ret((argc + 1) * kPointerSize); 1813 1814 StubRuntimeCallHelper call_helper; 1815 char_at_generator.GenerateSlow(masm(), call_helper); 1816 1817 if (index_out_of_range.is_linked()) { 1818 __ bind(&index_out_of_range); 1819 __ Set(eax, Immediate(Factory::empty_string())); 1820 __ ret((argc + 1) * kPointerSize); 1821 } 1822 1823 __ bind(&miss); 1824 // Restore function name in ecx. 1825 __ Set(ecx, Immediate(Handle<String>(name))); 1826 __ bind(&name_miss); 1827 Object* obj; 1828 { MaybeObject* maybe_obj = GenerateMissBranch(); 1829 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 1830 } 1831 1832 // Return the generated code. 1833 return GetCode(function); 1834} 1835 1836 1837MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall( 1838 Object* object, 1839 JSObject* holder, 1840 JSGlobalPropertyCell* cell, 1841 JSFunction* function, 1842 String* name) { 1843 // ----------- S t a t e ------------- 1844 // -- ecx : function name 1845 // -- esp[0] : return address 1846 // -- esp[(argc - n) * 4] : arg[n] (zero-based) 1847 // -- ... 1848 // -- esp[(argc + 1) * 4] : receiver 1849 // ----------------------------------- 1850 1851 const int argc = arguments().immediate(); 1852 1853 // If the object is not a JSObject or we got an unexpected number of 1854 // arguments, bail out to the regular call. 1855 if (!object->IsJSObject() || argc != 1) return Heap::undefined_value(); 1856 1857 Label miss; 1858 GenerateNameCheck(name, &miss); 1859 1860 if (cell == NULL) { 1861 __ mov(edx, Operand(esp, 2 * kPointerSize)); 1862 1863 STATIC_ASSERT(kSmiTag == 0); 1864 __ test(edx, Immediate(kSmiTagMask)); 1865 __ j(zero, &miss); 1866 1867 CheckPrototypes(JSObject::cast(object), edx, holder, ebx, eax, edi, name, 1868 &miss); 1869 } else { 1870 ASSERT(cell->value() == function); 1871 GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss); 1872 GenerateLoadFunctionFromCell(cell, function, &miss); 1873 } 1874 1875 // Load the char code argument. 1876 Register code = ebx; 1877 __ mov(code, Operand(esp, 1 * kPointerSize)); 1878 1879 // Check the code is a smi. 1880 Label slow; 1881 STATIC_ASSERT(kSmiTag == 0); 1882 __ test(code, Immediate(kSmiTagMask)); 1883 __ j(not_zero, &slow); 1884 1885 // Convert the smi code to uint16. 1886 __ and_(code, Immediate(Smi::FromInt(0xffff))); 1887 1888 StringCharFromCodeGenerator char_from_code_generator(code, eax); 1889 char_from_code_generator.GenerateFast(masm()); 1890 __ ret(2 * kPointerSize); 1891 1892 StubRuntimeCallHelper call_helper; 1893 char_from_code_generator.GenerateSlow(masm(), call_helper); 1894 1895 // Tail call the full function. We do not have to patch the receiver 1896 // because the function makes no use of it. 1897 __ bind(&slow); 1898 __ InvokeFunction(function, arguments(), JUMP_FUNCTION); 1899 1900 __ bind(&miss); 1901 // ecx: function name. 1902 Object* obj; 1903 { MaybeObject* maybe_obj = GenerateMissBranch(); 1904 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 1905 } 1906 1907 // Return the generated code. 1908 return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name); 1909} 1910 1911 1912MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object, 1913 JSObject* holder, 1914 JSGlobalPropertyCell* cell, 1915 JSFunction* function, 1916 String* name) { 1917 // ----------- S t a t e ------------- 1918 // -- ecx : name 1919 // -- esp[0] : return address 1920 // -- esp[(argc - n) * 4] : arg[n] (zero-based) 1921 // -- ... 1922 // -- esp[(argc + 1) * 4] : receiver 1923 // ----------------------------------- 1924 1925 if (!CpuFeatures::IsSupported(SSE2)) return Heap::undefined_value(); 1926 CpuFeatures::Scope use_sse2(SSE2); 1927 1928 const int argc = arguments().immediate(); 1929 1930 // If the object is not a JSObject or we got an unexpected number of 1931 // arguments, bail out to the regular call. 1932 if (!object->IsJSObject() || argc != 1) return Heap::undefined_value(); 1933 1934 Label miss; 1935 GenerateNameCheck(name, &miss); 1936 1937 if (cell == NULL) { 1938 __ mov(edx, Operand(esp, 2 * kPointerSize)); 1939 1940 STATIC_ASSERT(kSmiTag == 0); 1941 __ test(edx, Immediate(kSmiTagMask)); 1942 __ j(zero, &miss); 1943 1944 CheckPrototypes(JSObject::cast(object), edx, holder, ebx, eax, edi, name, 1945 &miss); 1946 } else { 1947 ASSERT(cell->value() == function); 1948 GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss); 1949 GenerateLoadFunctionFromCell(cell, function, &miss); 1950 } 1951 1952 // Load the (only) argument into eax. 1953 __ mov(eax, Operand(esp, 1 * kPointerSize)); 1954 1955 // Check if the argument is a smi. 1956 Label smi; 1957 STATIC_ASSERT(kSmiTag == 0); 1958 __ test(eax, Immediate(kSmiTagMask)); 1959 __ j(zero, &smi); 1960 1961 // Check if the argument is a heap number and load its value into xmm0. 1962 Label slow; 1963 __ CheckMap(eax, Factory::heap_number_map(), &slow, true); 1964 __ movdbl(xmm0, FieldOperand(eax, HeapNumber::kValueOffset)); 1965 1966 // Check if the argument is strictly positive. Note this also 1967 // discards NaN. 1968 __ xorpd(xmm1, xmm1); 1969 __ ucomisd(xmm0, xmm1); 1970 __ j(below_equal, &slow); 1971 1972 // Do a truncating conversion. 1973 __ cvttsd2si(eax, Operand(xmm0)); 1974 1975 // Check if the result fits into a smi. Note this also checks for 1976 // 0x80000000 which signals a failed conversion. 1977 Label wont_fit_into_smi; 1978 __ test(eax, Immediate(0xc0000000)); 1979 __ j(not_zero, &wont_fit_into_smi); 1980 1981 // Smi tag and return. 1982 __ SmiTag(eax); 1983 __ bind(&smi); 1984 __ ret(2 * kPointerSize); 1985 1986 // Check if the argument is < 2^kMantissaBits. 1987 Label already_round; 1988 __ bind(&wont_fit_into_smi); 1989 __ LoadPowerOf2(xmm1, ebx, HeapNumber::kMantissaBits); 1990 __ ucomisd(xmm0, xmm1); 1991 __ j(above_equal, &already_round); 1992 1993 // Save a copy of the argument. 1994 __ movaps(xmm2, xmm0); 1995 1996 // Compute (argument + 2^kMantissaBits) - 2^kMantissaBits. 1997 __ addsd(xmm0, xmm1); 1998 __ subsd(xmm0, xmm1); 1999 2000 // Compare the argument and the tentative result to get the right mask: 2001 // if xmm2 < xmm0: 2002 // xmm2 = 1...1 2003 // else: 2004 // xmm2 = 0...0 2005 __ cmpltsd(xmm2, xmm0); 2006 2007 // Subtract 1 if the argument was less than the tentative result. 2008 __ LoadPowerOf2(xmm1, ebx, 0); 2009 __ andpd(xmm1, xmm2); 2010 __ subsd(xmm0, xmm1); 2011 2012 // Return a new heap number. 2013 __ AllocateHeapNumber(eax, ebx, edx, &slow); 2014 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0); 2015 __ ret(2 * kPointerSize); 2016 2017 // Return the argument (when it's an already round heap number). 2018 __ bind(&already_round); 2019 __ mov(eax, Operand(esp, 1 * kPointerSize)); 2020 __ ret(2 * kPointerSize); 2021 2022 // Tail call the full function. We do not have to patch the receiver 2023 // because the function makes no use of it. 2024 __ bind(&slow); 2025 __ InvokeFunction(function, arguments(), JUMP_FUNCTION); 2026 2027 __ bind(&miss); 2028 // ecx: function name. 2029 Object* obj; 2030 { MaybeObject* maybe_obj = GenerateMissBranch(); 2031 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 2032 } 2033 2034 // Return the generated code. 2035 return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name); 2036} 2037 2038 2039MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object, 2040 JSObject* holder, 2041 JSGlobalPropertyCell* cell, 2042 JSFunction* function, 2043 String* name) { 2044 // ----------- S t a t e ------------- 2045 // -- ecx : name 2046 // -- esp[0] : return address 2047 // -- esp[(argc - n) * 4] : arg[n] (zero-based) 2048 // -- ... 2049 // -- esp[(argc + 1) * 4] : receiver 2050 // ----------------------------------- 2051 2052 const int argc = arguments().immediate(); 2053 2054 // If the object is not a JSObject or we got an unexpected number of 2055 // arguments, bail out to the regular call. 2056 if (!object->IsJSObject() || argc != 1) return Heap::undefined_value(); 2057 2058 Label miss; 2059 GenerateNameCheck(name, &miss); 2060 2061 if (cell == NULL) { 2062 __ mov(edx, Operand(esp, 2 * kPointerSize)); 2063 2064 STATIC_ASSERT(kSmiTag == 0); 2065 __ test(edx, Immediate(kSmiTagMask)); 2066 __ j(zero, &miss); 2067 2068 CheckPrototypes(JSObject::cast(object), edx, holder, ebx, eax, edi, name, 2069 &miss); 2070 } else { 2071 ASSERT(cell->value() == function); 2072 GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss); 2073 GenerateLoadFunctionFromCell(cell, function, &miss); 2074 } 2075 2076 // Load the (only) argument into eax. 2077 __ mov(eax, Operand(esp, 1 * kPointerSize)); 2078 2079 // Check if the argument is a smi. 2080 Label not_smi; 2081 STATIC_ASSERT(kSmiTag == 0); 2082 __ test(eax, Immediate(kSmiTagMask)); 2083 __ j(not_zero, ¬_smi); 2084 2085 // Set ebx to 1...1 (== -1) if the argument is negative, or to 0...0 2086 // otherwise. 2087 __ mov(ebx, eax); 2088 __ sar(ebx, kBitsPerInt - 1); 2089 2090 // Do bitwise not or do nothing depending on ebx. 2091 __ xor_(eax, Operand(ebx)); 2092 2093 // Add 1 or do nothing depending on ebx. 2094 __ sub(eax, Operand(ebx)); 2095 2096 // If the result is still negative, go to the slow case. 2097 // This only happens for the most negative smi. 2098 Label slow; 2099 __ j(negative, &slow); 2100 2101 // Smi case done. 2102 __ ret(2 * kPointerSize); 2103 2104 // Check if the argument is a heap number and load its exponent and 2105 // sign into ebx. 2106 __ bind(¬_smi); 2107 __ CheckMap(eax, Factory::heap_number_map(), &slow, true); 2108 __ mov(ebx, FieldOperand(eax, HeapNumber::kExponentOffset)); 2109 2110 // Check the sign of the argument. If the argument is positive, 2111 // just return it. 2112 Label negative_sign; 2113 __ test(ebx, Immediate(HeapNumber::kSignMask)); 2114 __ j(not_zero, &negative_sign); 2115 __ ret(2 * kPointerSize); 2116 2117 // If the argument is negative, clear the sign, and return a new 2118 // number. 2119 __ bind(&negative_sign); 2120 __ and_(ebx, ~HeapNumber::kSignMask); 2121 __ mov(ecx, FieldOperand(eax, HeapNumber::kMantissaOffset)); 2122 __ AllocateHeapNumber(eax, edi, edx, &slow); 2123 __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), ebx); 2124 __ mov(FieldOperand(eax, HeapNumber::kMantissaOffset), ecx); 2125 __ ret(2 * kPointerSize); 2126 2127 // Tail call the full function. We do not have to patch the receiver 2128 // because the function makes no use of it. 2129 __ bind(&slow); 2130 __ InvokeFunction(function, arguments(), JUMP_FUNCTION); 2131 2132 __ bind(&miss); 2133 // ecx: function name. 2134 Object* obj; 2135 { MaybeObject* maybe_obj = GenerateMissBranch(); 2136 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 2137 } 2138 2139 // Return the generated code. 2140 return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name); 2141} 2142 2143 2144MaybeObject* CallStubCompiler::CompileCallConstant(Object* object, 2145 JSObject* holder, 2146 JSFunction* function, 2147 String* name, 2148 CheckType check) { 2149 // ----------- S t a t e ------------- 2150 // -- ecx : name 2151 // -- esp[0] : return address 2152 // -- esp[(argc - n) * 4] : arg[n] (zero-based) 2153 // -- ... 2154 // -- esp[(argc + 1) * 4] : receiver 2155 // ----------------------------------- 2156 2157 SharedFunctionInfo* function_info = function->shared(); 2158 if (function_info->HasBuiltinFunctionId()) { 2159 BuiltinFunctionId id = function_info->builtin_function_id(); 2160 MaybeObject* maybe_result = CompileCustomCall( 2161 id, object, holder, NULL, function, name); 2162 Object* result; 2163 if (!maybe_result->ToObject(&result)) return maybe_result; 2164 // undefined means bail out to regular compiler. 2165 if (!result->IsUndefined()) return result; 2166 } 2167 2168 Label miss_in_smi_check; 2169 2170 GenerateNameCheck(name, &miss_in_smi_check); 2171 2172 // Get the receiver from the stack. 2173 const int argc = arguments().immediate(); 2174 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize)); 2175 2176 // Check that the receiver isn't a smi. 2177 if (check != NUMBER_CHECK) { 2178 __ test(edx, Immediate(kSmiTagMask)); 2179 __ j(zero, &miss_in_smi_check, not_taken); 2180 } 2181 2182 // Make sure that it's okay not to patch the on stack receiver 2183 // unless we're doing a receiver map check. 2184 ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK); 2185 2186 CallOptimization optimization(function); 2187 int depth = kInvalidProtoDepth; 2188 Label miss; 2189 2190 switch (check) { 2191 case RECEIVER_MAP_CHECK: 2192 __ IncrementCounter(&Counters::call_const, 1); 2193 2194 if (optimization.is_simple_api_call() && !object->IsGlobalObject()) { 2195 depth = optimization.GetPrototypeDepthOfExpectedType( 2196 JSObject::cast(object), holder); 2197 } 2198 2199 if (depth != kInvalidProtoDepth) { 2200 __ IncrementCounter(&Counters::call_const_fast_api, 1); 2201 2202 // Allocate space for v8::Arguments implicit values. Must be initialized 2203 // before to call any runtime function. 2204 __ sub(Operand(esp), Immediate(kFastApiCallArguments * kPointerSize)); 2205 } 2206 2207 // Check that the maps haven't changed. 2208 CheckPrototypes(JSObject::cast(object), edx, holder, 2209 ebx, eax, edi, name, depth, &miss); 2210 2211 // Patch the receiver on the stack with the global proxy if 2212 // necessary. 2213 if (object->IsGlobalObject()) { 2214 ASSERT(depth == kInvalidProtoDepth); 2215 __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalReceiverOffset)); 2216 __ mov(Operand(esp, (argc + 1) * kPointerSize), edx); 2217 } 2218 break; 2219 2220 case STRING_CHECK: 2221 if (!function->IsBuiltin()) { 2222 // Calling non-builtins with a value as receiver requires boxing. 2223 __ jmp(&miss); 2224 } else { 2225 // Check that the object is a string or a symbol. 2226 __ CmpObjectType(edx, FIRST_NONSTRING_TYPE, eax); 2227 __ j(above_equal, &miss, not_taken); 2228 // Check that the maps starting from the prototype haven't changed. 2229 GenerateDirectLoadGlobalFunctionPrototype( 2230 masm(), Context::STRING_FUNCTION_INDEX, eax, &miss); 2231 CheckPrototypes(JSObject::cast(object->GetPrototype()), eax, holder, 2232 ebx, edx, edi, name, &miss); 2233 } 2234 break; 2235 2236 case NUMBER_CHECK: { 2237 if (!function->IsBuiltin()) { 2238 // Calling non-builtins with a value as receiver requires boxing. 2239 __ jmp(&miss); 2240 } else { 2241 Label fast; 2242 // Check that the object is a smi or a heap number. 2243 __ test(edx, Immediate(kSmiTagMask)); 2244 __ j(zero, &fast, taken); 2245 __ CmpObjectType(edx, HEAP_NUMBER_TYPE, eax); 2246 __ j(not_equal, &miss, not_taken); 2247 __ bind(&fast); 2248 // Check that the maps starting from the prototype haven't changed. 2249 GenerateDirectLoadGlobalFunctionPrototype( 2250 masm(), Context::NUMBER_FUNCTION_INDEX, eax, &miss); 2251 CheckPrototypes(JSObject::cast(object->GetPrototype()), eax, holder, 2252 ebx, edx, edi, name, &miss); 2253 } 2254 break; 2255 } 2256 2257 case BOOLEAN_CHECK: { 2258 if (!function->IsBuiltin()) { 2259 // Calling non-builtins with a value as receiver requires boxing. 2260 __ jmp(&miss); 2261 } else { 2262 Label fast; 2263 // Check that the object is a boolean. 2264 __ cmp(edx, Factory::true_value()); 2265 __ j(equal, &fast, taken); 2266 __ cmp(edx, Factory::false_value()); 2267 __ j(not_equal, &miss, not_taken); 2268 __ bind(&fast); 2269 // Check that the maps starting from the prototype haven't changed. 2270 GenerateDirectLoadGlobalFunctionPrototype( 2271 masm(), Context::BOOLEAN_FUNCTION_INDEX, eax, &miss); 2272 CheckPrototypes(JSObject::cast(object->GetPrototype()), eax, holder, 2273 ebx, edx, edi, name, &miss); 2274 } 2275 break; 2276 } 2277 2278 default: 2279 UNREACHABLE(); 2280 } 2281 2282 if (depth != kInvalidProtoDepth) { 2283 Failure* failure; 2284 // Move the return address on top of the stack. 2285 __ mov(eax, Operand(esp, 3 * kPointerSize)); 2286 __ mov(Operand(esp, 0 * kPointerSize), eax); 2287 2288 // esp[2 * kPointerSize] is uninitialized, esp[3 * kPointerSize] contains 2289 // duplicate of return address and will be overwritten. 2290 bool success = GenerateFastApiCall(masm(), optimization, argc, &failure); 2291 if (!success) { 2292 return failure; 2293 } 2294 } else { 2295 __ InvokeFunction(function, arguments(), JUMP_FUNCTION); 2296 } 2297 2298 // Handle call cache miss. 2299 __ bind(&miss); 2300 if (depth != kInvalidProtoDepth) { 2301 __ add(Operand(esp), Immediate(kFastApiCallArguments * kPointerSize)); 2302 } 2303 __ bind(&miss_in_smi_check); 2304 Object* obj; 2305 { MaybeObject* maybe_obj = GenerateMissBranch(); 2306 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 2307 } 2308 2309 // Return the generated code. 2310 return GetCode(function); 2311} 2312 2313 2314MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object, 2315 JSObject* holder, 2316 String* name) { 2317 // ----------- S t a t e ------------- 2318 // -- ecx : name 2319 // -- esp[0] : return address 2320 // -- esp[(argc - n) * 4] : arg[n] (zero-based) 2321 // -- ... 2322 // -- esp[(argc + 1) * 4] : receiver 2323 // ----------------------------------- 2324 Label miss; 2325 2326 GenerateNameCheck(name, &miss); 2327 2328 // Get the number of arguments. 2329 const int argc = arguments().immediate(); 2330 2331 LookupResult lookup; 2332 LookupPostInterceptor(holder, name, &lookup); 2333 2334 // Get the receiver from the stack. 2335 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize)); 2336 2337 CallInterceptorCompiler compiler(this, arguments(), ecx); 2338 Failure* failure; 2339 bool success = compiler.Compile(masm(), 2340 object, 2341 holder, 2342 name, 2343 &lookup, 2344 edx, 2345 ebx, 2346 edi, 2347 eax, 2348 &miss, 2349 &failure); 2350 if (!success) { 2351 return failure; 2352 } 2353 2354 // Restore receiver. 2355 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize)); 2356 2357 // Check that the function really is a function. 2358 __ test(eax, Immediate(kSmiTagMask)); 2359 __ j(zero, &miss, not_taken); 2360 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx); 2361 __ j(not_equal, &miss, not_taken); 2362 2363 // Patch the receiver on the stack with the global proxy if 2364 // necessary. 2365 if (object->IsGlobalObject()) { 2366 __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalReceiverOffset)); 2367 __ mov(Operand(esp, (argc + 1) * kPointerSize), edx); 2368 } 2369 2370 // Invoke the function. 2371 __ mov(edi, eax); 2372 __ InvokeFunction(edi, arguments(), JUMP_FUNCTION); 2373 2374 // Handle load cache miss. 2375 __ bind(&miss); 2376 Object* obj; 2377 { MaybeObject* maybe_obj = GenerateMissBranch(); 2378 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 2379 } 2380 2381 // Return the generated code. 2382 return GetCode(INTERCEPTOR, name); 2383} 2384 2385 2386MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object, 2387 GlobalObject* holder, 2388 JSGlobalPropertyCell* cell, 2389 JSFunction* function, 2390 String* name) { 2391 // ----------- S t a t e ------------- 2392 // -- ecx : name 2393 // -- esp[0] : return address 2394 // -- esp[(argc - n) * 4] : arg[n] (zero-based) 2395 // -- ... 2396 // -- esp[(argc + 1) * 4] : receiver 2397 // ----------------------------------- 2398 2399 SharedFunctionInfo* function_info = function->shared(); 2400 if (function_info->HasBuiltinFunctionId()) { 2401 BuiltinFunctionId id = function_info->builtin_function_id(); 2402 MaybeObject* maybe_result = CompileCustomCall( 2403 id, object, holder, cell, function, name); 2404 Object* result; 2405 if (!maybe_result->ToObject(&result)) return maybe_result; 2406 // undefined means bail out to regular compiler. 2407 if (!result->IsUndefined()) return result; 2408 } 2409 2410 Label miss; 2411 2412 GenerateNameCheck(name, &miss); 2413 2414 // Get the number of arguments. 2415 const int argc = arguments().immediate(); 2416 2417 GenerateGlobalReceiverCheck(object, holder, name, &miss); 2418 2419 GenerateLoadFunctionFromCell(cell, function, &miss); 2420 2421 // Patch the receiver on the stack with the global proxy. 2422 if (object->IsGlobalObject()) { 2423 __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalReceiverOffset)); 2424 __ mov(Operand(esp, (argc + 1) * kPointerSize), edx); 2425 } 2426 2427 // Setup the context (function already in edi). 2428 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); 2429 2430 // Jump to the cached code (tail call). 2431 __ IncrementCounter(&Counters::call_global_inline, 1); 2432 ASSERT(function->is_compiled()); 2433 ParameterCount expected(function->shared()->formal_parameter_count()); 2434 if (V8::UseCrankshaft()) { 2435 // TODO(kasperl): For now, we always call indirectly through the 2436 // code field in the function to allow recompilation to take effect 2437 // without changing any of the call sites. 2438 __ InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset), 2439 expected, arguments(), JUMP_FUNCTION); 2440 } else { 2441 Handle<Code> code(function->code()); 2442 __ InvokeCode(code, expected, arguments(), 2443 RelocInfo::CODE_TARGET, JUMP_FUNCTION); 2444 } 2445 2446 // Handle call cache miss. 2447 __ bind(&miss); 2448 __ IncrementCounter(&Counters::call_global_inline_miss, 1); 2449 Object* obj; 2450 { MaybeObject* maybe_obj = GenerateMissBranch(); 2451 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 2452 } 2453 2454 // Return the generated code. 2455 return GetCode(NORMAL, name); 2456} 2457 2458 2459MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object, 2460 int index, 2461 Map* transition, 2462 String* name) { 2463 // ----------- S t a t e ------------- 2464 // -- eax : value 2465 // -- ecx : name 2466 // -- edx : receiver 2467 // -- esp[0] : return address 2468 // ----------------------------------- 2469 Label miss; 2470 2471 // Generate store field code. Trashes the name register. 2472 GenerateStoreField(masm(), 2473 object, 2474 index, 2475 transition, 2476 edx, ecx, ebx, 2477 &miss); 2478 2479 // Handle store cache miss. 2480 __ bind(&miss); 2481 __ mov(ecx, Immediate(Handle<String>(name))); // restore name 2482 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss)); 2483 __ jmp(ic, RelocInfo::CODE_TARGET); 2484 2485 // Return the generated code. 2486 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name); 2487} 2488 2489 2490MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object, 2491 AccessorInfo* callback, 2492 String* name) { 2493 // ----------- S t a t e ------------- 2494 // -- eax : value 2495 // -- ecx : name 2496 // -- edx : receiver 2497 // -- esp[0] : return address 2498 // ----------------------------------- 2499 Label miss; 2500 2501 // Check that the object isn't a smi. 2502 __ test(edx, Immediate(kSmiTagMask)); 2503 __ j(zero, &miss, not_taken); 2504 2505 // Check that the map of the object hasn't changed. 2506 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), 2507 Immediate(Handle<Map>(object->map()))); 2508 __ j(not_equal, &miss, not_taken); 2509 2510 // Perform global security token check if needed. 2511 if (object->IsJSGlobalProxy()) { 2512 __ CheckAccessGlobalProxy(edx, ebx, &miss); 2513 } 2514 2515 // Stub never generated for non-global objects that require access 2516 // checks. 2517 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); 2518 2519 __ pop(ebx); // remove the return address 2520 __ push(edx); // receiver 2521 __ push(Immediate(Handle<AccessorInfo>(callback))); // callback info 2522 __ push(ecx); // name 2523 __ push(eax); // value 2524 __ push(ebx); // restore return address 2525 2526 // Do tail-call to the runtime system. 2527 ExternalReference store_callback_property = 2528 ExternalReference(IC_Utility(IC::kStoreCallbackProperty)); 2529 __ TailCallExternalReference(store_callback_property, 4, 1); 2530 2531 // Handle store cache miss. 2532 __ bind(&miss); 2533 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss)); 2534 __ jmp(ic, RelocInfo::CODE_TARGET); 2535 2536 // Return the generated code. 2537 return GetCode(CALLBACKS, name); 2538} 2539 2540 2541MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver, 2542 String* name) { 2543 // ----------- S t a t e ------------- 2544 // -- eax : value 2545 // -- ecx : name 2546 // -- edx : receiver 2547 // -- esp[0] : return address 2548 // ----------------------------------- 2549 Label miss; 2550 2551 // Check that the object isn't a smi. 2552 __ test(edx, Immediate(kSmiTagMask)); 2553 __ j(zero, &miss, not_taken); 2554 2555 // Check that the map of the object hasn't changed. 2556 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), 2557 Immediate(Handle<Map>(receiver->map()))); 2558 __ j(not_equal, &miss, not_taken); 2559 2560 // Perform global security token check if needed. 2561 if (receiver->IsJSGlobalProxy()) { 2562 __ CheckAccessGlobalProxy(edx, ebx, &miss); 2563 } 2564 2565 // Stub never generated for non-global objects that require access 2566 // checks. 2567 ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded()); 2568 2569 __ pop(ebx); // remove the return address 2570 __ push(edx); // receiver 2571 __ push(ecx); // name 2572 __ push(eax); // value 2573 __ push(ebx); // restore return address 2574 2575 // Do tail-call to the runtime system. 2576 ExternalReference store_ic_property = 2577 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty)); 2578 __ TailCallExternalReference(store_ic_property, 3, 1); 2579 2580 // Handle store cache miss. 2581 __ bind(&miss); 2582 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss)); 2583 __ jmp(ic, RelocInfo::CODE_TARGET); 2584 2585 // Return the generated code. 2586 return GetCode(INTERCEPTOR, name); 2587} 2588 2589 2590MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object, 2591 JSGlobalPropertyCell* cell, 2592 String* name) { 2593 // ----------- S t a t e ------------- 2594 // -- eax : value 2595 // -- ecx : name 2596 // -- edx : receiver 2597 // -- esp[0] : return address 2598 // ----------------------------------- 2599 Label miss; 2600 2601 // Check that the map of the global has not changed. 2602 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), 2603 Immediate(Handle<Map>(object->map()))); 2604 __ j(not_equal, &miss, not_taken); 2605 2606 // Store the value in the cell. 2607 if (Serializer::enabled()) { 2608 __ mov(ecx, Immediate(Handle<JSGlobalPropertyCell>(cell))); 2609 __ mov(FieldOperand(ecx, JSGlobalPropertyCell::kValueOffset), eax); 2610 } else { 2611 __ mov(Operand::Cell(Handle<JSGlobalPropertyCell>(cell)), eax); 2612 } 2613 2614 // Return the value (register eax). 2615 __ IncrementCounter(&Counters::named_store_global_inline, 1); 2616 __ ret(0); 2617 2618 // Handle store cache miss. 2619 __ bind(&miss); 2620 __ IncrementCounter(&Counters::named_store_global_inline_miss, 1); 2621 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss)); 2622 __ jmp(ic, RelocInfo::CODE_TARGET); 2623 2624 // Return the generated code. 2625 return GetCode(NORMAL, name); 2626} 2627 2628 2629MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object, 2630 int index, 2631 Map* transition, 2632 String* name) { 2633 // ----------- S t a t e ------------- 2634 // -- eax : value 2635 // -- ecx : key 2636 // -- edx : receiver 2637 // -- esp[0] : return address 2638 // ----------------------------------- 2639 Label miss; 2640 2641 __ IncrementCounter(&Counters::keyed_store_field, 1); 2642 2643 // Check that the name has not changed. 2644 __ cmp(Operand(ecx), Immediate(Handle<String>(name))); 2645 __ j(not_equal, &miss, not_taken); 2646 2647 // Generate store field code. Trashes the name register. 2648 GenerateStoreField(masm(), 2649 object, 2650 index, 2651 transition, 2652 edx, ecx, ebx, 2653 &miss); 2654 2655 // Handle store cache miss. 2656 __ bind(&miss); 2657 __ DecrementCounter(&Counters::keyed_store_field, 1); 2658 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss)); 2659 __ jmp(ic, RelocInfo::CODE_TARGET); 2660 2661 // Return the generated code. 2662 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name); 2663} 2664 2665 2666MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized( 2667 JSObject* receiver) { 2668 // ----------- S t a t e ------------- 2669 // -- eax : value 2670 // -- ecx : key 2671 // -- edx : receiver 2672 // -- esp[0] : return address 2673 // ----------------------------------- 2674 Label miss; 2675 2676 // Check that the receiver isn't a smi. 2677 __ test(edx, Immediate(kSmiTagMask)); 2678 __ j(zero, &miss, not_taken); 2679 2680 // Check that the map matches. 2681 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), 2682 Immediate(Handle<Map>(receiver->map()))); 2683 __ j(not_equal, &miss, not_taken); 2684 2685 // Check that the key is a smi. 2686 __ test(ecx, Immediate(kSmiTagMask)); 2687 __ j(not_zero, &miss, not_taken); 2688 2689 // Get the elements array and make sure it is a fast element array, not 'cow'. 2690 __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset)); 2691 __ cmp(FieldOperand(edi, HeapObject::kMapOffset), 2692 Immediate(Factory::fixed_array_map())); 2693 __ j(not_equal, &miss, not_taken); 2694 2695 // Check that the key is within bounds. 2696 if (receiver->IsJSArray()) { 2697 __ cmp(ecx, FieldOperand(edx, JSArray::kLengthOffset)); // Compare smis. 2698 __ j(above_equal, &miss, not_taken); 2699 } else { 2700 __ cmp(ecx, FieldOperand(edi, FixedArray::kLengthOffset)); // Compare smis. 2701 __ j(above_equal, &miss, not_taken); 2702 } 2703 2704 // Do the store and update the write barrier. Make sure to preserve 2705 // the value in register eax. 2706 __ mov(edx, Operand(eax)); 2707 __ mov(FieldOperand(edi, ecx, times_2, FixedArray::kHeaderSize), eax); 2708 __ RecordWrite(edi, 0, edx, ecx); 2709 2710 // Done. 2711 __ ret(0); 2712 2713 // Handle store cache miss. 2714 __ bind(&miss); 2715 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss)); 2716 __ jmp(ic, RelocInfo::CODE_TARGET); 2717 2718 // Return the generated code. 2719 return GetCode(NORMAL, NULL); 2720} 2721 2722 2723MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name, 2724 JSObject* object, 2725 JSObject* last) { 2726 // ----------- S t a t e ------------- 2727 // -- eax : receiver 2728 // -- ecx : name 2729 // -- esp[0] : return address 2730 // ----------------------------------- 2731 Label miss; 2732 2733 // Check that the receiver isn't a smi. 2734 __ test(eax, Immediate(kSmiTagMask)); 2735 __ j(zero, &miss, not_taken); 2736 2737 ASSERT(last->IsGlobalObject() || last->HasFastProperties()); 2738 2739 // Check the maps of the full prototype chain. Also check that 2740 // global property cells up to (but not including) the last object 2741 // in the prototype chain are empty. 2742 CheckPrototypes(object, eax, last, ebx, edx, edi, name, &miss); 2743 2744 // If the last object in the prototype chain is a global object, 2745 // check that the global property cell is empty. 2746 if (last->IsGlobalObject()) { 2747 MaybeObject* cell = GenerateCheckPropertyCell(masm(), 2748 GlobalObject::cast(last), 2749 name, 2750 edx, 2751 &miss); 2752 if (cell->IsFailure()) { 2753 miss.Unuse(); 2754 return cell; 2755 } 2756 } 2757 2758 // Return undefined if maps of the full prototype chain are still the 2759 // same and no global property with this name contains a value. 2760 __ mov(eax, Factory::undefined_value()); 2761 __ ret(0); 2762 2763 __ bind(&miss); 2764 GenerateLoadMiss(masm(), Code::LOAD_IC); 2765 2766 // Return the generated code. 2767 return GetCode(NONEXISTENT, Heap::empty_string()); 2768} 2769 2770 2771MaybeObject* LoadStubCompiler::CompileLoadField(JSObject* object, 2772 JSObject* holder, 2773 int index, 2774 String* name) { 2775 // ----------- S t a t e ------------- 2776 // -- eax : receiver 2777 // -- ecx : name 2778 // -- esp[0] : return address 2779 // ----------------------------------- 2780 Label miss; 2781 2782 GenerateLoadField(object, holder, eax, ebx, edx, edi, index, name, &miss); 2783 __ bind(&miss); 2784 GenerateLoadMiss(masm(), Code::LOAD_IC); 2785 2786 // Return the generated code. 2787 return GetCode(FIELD, name); 2788} 2789 2790 2791MaybeObject* LoadStubCompiler::CompileLoadCallback(String* name, 2792 JSObject* object, 2793 JSObject* holder, 2794 AccessorInfo* callback) { 2795 // ----------- S t a t e ------------- 2796 // -- eax : receiver 2797 // -- ecx : name 2798 // -- esp[0] : return address 2799 // ----------------------------------- 2800 Label miss; 2801 2802 Failure* failure = Failure::InternalError(); 2803 bool success = GenerateLoadCallback(object, holder, eax, ecx, ebx, edx, edi, 2804 callback, name, &miss, &failure); 2805 if (!success) { 2806 miss.Unuse(); 2807 return failure; 2808 } 2809 2810 __ bind(&miss); 2811 GenerateLoadMiss(masm(), Code::LOAD_IC); 2812 2813 // Return the generated code. 2814 return GetCode(CALLBACKS, name); 2815} 2816 2817 2818MaybeObject* LoadStubCompiler::CompileLoadConstant(JSObject* object, 2819 JSObject* holder, 2820 Object* value, 2821 String* name) { 2822 // ----------- S t a t e ------------- 2823 // -- eax : receiver 2824 // -- ecx : name 2825 // -- esp[0] : return address 2826 // ----------------------------------- 2827 Label miss; 2828 2829 GenerateLoadConstant(object, holder, eax, ebx, edx, edi, value, name, &miss); 2830 __ bind(&miss); 2831 GenerateLoadMiss(masm(), Code::LOAD_IC); 2832 2833 // Return the generated code. 2834 return GetCode(CONSTANT_FUNCTION, name); 2835} 2836 2837 2838MaybeObject* LoadStubCompiler::CompileLoadInterceptor(JSObject* receiver, 2839 JSObject* holder, 2840 String* name) { 2841 // ----------- S t a t e ------------- 2842 // -- eax : receiver 2843 // -- ecx : name 2844 // -- esp[0] : return address 2845 // ----------------------------------- 2846 Label miss; 2847 2848 LookupResult lookup; 2849 LookupPostInterceptor(holder, name, &lookup); 2850 2851 // TODO(368): Compile in the whole chain: all the interceptors in 2852 // prototypes and ultimate answer. 2853 GenerateLoadInterceptor(receiver, 2854 holder, 2855 &lookup, 2856 eax, 2857 ecx, 2858 edx, 2859 ebx, 2860 edi, 2861 name, 2862 &miss); 2863 2864 __ bind(&miss); 2865 GenerateLoadMiss(masm(), Code::LOAD_IC); 2866 2867 // Return the generated code. 2868 return GetCode(INTERCEPTOR, name); 2869} 2870 2871 2872MaybeObject* LoadStubCompiler::CompileLoadGlobal(JSObject* object, 2873 GlobalObject* holder, 2874 JSGlobalPropertyCell* cell, 2875 String* name, 2876 bool is_dont_delete) { 2877 // ----------- S t a t e ------------- 2878 // -- eax : receiver 2879 // -- ecx : name 2880 // -- esp[0] : return address 2881 // ----------------------------------- 2882 Label miss; 2883 2884 // If the object is the holder then we know that it's a global 2885 // object which can only happen for contextual loads. In this case, 2886 // the receiver cannot be a smi. 2887 if (object != holder) { 2888 __ test(eax, Immediate(kSmiTagMask)); 2889 __ j(zero, &miss, not_taken); 2890 } 2891 2892 // Check that the maps haven't changed. 2893 CheckPrototypes(object, eax, holder, ebx, edx, edi, name, &miss); 2894 2895 // Get the value from the cell. 2896 if (Serializer::enabled()) { 2897 __ mov(ebx, Immediate(Handle<JSGlobalPropertyCell>(cell))); 2898 __ mov(ebx, FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset)); 2899 } else { 2900 __ mov(ebx, Operand::Cell(Handle<JSGlobalPropertyCell>(cell))); 2901 } 2902 2903 // Check for deleted property if property can actually be deleted. 2904 if (!is_dont_delete) { 2905 __ cmp(ebx, Factory::the_hole_value()); 2906 __ j(equal, &miss, not_taken); 2907 } else if (FLAG_debug_code) { 2908 __ cmp(ebx, Factory::the_hole_value()); 2909 __ Check(not_equal, "DontDelete cells can't contain the hole"); 2910 } 2911 2912 __ IncrementCounter(&Counters::named_load_global_stub, 1); 2913 __ mov(eax, ebx); 2914 __ ret(0); 2915 2916 __ bind(&miss); 2917 __ IncrementCounter(&Counters::named_load_global_stub_miss, 1); 2918 GenerateLoadMiss(masm(), Code::LOAD_IC); 2919 2920 // Return the generated code. 2921 return GetCode(NORMAL, name); 2922} 2923 2924 2925MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name, 2926 JSObject* receiver, 2927 JSObject* holder, 2928 int index) { 2929 // ----------- S t a t e ------------- 2930 // -- eax : key 2931 // -- edx : receiver 2932 // -- esp[0] : return address 2933 // ----------------------------------- 2934 Label miss; 2935 2936 __ IncrementCounter(&Counters::keyed_load_field, 1); 2937 2938 // Check that the name has not changed. 2939 __ cmp(Operand(eax), Immediate(Handle<String>(name))); 2940 __ j(not_equal, &miss, not_taken); 2941 2942 GenerateLoadField(receiver, holder, edx, ebx, ecx, edi, index, name, &miss); 2943 2944 __ bind(&miss); 2945 __ DecrementCounter(&Counters::keyed_load_field, 1); 2946 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 2947 2948 // Return the generated code. 2949 return GetCode(FIELD, name); 2950} 2951 2952 2953MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback( 2954 String* name, 2955 JSObject* receiver, 2956 JSObject* holder, 2957 AccessorInfo* callback) { 2958 // ----------- S t a t e ------------- 2959 // -- eax : key 2960 // -- edx : receiver 2961 // -- esp[0] : return address 2962 // ----------------------------------- 2963 Label miss; 2964 2965 __ IncrementCounter(&Counters::keyed_load_callback, 1); 2966 2967 // Check that the name has not changed. 2968 __ cmp(Operand(eax), Immediate(Handle<String>(name))); 2969 __ j(not_equal, &miss, not_taken); 2970 2971 Failure* failure = Failure::InternalError(); 2972 bool success = GenerateLoadCallback(receiver, holder, edx, eax, ebx, ecx, edi, 2973 callback, name, &miss, &failure); 2974 if (!success) { 2975 miss.Unuse(); 2976 return failure; 2977 } 2978 2979 __ bind(&miss); 2980 2981 __ DecrementCounter(&Counters::keyed_load_callback, 1); 2982 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 2983 2984 // Return the generated code. 2985 return GetCode(CALLBACKS, name); 2986} 2987 2988 2989MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name, 2990 JSObject* receiver, 2991 JSObject* holder, 2992 Object* value) { 2993 // ----------- S t a t e ------------- 2994 // -- eax : key 2995 // -- edx : receiver 2996 // -- esp[0] : return address 2997 // ----------------------------------- 2998 Label miss; 2999 3000 __ IncrementCounter(&Counters::keyed_load_constant_function, 1); 3001 3002 // Check that the name has not changed. 3003 __ cmp(Operand(eax), Immediate(Handle<String>(name))); 3004 __ j(not_equal, &miss, not_taken); 3005 3006 GenerateLoadConstant(receiver, holder, edx, ebx, ecx, edi, 3007 value, name, &miss); 3008 __ bind(&miss); 3009 __ DecrementCounter(&Counters::keyed_load_constant_function, 1); 3010 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 3011 3012 // Return the generated code. 3013 return GetCode(CONSTANT_FUNCTION, name); 3014} 3015 3016 3017MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver, 3018 JSObject* holder, 3019 String* name) { 3020 // ----------- S t a t e ------------- 3021 // -- eax : key 3022 // -- edx : receiver 3023 // -- esp[0] : return address 3024 // ----------------------------------- 3025 Label miss; 3026 3027 __ IncrementCounter(&Counters::keyed_load_interceptor, 1); 3028 3029 // Check that the name has not changed. 3030 __ cmp(Operand(eax), Immediate(Handle<String>(name))); 3031 __ j(not_equal, &miss, not_taken); 3032 3033 LookupResult lookup; 3034 LookupPostInterceptor(holder, name, &lookup); 3035 GenerateLoadInterceptor(receiver, 3036 holder, 3037 &lookup, 3038 edx, 3039 eax, 3040 ecx, 3041 ebx, 3042 edi, 3043 name, 3044 &miss); 3045 __ bind(&miss); 3046 __ DecrementCounter(&Counters::keyed_load_interceptor, 1); 3047 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 3048 3049 // Return the generated code. 3050 return GetCode(INTERCEPTOR, name); 3051} 3052 3053 3054MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) { 3055 // ----------- S t a t e ------------- 3056 // -- eax : key 3057 // -- edx : receiver 3058 // -- esp[0] : return address 3059 // ----------------------------------- 3060 Label miss; 3061 3062 __ IncrementCounter(&Counters::keyed_load_array_length, 1); 3063 3064 // Check that the name has not changed. 3065 __ cmp(Operand(eax), Immediate(Handle<String>(name))); 3066 __ j(not_equal, &miss, not_taken); 3067 3068 GenerateLoadArrayLength(masm(), edx, ecx, &miss); 3069 __ bind(&miss); 3070 __ DecrementCounter(&Counters::keyed_load_array_length, 1); 3071 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 3072 3073 // Return the generated code. 3074 return GetCode(CALLBACKS, name); 3075} 3076 3077 3078MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) { 3079 // ----------- S t a t e ------------- 3080 // -- eax : key 3081 // -- edx : receiver 3082 // -- esp[0] : return address 3083 // ----------------------------------- 3084 Label miss; 3085 3086 __ IncrementCounter(&Counters::keyed_load_string_length, 1); 3087 3088 // Check that the name has not changed. 3089 __ cmp(Operand(eax), Immediate(Handle<String>(name))); 3090 __ j(not_equal, &miss, not_taken); 3091 3092 GenerateLoadStringLength(masm(), edx, ecx, ebx, &miss); 3093 __ bind(&miss); 3094 __ DecrementCounter(&Counters::keyed_load_string_length, 1); 3095 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 3096 3097 // Return the generated code. 3098 return GetCode(CALLBACKS, name); 3099} 3100 3101 3102MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) { 3103 // ----------- S t a t e ------------- 3104 // -- eax : key 3105 // -- edx : receiver 3106 // -- esp[0] : return address 3107 // ----------------------------------- 3108 Label miss; 3109 3110 __ IncrementCounter(&Counters::keyed_load_function_prototype, 1); 3111 3112 // Check that the name has not changed. 3113 __ cmp(Operand(eax), Immediate(Handle<String>(name))); 3114 __ j(not_equal, &miss, not_taken); 3115 3116 GenerateLoadFunctionPrototype(masm(), edx, ecx, ebx, &miss); 3117 __ bind(&miss); 3118 __ DecrementCounter(&Counters::keyed_load_function_prototype, 1); 3119 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 3120 3121 // Return the generated code. 3122 return GetCode(CALLBACKS, name); 3123} 3124 3125 3126MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) { 3127 // ----------- S t a t e ------------- 3128 // -- eax : key 3129 // -- edx : receiver 3130 // -- esp[0] : return address 3131 // ----------------------------------- 3132 Label miss; 3133 3134 // Check that the receiver isn't a smi. 3135 __ test(edx, Immediate(kSmiTagMask)); 3136 __ j(zero, &miss, not_taken); 3137 3138 // Check that the map matches. 3139 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), 3140 Immediate(Handle<Map>(receiver->map()))); 3141 __ j(not_equal, &miss, not_taken); 3142 3143 // Check that the key is a smi. 3144 __ test(eax, Immediate(kSmiTagMask)); 3145 __ j(not_zero, &miss, not_taken); 3146 3147 // Get the elements array. 3148 __ mov(ecx, FieldOperand(edx, JSObject::kElementsOffset)); 3149 __ AssertFastElements(ecx); 3150 3151 // Check that the key is within bounds. 3152 __ cmp(eax, FieldOperand(ecx, FixedArray::kLengthOffset)); 3153 __ j(above_equal, &miss, not_taken); 3154 3155 // Load the result and make sure it's not the hole. 3156 __ mov(ebx, Operand(ecx, eax, times_2, 3157 FixedArray::kHeaderSize - kHeapObjectTag)); 3158 __ cmp(ebx, Factory::the_hole_value()); 3159 __ j(equal, &miss, not_taken); 3160 __ mov(eax, ebx); 3161 __ ret(0); 3162 3163 __ bind(&miss); 3164 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 3165 3166 // Return the generated code. 3167 return GetCode(NORMAL, NULL); 3168} 3169 3170 3171// Specialized stub for constructing objects from functions which only have only 3172// simple assignments of the form this.x = ...; in their body. 3173MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) { 3174 // ----------- S t a t e ------------- 3175 // -- eax : argc 3176 // -- edi : constructor 3177 // -- esp[0] : return address 3178 // -- esp[4] : last argument 3179 // ----------------------------------- 3180 Label generic_stub_call; 3181#ifdef ENABLE_DEBUGGER_SUPPORT 3182 // Check to see whether there are any break points in the function code. If 3183 // there are jump to the generic constructor stub which calls the actual 3184 // code for the function thereby hitting the break points. 3185 __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); 3186 __ mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kDebugInfoOffset)); 3187 __ cmp(ebx, Factory::undefined_value()); 3188 __ j(not_equal, &generic_stub_call, not_taken); 3189#endif 3190 3191 // Load the initial map and verify that it is in fact a map. 3192 __ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset)); 3193 // Will both indicate a NULL and a Smi. 3194 __ test(ebx, Immediate(kSmiTagMask)); 3195 __ j(zero, &generic_stub_call); 3196 __ CmpObjectType(ebx, MAP_TYPE, ecx); 3197 __ j(not_equal, &generic_stub_call); 3198 3199#ifdef DEBUG 3200 // Cannot construct functions this way. 3201 // edi: constructor 3202 // ebx: initial map 3203 __ CmpInstanceType(ebx, JS_FUNCTION_TYPE); 3204 __ Assert(not_equal, "Function constructed by construct stub."); 3205#endif 3206 3207 // Now allocate the JSObject on the heap by moving the new space allocation 3208 // top forward. 3209 // edi: constructor 3210 // ebx: initial map 3211 __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceSizeOffset)); 3212 __ shl(ecx, kPointerSizeLog2); 3213 __ AllocateInNewSpace(ecx, 3214 edx, 3215 ecx, 3216 no_reg, 3217 &generic_stub_call, 3218 NO_ALLOCATION_FLAGS); 3219 3220 // Allocated the JSObject, now initialize the fields and add the heap tag. 3221 // ebx: initial map 3222 // edx: JSObject (untagged) 3223 __ mov(Operand(edx, JSObject::kMapOffset), ebx); 3224 __ mov(ebx, Factory::empty_fixed_array()); 3225 __ mov(Operand(edx, JSObject::kPropertiesOffset), ebx); 3226 __ mov(Operand(edx, JSObject::kElementsOffset), ebx); 3227 3228 // Push the allocated object to the stack. This is the object that will be 3229 // returned (after it is tagged). 3230 __ push(edx); 3231 3232 // eax: argc 3233 // edx: JSObject (untagged) 3234 // Load the address of the first in-object property into edx. 3235 __ lea(edx, Operand(edx, JSObject::kHeaderSize)); 3236 // Calculate the location of the first argument. The stack contains the 3237 // allocated object and the return address on top of the argc arguments. 3238 __ lea(ecx, Operand(esp, eax, times_4, 1 * kPointerSize)); 3239 3240 // Use edi for holding undefined which is used in several places below. 3241 __ mov(edi, Factory::undefined_value()); 3242 3243 // eax: argc 3244 // ecx: first argument 3245 // edx: first in-object property of the JSObject 3246 // edi: undefined 3247 // Fill the initialized properties with a constant value or a passed argument 3248 // depending on the this.x = ...; assignment in the function. 3249 SharedFunctionInfo* shared = function->shared(); 3250 for (int i = 0; i < shared->this_property_assignments_count(); i++) { 3251 if (shared->IsThisPropertyAssignmentArgument(i)) { 3252 // Check if the argument assigned to the property is actually passed. 3253 // If argument is not passed the property is set to undefined, 3254 // otherwise find it on the stack. 3255 int arg_number = shared->GetThisPropertyAssignmentArgument(i); 3256 __ mov(ebx, edi); 3257 __ cmp(eax, arg_number); 3258 if (CpuFeatures::IsSupported(CMOV)) { 3259 CpuFeatures::Scope use_cmov(CMOV); 3260 __ cmov(above, ebx, Operand(ecx, arg_number * -kPointerSize)); 3261 } else { 3262 Label not_passed; 3263 __ j(below_equal, ¬_passed); 3264 __ mov(ebx, Operand(ecx, arg_number * -kPointerSize)); 3265 __ bind(¬_passed); 3266 } 3267 // Store value in the property. 3268 __ mov(Operand(edx, i * kPointerSize), ebx); 3269 } else { 3270 // Set the property to the constant value. 3271 Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i)); 3272 __ mov(Operand(edx, i * kPointerSize), Immediate(constant)); 3273 } 3274 } 3275 3276 // Fill the unused in-object property fields with undefined. 3277 ASSERT(function->has_initial_map()); 3278 for (int i = shared->this_property_assignments_count(); 3279 i < function->initial_map()->inobject_properties(); 3280 i++) { 3281 __ mov(Operand(edx, i * kPointerSize), edi); 3282 } 3283 3284 // Move argc to ebx and retrieve and tag the JSObject to return. 3285 __ mov(ebx, eax); 3286 __ pop(eax); 3287 __ or_(Operand(eax), Immediate(kHeapObjectTag)); 3288 3289 // Remove caller arguments and receiver from the stack and return. 3290 __ pop(ecx); 3291 __ lea(esp, Operand(esp, ebx, times_pointer_size, 1 * kPointerSize)); 3292 __ push(ecx); 3293 __ IncrementCounter(&Counters::constructed_objects, 1); 3294 __ IncrementCounter(&Counters::constructed_objects_stub, 1); 3295 __ ret(0); 3296 3297 // Jump to the generic stub in case the specialized code cannot handle the 3298 // construction. 3299 __ bind(&generic_stub_call); 3300 Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric); 3301 Handle<Code> generic_construct_stub(code); 3302 __ jmp(generic_construct_stub, RelocInfo::CODE_TARGET); 3303 3304 // Return the generated code. 3305 return GetCode(); 3306} 3307 3308 3309#undef __ 3310 3311} } // namespace v8::internal 3312 3313#endif // V8_TARGET_ARCH_IA32 3314