1// Copyright 2006-2009 the V8 project authors. All rights reserved. 2// Redistribution and use in source and binary forms, with or without 3// modification, are permitted provided that the following conditions are 4// met: 5// 6// * Redistributions of source code must retain the above copyright 7// notice, this list of conditions and the following disclaimer. 8// * Redistributions in binary form must reproduce the above 9// copyright notice, this list of conditions and the following 10// disclaimer in the documentation and/or other materials provided 11// with the distribution. 12// * Neither the name of Google Inc. nor the names of its 13// contributors may be used to endorse or promote products derived 14// from this software without specific prior written permission. 15// 16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28#include "v8.h" 29 30#include "ic-inl.h" 31#include "codegen-inl.h" 32#include "stub-cache.h" 33 34namespace v8 { 35namespace internal { 36 37#define __ ACCESS_MASM(masm) 38 39 40static void ProbeTable(MacroAssembler* masm, 41 Code::Flags flags, 42 StubCache::Table table, 43 Register name, 44 Register offset, 45 Register extra) { 46 ExternalReference key_offset(SCTableReference::keyReference(table)); 47 ExternalReference value_offset(SCTableReference::valueReference(table)); 48 49 Label miss; 50 51 if (extra.is_valid()) { 52 // Get the code entry from the cache. 53 __ mov(extra, Operand::StaticArray(offset, times_2, value_offset)); 54 55 // Check that the key in the entry matches the name. 56 __ cmp(name, Operand::StaticArray(offset, times_2, key_offset)); 57 __ j(not_equal, &miss, not_taken); 58 59 // Check that the flags match what we're looking for. 60 __ mov(offset, FieldOperand(extra, Code::kFlagsOffset)); 61 __ and_(offset, ~Code::kFlagsNotUsedInLookup); 62 __ cmp(offset, flags); 63 __ j(not_equal, &miss); 64 65 // Jump to the first instruction in the code stub. 66 __ add(Operand(extra), Immediate(Code::kHeaderSize - kHeapObjectTag)); 67 __ jmp(Operand(extra)); 68 69 __ bind(&miss); 70 } else { 71 // Save the offset on the stack. 72 __ push(offset); 73 74 // Check that the key in the entry matches the name. 75 __ cmp(name, Operand::StaticArray(offset, times_2, key_offset)); 76 __ j(not_equal, &miss, not_taken); 77 78 // Get the code entry from the cache. 79 __ mov(offset, Operand::StaticArray(offset, times_2, value_offset)); 80 81 // Check that the flags match what we're looking for. 82 __ mov(offset, FieldOperand(offset, Code::kFlagsOffset)); 83 __ and_(offset, ~Code::kFlagsNotUsedInLookup); 84 __ cmp(offset, flags); 85 __ j(not_equal, &miss); 86 87 // Restore offset and re-load code entry from cache. 88 __ pop(offset); 89 __ mov(offset, Operand::StaticArray(offset, times_2, value_offset)); 90 91 // Jump to the first instruction in the code stub. 92 __ add(Operand(offset), Immediate(Code::kHeaderSize - kHeapObjectTag)); 93 __ jmp(Operand(offset)); 94 95 // Pop at miss. 96 __ bind(&miss); 97 __ pop(offset); 98 } 99} 100 101 102void StubCache::GenerateProbe(MacroAssembler* masm, 103 Code::Flags flags, 104 Register receiver, 105 Register name, 106 Register scratch, 107 Register extra) { 108 Label miss; 109 110 // Make sure that code is valid. The shifting code relies on the 111 // entry size being 8. 112 ASSERT(sizeof(Entry) == 8); 113 114 // Make sure the flags does not name a specific type. 115 ASSERT(Code::ExtractTypeFromFlags(flags) == 0); 116 117 // Make sure that there are no register conflicts. 118 ASSERT(!scratch.is(receiver)); 119 ASSERT(!scratch.is(name)); 120 ASSERT(!extra.is(receiver)); 121 ASSERT(!extra.is(name)); 122 ASSERT(!extra.is(scratch)); 123 124 // Check that the receiver isn't a smi. 125 __ test(receiver, Immediate(kSmiTagMask)); 126 __ j(zero, &miss, not_taken); 127 128 // Get the map of the receiver and compute the hash. 129 __ mov(scratch, FieldOperand(name, String::kHashFieldOffset)); 130 __ add(scratch, FieldOperand(receiver, HeapObject::kMapOffset)); 131 __ xor_(scratch, flags); 132 __ and_(scratch, (kPrimaryTableSize - 1) << kHeapObjectTagSize); 133 134 // Probe the primary table. 135 ProbeTable(masm, flags, kPrimary, name, scratch, extra); 136 137 // Primary miss: Compute hash for secondary probe. 138 __ mov(scratch, FieldOperand(name, String::kHashFieldOffset)); 139 __ add(scratch, FieldOperand(receiver, HeapObject::kMapOffset)); 140 __ xor_(scratch, flags); 141 __ and_(scratch, (kPrimaryTableSize - 1) << kHeapObjectTagSize); 142 __ sub(scratch, Operand(name)); 143 __ add(Operand(scratch), Immediate(flags)); 144 __ and_(scratch, (kSecondaryTableSize - 1) << kHeapObjectTagSize); 145 146 // Probe the secondary table. 147 ProbeTable(masm, flags, kSecondary, name, scratch, extra); 148 149 // Cache miss: Fall-through and let caller handle the miss by 150 // entering the runtime system. 151 __ bind(&miss); 152} 153 154 155void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm, 156 int index, 157 Register prototype) { 158 // Load the global or builtins object from the current context. 159 __ mov(prototype, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX))); 160 // Load the global context from the global or builtins object. 161 __ mov(prototype, 162 FieldOperand(prototype, GlobalObject::kGlobalContextOffset)); 163 // Load the function from the global context. 164 __ mov(prototype, Operand(prototype, Context::SlotOffset(index))); 165 // Load the initial map. The global functions all have initial maps. 166 __ mov(prototype, 167 FieldOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset)); 168 // Load the prototype from the initial map. 169 __ mov(prototype, FieldOperand(prototype, Map::kPrototypeOffset)); 170} 171 172 173void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm, 174 Register receiver, 175 Register scratch, 176 Label* miss_label) { 177 // Check that the receiver isn't a smi. 178 __ test(receiver, Immediate(kSmiTagMask)); 179 __ j(zero, miss_label, not_taken); 180 181 // Check that the object is a JS array. 182 __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch); 183 __ j(not_equal, miss_label, not_taken); 184 185 // Load length directly from the JS array. 186 __ mov(eax, FieldOperand(receiver, JSArray::kLengthOffset)); 187 __ ret(0); 188} 189 190 191// Generate code to check if an object is a string. If the object is 192// a string, the map's instance type is left in the scratch register. 193static void GenerateStringCheck(MacroAssembler* masm, 194 Register receiver, 195 Register scratch, 196 Label* smi, 197 Label* non_string_object) { 198 // Check that the object isn't a smi. 199 __ test(receiver, Immediate(kSmiTagMask)); 200 __ j(zero, smi, not_taken); 201 202 // Check that the object is a string. 203 __ mov(scratch, FieldOperand(receiver, HeapObject::kMapOffset)); 204 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset)); 205 ASSERT(kNotStringTag != 0); 206 __ test(scratch, Immediate(kNotStringTag)); 207 __ j(not_zero, non_string_object, not_taken); 208} 209 210 211void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm, 212 Register receiver, 213 Register scratch1, 214 Register scratch2, 215 Label* miss) { 216 Label check_wrapper; 217 218 // Check if the object is a string leaving the instance type in the 219 // scratch register. 220 GenerateStringCheck(masm, receiver, scratch1, miss, &check_wrapper); 221 222 // Load length from the string and convert to a smi. 223 __ mov(eax, FieldOperand(receiver, String::kLengthOffset)); 224 __ SmiTag(eax); 225 __ ret(0); 226 227 // Check if the object is a JSValue wrapper. 228 __ bind(&check_wrapper); 229 __ cmp(scratch1, JS_VALUE_TYPE); 230 __ j(not_equal, miss, not_taken); 231 232 // Check if the wrapped value is a string and load the length 233 // directly if it is. 234 __ mov(scratch2, FieldOperand(receiver, JSValue::kValueOffset)); 235 GenerateStringCheck(masm, scratch2, scratch1, miss, miss); 236 __ mov(eax, FieldOperand(scratch2, String::kLengthOffset)); 237 __ SmiTag(eax); 238 __ ret(0); 239} 240 241 242void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm, 243 Register receiver, 244 Register scratch1, 245 Register scratch2, 246 Label* miss_label) { 247 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label); 248 __ mov(eax, Operand(scratch1)); 249 __ ret(0); 250} 251 252 253// Load a fast property out of a holder object (src). In-object properties 254// are loaded directly otherwise the property is loaded from the properties 255// fixed array. 256void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm, 257 Register dst, Register src, 258 JSObject* holder, int index) { 259 // Adjust for the number of properties stored in the holder. 260 index -= holder->map()->inobject_properties(); 261 if (index < 0) { 262 // Get the property straight out of the holder. 263 int offset = holder->map()->instance_size() + (index * kPointerSize); 264 __ mov(dst, FieldOperand(src, offset)); 265 } else { 266 // Calculate the offset into the properties array. 267 int offset = index * kPointerSize + FixedArray::kHeaderSize; 268 __ mov(dst, FieldOperand(src, JSObject::kPropertiesOffset)); 269 __ mov(dst, FieldOperand(dst, offset)); 270 } 271} 272 273 274static void PushInterceptorArguments(MacroAssembler* masm, 275 Register receiver, 276 Register holder, 277 Register name, 278 JSObject* holder_obj) { 279 __ push(receiver); 280 __ push(holder); 281 __ push(name); 282 InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor(); 283 ASSERT(!Heap::InNewSpace(interceptor)); 284 __ mov(receiver, Immediate(Handle<Object>(interceptor))); 285 __ push(receiver); 286 __ push(FieldOperand(receiver, InterceptorInfo::kDataOffset)); 287} 288 289 290static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm, 291 Register receiver, 292 Register holder, 293 Register name, 294 JSObject* holder_obj) { 295 PushInterceptorArguments(masm, receiver, holder, name, holder_obj); 296 __ CallExternalReference( 297 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly)), 298 5); 299} 300 301 302template <class Compiler> 303static void CompileLoadInterceptor(Compiler* compiler, 304 StubCompiler* stub_compiler, 305 MacroAssembler* masm, 306 JSObject* object, 307 JSObject* holder, 308 String* name, 309 LookupResult* lookup, 310 Register receiver, 311 Register scratch1, 312 Register scratch2, 313 Label* miss) { 314 ASSERT(holder->HasNamedInterceptor()); 315 ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined()); 316 317 // Check that the receiver isn't a smi. 318 __ test(receiver, Immediate(kSmiTagMask)); 319 __ j(zero, miss, not_taken); 320 321 // Check that the maps haven't changed. 322 Register reg = 323 stub_compiler->CheckPrototypes(object, receiver, holder, 324 scratch1, scratch2, name, miss); 325 326 if (lookup->IsProperty() && lookup->IsCacheable()) { 327 compiler->CompileCacheable(masm, 328 stub_compiler, 329 receiver, 330 reg, 331 scratch1, 332 scratch2, 333 holder, 334 lookup, 335 name, 336 miss); 337 } else { 338 compiler->CompileRegular(masm, 339 receiver, 340 reg, 341 scratch2, 342 holder, 343 miss); 344 } 345} 346 347 348class LoadInterceptorCompiler BASE_EMBEDDED { 349 public: 350 explicit LoadInterceptorCompiler(Register name) : name_(name) {} 351 352 void CompileCacheable(MacroAssembler* masm, 353 StubCompiler* stub_compiler, 354 Register receiver, 355 Register holder, 356 Register scratch1, 357 Register scratch2, 358 JSObject* holder_obj, 359 LookupResult* lookup, 360 String* name, 361 Label* miss_label) { 362 AccessorInfo* callback = NULL; 363 bool optimize = false; 364 // So far the most popular follow ups for interceptor loads are FIELD 365 // and CALLBACKS, so inline only them, other cases may be added 366 // later. 367 if (lookup->type() == FIELD) { 368 optimize = true; 369 } else if (lookup->type() == CALLBACKS) { 370 Object* callback_object = lookup->GetCallbackObject(); 371 if (callback_object->IsAccessorInfo()) { 372 callback = AccessorInfo::cast(callback_object); 373 optimize = callback->getter() != NULL; 374 } 375 } 376 377 if (!optimize) { 378 CompileRegular(masm, receiver, holder, scratch2, holder_obj, miss_label); 379 return; 380 } 381 382 // Note: starting a frame here makes GC aware of pointers pushed below. 383 __ EnterInternalFrame(); 384 385 if (lookup->type() == CALLBACKS) { 386 __ push(receiver); 387 } 388 __ push(holder); 389 __ push(name_); 390 391 CompileCallLoadPropertyWithInterceptor(masm, 392 receiver, 393 holder, 394 name_, 395 holder_obj); 396 397 Label interceptor_failed; 398 __ cmp(eax, Factory::no_interceptor_result_sentinel()); 399 __ j(equal, &interceptor_failed); 400 __ LeaveInternalFrame(); 401 __ ret(0); 402 403 __ bind(&interceptor_failed); 404 __ pop(name_); 405 __ pop(holder); 406 if (lookup->type() == CALLBACKS) { 407 __ pop(receiver); 408 } 409 410 __ LeaveInternalFrame(); 411 412 if (lookup->type() == FIELD) { 413 holder = stub_compiler->CheckPrototypes(holder_obj, holder, 414 lookup->holder(), scratch1, 415 scratch2, 416 name, 417 miss_label); 418 stub_compiler->GenerateFastPropertyLoad(masm, eax, 419 holder, lookup->holder(), 420 lookup->GetFieldIndex()); 421 __ ret(0); 422 } else { 423 ASSERT(lookup->type() == CALLBACKS); 424 ASSERT(lookup->GetCallbackObject()->IsAccessorInfo()); 425 ASSERT(callback != NULL); 426 ASSERT(callback->getter() != NULL); 427 428 Label cleanup; 429 __ pop(scratch2); 430 __ push(receiver); 431 __ push(scratch2); 432 433 holder = stub_compiler->CheckPrototypes(holder_obj, holder, 434 lookup->holder(), scratch1, 435 scratch2, 436 name, 437 &cleanup); 438 439 __ pop(scratch2); // save old return address 440 __ push(holder); 441 __ mov(holder, Immediate(Handle<AccessorInfo>(callback))); 442 __ push(holder); 443 __ push(FieldOperand(holder, AccessorInfo::kDataOffset)); 444 __ push(name_); 445 __ push(scratch2); // restore old return address 446 447 ExternalReference ref = 448 ExternalReference(IC_Utility(IC::kLoadCallbackProperty)); 449 __ TailCallRuntime(ref, 5, 1); 450 451 __ bind(&cleanup); 452 __ pop(scratch1); 453 __ pop(scratch2); 454 __ push(scratch1); 455 } 456 } 457 458 459 void CompileRegular(MacroAssembler* masm, 460 Register receiver, 461 Register holder, 462 Register scratch, 463 JSObject* holder_obj, 464 Label* miss_label) { 465 __ pop(scratch); // save old return address 466 PushInterceptorArguments(masm, receiver, holder, name_, holder_obj); 467 __ push(scratch); // restore old return address 468 469 ExternalReference ref = ExternalReference( 470 IC_Utility(IC::kLoadPropertyWithInterceptorForLoad)); 471 __ TailCallRuntime(ref, 5, 1); 472 } 473 474 private: 475 Register name_; 476}; 477 478 479// Holds information about possible function call optimizations. 480class CallOptimization BASE_EMBEDDED { 481 public: 482 explicit CallOptimization(LookupResult* lookup) 483 : constant_function_(NULL), 484 is_simple_api_call_(false), 485 expected_receiver_type_(NULL), 486 api_call_info_(NULL) { 487 if (!lookup->IsProperty() || !lookup->IsCacheable()) return; 488 489 // We only optimize constant function calls. 490 if (lookup->type() != CONSTANT_FUNCTION) return; 491 492 Initialize(lookup->GetConstantFunction()); 493 } 494 495 explicit CallOptimization(JSFunction* function) { 496 Initialize(function); 497 } 498 499 bool is_constant_call() const { 500 return constant_function_ != NULL; 501 } 502 503 JSFunction* constant_function() const { 504 ASSERT(constant_function_ != NULL); 505 return constant_function_; 506 } 507 508 bool is_simple_api_call() const { 509 return is_simple_api_call_; 510 } 511 512 FunctionTemplateInfo* expected_receiver_type() const { 513 ASSERT(is_simple_api_call_); 514 return expected_receiver_type_; 515 } 516 517 CallHandlerInfo* api_call_info() const { 518 ASSERT(is_simple_api_call_); 519 return api_call_info_; 520 } 521 522 // Returns the depth of the object having the expected type in the 523 // prototype chain between the two arguments. 524 int GetPrototypeDepthOfExpectedType(JSObject* object, 525 JSObject* holder) const { 526 ASSERT(is_simple_api_call_); 527 if (expected_receiver_type_ == NULL) return 0; 528 int depth = 0; 529 while (object != holder) { 530 if (object->IsInstanceOf(expected_receiver_type_)) return depth; 531 object = JSObject::cast(object->GetPrototype()); 532 ++depth; 533 } 534 if (holder->IsInstanceOf(expected_receiver_type_)) return depth; 535 return kInvalidProtoDepth; 536 } 537 538 private: 539 void Initialize(JSFunction* function) { 540 if (!function->is_compiled()) return; 541 542 constant_function_ = function; 543 is_simple_api_call_ = false; 544 545 AnalyzePossibleApiFunction(function); 546 } 547 548 // Determines whether the given function can be called using the 549 // fast api call builtin. 550 void AnalyzePossibleApiFunction(JSFunction* function) { 551 SharedFunctionInfo* sfi = function->shared(); 552 if (sfi->function_data()->IsUndefined()) return; 553 FunctionTemplateInfo* info = 554 FunctionTemplateInfo::cast(sfi->function_data()); 555 556 // Require a C++ callback. 557 if (info->call_code()->IsUndefined()) return; 558 api_call_info_ = CallHandlerInfo::cast(info->call_code()); 559 560 // Accept signatures that either have no restrictions at all or 561 // only have restrictions on the receiver. 562 if (!info->signature()->IsUndefined()) { 563 SignatureInfo* signature = SignatureInfo::cast(info->signature()); 564 if (!signature->args()->IsUndefined()) return; 565 if (!signature->receiver()->IsUndefined()) { 566 expected_receiver_type_ = 567 FunctionTemplateInfo::cast(signature->receiver()); 568 } 569 } 570 571 is_simple_api_call_ = true; 572 } 573 574 JSFunction* constant_function_; 575 bool is_simple_api_call_; 576 FunctionTemplateInfo* expected_receiver_type_; 577 CallHandlerInfo* api_call_info_; 578}; 579 580 581// Reserves space for the extra arguments to FastHandleApiCall in the 582// caller's frame. 583// 584// These arguments are set by CheckPrototypes and GenerateFastApiCall. 585static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) { 586 // ----------- S t a t e ------------- 587 // -- esp[0] : return address 588 // -- esp[4] : last argument in the internal frame of the caller 589 // ----------------------------------- 590 __ pop(scratch); 591 __ push(Immediate(Smi::FromInt(0))); 592 __ push(Immediate(Smi::FromInt(0))); 593 __ push(Immediate(Smi::FromInt(0))); 594 __ push(Immediate(Smi::FromInt(0))); 595 __ push(scratch); 596} 597 598 599// Undoes the effects of ReserveSpaceForFastApiCall. 600static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) { 601 // ----------- S t a t e ------------- 602 // -- esp[0] : return address 603 // -- esp[4] : last fast api call extra argument 604 // -- ... 605 // -- esp[16] : first fast api call extra argument 606 // -- esp[20] : last argument in the internal frame 607 // ----------------------------------- 608 __ pop(scratch); 609 __ add(Operand(esp), Immediate(kPointerSize * 4)); 610 __ push(scratch); 611} 612 613 614// Generates call to FastHandleApiCall builtin. 615static void GenerateFastApiCall(MacroAssembler* masm, 616 const CallOptimization& optimization, 617 int argc) { 618 // ----------- S t a t e ------------- 619 // -- esp[0] : return address 620 // -- esp[4] : object passing the type check 621 // (last fast api call extra argument, 622 // set by CheckPrototypes) 623 // -- esp[8] : api call data 624 // -- esp[12] : api callback 625 // -- esp[16] : api function 626 // (first fast api call extra argument) 627 // -- esp[20] : last argument 628 // -- ... 629 // -- esp[(argc + 5) * 4] : first argument 630 // -- esp[(argc + 6) * 4] : receiver 631 // ----------------------------------- 632 633 // Get the function and setup the context. 634 JSFunction* function = optimization.constant_function(); 635 __ mov(edi, Immediate(Handle<JSFunction>(function))); 636 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); 637 638 // Pass the additional arguments FastHandleApiCall expects. 639 __ mov(Operand(esp, 4 * kPointerSize), edi); 640 bool info_loaded = false; 641 Object* callback = optimization.api_call_info()->callback(); 642 if (Heap::InNewSpace(callback)) { 643 info_loaded = true; 644 __ mov(ecx, Handle<CallHandlerInfo>(optimization.api_call_info())); 645 __ mov(ebx, FieldOperand(ecx, CallHandlerInfo::kCallbackOffset)); 646 __ mov(Operand(esp, 3 * kPointerSize), ebx); 647 } else { 648 __ mov(Operand(esp, 3 * kPointerSize), Immediate(Handle<Object>(callback))); 649 } 650 Object* call_data = optimization.api_call_info()->data(); 651 if (Heap::InNewSpace(call_data)) { 652 if (!info_loaded) { 653 __ mov(ecx, Handle<CallHandlerInfo>(optimization.api_call_info())); 654 } 655 __ mov(ebx, FieldOperand(ecx, CallHandlerInfo::kDataOffset)); 656 __ mov(Operand(esp, 2 * kPointerSize), ebx); 657 } else { 658 __ mov(Operand(esp, 2 * kPointerSize), 659 Immediate(Handle<Object>(call_data))); 660 } 661 662 // Set the number of arguments. 663 __ mov(eax, Immediate(argc + 4)); 664 665 // Jump to the fast api call builtin (tail call). 666 Handle<Code> code = Handle<Code>( 667 Builtins::builtin(Builtins::FastHandleApiCall)); 668 ParameterCount expected(0); 669 __ InvokeCode(code, expected, expected, 670 RelocInfo::CODE_TARGET, JUMP_FUNCTION); 671} 672 673 674class CallInterceptorCompiler BASE_EMBEDDED { 675 public: 676 CallInterceptorCompiler(StubCompiler* stub_compiler, 677 const ParameterCount& arguments, 678 Register name) 679 : stub_compiler_(stub_compiler), 680 arguments_(arguments), 681 name_(name) {} 682 683 void Compile(MacroAssembler* masm, 684 JSObject* object, 685 JSObject* holder, 686 String* name, 687 LookupResult* lookup, 688 Register receiver, 689 Register scratch1, 690 Register scratch2, 691 Label* miss) { 692 ASSERT(holder->HasNamedInterceptor()); 693 ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined()); 694 695 // Check that the receiver isn't a smi. 696 __ test(receiver, Immediate(kSmiTagMask)); 697 __ j(zero, miss, not_taken); 698 699 CallOptimization optimization(lookup); 700 701 if (optimization.is_constant_call() && 702 !Top::CanHaveSpecialFunctions(holder)) { 703 CompileCacheable(masm, 704 object, 705 receiver, 706 scratch1, 707 scratch2, 708 holder, 709 lookup, 710 name, 711 optimization, 712 miss); 713 } else { 714 CompileRegular(masm, 715 object, 716 receiver, 717 scratch1, 718 scratch2, 719 name, 720 holder, 721 miss); 722 } 723 } 724 725 private: 726 void CompileCacheable(MacroAssembler* masm, 727 JSObject* object, 728 Register receiver, 729 Register scratch1, 730 Register scratch2, 731 JSObject* holder_obj, 732 LookupResult* lookup, 733 String* name, 734 const CallOptimization& optimization, 735 Label* miss_label) { 736 ASSERT(optimization.is_constant_call()); 737 ASSERT(!lookup->holder()->IsGlobalObject()); 738 739 int depth1 = kInvalidProtoDepth; 740 int depth2 = kInvalidProtoDepth; 741 bool can_do_fast_api_call = false; 742 if (optimization.is_simple_api_call() && 743 !lookup->holder()->IsGlobalObject()) { 744 depth1 = optimization.GetPrototypeDepthOfExpectedType(object, holder_obj); 745 if (depth1 == kInvalidProtoDepth) { 746 depth2 = optimization.GetPrototypeDepthOfExpectedType(holder_obj, 747 lookup->holder()); 748 } 749 can_do_fast_api_call = (depth1 != kInvalidProtoDepth) || 750 (depth2 != kInvalidProtoDepth); 751 } 752 753 __ IncrementCounter(&Counters::call_const_interceptor, 1); 754 755 if (can_do_fast_api_call) { 756 __ IncrementCounter(&Counters::call_const_interceptor_fast_api, 1); 757 ReserveSpaceForFastApiCall(masm, scratch1); 758 } 759 760 Label miss_cleanup; 761 Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label; 762 Register holder = 763 stub_compiler_->CheckPrototypes(object, receiver, holder_obj, 764 scratch1, scratch2, name, 765 depth1, miss); 766 767 Label regular_invoke; 768 LoadWithInterceptor(masm, receiver, holder, holder_obj, ®ular_invoke); 769 770 // Generate code for the failed interceptor case. 771 772 // Check the lookup is still valid. 773 stub_compiler_->CheckPrototypes(holder_obj, receiver, 774 lookup->holder(), 775 scratch1, scratch2, name, 776 depth2, miss); 777 778 if (can_do_fast_api_call) { 779 GenerateFastApiCall(masm, optimization, arguments_.immediate()); 780 } else { 781 __ InvokeFunction(optimization.constant_function(), arguments_, 782 JUMP_FUNCTION); 783 } 784 785 if (can_do_fast_api_call) { 786 __ bind(&miss_cleanup); 787 FreeSpaceForFastApiCall(masm, scratch1); 788 __ jmp(miss_label); 789 } 790 791 __ bind(®ular_invoke); 792 if (can_do_fast_api_call) { 793 FreeSpaceForFastApiCall(masm, scratch1); 794 } 795 } 796 797 void CompileRegular(MacroAssembler* masm, 798 JSObject* object, 799 Register receiver, 800 Register scratch1, 801 Register scratch2, 802 String* name, 803 JSObject* holder_obj, 804 Label* miss_label) { 805 Register holder = 806 stub_compiler_->CheckPrototypes(object, receiver, holder_obj, 807 scratch1, scratch2, name, 808 miss_label); 809 810 __ EnterInternalFrame(); 811 // Save the name_ register across the call. 812 __ push(name_); 813 814 PushInterceptorArguments(masm, 815 receiver, 816 holder, 817 name_, 818 holder_obj); 819 820 __ CallExternalReference( 821 ExternalReference( 822 IC_Utility(IC::kLoadPropertyWithInterceptorForCall)), 823 5); 824 825 // Restore the name_ register. 826 __ pop(name_); 827 __ LeaveInternalFrame(); 828 } 829 830 void LoadWithInterceptor(MacroAssembler* masm, 831 Register receiver, 832 Register holder, 833 JSObject* holder_obj, 834 Label* interceptor_succeeded) { 835 __ EnterInternalFrame(); 836 __ push(holder); // Save the holder. 837 __ push(name_); // Save the name. 838 839 CompileCallLoadPropertyWithInterceptor(masm, 840 receiver, 841 holder, 842 name_, 843 holder_obj); 844 845 __ pop(name_); // Restore the name. 846 __ pop(receiver); // Restore the holder. 847 __ LeaveInternalFrame(); 848 849 __ cmp(eax, Factory::no_interceptor_result_sentinel()); 850 __ j(not_equal, interceptor_succeeded); 851 } 852 853 StubCompiler* stub_compiler_; 854 const ParameterCount& arguments_; 855 Register name_; 856}; 857 858 859void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) { 860 ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC); 861 Code* code = NULL; 862 if (kind == Code::LOAD_IC) { 863 code = Builtins::builtin(Builtins::LoadIC_Miss); 864 } else { 865 code = Builtins::builtin(Builtins::KeyedLoadIC_Miss); 866 } 867 868 Handle<Code> ic(code); 869 __ jmp(ic, RelocInfo::CODE_TARGET); 870} 871 872 873// Both name_reg and receiver_reg are preserved on jumps to miss_label, 874// but may be destroyed if store is successful. 875void StubCompiler::GenerateStoreField(MacroAssembler* masm, 876 JSObject* object, 877 int index, 878 Map* transition, 879 Register receiver_reg, 880 Register name_reg, 881 Register scratch, 882 Label* miss_label) { 883 // Check that the object isn't a smi. 884 __ test(receiver_reg, Immediate(kSmiTagMask)); 885 __ j(zero, miss_label, not_taken); 886 887 // Check that the map of the object hasn't changed. 888 __ cmp(FieldOperand(receiver_reg, HeapObject::kMapOffset), 889 Immediate(Handle<Map>(object->map()))); 890 __ j(not_equal, miss_label, not_taken); 891 892 // Perform global security token check if needed. 893 if (object->IsJSGlobalProxy()) { 894 __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label); 895 } 896 897 // Stub never generated for non-global objects that require access 898 // checks. 899 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); 900 901 // Perform map transition for the receiver if necessary. 902 if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) { 903 // The properties must be extended before we can store the value. 904 // We jump to a runtime call that extends the properties array. 905 __ pop(scratch); // Return address. 906 __ push(receiver_reg); 907 __ push(Immediate(Handle<Map>(transition))); 908 __ push(eax); 909 __ push(scratch); 910 __ TailCallRuntime( 911 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage)), 3, 1); 912 return; 913 } 914 915 if (transition != NULL) { 916 // Update the map of the object; no write barrier updating is 917 // needed because the map is never in new space. 918 __ mov(FieldOperand(receiver_reg, HeapObject::kMapOffset), 919 Immediate(Handle<Map>(transition))); 920 } 921 922 // Adjust for the number of properties stored in the object. Even in the 923 // face of a transition we can use the old map here because the size of the 924 // object and the number of in-object properties is not going to change. 925 index -= object->map()->inobject_properties(); 926 927 if (index < 0) { 928 // Set the property straight into the object. 929 int offset = object->map()->instance_size() + (index * kPointerSize); 930 __ mov(FieldOperand(receiver_reg, offset), eax); 931 932 // Update the write barrier for the array address. 933 // Pass the value being stored in the now unused name_reg. 934 __ mov(name_reg, Operand(eax)); 935 __ RecordWrite(receiver_reg, offset, name_reg, scratch); 936 } else { 937 // Write to the properties array. 938 int offset = index * kPointerSize + FixedArray::kHeaderSize; 939 // Get the properties array (optimistically). 940 __ mov(scratch, FieldOperand(receiver_reg, JSObject::kPropertiesOffset)); 941 __ mov(FieldOperand(scratch, offset), eax); 942 943 // Update the write barrier for the array address. 944 // Pass the value being stored in the now unused name_reg. 945 __ mov(name_reg, Operand(eax)); 946 __ RecordWrite(scratch, offset, name_reg, receiver_reg); 947 } 948 949 // Return the value (register eax). 950 __ ret(0); 951} 952 953 954#undef __ 955#define __ ACCESS_MASM(masm()) 956 957 958Register StubCompiler::CheckPrototypes(JSObject* object, 959 Register object_reg, 960 JSObject* holder, 961 Register holder_reg, 962 Register scratch, 963 String* name, 964 int push_at_depth, 965 Label* miss) { 966 // Check that the maps haven't changed. 967 Register result = 968 masm()->CheckMaps(object, object_reg, holder, holder_reg, scratch, 969 push_at_depth, miss); 970 971 // If we've skipped any global objects, it's not enough to verify 972 // that their maps haven't changed. 973 while (object != holder) { 974 if (object->IsGlobalObject()) { 975 GlobalObject* global = GlobalObject::cast(object); 976 Object* probe = global->EnsurePropertyCell(name); 977 if (probe->IsFailure()) { 978 set_failure(Failure::cast(probe)); 979 return result; 980 } 981 JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(probe); 982 ASSERT(cell->value()->IsTheHole()); 983 __ mov(scratch, Immediate(Handle<Object>(cell))); 984 __ cmp(FieldOperand(scratch, JSGlobalPropertyCell::kValueOffset), 985 Immediate(Factory::the_hole_value())); 986 __ j(not_equal, miss, not_taken); 987 } 988 object = JSObject::cast(object->GetPrototype()); 989 } 990 991 // Return the register containing the holder. 992 return result; 993} 994 995 996void StubCompiler::GenerateLoadField(JSObject* object, 997 JSObject* holder, 998 Register receiver, 999 Register scratch1, 1000 Register scratch2, 1001 int index, 1002 String* name, 1003 Label* miss) { 1004 // Check that the receiver isn't a smi. 1005 __ test(receiver, Immediate(kSmiTagMask)); 1006 __ j(zero, miss, not_taken); 1007 1008 // Check the prototype chain. 1009 Register reg = 1010 CheckPrototypes(object, receiver, holder, 1011 scratch1, scratch2, name, miss); 1012 1013 // Get the value from the properties. 1014 GenerateFastPropertyLoad(masm(), eax, reg, holder, index); 1015 __ ret(0); 1016} 1017 1018 1019bool StubCompiler::GenerateLoadCallback(JSObject* object, 1020 JSObject* holder, 1021 Register receiver, 1022 Register name_reg, 1023 Register scratch1, 1024 Register scratch2, 1025 AccessorInfo* callback, 1026 String* name, 1027 Label* miss, 1028 Failure** failure) { 1029 // Check that the receiver isn't a smi. 1030 __ test(receiver, Immediate(kSmiTagMask)); 1031 __ j(zero, miss, not_taken); 1032 1033 // Check that the maps haven't changed. 1034 Register reg = 1035 CheckPrototypes(object, receiver, holder, 1036 scratch1, scratch2, name, miss); 1037 1038 Handle<AccessorInfo> callback_handle(callback); 1039 1040 Register other = reg.is(scratch1) ? scratch2 : scratch1; 1041 __ EnterInternalFrame(); 1042 __ PushHandleScope(other); 1043 // Push the stack address where the list of arguments ends 1044 __ mov(other, esp); 1045 __ sub(Operand(other), Immediate(2 * kPointerSize)); 1046 __ push(other); 1047 __ push(receiver); // receiver 1048 __ push(reg); // holder 1049 __ mov(other, Immediate(callback_handle)); 1050 __ push(other); 1051 __ push(FieldOperand(other, AccessorInfo::kDataOffset)); // data 1052 __ push(name_reg); // name 1053 // Save a pointer to where we pushed the arguments pointer. 1054 // This will be passed as the const Arguments& to the C++ callback. 1055 __ mov(eax, esp); 1056 __ add(Operand(eax), Immediate(5 * kPointerSize)); 1057 __ mov(ebx, esp); 1058 1059 // Do call through the api. 1060 ASSERT_EQ(6, ApiGetterEntryStub::kStackSpace); 1061 Address getter_address = v8::ToCData<Address>(callback->getter()); 1062 ApiFunction fun(getter_address); 1063 ApiGetterEntryStub stub(callback_handle, &fun); 1064 // Emitting a stub call may try to allocate (if the code is not 1065 // already generated). Do not allow the assembler to perform a 1066 // garbage collection but instead return the allocation failure 1067 // object. 1068 Object* result = masm()->TryCallStub(&stub); 1069 if (result->IsFailure()) { 1070 *failure = Failure::cast(result); 1071 return false; 1072 } 1073 1074 // We need to avoid using eax since that now holds the result. 1075 Register tmp = other.is(eax) ? reg : other; 1076 // Emitting PopHandleScope may try to allocate. Do not allow the 1077 // assembler to perform a garbage collection but instead return a 1078 // failure object. 1079 result = masm()->TryPopHandleScope(eax, tmp); 1080 if (result->IsFailure()) { 1081 *failure = Failure::cast(result); 1082 return false; 1083 } 1084 __ LeaveInternalFrame(); 1085 1086 __ ret(0); 1087 return true; 1088} 1089 1090 1091void StubCompiler::GenerateLoadConstant(JSObject* object, 1092 JSObject* holder, 1093 Register receiver, 1094 Register scratch1, 1095 Register scratch2, 1096 Object* value, 1097 String* name, 1098 Label* miss) { 1099 // Check that the receiver isn't a smi. 1100 __ test(receiver, Immediate(kSmiTagMask)); 1101 __ j(zero, miss, not_taken); 1102 1103 // Check that the maps haven't changed. 1104 Register reg = 1105 CheckPrototypes(object, receiver, holder, 1106 scratch1, scratch2, name, miss); 1107 1108 // Return the constant value. 1109 __ mov(eax, Handle<Object>(value)); 1110 __ ret(0); 1111} 1112 1113 1114void StubCompiler::GenerateLoadInterceptor(JSObject* object, 1115 JSObject* holder, 1116 LookupResult* lookup, 1117 Register receiver, 1118 Register name_reg, 1119 Register scratch1, 1120 Register scratch2, 1121 String* name, 1122 Label* miss) { 1123 LoadInterceptorCompiler compiler(name_reg); 1124 CompileLoadInterceptor(&compiler, 1125 this, 1126 masm(), 1127 object, 1128 holder, 1129 name, 1130 lookup, 1131 receiver, 1132 scratch1, 1133 scratch2, 1134 miss); 1135} 1136 1137 1138// TODO(1241006): Avoid having lazy compile stubs specialized by the 1139// number of arguments. It is not needed anymore. 1140Object* StubCompiler::CompileLazyCompile(Code::Flags flags) { 1141 // Enter an internal frame. 1142 __ EnterInternalFrame(); 1143 1144 // Push a copy of the function onto the stack. 1145 __ push(edi); 1146 1147 __ push(edi); // function is also the parameter to the runtime call 1148 __ CallRuntime(Runtime::kLazyCompile, 1); 1149 __ pop(edi); 1150 1151 // Tear down temporary frame. 1152 __ LeaveInternalFrame(); 1153 1154 // Do a tail-call of the compiled function. 1155 __ lea(ecx, FieldOperand(eax, Code::kHeaderSize)); 1156 __ jmp(Operand(ecx)); 1157 1158 return GetCodeWithFlags(flags, "LazyCompileStub"); 1159} 1160 1161 1162Object* CallStubCompiler::CompileCallField(JSObject* object, 1163 JSObject* holder, 1164 int index, 1165 String* name) { 1166 // ----------- S t a t e ------------- 1167 // -- ecx : name 1168 // -- esp[0] : return address 1169 // -- esp[(argc - n) * 4] : arg[n] (zero-based) 1170 // -- ... 1171 // -- esp[(argc + 1) * 4] : receiver 1172 // ----------------------------------- 1173 Label miss; 1174 1175 // Get the receiver from the stack. 1176 const int argc = arguments().immediate(); 1177 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize)); 1178 1179 // Check that the receiver isn't a smi. 1180 __ test(edx, Immediate(kSmiTagMask)); 1181 __ j(zero, &miss, not_taken); 1182 1183 // Do the right check and compute the holder register. 1184 Register reg = CheckPrototypes(object, edx, holder, ebx, eax, name, &miss); 1185 1186 GenerateFastPropertyLoad(masm(), edi, reg, holder, index); 1187 1188 // Check that the function really is a function. 1189 __ test(edi, Immediate(kSmiTagMask)); 1190 __ j(zero, &miss, not_taken); 1191 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ebx); 1192 __ j(not_equal, &miss, not_taken); 1193 1194 // Patch the receiver on the stack with the global proxy if 1195 // necessary. 1196 if (object->IsGlobalObject()) { 1197 __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalReceiverOffset)); 1198 __ mov(Operand(esp, (argc + 1) * kPointerSize), edx); 1199 } 1200 1201 // Invoke the function. 1202 __ InvokeFunction(edi, arguments(), JUMP_FUNCTION); 1203 1204 // Handle call cache miss. 1205 __ bind(&miss); 1206 Handle<Code> ic = ComputeCallMiss(arguments().immediate()); 1207 __ jmp(ic, RelocInfo::CODE_TARGET); 1208 1209 // Return the generated code. 1210 return GetCode(FIELD, name); 1211} 1212 1213 1214Object* CallStubCompiler::CompileCallConstant(Object* object, 1215 JSObject* holder, 1216 JSFunction* function, 1217 String* name, 1218 CheckType check) { 1219 // ----------- S t a t e ------------- 1220 // -- ecx : name 1221 // -- esp[0] : return address 1222 // -- esp[(argc - n) * 4] : arg[n] (zero-based) 1223 // -- ... 1224 // -- esp[(argc + 1) * 4] : receiver 1225 // ----------------------------------- 1226 Label miss; 1227 1228 // Get the receiver from the stack. 1229 const int argc = arguments().immediate(); 1230 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize)); 1231 1232 // Check that the receiver isn't a smi. 1233 if (check != NUMBER_CHECK) { 1234 __ test(edx, Immediate(kSmiTagMask)); 1235 __ j(zero, &miss, not_taken); 1236 } 1237 1238 // Make sure that it's okay not to patch the on stack receiver 1239 // unless we're doing a receiver map check. 1240 ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK); 1241 1242 CallOptimization optimization(function); 1243 int depth = kInvalidProtoDepth; 1244 1245 switch (check) { 1246 case RECEIVER_MAP_CHECK: 1247 __ IncrementCounter(&Counters::call_const, 1); 1248 1249 if (optimization.is_simple_api_call() && !object->IsGlobalObject()) { 1250 depth = optimization.GetPrototypeDepthOfExpectedType( 1251 JSObject::cast(object), holder); 1252 } 1253 1254 if (depth != kInvalidProtoDepth) { 1255 __ IncrementCounter(&Counters::call_const_fast_api, 1); 1256 ReserveSpaceForFastApiCall(masm(), eax); 1257 } 1258 1259 // Check that the maps haven't changed. 1260 CheckPrototypes(JSObject::cast(object), edx, holder, 1261 ebx, eax, name, depth, &miss); 1262 1263 // Patch the receiver on the stack with the global proxy if 1264 // necessary. 1265 if (object->IsGlobalObject()) { 1266 ASSERT(depth == kInvalidProtoDepth); 1267 __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalReceiverOffset)); 1268 __ mov(Operand(esp, (argc + 1) * kPointerSize), edx); 1269 } 1270 break; 1271 1272 case STRING_CHECK: 1273 if (!function->IsBuiltin()) { 1274 // Calling non-builtins with a value as receiver requires boxing. 1275 __ jmp(&miss); 1276 } else { 1277 // Check that the object is a string or a symbol. 1278 __ mov(eax, FieldOperand(edx, HeapObject::kMapOffset)); 1279 __ movzx_b(eax, FieldOperand(eax, Map::kInstanceTypeOffset)); 1280 __ cmp(eax, FIRST_NONSTRING_TYPE); 1281 __ j(above_equal, &miss, not_taken); 1282 // Check that the maps starting from the prototype haven't changed. 1283 GenerateLoadGlobalFunctionPrototype(masm(), 1284 Context::STRING_FUNCTION_INDEX, 1285 eax); 1286 CheckPrototypes(JSObject::cast(object->GetPrototype()), eax, holder, 1287 ebx, edx, name, &miss); 1288 } 1289 break; 1290 1291 case NUMBER_CHECK: { 1292 if (!function->IsBuiltin()) { 1293 // Calling non-builtins with a value as receiver requires boxing. 1294 __ jmp(&miss); 1295 } else { 1296 Label fast; 1297 // Check that the object is a smi or a heap number. 1298 __ test(edx, Immediate(kSmiTagMask)); 1299 __ j(zero, &fast, taken); 1300 __ CmpObjectType(edx, HEAP_NUMBER_TYPE, eax); 1301 __ j(not_equal, &miss, not_taken); 1302 __ bind(&fast); 1303 // Check that the maps starting from the prototype haven't changed. 1304 GenerateLoadGlobalFunctionPrototype(masm(), 1305 Context::NUMBER_FUNCTION_INDEX, 1306 eax); 1307 CheckPrototypes(JSObject::cast(object->GetPrototype()), eax, holder, 1308 ebx, edx, name, &miss); 1309 } 1310 break; 1311 } 1312 1313 case BOOLEAN_CHECK: { 1314 if (!function->IsBuiltin()) { 1315 // Calling non-builtins with a value as receiver requires boxing. 1316 __ jmp(&miss); 1317 } else { 1318 Label fast; 1319 // Check that the object is a boolean. 1320 __ cmp(edx, Factory::true_value()); 1321 __ j(equal, &fast, taken); 1322 __ cmp(edx, Factory::false_value()); 1323 __ j(not_equal, &miss, not_taken); 1324 __ bind(&fast); 1325 // Check that the maps starting from the prototype haven't changed. 1326 GenerateLoadGlobalFunctionPrototype(masm(), 1327 Context::BOOLEAN_FUNCTION_INDEX, 1328 eax); 1329 CheckPrototypes(JSObject::cast(object->GetPrototype()), eax, holder, 1330 ebx, edx, name, &miss); 1331 } 1332 break; 1333 } 1334 1335 case JSARRAY_HAS_FAST_ELEMENTS_CHECK: 1336 CheckPrototypes(JSObject::cast(object), edx, holder, 1337 ebx, eax, name, &miss); 1338 // Make sure object->HasFastElements(). 1339 // Get the elements array of the object. 1340 __ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset)); 1341 // Check that the object is in fast mode (not dictionary). 1342 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset), 1343 Immediate(Factory::fixed_array_map())); 1344 __ j(not_equal, &miss, not_taken); 1345 break; 1346 1347 default: 1348 UNREACHABLE(); 1349 } 1350 1351 if (depth != kInvalidProtoDepth) { 1352 GenerateFastApiCall(masm(), optimization, argc); 1353 } else { 1354 __ InvokeFunction(function, arguments(), JUMP_FUNCTION); 1355 } 1356 1357 // Handle call cache miss. 1358 __ bind(&miss); 1359 if (depth != kInvalidProtoDepth) { 1360 FreeSpaceForFastApiCall(masm(), eax); 1361 } 1362 Handle<Code> ic = ComputeCallMiss(arguments().immediate()); 1363 __ jmp(ic, RelocInfo::CODE_TARGET); 1364 1365 // Return the generated code. 1366 String* function_name = NULL; 1367 if (function->shared()->name()->IsString()) { 1368 function_name = String::cast(function->shared()->name()); 1369 } 1370 return GetCode(CONSTANT_FUNCTION, function_name); 1371} 1372 1373 1374Object* CallStubCompiler::CompileCallInterceptor(JSObject* object, 1375 JSObject* holder, 1376 String* name) { 1377 // ----------- S t a t e ------------- 1378 // -- ecx : name 1379 // -- esp[0] : return address 1380 // -- esp[(argc - n) * 4] : arg[n] (zero-based) 1381 // -- ... 1382 // -- esp[(argc + 1) * 4] : receiver 1383 // ----------------------------------- 1384 Label miss; 1385 1386 // Get the number of arguments. 1387 const int argc = arguments().immediate(); 1388 1389 LookupResult lookup; 1390 LookupPostInterceptor(holder, name, &lookup); 1391 1392 // Get the receiver from the stack. 1393 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize)); 1394 1395 CallInterceptorCompiler compiler(this, arguments(), ecx); 1396 compiler.Compile(masm(), 1397 object, 1398 holder, 1399 name, 1400 &lookup, 1401 edx, 1402 ebx, 1403 edi, 1404 &miss); 1405 1406 // Restore receiver. 1407 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize)); 1408 1409 // Check that the function really is a function. 1410 __ test(eax, Immediate(kSmiTagMask)); 1411 __ j(zero, &miss, not_taken); 1412 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx); 1413 __ j(not_equal, &miss, not_taken); 1414 1415 // Patch the receiver on the stack with the global proxy if 1416 // necessary. 1417 if (object->IsGlobalObject()) { 1418 __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalReceiverOffset)); 1419 __ mov(Operand(esp, (argc + 1) * kPointerSize), edx); 1420 } 1421 1422 // Invoke the function. 1423 __ mov(edi, eax); 1424 __ InvokeFunction(edi, arguments(), JUMP_FUNCTION); 1425 1426 // Handle load cache miss. 1427 __ bind(&miss); 1428 Handle<Code> ic = ComputeCallMiss(argc); 1429 __ jmp(ic, RelocInfo::CODE_TARGET); 1430 1431 // Return the generated code. 1432 return GetCode(INTERCEPTOR, name); 1433} 1434 1435 1436Object* CallStubCompiler::CompileCallGlobal(JSObject* object, 1437 GlobalObject* holder, 1438 JSGlobalPropertyCell* cell, 1439 JSFunction* function, 1440 String* name) { 1441 // ----------- S t a t e ------------- 1442 // -- ecx : name 1443 // -- esp[0] : return address 1444 // -- esp[(argc - n) * 4] : arg[n] (zero-based) 1445 // -- ... 1446 // -- esp[(argc + 1) * 4] : receiver 1447 // ----------------------------------- 1448 Label miss; 1449 1450 // Get the number of arguments. 1451 const int argc = arguments().immediate(); 1452 1453 // Get the receiver from the stack. 1454 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize)); 1455 1456 // If the object is the holder then we know that it's a global 1457 // object which can only happen for contextual calls. In this case, 1458 // the receiver cannot be a smi. 1459 if (object != holder) { 1460 __ test(edx, Immediate(kSmiTagMask)); 1461 __ j(zero, &miss, not_taken); 1462 } 1463 1464 // Check that the maps haven't changed. 1465 CheckPrototypes(object, edx, holder, ebx, eax, name, &miss); 1466 1467 // Get the value from the cell. 1468 __ mov(edi, Immediate(Handle<JSGlobalPropertyCell>(cell))); 1469 __ mov(edi, FieldOperand(edi, JSGlobalPropertyCell::kValueOffset)); 1470 1471 // Check that the cell contains the same function. 1472 if (Heap::InNewSpace(function)) { 1473 // We can't embed a pointer to a function in new space so we have 1474 // to verify that the shared function info is unchanged. This has 1475 // the nice side effect that multiple closures based on the same 1476 // function can all use this call IC. Before we load through the 1477 // function, we have to verify that it still is a function. 1478 __ test(edi, Immediate(kSmiTagMask)); 1479 __ j(zero, &miss, not_taken); 1480 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ebx); 1481 __ j(not_equal, &miss, not_taken); 1482 1483 // Check the shared function info. Make sure it hasn't changed. 1484 __ cmp(FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset), 1485 Immediate(Handle<SharedFunctionInfo>(function->shared()))); 1486 __ j(not_equal, &miss, not_taken); 1487 } else { 1488 __ cmp(Operand(edi), Immediate(Handle<JSFunction>(function))); 1489 __ j(not_equal, &miss, not_taken); 1490 } 1491 1492 // Patch the receiver on the stack with the global proxy. 1493 if (object->IsGlobalObject()) { 1494 __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalReceiverOffset)); 1495 __ mov(Operand(esp, (argc + 1) * kPointerSize), edx); 1496 } 1497 1498 // Setup the context (function already in edi). 1499 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); 1500 1501 // Jump to the cached code (tail call). 1502 __ IncrementCounter(&Counters::call_global_inline, 1); 1503 ASSERT(function->is_compiled()); 1504 Handle<Code> code(function->code()); 1505 ParameterCount expected(function->shared()->formal_parameter_count()); 1506 __ InvokeCode(code, expected, arguments(), 1507 RelocInfo::CODE_TARGET, JUMP_FUNCTION); 1508 1509 // Handle call cache miss. 1510 __ bind(&miss); 1511 __ IncrementCounter(&Counters::call_global_inline_miss, 1); 1512 Handle<Code> ic = ComputeCallMiss(arguments().immediate()); 1513 __ jmp(ic, RelocInfo::CODE_TARGET); 1514 1515 // Return the generated code. 1516 return GetCode(NORMAL, name); 1517} 1518 1519 1520Object* StoreStubCompiler::CompileStoreField(JSObject* object, 1521 int index, 1522 Map* transition, 1523 String* name) { 1524 // ----------- S t a t e ------------- 1525 // -- eax : value 1526 // -- ecx : name 1527 // -- edx : receiver 1528 // -- esp[0] : return address 1529 // ----------------------------------- 1530 Label miss; 1531 1532 // Generate store field code. Trashes the name register. 1533 GenerateStoreField(masm(), 1534 object, 1535 index, 1536 transition, 1537 edx, ecx, ebx, 1538 &miss); 1539 1540 // Handle store cache miss. 1541 __ bind(&miss); 1542 __ mov(ecx, Immediate(Handle<String>(name))); // restore name 1543 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss)); 1544 __ jmp(ic, RelocInfo::CODE_TARGET); 1545 1546 // Return the generated code. 1547 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name); 1548} 1549 1550 1551Object* StoreStubCompiler::CompileStoreCallback(JSObject* object, 1552 AccessorInfo* callback, 1553 String* name) { 1554 // ----------- S t a t e ------------- 1555 // -- eax : value 1556 // -- ecx : name 1557 // -- edx : receiver 1558 // -- esp[0] : return address 1559 // ----------------------------------- 1560 Label miss; 1561 1562 // Check that the object isn't a smi. 1563 __ test(edx, Immediate(kSmiTagMask)); 1564 __ j(zero, &miss, not_taken); 1565 1566 // Check that the map of the object hasn't changed. 1567 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), 1568 Immediate(Handle<Map>(object->map()))); 1569 __ j(not_equal, &miss, not_taken); 1570 1571 // Perform global security token check if needed. 1572 if (object->IsJSGlobalProxy()) { 1573 __ CheckAccessGlobalProxy(edx, ebx, &miss); 1574 } 1575 1576 // Stub never generated for non-global objects that require access 1577 // checks. 1578 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); 1579 1580 __ pop(ebx); // remove the return address 1581 __ push(edx); // receiver 1582 __ push(Immediate(Handle<AccessorInfo>(callback))); // callback info 1583 __ push(ecx); // name 1584 __ push(eax); // value 1585 __ push(ebx); // restore return address 1586 1587 // Do tail-call to the runtime system. 1588 ExternalReference store_callback_property = 1589 ExternalReference(IC_Utility(IC::kStoreCallbackProperty)); 1590 __ TailCallRuntime(store_callback_property, 4, 1); 1591 1592 // Handle store cache miss. 1593 __ bind(&miss); 1594 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss)); 1595 __ jmp(ic, RelocInfo::CODE_TARGET); 1596 1597 // Return the generated code. 1598 return GetCode(CALLBACKS, name); 1599} 1600 1601 1602Object* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver, 1603 String* name) { 1604 // ----------- S t a t e ------------- 1605 // -- eax : value 1606 // -- ecx : name 1607 // -- edx : receiver 1608 // -- esp[0] : return address 1609 // ----------------------------------- 1610 Label miss; 1611 1612 // Check that the object isn't a smi. 1613 __ test(edx, Immediate(kSmiTagMask)); 1614 __ j(zero, &miss, not_taken); 1615 1616 // Check that the map of the object hasn't changed. 1617 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), 1618 Immediate(Handle<Map>(receiver->map()))); 1619 __ j(not_equal, &miss, not_taken); 1620 1621 // Perform global security token check if needed. 1622 if (receiver->IsJSGlobalProxy()) { 1623 __ CheckAccessGlobalProxy(edx, ebx, &miss); 1624 } 1625 1626 // Stub never generated for non-global objects that require access 1627 // checks. 1628 ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded()); 1629 1630 __ pop(ebx); // remove the return address 1631 __ push(edx); // receiver 1632 __ push(ecx); // name 1633 __ push(eax); // value 1634 __ push(ebx); // restore return address 1635 1636 // Do tail-call to the runtime system. 1637 ExternalReference store_ic_property = 1638 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty)); 1639 __ TailCallRuntime(store_ic_property, 3, 1); 1640 1641 // Handle store cache miss. 1642 __ bind(&miss); 1643 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss)); 1644 __ jmp(ic, RelocInfo::CODE_TARGET); 1645 1646 // Return the generated code. 1647 return GetCode(INTERCEPTOR, name); 1648} 1649 1650 1651Object* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object, 1652 JSGlobalPropertyCell* cell, 1653 String* name) { 1654 // ----------- S t a t e ------------- 1655 // -- eax : value 1656 // -- ecx : name 1657 // -- edx : receiver 1658 // -- esp[0] : return address 1659 // ----------------------------------- 1660 Label miss; 1661 1662 // Check that the map of the global has not changed. 1663 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), 1664 Immediate(Handle<Map>(object->map()))); 1665 __ j(not_equal, &miss, not_taken); 1666 1667 // Store the value in the cell. 1668 __ mov(ecx, Immediate(Handle<JSGlobalPropertyCell>(cell))); 1669 __ mov(FieldOperand(ecx, JSGlobalPropertyCell::kValueOffset), eax); 1670 1671 // Return the value (register eax). 1672 __ IncrementCounter(&Counters::named_store_global_inline, 1); 1673 __ ret(0); 1674 1675 // Handle store cache miss. 1676 __ bind(&miss); 1677 __ IncrementCounter(&Counters::named_store_global_inline_miss, 1); 1678 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss)); 1679 __ jmp(ic, RelocInfo::CODE_TARGET); 1680 1681 // Return the generated code. 1682 return GetCode(NORMAL, name); 1683} 1684 1685 1686Object* KeyedStoreStubCompiler::CompileStoreField(JSObject* object, 1687 int index, 1688 Map* transition, 1689 String* name) { 1690 // ----------- S t a t e ------------- 1691 // -- eax : value 1692 // -- esp[0] : return address 1693 // -- esp[4] : key 1694 // -- esp[8] : receiver 1695 // ----------------------------------- 1696 Label miss; 1697 1698 __ IncrementCounter(&Counters::keyed_store_field, 1); 1699 1700 // Get the name from the stack. 1701 __ mov(ecx, Operand(esp, 1 * kPointerSize)); 1702 // Check that the name has not changed. 1703 __ cmp(Operand(ecx), Immediate(Handle<String>(name))); 1704 __ j(not_equal, &miss, not_taken); 1705 1706 // Get the object from the stack. 1707 __ mov(edx, Operand(esp, 2 * kPointerSize)); 1708 1709 // Generate store field code. Trashes the name register. 1710 GenerateStoreField(masm(), 1711 object, 1712 index, 1713 transition, 1714 edx, ecx, ebx, 1715 &miss); 1716 1717 // Handle store cache miss. 1718 __ bind(&miss); 1719 __ DecrementCounter(&Counters::keyed_store_field, 1); 1720 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss)); 1721 __ jmp(ic, RelocInfo::CODE_TARGET); 1722 1723 // Return the generated code. 1724 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name); 1725} 1726 1727 1728 1729Object* LoadStubCompiler::CompileLoadField(JSObject* object, 1730 JSObject* holder, 1731 int index, 1732 String* name) { 1733 // ----------- S t a t e ------------- 1734 // -- eax : receiver 1735 // -- ecx : name 1736 // -- esp[0] : return address 1737 // ----------------------------------- 1738 Label miss; 1739 1740 GenerateLoadField(object, holder, eax, ebx, edx, index, name, &miss); 1741 __ bind(&miss); 1742 GenerateLoadMiss(masm(), Code::LOAD_IC); 1743 1744 // Return the generated code. 1745 return GetCode(FIELD, name); 1746} 1747 1748 1749Object* LoadStubCompiler::CompileLoadCallback(String* name, 1750 JSObject* object, 1751 JSObject* holder, 1752 AccessorInfo* callback) { 1753 // ----------- S t a t e ------------- 1754 // -- eax : receiver 1755 // -- ecx : name 1756 // -- esp[0] : return address 1757 // ----------------------------------- 1758 Label miss; 1759 1760 Failure* failure = Failure::InternalError(); 1761 bool success = GenerateLoadCallback(object, holder, eax, ecx, ebx, edx, 1762 callback, name, &miss, &failure); 1763 if (!success) return failure; 1764 1765 __ bind(&miss); 1766 GenerateLoadMiss(masm(), Code::LOAD_IC); 1767 1768 // Return the generated code. 1769 return GetCode(CALLBACKS, name); 1770} 1771 1772 1773Object* LoadStubCompiler::CompileLoadConstant(JSObject* object, 1774 JSObject* holder, 1775 Object* value, 1776 String* name) { 1777 // ----------- S t a t e ------------- 1778 // -- eax : receiver 1779 // -- ecx : name 1780 // -- esp[0] : return address 1781 // ----------------------------------- 1782 Label miss; 1783 1784 GenerateLoadConstant(object, holder, eax, ebx, edx, value, name, &miss); 1785 __ bind(&miss); 1786 GenerateLoadMiss(masm(), Code::LOAD_IC); 1787 1788 // Return the generated code. 1789 return GetCode(CONSTANT_FUNCTION, name); 1790} 1791 1792 1793Object* LoadStubCompiler::CompileLoadInterceptor(JSObject* receiver, 1794 JSObject* holder, 1795 String* name) { 1796 // ----------- S t a t e ------------- 1797 // -- eax : receiver 1798 // -- ecx : name 1799 // -- esp[0] : return address 1800 // ----------------------------------- 1801 Label miss; 1802 1803 LookupResult lookup; 1804 LookupPostInterceptor(holder, name, &lookup); 1805 1806 // TODO(368): Compile in the whole chain: all the interceptors in 1807 // prototypes and ultimate answer. 1808 GenerateLoadInterceptor(receiver, 1809 holder, 1810 &lookup, 1811 eax, 1812 ecx, 1813 edx, 1814 ebx, 1815 name, 1816 &miss); 1817 1818 __ bind(&miss); 1819 GenerateLoadMiss(masm(), Code::LOAD_IC); 1820 1821 // Return the generated code. 1822 return GetCode(INTERCEPTOR, name); 1823} 1824 1825 1826Object* LoadStubCompiler::CompileLoadGlobal(JSObject* object, 1827 GlobalObject* holder, 1828 JSGlobalPropertyCell* cell, 1829 String* name, 1830 bool is_dont_delete) { 1831 // ----------- S t a t e ------------- 1832 // -- eax : receiver 1833 // -- ecx : name 1834 // -- esp[0] : return address 1835 // ----------------------------------- 1836 Label miss; 1837 1838 // If the object is the holder then we know that it's a global 1839 // object which can only happen for contextual loads. In this case, 1840 // the receiver cannot be a smi. 1841 if (object != holder) { 1842 __ test(eax, Immediate(kSmiTagMask)); 1843 __ j(zero, &miss, not_taken); 1844 } 1845 1846 // Check that the maps haven't changed. 1847 CheckPrototypes(object, eax, holder, ebx, edx, name, &miss); 1848 1849 // Get the value from the cell. 1850 __ mov(ebx, Immediate(Handle<JSGlobalPropertyCell>(cell))); 1851 __ mov(ebx, FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset)); 1852 1853 // Check for deleted property if property can actually be deleted. 1854 if (!is_dont_delete) { 1855 __ cmp(ebx, Factory::the_hole_value()); 1856 __ j(equal, &miss, not_taken); 1857 } else if (FLAG_debug_code) { 1858 __ cmp(ebx, Factory::the_hole_value()); 1859 __ Check(not_equal, "DontDelete cells can't contain the hole"); 1860 } 1861 1862 __ IncrementCounter(&Counters::named_load_global_inline, 1); 1863 __ mov(eax, ebx); 1864 __ ret(0); 1865 1866 __ bind(&miss); 1867 __ IncrementCounter(&Counters::named_load_global_inline_miss, 1); 1868 GenerateLoadMiss(masm(), Code::LOAD_IC); 1869 1870 // Return the generated code. 1871 return GetCode(NORMAL, name); 1872} 1873 1874 1875Object* KeyedLoadStubCompiler::CompileLoadField(String* name, 1876 JSObject* receiver, 1877 JSObject* holder, 1878 int index) { 1879 // ----------- S t a t e ------------- 1880 // -- eax : key 1881 // -- edx : receiver 1882 // -- esp[0] : return address 1883 // ----------------------------------- 1884 Label miss; 1885 1886 __ IncrementCounter(&Counters::keyed_load_field, 1); 1887 1888 // Check that the name has not changed. 1889 __ cmp(Operand(eax), Immediate(Handle<String>(name))); 1890 __ j(not_equal, &miss, not_taken); 1891 1892 GenerateLoadField(receiver, holder, edx, ebx, ecx, index, name, &miss); 1893 1894 __ bind(&miss); 1895 __ DecrementCounter(&Counters::keyed_load_field, 1); 1896 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 1897 1898 // Return the generated code. 1899 return GetCode(FIELD, name); 1900} 1901 1902 1903Object* KeyedLoadStubCompiler::CompileLoadCallback(String* name, 1904 JSObject* receiver, 1905 JSObject* holder, 1906 AccessorInfo* callback) { 1907 // ----------- S t a t e ------------- 1908 // -- eax : key 1909 // -- edx : receiver 1910 // -- esp[0] : return address 1911 // ----------------------------------- 1912 Label miss; 1913 1914 __ IncrementCounter(&Counters::keyed_load_callback, 1); 1915 1916 // Check that the name has not changed. 1917 __ cmp(Operand(eax), Immediate(Handle<String>(name))); 1918 __ j(not_equal, &miss, not_taken); 1919 1920 Failure* failure = Failure::InternalError(); 1921 bool success = GenerateLoadCallback(receiver, holder, edx, eax, ebx, ecx, 1922 callback, name, &miss, &failure); 1923 if (!success) return failure; 1924 1925 __ bind(&miss); 1926 __ DecrementCounter(&Counters::keyed_load_callback, 1); 1927 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 1928 1929 // Return the generated code. 1930 return GetCode(CALLBACKS, name); 1931} 1932 1933 1934Object* KeyedLoadStubCompiler::CompileLoadConstant(String* name, 1935 JSObject* receiver, 1936 JSObject* holder, 1937 Object* value) { 1938 // ----------- S t a t e ------------- 1939 // -- eax : key 1940 // -- edx : receiver 1941 // -- esp[0] : return address 1942 // ----------------------------------- 1943 Label miss; 1944 1945 __ IncrementCounter(&Counters::keyed_load_constant_function, 1); 1946 1947 // Check that the name has not changed. 1948 __ cmp(Operand(eax), Immediate(Handle<String>(name))); 1949 __ j(not_equal, &miss, not_taken); 1950 1951 GenerateLoadConstant(receiver, holder, edx, ebx, ecx, 1952 value, name, &miss); 1953 __ bind(&miss); 1954 __ DecrementCounter(&Counters::keyed_load_constant_function, 1); 1955 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 1956 1957 // Return the generated code. 1958 return GetCode(CONSTANT_FUNCTION, name); 1959} 1960 1961 1962Object* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver, 1963 JSObject* holder, 1964 String* name) { 1965 // ----------- S t a t e ------------- 1966 // -- eax : key 1967 // -- edx : receiver 1968 // -- esp[0] : return address 1969 // ----------------------------------- 1970 Label miss; 1971 1972 __ IncrementCounter(&Counters::keyed_load_interceptor, 1); 1973 1974 // Check that the name has not changed. 1975 __ cmp(Operand(eax), Immediate(Handle<String>(name))); 1976 __ j(not_equal, &miss, not_taken); 1977 1978 LookupResult lookup; 1979 LookupPostInterceptor(holder, name, &lookup); 1980 GenerateLoadInterceptor(receiver, 1981 holder, 1982 &lookup, 1983 edx, 1984 eax, 1985 ecx, 1986 ebx, 1987 name, 1988 &miss); 1989 __ bind(&miss); 1990 __ DecrementCounter(&Counters::keyed_load_interceptor, 1); 1991 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 1992 1993 // Return the generated code. 1994 return GetCode(INTERCEPTOR, name); 1995} 1996 1997 1998 1999 2000Object* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) { 2001 // ----------- S t a t e ------------- 2002 // -- eax : key 2003 // -- edx : receiver 2004 // -- esp[0] : return address 2005 // ----------------------------------- 2006 Label miss; 2007 2008 __ IncrementCounter(&Counters::keyed_load_array_length, 1); 2009 2010 // Check that the name has not changed. 2011 __ cmp(Operand(eax), Immediate(Handle<String>(name))); 2012 __ j(not_equal, &miss, not_taken); 2013 2014 GenerateLoadArrayLength(masm(), edx, ecx, &miss); 2015 __ bind(&miss); 2016 __ DecrementCounter(&Counters::keyed_load_array_length, 1); 2017 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 2018 2019 // Return the generated code. 2020 return GetCode(CALLBACKS, name); 2021} 2022 2023 2024Object* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) { 2025 // ----------- S t a t e ------------- 2026 // -- eax : key 2027 // -- edx : receiver 2028 // -- esp[0] : return address 2029 // ----------------------------------- 2030 Label miss; 2031 2032 __ IncrementCounter(&Counters::keyed_load_string_length, 1); 2033 2034 // Check that the name has not changed. 2035 __ cmp(Operand(eax), Immediate(Handle<String>(name))); 2036 __ j(not_equal, &miss, not_taken); 2037 2038 GenerateLoadStringLength(masm(), edx, ecx, ebx, &miss); 2039 __ bind(&miss); 2040 __ DecrementCounter(&Counters::keyed_load_string_length, 1); 2041 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 2042 2043 // Return the generated code. 2044 return GetCode(CALLBACKS, name); 2045} 2046 2047 2048Object* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) { 2049 // ----------- S t a t e ------------- 2050 // -- eax : key 2051 // -- edx : receiver 2052 // -- esp[0] : return address 2053 // ----------------------------------- 2054 Label miss; 2055 2056 __ IncrementCounter(&Counters::keyed_load_function_prototype, 1); 2057 2058 // Check that the name has not changed. 2059 __ cmp(Operand(eax), Immediate(Handle<String>(name))); 2060 __ j(not_equal, &miss, not_taken); 2061 2062 GenerateLoadFunctionPrototype(masm(), edx, ecx, ebx, &miss); 2063 __ bind(&miss); 2064 __ DecrementCounter(&Counters::keyed_load_function_prototype, 1); 2065 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 2066 2067 // Return the generated code. 2068 return GetCode(CALLBACKS, name); 2069} 2070 2071 2072// Specialized stub for constructing objects from functions which only have only 2073// simple assignments of the form this.x = ...; in their body. 2074Object* ConstructStubCompiler::CompileConstructStub( 2075 SharedFunctionInfo* shared) { 2076 // ----------- S t a t e ------------- 2077 // -- eax : argc 2078 // -- edi : constructor 2079 // -- esp[0] : return address 2080 // -- esp[4] : last argument 2081 // ----------------------------------- 2082 Label generic_stub_call; 2083#ifdef ENABLE_DEBUGGER_SUPPORT 2084 // Check to see whether there are any break points in the function code. If 2085 // there are jump to the generic constructor stub which calls the actual 2086 // code for the function thereby hitting the break points. 2087 __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); 2088 __ mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kDebugInfoOffset)); 2089 __ cmp(ebx, Factory::undefined_value()); 2090 __ j(not_equal, &generic_stub_call, not_taken); 2091#endif 2092 2093 // Load the initial map and verify that it is in fact a map. 2094 __ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset)); 2095 // Will both indicate a NULL and a Smi. 2096 __ test(ebx, Immediate(kSmiTagMask)); 2097 __ j(zero, &generic_stub_call); 2098 __ CmpObjectType(ebx, MAP_TYPE, ecx); 2099 __ j(not_equal, &generic_stub_call); 2100 2101#ifdef DEBUG 2102 // Cannot construct functions this way. 2103 // edi: constructor 2104 // ebx: initial map 2105 __ CmpInstanceType(ebx, JS_FUNCTION_TYPE); 2106 __ Assert(not_equal, "Function constructed by construct stub."); 2107#endif 2108 2109 // Now allocate the JSObject on the heap by moving the new space allocation 2110 // top forward. 2111 // edi: constructor 2112 // ebx: initial map 2113 __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceSizeOffset)); 2114 __ shl(ecx, kPointerSizeLog2); 2115 __ AllocateInNewSpace(ecx, 2116 edx, 2117 ecx, 2118 no_reg, 2119 &generic_stub_call, 2120 NO_ALLOCATION_FLAGS); 2121 2122 // Allocated the JSObject, now initialize the fields and add the heap tag. 2123 // ebx: initial map 2124 // edx: JSObject (untagged) 2125 __ mov(Operand(edx, JSObject::kMapOffset), ebx); 2126 __ mov(ebx, Factory::empty_fixed_array()); 2127 __ mov(Operand(edx, JSObject::kPropertiesOffset), ebx); 2128 __ mov(Operand(edx, JSObject::kElementsOffset), ebx); 2129 2130 // Push the allocated object to the stack. This is the object that will be 2131 // returned (after it is tagged). 2132 __ push(edx); 2133 2134 // eax: argc 2135 // edx: JSObject (untagged) 2136 // Load the address of the first in-object property into edx. 2137 __ lea(edx, Operand(edx, JSObject::kHeaderSize)); 2138 // Calculate the location of the first argument. The stack contains the 2139 // allocated object and the return address on top of the argc arguments. 2140 __ lea(ecx, Operand(esp, eax, times_4, 1 * kPointerSize)); 2141 2142 // Use edi for holding undefined which is used in several places below. 2143 __ mov(edi, Factory::undefined_value()); 2144 2145 // eax: argc 2146 // ecx: first argument 2147 // edx: first in-object property of the JSObject 2148 // edi: undefined 2149 // Fill the initialized properties with a constant value or a passed argument 2150 // depending on the this.x = ...; assignment in the function. 2151 for (int i = 0; i < shared->this_property_assignments_count(); i++) { 2152 if (shared->IsThisPropertyAssignmentArgument(i)) { 2153 // Check if the argument assigned to the property is actually passed. 2154 // If argument is not passed the property is set to undefined, 2155 // otherwise find it on the stack. 2156 int arg_number = shared->GetThisPropertyAssignmentArgument(i); 2157 __ mov(ebx, edi); 2158 __ cmp(eax, arg_number); 2159 if (CpuFeatures::IsSupported(CMOV)) { 2160 CpuFeatures::Scope use_cmov(CMOV); 2161 __ cmov(above, ebx, Operand(ecx, arg_number * -kPointerSize)); 2162 } else { 2163 Label not_passed; 2164 __ j(below_equal, ¬_passed); 2165 __ mov(ebx, Operand(ecx, arg_number * -kPointerSize)); 2166 __ bind(¬_passed); 2167 } 2168 // Store value in the property. 2169 __ mov(Operand(edx, i * kPointerSize), ebx); 2170 } else { 2171 // Set the property to the constant value. 2172 Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i)); 2173 __ mov(Operand(edx, i * kPointerSize), Immediate(constant)); 2174 } 2175 } 2176 2177 // Fill the unused in-object property fields with undefined. 2178 for (int i = shared->this_property_assignments_count(); 2179 i < shared->CalculateInObjectProperties(); 2180 i++) { 2181 __ mov(Operand(edx, i * kPointerSize), edi); 2182 } 2183 2184 // Move argc to ebx and retrieve and tag the JSObject to return. 2185 __ mov(ebx, eax); 2186 __ pop(eax); 2187 __ or_(Operand(eax), Immediate(kHeapObjectTag)); 2188 2189 // Remove caller arguments and receiver from the stack and return. 2190 __ pop(ecx); 2191 __ lea(esp, Operand(esp, ebx, times_pointer_size, 1 * kPointerSize)); 2192 __ push(ecx); 2193 __ IncrementCounter(&Counters::constructed_objects, 1); 2194 __ IncrementCounter(&Counters::constructed_objects_stub, 1); 2195 __ ret(0); 2196 2197 // Jump to the generic stub in case the specialized code cannot handle the 2198 // construction. 2199 __ bind(&generic_stub_call); 2200 Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric); 2201 Handle<Code> generic_construct_stub(code); 2202 __ jmp(generic_construct_stub, RelocInfo::CODE_TARGET); 2203 2204 // Return the generated code. 2205 return GetCode(); 2206} 2207 2208 2209#undef __ 2210 2211} } // namespace v8::internal 2212