ic.cc revision 3ef787dbeca8a5fb1086949cda830dccee07bfbd
1// Copyright 2012 the V8 project authors. All rights reserved. 2// Redistribution and use in source and binary forms, with or without 3// modification, are permitted provided that the following conditions are 4// met: 5// 6// * Redistributions of source code must retain the above copyright 7// notice, this list of conditions and the following disclaimer. 8// * Redistributions in binary form must reproduce the above 9// copyright notice, this list of conditions and the following 10// disclaimer in the documentation and/or other materials provided 11// with the distribution. 12// * Neither the name of Google Inc. nor the names of its 13// contributors may be used to endorse or promote products derived 14// from this software without specific prior written permission. 15// 16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28#include "v8.h" 29 30#include "accessors.h" 31#include "api.h" 32#include "arguments.h" 33#include "codegen.h" 34#include "execution.h" 35#include "ic-inl.h" 36#include "runtime.h" 37#include "stub-cache.h" 38 39namespace v8 { 40namespace internal { 41 42#ifdef DEBUG 43char IC::TransitionMarkFromState(IC::State state) { 44 switch (state) { 45 case UNINITIALIZED: return '0'; 46 case PREMONOMORPHIC: return 'P'; 47 case MONOMORPHIC: return '1'; 48 case MONOMORPHIC_PROTOTYPE_FAILURE: return '^'; 49 case MEGAMORPHIC: return IsGeneric() ? 'G' : 'N'; 50 51 // We never see the debugger states here, because the state is 52 // computed from the original code - not the patched code. Let 53 // these cases fall through to the unreachable code below. 54 case DEBUG_BREAK: break; 55 case DEBUG_PREPARE_STEP_IN: break; 56 } 57 UNREACHABLE(); 58 return 0; 59} 60 61void IC::TraceIC(const char* type, 62 Handle<Object> name, 63 State old_state, 64 Code* new_target) { 65 if (FLAG_trace_ic) { 66 State new_state = StateFrom(new_target, 67 HEAP->undefined_value(), 68 HEAP->undefined_value()); 69 PrintF("[%s in ", type); 70 StackFrameIterator it; 71 while (it.frame()->fp() != this->fp()) it.Advance(); 72 StackFrame* raw_frame = it.frame(); 73 if (raw_frame->is_internal()) { 74 Isolate* isolate = new_target->GetIsolate(); 75 Code* apply_builtin = isolate->builtins()->builtin( 76 Builtins::kFunctionApply); 77 if (raw_frame->unchecked_code() == apply_builtin) { 78 PrintF("apply from "); 79 it.Advance(); 80 raw_frame = it.frame(); 81 } 82 } 83 JavaScriptFrame::PrintTop(stdout, false, true); 84 bool new_can_grow = 85 Code::GetKeyedAccessGrowMode(new_target->extra_ic_state()) == 86 ALLOW_JSARRAY_GROWTH; 87 PrintF(" (%c->%c%s)", 88 TransitionMarkFromState(old_state), 89 TransitionMarkFromState(new_state), 90 new_can_grow ? ".GROW" : ""); 91 name->Print(); 92 PrintF("]\n"); 93 } 94} 95 96#define TRACE_GENERIC_IC(type, reason) \ 97 do { \ 98 if (FLAG_trace_ic) { \ 99 PrintF("[%s patching generic stub in ", type); \ 100 JavaScriptFrame::PrintTop(stdout, false, true); \ 101 PrintF(" (%s)]\n", reason); \ 102 } \ 103 } while (false) 104 105#else 106#define TRACE_GENERIC_IC(type, reason) 107#endif // DEBUG 108 109#define TRACE_IC(type, name, old_state, new_target) \ 110 ASSERT((TraceIC(type, name, old_state, new_target), true)) 111 112IC::IC(FrameDepth depth, Isolate* isolate) : isolate_(isolate) { 113 ASSERT(isolate == Isolate::Current()); 114 // To improve the performance of the (much used) IC code, we unfold 115 // a few levels of the stack frame iteration code. This yields a 116 // ~35% speedup when running DeltaBlue with the '--nouse-ic' flag. 117 const Address entry = 118 Isolate::c_entry_fp(isolate->thread_local_top()); 119 Address* pc_address = 120 reinterpret_cast<Address*>(entry + ExitFrameConstants::kCallerPCOffset); 121 Address fp = Memory::Address_at(entry + ExitFrameConstants::kCallerFPOffset); 122 // If there's another JavaScript frame on the stack, we need to look 123 // one frame further down the stack to find the frame pointer and 124 // the return address stack slot. 125 if (depth == EXTRA_CALL_FRAME) { 126 const int kCallerPCOffset = StandardFrameConstants::kCallerPCOffset; 127 pc_address = reinterpret_cast<Address*>(fp + kCallerPCOffset); 128 fp = Memory::Address_at(fp + StandardFrameConstants::kCallerFPOffset); 129 } 130#ifdef DEBUG 131 StackFrameIterator it; 132 for (int i = 0; i < depth + 1; i++) it.Advance(); 133 StackFrame* frame = it.frame(); 134 ASSERT(fp == frame->fp() && pc_address == frame->pc_address()); 135#endif 136 fp_ = fp; 137 pc_address_ = pc_address; 138} 139 140 141#ifdef ENABLE_DEBUGGER_SUPPORT 142Address IC::OriginalCodeAddress() const { 143 HandleScope scope; 144 // Compute the JavaScript frame for the frame pointer of this IC 145 // structure. We need this to be able to find the function 146 // corresponding to the frame. 147 StackFrameIterator it; 148 while (it.frame()->fp() != this->fp()) it.Advance(); 149 JavaScriptFrame* frame = JavaScriptFrame::cast(it.frame()); 150 // Find the function on the stack and both the active code for the 151 // function and the original code. 152 JSFunction* function = JSFunction::cast(frame->function()); 153 Handle<SharedFunctionInfo> shared(function->shared()); 154 Code* code = shared->code(); 155 ASSERT(Debug::HasDebugInfo(shared)); 156 Code* original_code = Debug::GetDebugInfo(shared)->original_code(); 157 ASSERT(original_code->IsCode()); 158 // Get the address of the call site in the active code. This is the 159 // place where the call to DebugBreakXXX is and where the IC 160 // normally would be. 161 Address addr = pc() - Assembler::kCallTargetAddressOffset; 162 // Return the address in the original code. This is the place where 163 // the call which has been overwritten by the DebugBreakXXX resides 164 // and the place where the inline cache system should look. 165 intptr_t delta = 166 original_code->instruction_start() - code->instruction_start(); 167 return addr + delta; 168} 169#endif 170 171 172static bool HasNormalObjectsInPrototypeChain(Isolate* isolate, 173 LookupResult* lookup, 174 Object* receiver) { 175 Object* end = lookup->IsProperty() 176 ? lookup->holder() : Object::cast(isolate->heap()->null_value()); 177 for (Object* current = receiver; 178 current != end; 179 current = current->GetPrototype()) { 180 if (current->IsJSObject() && 181 !JSObject::cast(current)->HasFastProperties() && 182 !current->IsJSGlobalProxy() && 183 !current->IsJSGlobalObject()) { 184 return true; 185 } 186 } 187 188 return false; 189} 190 191 192static bool TryRemoveInvalidPrototypeDependentStub(Code* target, 193 Object* receiver, 194 Object* name) { 195 InlineCacheHolderFlag cache_holder = 196 Code::ExtractCacheHolderFromFlags(target->flags()); 197 198 if (cache_holder == OWN_MAP && !receiver->IsJSObject()) { 199 // The stub was generated for JSObject but called for non-JSObject. 200 // IC::GetCodeCacheHolder is not applicable. 201 return false; 202 } else if (cache_holder == PROTOTYPE_MAP && 203 receiver->GetPrototype()->IsNull()) { 204 // IC::GetCodeCacheHolder is not applicable. 205 return false; 206 } 207 Map* map = IC::GetCodeCacheHolder(receiver, cache_holder)->map(); 208 209 // Decide whether the inline cache failed because of changes to the 210 // receiver itself or changes to one of its prototypes. 211 // 212 // If there are changes to the receiver itself, the map of the 213 // receiver will have changed and the current target will not be in 214 // the receiver map's code cache. Therefore, if the current target 215 // is in the receiver map's code cache, the inline cache failed due 216 // to prototype check failure. 217 int index = map->IndexInCodeCache(name, target); 218 if (index >= 0) { 219 map->RemoveFromCodeCache(String::cast(name), target, index); 220 return true; 221 } 222 223 return false; 224} 225 226 227IC::State IC::StateFrom(Code* target, Object* receiver, Object* name) { 228 IC::State state = target->ic_state(); 229 230 if (state != MONOMORPHIC || !name->IsString()) return state; 231 if (receiver->IsUndefined() || receiver->IsNull()) return state; 232 233 // For keyed load/store/call, the most likely cause of cache failure is 234 // that the key has changed. We do not distinguish between 235 // prototype and non-prototype failures for keyed access. 236 Code::Kind kind = target->kind(); 237 if (kind == Code::KEYED_LOAD_IC || 238 kind == Code::KEYED_STORE_IC || 239 kind == Code::KEYED_CALL_IC) { 240 return MONOMORPHIC; 241 } 242 243 // Remove the target from the code cache if it became invalid 244 // because of changes in the prototype chain to avoid hitting it 245 // again. 246 // Call stubs handle this later to allow extra IC state 247 // transitions. 248 if (kind != Code::CALL_IC && 249 TryRemoveInvalidPrototypeDependentStub(target, receiver, name)) { 250 return MONOMORPHIC_PROTOTYPE_FAILURE; 251 } 252 253 // The builtins object is special. It only changes when JavaScript 254 // builtins are loaded lazily. It is important to keep inline 255 // caches for the builtins object monomorphic. Therefore, if we get 256 // an inline cache miss for the builtins object after lazily loading 257 // JavaScript builtins, we return uninitialized as the state to 258 // force the inline cache back to monomorphic state. 259 if (receiver->IsJSBuiltinsObject()) { 260 return UNINITIALIZED; 261 } 262 263 return MONOMORPHIC; 264} 265 266 267RelocInfo::Mode IC::ComputeMode() { 268 Address addr = address(); 269 Code* code = Code::cast(isolate()->heap()->FindCodeObject(addr)); 270 for (RelocIterator it(code, RelocInfo::kCodeTargetMask); 271 !it.done(); it.next()) { 272 RelocInfo* info = it.rinfo(); 273 if (info->pc() == addr) return info->rmode(); 274 } 275 UNREACHABLE(); 276 return RelocInfo::NONE; 277} 278 279 280Failure* IC::TypeError(const char* type, 281 Handle<Object> object, 282 Handle<Object> key) { 283 HandleScope scope(isolate()); 284 Handle<Object> args[2] = { key, object }; 285 Handle<Object> error = isolate()->factory()->NewTypeError( 286 type, HandleVector(args, 2)); 287 return isolate()->Throw(*error); 288} 289 290 291Failure* IC::ReferenceError(const char* type, Handle<String> name) { 292 HandleScope scope(isolate()); 293 Handle<Object> error = isolate()->factory()->NewReferenceError( 294 type, HandleVector(&name, 1)); 295 return isolate()->Throw(*error); 296} 297 298 299void IC::PostPatching(Address address, Code* target, Code* old_target) { 300 if (FLAG_type_info_threshold > 0) { 301 if (old_target->is_inline_cache_stub() && 302 target->is_inline_cache_stub()) { 303 State old_state = old_target->ic_state(); 304 State new_state = target->ic_state(); 305 bool was_uninitialized = 306 old_state == UNINITIALIZED || old_state == PREMONOMORPHIC; 307 bool is_uninitialized = 308 new_state == UNINITIALIZED || new_state == PREMONOMORPHIC; 309 int delta = 0; 310 if (was_uninitialized && !is_uninitialized) { 311 delta = 1; 312 } else if (!was_uninitialized && is_uninitialized) { 313 delta = -1; 314 } 315 if (delta != 0) { 316 Code* host = target->GetHeap()->isolate()-> 317 inner_pointer_to_code_cache()->GetCacheEntry(address)->code; 318 // Not all Code objects have TypeFeedbackInfo. 319 if (host->type_feedback_info()->IsTypeFeedbackInfo()) { 320 TypeFeedbackInfo* info = 321 TypeFeedbackInfo::cast(host->type_feedback_info()); 322 info->set_ic_with_typeinfo_count( 323 info->ic_with_typeinfo_count() + delta); 324 } 325 } 326 } 327 } 328 if (FLAG_watch_ic_patching) { 329 Isolate::Current()->runtime_profiler()->NotifyICChanged(); 330 // We do not want to optimize until the ICs have settled down, 331 // so when they are patched, we postpone optimization for the 332 // current function and the functions above it on the stack that 333 // might want to inline this one. 334 StackFrameIterator it; 335 if (it.done()) return; 336 it.Advance(); 337 static const int kStackFramesToMark = Compiler::kMaxInliningLevels - 1; 338 for (int i = 0; i < kStackFramesToMark; ++i) { 339 if (it.done()) return; 340 StackFrame* raw_frame = it.frame(); 341 if (raw_frame->is_java_script()) { 342 JSFunction* function = 343 JSFunction::cast(JavaScriptFrame::cast(raw_frame)->function()); 344 if (function->IsOptimized()) continue; 345 SharedFunctionInfo* shared = function->shared(); 346 shared->set_profiler_ticks(0); 347 } 348 it.Advance(); 349 } 350 } 351} 352 353 354void IC::Clear(Address address) { 355 Code* target = GetTargetAtAddress(address); 356 357 // Don't clear debug break inline cache as it will remove the break point. 358 if (target->ic_state() == DEBUG_BREAK) return; 359 360 switch (target->kind()) { 361 case Code::LOAD_IC: return LoadIC::Clear(address, target); 362 case Code::KEYED_LOAD_IC: 363 return KeyedLoadIC::Clear(address, target); 364 case Code::STORE_IC: return StoreIC::Clear(address, target); 365 case Code::KEYED_STORE_IC: 366 return KeyedStoreIC::Clear(address, target); 367 case Code::CALL_IC: return CallIC::Clear(address, target); 368 case Code::KEYED_CALL_IC: return KeyedCallIC::Clear(address, target); 369 case Code::UNARY_OP_IC: 370 case Code::BINARY_OP_IC: 371 case Code::COMPARE_IC: 372 case Code::TO_BOOLEAN_IC: 373 // Clearing these is tricky and does not 374 // make any performance difference. 375 return; 376 default: UNREACHABLE(); 377 } 378} 379 380 381void CallICBase::Clear(Address address, Code* target) { 382 bool contextual = CallICBase::Contextual::decode(target->extra_ic_state()); 383 State state = target->ic_state(); 384 if (state == UNINITIALIZED) return; 385 Code* code = 386 Isolate::Current()->stub_cache()->FindCallInitialize( 387 target->arguments_count(), 388 contextual ? RelocInfo::CODE_TARGET_CONTEXT : RelocInfo::CODE_TARGET, 389 target->kind()); 390 SetTargetAtAddress(address, code); 391} 392 393 394void KeyedLoadIC::Clear(Address address, Code* target) { 395 if (target->ic_state() == UNINITIALIZED) return; 396 // Make sure to also clear the map used in inline fast cases. If we 397 // do not clear these maps, cached code can keep objects alive 398 // through the embedded maps. 399 SetTargetAtAddress(address, initialize_stub()); 400} 401 402 403void LoadIC::Clear(Address address, Code* target) { 404 if (target->ic_state() == UNINITIALIZED) return; 405 SetTargetAtAddress(address, initialize_stub()); 406} 407 408 409void StoreIC::Clear(Address address, Code* target) { 410 if (target->ic_state() == UNINITIALIZED) return; 411 SetTargetAtAddress(address, 412 (Code::GetStrictMode(target->extra_ic_state()) == kStrictMode) 413 ? initialize_stub_strict() 414 : initialize_stub()); 415} 416 417 418void KeyedStoreIC::Clear(Address address, Code* target) { 419 if (target->ic_state() == UNINITIALIZED) return; 420 SetTargetAtAddress(address, 421 (Code::GetStrictMode(target->extra_ic_state()) == kStrictMode) 422 ? initialize_stub_strict() 423 : initialize_stub()); 424} 425 426 427static bool HasInterceptorGetter(JSObject* object) { 428 return !object->GetNamedInterceptor()->getter()->IsUndefined(); 429} 430 431 432static void LookupForRead(Handle<Object> object, 433 Handle<String> name, 434 LookupResult* lookup) { 435 // Skip all the objects with named interceptors, but 436 // without actual getter. 437 while (true) { 438 object->Lookup(*name, lookup); 439 // Besides normal conditions (property not found or it's not 440 // an interceptor), bail out if lookup is not cacheable: we won't 441 // be able to IC it anyway and regular lookup should work fine. 442 if (!lookup->IsFound() 443 || (lookup->type() != INTERCEPTOR) 444 || !lookup->IsCacheable()) { 445 return; 446 } 447 448 Handle<JSObject> holder(lookup->holder()); 449 if (HasInterceptorGetter(*holder)) { 450 return; 451 } 452 453 holder->LocalLookupRealNamedProperty(*name, lookup); 454 if (lookup->IsProperty()) { 455 ASSERT(lookup->type() != INTERCEPTOR); 456 return; 457 } 458 459 Handle<Object> proto(holder->GetPrototype()); 460 if (proto->IsNull()) { 461 lookup->NotFound(); 462 return; 463 } 464 465 object = proto; 466 } 467} 468 469 470Handle<Object> CallICBase::TryCallAsFunction(Handle<Object> object) { 471 Handle<Object> delegate = Execution::GetFunctionDelegate(object); 472 473 if (delegate->IsJSFunction() && !object->IsJSFunctionProxy()) { 474 // Patch the receiver and use the delegate as the function to 475 // invoke. This is used for invoking objects as if they were functions. 476 const int argc = target()->arguments_count(); 477 StackFrameLocator locator; 478 JavaScriptFrame* frame = locator.FindJavaScriptFrame(0); 479 int index = frame->ComputeExpressionsCount() - (argc + 1); 480 frame->SetExpression(index, *object); 481 } 482 483 return delegate; 484} 485 486 487void CallICBase::ReceiverToObjectIfRequired(Handle<Object> callee, 488 Handle<Object> object) { 489 while (callee->IsJSFunctionProxy()) { 490 callee = Handle<Object>(JSFunctionProxy::cast(*callee)->call_trap()); 491 } 492 493 if (callee->IsJSFunction()) { 494 Handle<JSFunction> function = Handle<JSFunction>::cast(callee); 495 if (!function->shared()->is_classic_mode() || function->IsBuiltin()) { 496 // Do not wrap receiver for strict mode functions or for builtins. 497 return; 498 } 499 } 500 501 // And only wrap string, number or boolean. 502 if (object->IsString() || object->IsNumber() || object->IsBoolean()) { 503 // Change the receiver to the result of calling ToObject on it. 504 const int argc = this->target()->arguments_count(); 505 StackFrameLocator locator; 506 JavaScriptFrame* frame = locator.FindJavaScriptFrame(0); 507 int index = frame->ComputeExpressionsCount() - (argc + 1); 508 frame->SetExpression(index, *isolate()->factory()->ToObject(object)); 509 } 510} 511 512 513MaybeObject* CallICBase::LoadFunction(State state, 514 Code::ExtraICState extra_ic_state, 515 Handle<Object> object, 516 Handle<String> name) { 517 // If the object is undefined or null it's illegal to try to get any 518 // of its properties; throw a TypeError in that case. 519 if (object->IsUndefined() || object->IsNull()) { 520 return TypeError("non_object_property_call", object, name); 521 } 522 523 // Check if the name is trivially convertible to an index and get 524 // the element if so. 525 uint32_t index; 526 if (name->AsArrayIndex(&index)) { 527 Handle<Object> result = Object::GetElement(object, index); 528 RETURN_IF_EMPTY_HANDLE(isolate(), result); 529 if (result->IsJSFunction()) return *result; 530 531 // Try to find a suitable function delegate for the object at hand. 532 result = TryCallAsFunction(result); 533 if (result->IsJSFunction()) return *result; 534 535 // Otherwise, it will fail in the lookup step. 536 } 537 538 // Lookup the property in the object. 539 LookupResult lookup(isolate()); 540 LookupForRead(object, name, &lookup); 541 542 if (!lookup.IsProperty()) { 543 // If the object does not have the requested property, check which 544 // exception we need to throw. 545 return IsContextual(object) 546 ? ReferenceError("not_defined", name) 547 : TypeError("undefined_method", object, name); 548 } 549 550 // Lookup is valid: Update inline cache and stub cache. 551 if (FLAG_use_ic) { 552 UpdateCaches(&lookup, state, extra_ic_state, object, name); 553 } 554 555 // Get the property. 556 PropertyAttributes attr; 557 Handle<Object> result = 558 Object::GetProperty(object, object, &lookup, name, &attr); 559 RETURN_IF_EMPTY_HANDLE(isolate(), result); 560 561 if (lookup.type() == INTERCEPTOR && attr == ABSENT) { 562 // If the object does not have the requested property, check which 563 // exception we need to throw. 564 return IsContextual(object) 565 ? ReferenceError("not_defined", name) 566 : TypeError("undefined_method", object, name); 567 } 568 569 ASSERT(!result->IsTheHole()); 570 571 // Make receiver an object if the callee requires it. Strict mode or builtin 572 // functions do not wrap the receiver, non-strict functions and objects 573 // called as functions do. 574 ReceiverToObjectIfRequired(result, object); 575 576 if (result->IsJSFunction()) { 577 Handle<JSFunction> function = Handle<JSFunction>::cast(result); 578#ifdef ENABLE_DEBUGGER_SUPPORT 579 // Handle stepping into a function if step into is active. 580 Debug* debug = isolate()->debug(); 581 if (debug->StepInActive()) { 582 // Protect the result in a handle as the debugger can allocate and might 583 // cause GC. 584 debug->HandleStepIn(function, object, fp(), false); 585 } 586#endif 587 return *function; 588 } 589 590 // Try to find a suitable function delegate for the object at hand. 591 result = TryCallAsFunction(result); 592 if (result->IsJSFunction()) return *result; 593 594 return TypeError("property_not_function", object, name); 595} 596 597 598bool CallICBase::TryUpdateExtraICState(LookupResult* lookup, 599 Handle<Object> object, 600 Code::ExtraICState* extra_ic_state) { 601 ASSERT(kind_ == Code::CALL_IC); 602 if (lookup->type() != CONSTANT_FUNCTION) return false; 603 JSFunction* function = lookup->GetConstantFunction(); 604 if (!function->shared()->HasBuiltinFunctionId()) return false; 605 606 // Fetch the arguments passed to the called function. 607 const int argc = target()->arguments_count(); 608 Address entry = isolate()->c_entry_fp(isolate()->thread_local_top()); 609 Address fp = Memory::Address_at(entry + ExitFrameConstants::kCallerFPOffset); 610 Arguments args(argc + 1, 611 &Memory::Object_at(fp + 612 StandardFrameConstants::kCallerSPOffset + 613 argc * kPointerSize)); 614 switch (function->shared()->builtin_function_id()) { 615 case kStringCharCodeAt: 616 case kStringCharAt: 617 if (object->IsString()) { 618 String* string = String::cast(*object); 619 // Check there's the right string value or wrapper in the receiver slot. 620 ASSERT(string == args[0] || string == JSValue::cast(args[0])->value()); 621 // If we're in the default (fastest) state and the index is 622 // out of bounds, update the state to record this fact. 623 if (StringStubState::decode(*extra_ic_state) == DEFAULT_STRING_STUB && 624 argc >= 1 && args[1]->IsNumber()) { 625 double index = DoubleToInteger(args.number_at(1)); 626 if (index < 0 || index >= string->length()) { 627 *extra_ic_state = 628 StringStubState::update(*extra_ic_state, 629 STRING_INDEX_OUT_OF_BOUNDS); 630 return true; 631 } 632 } 633 } 634 break; 635 default: 636 return false; 637 } 638 return false; 639} 640 641 642Handle<Code> CallICBase::ComputeMonomorphicStub(LookupResult* lookup, 643 State state, 644 Code::ExtraICState extra_state, 645 Handle<Object> object, 646 Handle<String> name) { 647 int argc = target()->arguments_count(); 648 Handle<JSObject> holder(lookup->holder()); 649 switch (lookup->type()) { 650 case FIELD: { 651 int index = lookup->GetFieldIndex(); 652 return isolate()->stub_cache()->ComputeCallField( 653 argc, kind_, extra_state, name, object, holder, index); 654 } 655 case CONSTANT_FUNCTION: { 656 // Get the constant function and compute the code stub for this 657 // call; used for rewriting to monomorphic state and making sure 658 // that the code stub is in the stub cache. 659 Handle<JSFunction> function(lookup->GetConstantFunction()); 660 return isolate()->stub_cache()->ComputeCallConstant( 661 argc, kind_, extra_state, name, object, holder, function); 662 } 663 case NORMAL: { 664 // If we return a null handle, the IC will not be patched. 665 if (!object->IsJSObject()) return Handle<Code>::null(); 666 Handle<JSObject> receiver = Handle<JSObject>::cast(object); 667 668 if (holder->IsGlobalObject()) { 669 Handle<GlobalObject> global = Handle<GlobalObject>::cast(holder); 670 Handle<JSGlobalPropertyCell> cell(global->GetPropertyCell(lookup)); 671 if (!cell->value()->IsJSFunction()) return Handle<Code>::null(); 672 Handle<JSFunction> function(JSFunction::cast(cell->value())); 673 return isolate()->stub_cache()->ComputeCallGlobal( 674 argc, kind_, extra_state, name, receiver, global, cell, function); 675 } else { 676 // There is only one shared stub for calling normalized 677 // properties. It does not traverse the prototype chain, so the 678 // property must be found in the receiver for the stub to be 679 // applicable. 680 if (!holder.is_identical_to(receiver)) return Handle<Code>::null(); 681 return isolate()->stub_cache()->ComputeCallNormal( 682 argc, kind_, extra_state); 683 } 684 break; 685 } 686 case INTERCEPTOR: 687 ASSERT(HasInterceptorGetter(*holder)); 688 return isolate()->stub_cache()->ComputeCallInterceptor( 689 argc, kind_, extra_state, name, object, holder); 690 default: 691 return Handle<Code>::null(); 692 } 693} 694 695 696void CallICBase::UpdateCaches(LookupResult* lookup, 697 State state, 698 Code::ExtraICState extra_ic_state, 699 Handle<Object> object, 700 Handle<String> name) { 701 // Bail out if we didn't find a result. 702 if (!lookup->IsProperty() || !lookup->IsCacheable()) return; 703 704 if (lookup->holder() != *object && 705 HasNormalObjectsInPrototypeChain( 706 isolate(), lookup, object->GetPrototype())) { 707 // Suppress optimization for prototype chains with slow properties objects 708 // in the middle. 709 return; 710 } 711 712 // Compute the number of arguments. 713 int argc = target()->arguments_count(); 714 bool had_proto_failure = false; 715 Handle<Code> code; 716 if (state == UNINITIALIZED) { 717 // This is the first time we execute this inline cache. 718 // Set the target to the pre monomorphic stub to delay 719 // setting the monomorphic state. 720 code = isolate()->stub_cache()->ComputeCallPreMonomorphic( 721 argc, kind_, extra_ic_state); 722 } else if (state == MONOMORPHIC) { 723 if (kind_ == Code::CALL_IC && 724 TryUpdateExtraICState(lookup, object, &extra_ic_state)) { 725 code = ComputeMonomorphicStub(lookup, state, extra_ic_state, 726 object, name); 727 } else if (kind_ == Code::CALL_IC && 728 TryRemoveInvalidPrototypeDependentStub(target(), 729 *object, 730 *name)) { 731 had_proto_failure = true; 732 code = ComputeMonomorphicStub(lookup, state, extra_ic_state, 733 object, name); 734 } else { 735 code = isolate()->stub_cache()->ComputeCallMegamorphic( 736 argc, kind_, extra_ic_state); 737 } 738 } else { 739 code = ComputeMonomorphicStub(lookup, state, extra_ic_state, 740 object, name); 741 } 742 743 // If there's no appropriate stub we simply avoid updating the caches. 744 if (code.is_null()) return; 745 746 // Patch the call site depending on the state of the cache. 747 if (state == UNINITIALIZED || 748 state == PREMONOMORPHIC || 749 state == MONOMORPHIC || 750 state == MONOMORPHIC_PROTOTYPE_FAILURE) { 751 set_target(*code); 752 } else if (state == MEGAMORPHIC) { 753 // Cache code holding map should be consistent with 754 // GenerateMonomorphicCacheProbe. It is not the map which holds the stub. 755 Handle<JSObject> cache_object = object->IsJSObject() 756 ? Handle<JSObject>::cast(object) 757 : Handle<JSObject>(JSObject::cast(object->GetPrototype())); 758 // Update the stub cache. 759 isolate()->stub_cache()->Set(*name, cache_object->map(), *code); 760 } 761 762 if (had_proto_failure) state = MONOMORPHIC_PROTOTYPE_FAILURE; 763 TRACE_IC(kind_ == Code::CALL_IC ? "CallIC" : "KeyedCallIC", 764 name, state, target()); 765} 766 767 768MaybeObject* KeyedCallIC::LoadFunction(State state, 769 Handle<Object> object, 770 Handle<Object> key) { 771 if (key->IsSymbol()) { 772 return CallICBase::LoadFunction(state, 773 Code::kNoExtraICState, 774 object, 775 Handle<String>::cast(key)); 776 } 777 778 if (object->IsUndefined() || object->IsNull()) { 779 return TypeError("non_object_property_call", object, key); 780 } 781 782 if (FLAG_use_ic && state != MEGAMORPHIC && object->IsHeapObject()) { 783 int argc = target()->arguments_count(); 784 Handle<Map> map = 785 isolate()->factory()->non_strict_arguments_elements_map(); 786 if (object->IsJSObject() && 787 Handle<JSObject>::cast(object)->elements()->map() == *map) { 788 Handle<Code> code = isolate()->stub_cache()->ComputeCallArguments( 789 argc, Code::KEYED_CALL_IC); 790 set_target(*code); 791 TRACE_IC("KeyedCallIC", key, state, target()); 792 } else if (!object->IsAccessCheckNeeded()) { 793 Handle<Code> code = isolate()->stub_cache()->ComputeCallMegamorphic( 794 argc, Code::KEYED_CALL_IC, Code::kNoExtraICState); 795 set_target(*code); 796 TRACE_IC("KeyedCallIC", key, state, target()); 797 } 798 } 799 800 Handle<Object> result = GetProperty(object, key); 801 RETURN_IF_EMPTY_HANDLE(isolate(), result); 802 803 // Make receiver an object if the callee requires it. Strict mode or builtin 804 // functions do not wrap the receiver, non-strict functions and objects 805 // called as functions do. 806 ReceiverToObjectIfRequired(result, object); 807 if (result->IsJSFunction()) return *result; 808 809 result = TryCallAsFunction(result); 810 if (result->IsJSFunction()) return *result; 811 812 return TypeError("property_not_function", object, key); 813} 814 815 816MaybeObject* LoadIC::Load(State state, 817 Handle<Object> object, 818 Handle<String> name) { 819 // If the object is undefined or null it's illegal to try to get any 820 // of its properties; throw a TypeError in that case. 821 if (object->IsUndefined() || object->IsNull()) { 822 return TypeError("non_object_property_load", object, name); 823 } 824 825 if (FLAG_use_ic) { 826 // Use specialized code for getting the length of strings and 827 // string wrapper objects. The length property of string wrapper 828 // objects is read-only and therefore always returns the length of 829 // the underlying string value. See ECMA-262 15.5.5.1. 830 if ((object->IsString() || object->IsStringWrapper()) && 831 name->Equals(isolate()->heap()->length_symbol())) { 832 Handle<Code> stub; 833 if (state == UNINITIALIZED) { 834 stub = pre_monomorphic_stub(); 835 } else if (state == PREMONOMORPHIC) { 836 stub = object->IsString() 837 ? isolate()->builtins()->LoadIC_StringLength() 838 : isolate()->builtins()->LoadIC_StringWrapperLength(); 839 } else if (state == MONOMORPHIC && object->IsStringWrapper()) { 840 stub = isolate()->builtins()->LoadIC_StringWrapperLength(); 841 } else if (state != MEGAMORPHIC) { 842 stub = megamorphic_stub(); 843 } 844 if (!stub.is_null()) { 845 set_target(*stub); 846#ifdef DEBUG 847 if (FLAG_trace_ic) PrintF("[LoadIC : +#length /string]\n"); 848#endif 849 } 850 // Get the string if we have a string wrapper object. 851 Handle<Object> string = object->IsJSValue() 852 ? Handle<Object>(Handle<JSValue>::cast(object)->value()) 853 : object; 854 return Smi::FromInt(String::cast(*string)->length()); 855 } 856 857 // Use specialized code for getting the length of arrays. 858 if (object->IsJSArray() && 859 name->Equals(isolate()->heap()->length_symbol())) { 860 Handle<Code> stub; 861 if (state == UNINITIALIZED) { 862 stub = pre_monomorphic_stub(); 863 } else if (state == PREMONOMORPHIC) { 864 stub = isolate()->builtins()->LoadIC_ArrayLength(); 865 } else if (state != MEGAMORPHIC) { 866 stub = megamorphic_stub(); 867 } 868 if (!stub.is_null()) { 869 set_target(*stub); 870#ifdef DEBUG 871 if (FLAG_trace_ic) PrintF("[LoadIC : +#length /array]\n"); 872#endif 873 } 874 return JSArray::cast(*object)->length(); 875 } 876 877 // Use specialized code for getting prototype of functions. 878 if (object->IsJSFunction() && 879 name->Equals(isolate()->heap()->prototype_symbol()) && 880 Handle<JSFunction>::cast(object)->should_have_prototype()) { 881 Handle<Code> stub; 882 if (state == UNINITIALIZED) { 883 stub = pre_monomorphic_stub(); 884 } else if (state == PREMONOMORPHIC) { 885 stub = isolate()->builtins()->LoadIC_FunctionPrototype(); 886 } else if (state != MEGAMORPHIC) { 887 stub = megamorphic_stub(); 888 } 889 if (!stub.is_null()) { 890 set_target(*stub); 891#ifdef DEBUG 892 if (FLAG_trace_ic) PrintF("[LoadIC : +#prototype /function]\n"); 893#endif 894 } 895 return Accessors::FunctionGetPrototype(*object, 0); 896 } 897 } 898 899 // Check if the name is trivially convertible to an index and get 900 // the element if so. 901 uint32_t index; 902 if (name->AsArrayIndex(&index)) return object->GetElement(index); 903 904 // Named lookup in the object. 905 LookupResult lookup(isolate()); 906 LookupForRead(object, name, &lookup); 907 908 // If we did not find a property, check if we need to throw an exception. 909 if (!lookup.IsProperty()) { 910 if (IsContextual(object)) { 911 return ReferenceError("not_defined", name); 912 } 913 LOG(isolate(), SuspectReadEvent(*name, *object)); 914 } 915 916 // Update inline cache and stub cache. 917 if (FLAG_use_ic) { 918 UpdateCaches(&lookup, state, object, name); 919 } 920 921 PropertyAttributes attr; 922 if (lookup.IsFound() && 923 (lookup.type() == INTERCEPTOR || lookup.type() == HANDLER)) { 924 // Get the property. 925 Handle<Object> result = 926 Object::GetProperty(object, object, &lookup, name, &attr); 927 RETURN_IF_EMPTY_HANDLE(isolate(), result); 928 // If the property is not present, check if we need to throw an 929 // exception. 930 if (attr == ABSENT && IsContextual(object)) { 931 return ReferenceError("not_defined", name); 932 } 933 return *result; 934 } 935 936 // Get the property. 937 return object->GetProperty(*object, &lookup, *name, &attr); 938} 939 940 941void LoadIC::UpdateCaches(LookupResult* lookup, 942 State state, 943 Handle<Object> object, 944 Handle<String> name) { 945 // Bail out if the result is not cacheable. 946 if (!lookup->IsCacheable()) return; 947 948 // Loading properties from values is not common, so don't try to 949 // deal with non-JS objects here. 950 if (!object->IsJSObject()) return; 951 Handle<JSObject> receiver = Handle<JSObject>::cast(object); 952 953 if (HasNormalObjectsInPrototypeChain(isolate(), lookup, *object)) return; 954 955 // Compute the code stub for this load. 956 Handle<Code> code; 957 if (state == UNINITIALIZED) { 958 // This is the first time we execute this inline cache. 959 // Set the target to the pre monomorphic stub to delay 960 // setting the monomorphic state. 961 code = pre_monomorphic_stub(); 962 } else if (!lookup->IsProperty()) { 963 // Nonexistent property. The result is undefined. 964 code = isolate()->stub_cache()->ComputeLoadNonexistent(name, receiver); 965 } else { 966 // Compute monomorphic stub. 967 Handle<JSObject> holder(lookup->holder()); 968 switch (lookup->type()) { 969 case FIELD: 970 code = isolate()->stub_cache()->ComputeLoadField( 971 name, receiver, holder, lookup->GetFieldIndex()); 972 break; 973 case CONSTANT_FUNCTION: { 974 Handle<JSFunction> constant(lookup->GetConstantFunction()); 975 code = isolate()->stub_cache()->ComputeLoadConstant( 976 name, receiver, holder, constant); 977 break; 978 } 979 case NORMAL: 980 if (holder->IsGlobalObject()) { 981 Handle<GlobalObject> global = Handle<GlobalObject>::cast(holder); 982 Handle<JSGlobalPropertyCell> cell(global->GetPropertyCell(lookup)); 983 code = isolate()->stub_cache()->ComputeLoadGlobal( 984 name, receiver, global, cell, lookup->IsDontDelete()); 985 } else { 986 // There is only one shared stub for loading normalized 987 // properties. It does not traverse the prototype chain, so the 988 // property must be found in the receiver for the stub to be 989 // applicable. 990 if (!holder.is_identical_to(receiver)) return; 991 code = isolate()->stub_cache()->ComputeLoadNormal(); 992 } 993 break; 994 case CALLBACKS: { 995 Handle<Object> callback_object(lookup->GetCallbackObject()); 996 if (!callback_object->IsAccessorInfo()) return; 997 Handle<AccessorInfo> callback = 998 Handle<AccessorInfo>::cast(callback_object); 999 if (v8::ToCData<Address>(callback->getter()) == 0) return; 1000 code = isolate()->stub_cache()->ComputeLoadCallback( 1001 name, receiver, holder, callback); 1002 break; 1003 } 1004 case INTERCEPTOR: 1005 ASSERT(HasInterceptorGetter(*holder)); 1006 code = isolate()->stub_cache()->ComputeLoadInterceptor( 1007 name, receiver, holder); 1008 break; 1009 default: 1010 return; 1011 } 1012 } 1013 1014 // Patch the call site depending on the state of the cache. 1015 if (state == UNINITIALIZED || 1016 state == PREMONOMORPHIC || 1017 state == MONOMORPHIC_PROTOTYPE_FAILURE) { 1018 set_target(*code); 1019 } else if (state == MONOMORPHIC) { 1020 // We are transitioning from monomorphic to megamorphic case. 1021 // Place the current monomorphic stub and stub compiled for 1022 // the receiver into stub cache. 1023 Map* map = target()->FindFirstMap(); 1024 if (map != NULL) { 1025 isolate()->stub_cache()->Set(*name, map, target()); 1026 } 1027 isolate()->stub_cache()->Set(*name, receiver->map(), *code); 1028 1029 set_target(*megamorphic_stub()); 1030 } else if (state == MEGAMORPHIC) { 1031 // Cache code holding map should be consistent with 1032 // GenerateMonomorphicCacheProbe. 1033 isolate()->stub_cache()->Set(*name, receiver->map(), *code); 1034 } 1035 1036 TRACE_IC("LoadIC", name, state, target()); 1037} 1038 1039 1040Handle<Code> KeyedLoadIC::GetElementStubWithoutMapCheck( 1041 bool is_js_array, 1042 ElementsKind elements_kind, 1043 KeyedAccessGrowMode grow_mode) { 1044 ASSERT(grow_mode == DO_NOT_ALLOW_JSARRAY_GROWTH); 1045 return KeyedLoadElementStub(elements_kind).GetCode(); 1046} 1047 1048 1049Handle<Code> KeyedLoadIC::ComputePolymorphicStub( 1050 MapHandleList* receiver_maps, 1051 StrictModeFlag strict_mode, 1052 KeyedAccessGrowMode growth_mode) { 1053 CodeHandleList handler_ics(receiver_maps->length()); 1054 for (int i = 0; i < receiver_maps->length(); ++i) { 1055 Handle<Map> receiver_map = receiver_maps->at(i); 1056 Handle<Code> cached_stub = ComputeMonomorphicStubWithoutMapCheck( 1057 receiver_map, strict_mode, growth_mode); 1058 handler_ics.Add(cached_stub); 1059 } 1060 KeyedLoadStubCompiler compiler(isolate()); 1061 Handle<Code> code = compiler.CompileLoadPolymorphic( 1062 receiver_maps, &handler_ics); 1063 isolate()->counters()->keyed_load_polymorphic_stubs()->Increment(); 1064 PROFILE(isolate(), 1065 CodeCreateEvent(Logger::KEYED_LOAD_MEGAMORPHIC_IC_TAG, *code, 0)); 1066 return code; 1067} 1068 1069 1070MaybeObject* KeyedLoadIC::Load(State state, 1071 Handle<Object> object, 1072 Handle<Object> key, 1073 bool force_generic_stub) { 1074 // Check for values that can be converted into a symbol. 1075 // TODO(1295): Remove this code. 1076 if (key->IsHeapNumber() && 1077 isnan(Handle<HeapNumber>::cast(key)->value())) { 1078 key = isolate()->factory()->nan_symbol(); 1079 } else if (key->IsUndefined()) { 1080 key = isolate()->factory()->undefined_symbol(); 1081 } 1082 1083 if (key->IsSymbol()) { 1084 Handle<String> name = Handle<String>::cast(key); 1085 1086 // If the object is undefined or null it's illegal to try to get any 1087 // of its properties; throw a TypeError in that case. 1088 if (object->IsUndefined() || object->IsNull()) { 1089 return TypeError("non_object_property_load", object, name); 1090 } 1091 1092 if (FLAG_use_ic) { 1093 // TODO(1073): don't ignore the current stub state. 1094 1095 // Use specialized code for getting the length of strings. 1096 if (object->IsString() && 1097 name->Equals(isolate()->heap()->length_symbol())) { 1098 Handle<String> string = Handle<String>::cast(object); 1099 Handle<Code> code = 1100 isolate()->stub_cache()->ComputeKeyedLoadStringLength(name, string); 1101 ASSERT(!code.is_null()); 1102 set_target(*code); 1103 TRACE_IC("KeyedLoadIC", name, state, target()); 1104 return Smi::FromInt(string->length()); 1105 } 1106 1107 // Use specialized code for getting the length of arrays. 1108 if (object->IsJSArray() && 1109 name->Equals(isolate()->heap()->length_symbol())) { 1110 Handle<JSArray> array = Handle<JSArray>::cast(object); 1111 Handle<Code> code = 1112 isolate()->stub_cache()->ComputeKeyedLoadArrayLength(name, array); 1113 ASSERT(!code.is_null()); 1114 set_target(*code); 1115 TRACE_IC("KeyedLoadIC", name, state, target()); 1116 return array->length(); 1117 } 1118 1119 // Use specialized code for getting prototype of functions. 1120 if (object->IsJSFunction() && 1121 name->Equals(isolate()->heap()->prototype_symbol()) && 1122 Handle<JSFunction>::cast(object)->should_have_prototype()) { 1123 Handle<JSFunction> function = Handle<JSFunction>::cast(object); 1124 Handle<Code> code = 1125 isolate()->stub_cache()->ComputeKeyedLoadFunctionPrototype( 1126 name, function); 1127 ASSERT(!code.is_null()); 1128 set_target(*code); 1129 TRACE_IC("KeyedLoadIC", name, state, target()); 1130 return Accessors::FunctionGetPrototype(*object, 0); 1131 } 1132 } 1133 1134 // Check if the name is trivially convertible to an index and get 1135 // the element or char if so. 1136 uint32_t index = 0; 1137 if (name->AsArrayIndex(&index)) { 1138 // Rewrite to the generic keyed load stub. 1139 if (FLAG_use_ic) set_target(*generic_stub()); 1140 return Runtime::GetElementOrCharAt(isolate(), object, index); 1141 } 1142 1143 // Named lookup. 1144 LookupResult lookup(isolate()); 1145 LookupForRead(object, name, &lookup); 1146 1147 // If we did not find a property, check if we need to throw an exception. 1148 if (!lookup.IsProperty() && IsContextual(object)) { 1149 return ReferenceError("not_defined", name); 1150 } 1151 1152 if (FLAG_use_ic) { 1153 UpdateCaches(&lookup, state, object, name); 1154 } 1155 1156 PropertyAttributes attr; 1157 if (lookup.IsFound() && lookup.type() == INTERCEPTOR) { 1158 // Get the property. 1159 Handle<Object> result = 1160 Object::GetProperty(object, object, &lookup, name, &attr); 1161 RETURN_IF_EMPTY_HANDLE(isolate(), result); 1162 // If the property is not present, check if we need to throw an 1163 // exception. 1164 if (attr == ABSENT && IsContextual(object)) { 1165 return ReferenceError("not_defined", name); 1166 } 1167 return *result; 1168 } 1169 1170 return object->GetProperty(*object, &lookup, *name, &attr); 1171 } 1172 1173 // Do not use ICs for objects that require access checks (including 1174 // the global object). 1175 bool use_ic = FLAG_use_ic && !object->IsAccessCheckNeeded(); 1176 1177 if (use_ic) { 1178 Handle<Code> stub = generic_stub(); 1179 if (!force_generic_stub) { 1180 if (object->IsString() && key->IsNumber()) { 1181 if (state == UNINITIALIZED) { 1182 stub = string_stub(); 1183 } 1184 } else if (object->IsJSObject()) { 1185 Handle<JSObject> receiver = Handle<JSObject>::cast(object); 1186 if (receiver->elements()->map() == 1187 isolate()->heap()->non_strict_arguments_elements_map()) { 1188 stub = non_strict_arguments_stub(); 1189 } else if (receiver->HasIndexedInterceptor()) { 1190 stub = indexed_interceptor_stub(); 1191 } else if (key->IsSmi() && (target() != *non_strict_arguments_stub())) { 1192 stub = ComputeStub(receiver, LOAD, kNonStrictMode, stub); 1193 } 1194 } 1195 } else { 1196 TRACE_GENERIC_IC("KeyedLoadIC", "force generic"); 1197 } 1198 if (!stub.is_null()) set_target(*stub); 1199 } 1200 1201 TRACE_IC("KeyedLoadIC", key, state, target()); 1202 1203 // Get the property. 1204 return Runtime::GetObjectProperty(isolate(), object, key); 1205} 1206 1207 1208void KeyedLoadIC::UpdateCaches(LookupResult* lookup, 1209 State state, 1210 Handle<Object> object, 1211 Handle<String> name) { 1212 // Bail out if we didn't find a result. 1213 if (!lookup->IsProperty() || !lookup->IsCacheable()) return; 1214 1215 if (!object->IsJSObject()) return; 1216 Handle<JSObject> receiver = Handle<JSObject>::cast(object); 1217 1218 if (HasNormalObjectsInPrototypeChain(isolate(), lookup, *object)) return; 1219 1220 // Compute the code stub for this load. 1221 Handle<Code> code; 1222 1223 if (state == UNINITIALIZED) { 1224 // This is the first time we execute this inline cache. 1225 // Set the target to the pre monomorphic stub to delay 1226 // setting the monomorphic state. 1227 code = pre_monomorphic_stub(); 1228 } else { 1229 // Compute a monomorphic stub. 1230 Handle<JSObject> holder(lookup->holder()); 1231 switch (lookup->type()) { 1232 case FIELD: 1233 code = isolate()->stub_cache()->ComputeKeyedLoadField( 1234 name, receiver, holder, lookup->GetFieldIndex()); 1235 break; 1236 case CONSTANT_FUNCTION: { 1237 Handle<JSFunction> constant(lookup->GetConstantFunction()); 1238 code = isolate()->stub_cache()->ComputeKeyedLoadConstant( 1239 name, receiver, holder, constant); 1240 break; 1241 } 1242 case CALLBACKS: { 1243 Handle<Object> callback_object(lookup->GetCallbackObject()); 1244 if (!callback_object->IsAccessorInfo()) return; 1245 Handle<AccessorInfo> callback = 1246 Handle<AccessorInfo>::cast(callback_object); 1247 if (v8::ToCData<Address>(callback->getter()) == 0) return; 1248 code = isolate()->stub_cache()->ComputeKeyedLoadCallback( 1249 name, receiver, holder, callback); 1250 break; 1251 } 1252 case INTERCEPTOR: 1253 ASSERT(HasInterceptorGetter(lookup->holder())); 1254 code = isolate()->stub_cache()->ComputeKeyedLoadInterceptor( 1255 name, receiver, holder); 1256 break; 1257 default: 1258 // Always rewrite to the generic case so that we do not 1259 // repeatedly try to rewrite. 1260 code = generic_stub(); 1261 break; 1262 } 1263 } 1264 1265 // Patch the call site depending on the state of the cache. Make 1266 // sure to always rewrite from monomorphic to megamorphic. 1267 ASSERT(state != MONOMORPHIC_PROTOTYPE_FAILURE); 1268 if (state == UNINITIALIZED || state == PREMONOMORPHIC) { 1269 set_target(*code); 1270 } else if (state == MONOMORPHIC) { 1271 set_target(*megamorphic_stub()); 1272 } 1273 1274 TRACE_IC("KeyedLoadIC", name, state, target()); 1275} 1276 1277 1278static bool StoreICableLookup(LookupResult* lookup) { 1279 // Bail out if we didn't find a result. 1280 if (!lookup->IsFound() || lookup->type() == NULL_DESCRIPTOR) return false; 1281 1282 // Bail out if inline caching is not allowed. 1283 if (!lookup->IsCacheable()) return false; 1284 1285 // If the property is read-only, we leave the IC in its current state. 1286 if (lookup->IsReadOnly()) return false; 1287 1288 return true; 1289} 1290 1291 1292static bool LookupForWrite(Handle<JSObject> receiver, 1293 Handle<String> name, 1294 LookupResult* lookup) { 1295 receiver->LocalLookup(*name, lookup); 1296 if (!StoreICableLookup(lookup)) { 1297 return false; 1298 } 1299 1300 if (lookup->type() == INTERCEPTOR && 1301 receiver->GetNamedInterceptor()->setter()->IsUndefined()) { 1302 receiver->LocalLookupRealNamedProperty(*name, lookup); 1303 return StoreICableLookup(lookup); 1304 } 1305 1306 return true; 1307} 1308 1309 1310MaybeObject* StoreIC::Store(State state, 1311 StrictModeFlag strict_mode, 1312 Handle<Object> object, 1313 Handle<String> name, 1314 Handle<Object> value) { 1315 if (!object->IsJSObject()) { 1316 // Handle proxies. 1317 if (object->IsJSProxy()) { 1318 return JSProxy::cast(*object)-> 1319 SetProperty(*name, *value, NONE, strict_mode); 1320 } 1321 1322 // If the object is undefined or null it's illegal to try to set any 1323 // properties on it; throw a TypeError in that case. 1324 if (object->IsUndefined() || object->IsNull()) { 1325 return TypeError("non_object_property_store", object, name); 1326 } 1327 1328 // The length property of string values is read-only. Throw in strict mode. 1329 if (strict_mode == kStrictMode && object->IsString() && 1330 name->Equals(isolate()->heap()->length_symbol())) { 1331 return TypeError("strict_read_only_property", object, name); 1332 } 1333 // Ignore other stores where the receiver is not a JSObject. 1334 // TODO(1475): Must check prototype chains of object wrappers. 1335 return *value; 1336 } 1337 1338 Handle<JSObject> receiver = Handle<JSObject>::cast(object); 1339 1340 // Check if the given name is an array index. 1341 uint32_t index; 1342 if (name->AsArrayIndex(&index)) { 1343 Handle<Object> result = 1344 JSObject::SetElement(receiver, index, value, NONE, strict_mode); 1345 RETURN_IF_EMPTY_HANDLE(isolate(), result); 1346 return *value; 1347 } 1348 1349 // Use specialized code for setting the length of arrays with fast 1350 // properties. Slow properties might indicate redefinition of the 1351 // length property. 1352 if (receiver->IsJSArray() && 1353 name->Equals(isolate()->heap()->length_symbol()) && 1354 Handle<JSArray>::cast(receiver)->AllowsSetElementsLength() && 1355 receiver->HasFastProperties()) { 1356#ifdef DEBUG 1357 if (FLAG_trace_ic) PrintF("[StoreIC : +#length /array]\n"); 1358#endif 1359 Handle<Code> stub = (strict_mode == kStrictMode) 1360 ? isolate()->builtins()->StoreIC_ArrayLength_Strict() 1361 : isolate()->builtins()->StoreIC_ArrayLength(); 1362 set_target(*stub); 1363 return receiver->SetProperty(*name, *value, NONE, strict_mode); 1364 } 1365 1366 // Lookup the property locally in the receiver. 1367 if (FLAG_use_ic && !receiver->IsJSGlobalProxy()) { 1368 LookupResult lookup(isolate()); 1369 1370 if (LookupForWrite(receiver, name, &lookup)) { 1371 // Generate a stub for this store. 1372 UpdateCaches(&lookup, state, strict_mode, receiver, name, value); 1373 } else { 1374 // Strict mode doesn't allow setting non-existent global property 1375 // or an assignment to a read only property. 1376 if (strict_mode == kStrictMode) { 1377 if (lookup.IsProperty() && lookup.IsReadOnly()) { 1378 return TypeError("strict_read_only_property", object, name); 1379 } else if (IsContextual(object)) { 1380 return ReferenceError("not_defined", name); 1381 } 1382 } 1383 } 1384 } 1385 1386 if (receiver->IsJSGlobalProxy()) { 1387 // TODO(ulan): find out why we patch this site even with --no-use-ic 1388 // Generate a generic stub that goes to the runtime when we see a global 1389 // proxy as receiver. 1390 Handle<Code> stub = (strict_mode == kStrictMode) 1391 ? global_proxy_stub_strict() 1392 : global_proxy_stub(); 1393 if (target() != *stub) { 1394 set_target(*stub); 1395 TRACE_IC("StoreIC", name, state, target()); 1396 } 1397 } 1398 1399 // Set the property. 1400 return receiver->SetProperty(*name, *value, NONE, strict_mode); 1401} 1402 1403 1404void StoreIC::UpdateCaches(LookupResult* lookup, 1405 State state, 1406 StrictModeFlag strict_mode, 1407 Handle<JSObject> receiver, 1408 Handle<String> name, 1409 Handle<Object> value) { 1410 ASSERT(!receiver->IsJSGlobalProxy()); 1411 ASSERT(StoreICableLookup(lookup)); 1412 // These are not cacheable, so we never see such LookupResults here. 1413 ASSERT(lookup->type() != HANDLER); 1414 // We get only called for properties or transitions, see StoreICableLookup. 1415 ASSERT(lookup->type() != NULL_DESCRIPTOR); 1416 1417 // If the property has a non-field type allowing map transitions 1418 // where there is extra room in the object, we leave the IC in its 1419 // current state. 1420 PropertyType type = lookup->type(); 1421 1422 // Compute the code stub for this store; used for rewriting to 1423 // monomorphic state and making sure that the code stub is in the 1424 // stub cache. 1425 Handle<Code> code; 1426 switch (type) { 1427 case FIELD: 1428 code = isolate()->stub_cache()->ComputeStoreField(name, 1429 receiver, 1430 lookup->GetFieldIndex(), 1431 Handle<Map>::null(), 1432 strict_mode); 1433 break; 1434 case MAP_TRANSITION: { 1435 if (lookup->GetAttributes() != NONE) return; 1436 Handle<Map> transition(lookup->GetTransitionMap()); 1437 int index = transition->PropertyIndexFor(*name); 1438 code = isolate()->stub_cache()->ComputeStoreField( 1439 name, receiver, index, transition, strict_mode); 1440 break; 1441 } 1442 case NORMAL: 1443 if (receiver->IsGlobalObject()) { 1444 // The stub generated for the global object picks the value directly 1445 // from the property cell. So the property must be directly on the 1446 // global object. 1447 Handle<GlobalObject> global = Handle<GlobalObject>::cast(receiver); 1448 Handle<JSGlobalPropertyCell> cell(global->GetPropertyCell(lookup)); 1449 code = isolate()->stub_cache()->ComputeStoreGlobal( 1450 name, global, cell, strict_mode); 1451 } else { 1452 if (lookup->holder() != *receiver) return; 1453 code = isolate()->stub_cache()->ComputeStoreNormal(strict_mode); 1454 } 1455 break; 1456 case CALLBACKS: { 1457 Handle<Object> callback_object(lookup->GetCallbackObject()); 1458 if (!callback_object->IsAccessorInfo()) return; 1459 Handle<AccessorInfo> callback = 1460 Handle<AccessorInfo>::cast(callback_object); 1461 if (v8::ToCData<Address>(callback->setter()) == 0) return; 1462 code = isolate()->stub_cache()->ComputeStoreCallback( 1463 name, receiver, callback, strict_mode); 1464 break; 1465 } 1466 case INTERCEPTOR: 1467 ASSERT(!receiver->GetNamedInterceptor()->setter()->IsUndefined()); 1468 code = isolate()->stub_cache()->ComputeStoreInterceptor( 1469 name, receiver, strict_mode); 1470 break; 1471 case CONSTANT_FUNCTION: 1472 case CONSTANT_TRANSITION: 1473 case ELEMENTS_TRANSITION: 1474 return; 1475 case HANDLER: 1476 case NULL_DESCRIPTOR: 1477 UNREACHABLE(); 1478 return; 1479 } 1480 1481 // Patch the call site depending on the state of the cache. 1482 if (state == UNINITIALIZED || state == MONOMORPHIC_PROTOTYPE_FAILURE) { 1483 set_target(*code); 1484 } else if (state == MONOMORPHIC) { 1485 // Only move to megamorphic if the target changes. 1486 if (target() != *code) { 1487 set_target((strict_mode == kStrictMode) 1488 ? megamorphic_stub_strict() 1489 : megamorphic_stub()); 1490 } 1491 } else if (state == MEGAMORPHIC) { 1492 // Update the stub cache. 1493 isolate()->stub_cache()->Set(*name, receiver->map(), *code); 1494 } 1495 1496 TRACE_IC("StoreIC", name, state, target()); 1497} 1498 1499 1500static bool AddOneReceiverMapIfMissing(MapHandleList* receiver_maps, 1501 Handle<Map> new_receiver_map) { 1502 ASSERT(!new_receiver_map.is_null()); 1503 for (int current = 0; current < receiver_maps->length(); ++current) { 1504 if (!receiver_maps->at(current).is_null() && 1505 receiver_maps->at(current).is_identical_to(new_receiver_map)) { 1506 return false; 1507 } 1508 } 1509 receiver_maps->Add(new_receiver_map); 1510 return true; 1511} 1512 1513 1514void KeyedIC::GetReceiverMapsForStub(Handle<Code> stub, 1515 MapHandleList* result) { 1516 ASSERT(stub->is_inline_cache_stub()); 1517 if (!string_stub().is_null() && stub.is_identical_to(string_stub())) { 1518 return result->Add(isolate()->factory()->string_map()); 1519 } else if (stub->is_keyed_load_stub() || stub->is_keyed_store_stub()) { 1520 if (stub->ic_state() == MONOMORPHIC) { 1521 result->Add(Handle<Map>(stub->FindFirstMap())); 1522 } else { 1523 ASSERT(stub->ic_state() == MEGAMORPHIC); 1524 AssertNoAllocation no_allocation; 1525 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); 1526 for (RelocIterator it(*stub, mask); !it.done(); it.next()) { 1527 RelocInfo* info = it.rinfo(); 1528 Handle<Object> object(info->target_object()); 1529 ASSERT(object->IsMap()); 1530 AddOneReceiverMapIfMissing(result, Handle<Map>::cast(object)); 1531 } 1532 } 1533 } 1534} 1535 1536 1537Handle<Code> KeyedIC::ComputeStub(Handle<JSObject> receiver, 1538 StubKind stub_kind, 1539 StrictModeFlag strict_mode, 1540 Handle<Code> generic_stub) { 1541 State ic_state = target()->ic_state(); 1542 KeyedAccessGrowMode grow_mode = IsGrowStubKind(stub_kind) 1543 ? ALLOW_JSARRAY_GROWTH 1544 : DO_NOT_ALLOW_JSARRAY_GROWTH; 1545 1546 // Don't handle megamorphic property accesses for INTERCEPTORS or CALLBACKS 1547 // via megamorphic stubs, since they don't have a map in their relocation info 1548 // and so the stubs can't be harvested for the object needed for a map check. 1549 if (target()->type() != NORMAL) { 1550 TRACE_GENERIC_IC("KeyedIC", "non-NORMAL target type"); 1551 return generic_stub; 1552 } 1553 1554 bool monomorphic = false; 1555 MapHandleList target_receiver_maps; 1556 if (ic_state != UNINITIALIZED && ic_state != PREMONOMORPHIC) { 1557 GetReceiverMapsForStub(Handle<Code>(target()), &target_receiver_maps); 1558 } 1559 if (!IsTransitionStubKind(stub_kind)) { 1560 if (ic_state == UNINITIALIZED || ic_state == PREMONOMORPHIC) { 1561 monomorphic = true; 1562 } else { 1563 if (ic_state == MONOMORPHIC) { 1564 // The first time a receiver is seen that is a transitioned version of 1565 // the previous monomorphic receiver type, assume the new ElementsKind 1566 // is the monomorphic type. This benefits global arrays that only 1567 // transition once, and all call sites accessing them are faster if they 1568 // remain monomorphic. If this optimistic assumption is not true, the IC 1569 // will miss again and it will become polymorphic and support both the 1570 // untransitioned and transitioned maps. 1571 monomorphic = IsMoreGeneralElementsKindTransition( 1572 target_receiver_maps.at(0)->elements_kind(), 1573 receiver->GetElementsKind()); 1574 } 1575 } 1576 } 1577 1578 if (monomorphic) { 1579 return ComputeMonomorphicStub( 1580 receiver, stub_kind, strict_mode, generic_stub); 1581 } 1582 ASSERT(target() != *generic_stub); 1583 1584 // Determine the list of receiver maps that this call site has seen, 1585 // adding the map that was just encountered. 1586 Handle<Map> receiver_map(receiver->map()); 1587 bool map_added = 1588 AddOneReceiverMapIfMissing(&target_receiver_maps, receiver_map); 1589 if (IsTransitionStubKind(stub_kind)) { 1590 Handle<Map> new_map = ComputeTransitionedMap(receiver, stub_kind); 1591 map_added |= AddOneReceiverMapIfMissing(&target_receiver_maps, new_map); 1592 } 1593 if (!map_added) { 1594 // If the miss wasn't due to an unseen map, a polymorphic stub 1595 // won't help, use the generic stub. 1596 TRACE_GENERIC_IC("KeyedIC", "same map added twice"); 1597 return generic_stub; 1598 } 1599 1600 // If the maximum number of receiver maps has been exceeded, use the generic 1601 // version of the IC. 1602 if (target_receiver_maps.length() > kMaxKeyedPolymorphism) { 1603 TRACE_GENERIC_IC("KeyedIC", "max polymorph exceeded"); 1604 return generic_stub; 1605 } 1606 1607 if ((Code::GetKeyedAccessGrowMode(target()->extra_ic_state()) == 1608 ALLOW_JSARRAY_GROWTH)) { 1609 grow_mode = ALLOW_JSARRAY_GROWTH; 1610 } 1611 1612 Handle<PolymorphicCodeCache> cache = 1613 isolate()->factory()->polymorphic_code_cache(); 1614 Code::ExtraICState extra_state = Code::ComputeExtraICState(grow_mode, 1615 strict_mode); 1616 Code::Flags flags = Code::ComputeFlags(kind(), MEGAMORPHIC, extra_state); 1617 Handle<Object> probe = cache->Lookup(&target_receiver_maps, flags); 1618 if (probe->IsCode()) return Handle<Code>::cast(probe); 1619 1620 Handle<Code> stub = 1621 ComputePolymorphicStub(&target_receiver_maps, strict_mode, grow_mode); 1622 PolymorphicCodeCache::Update(cache, &target_receiver_maps, flags, stub); 1623 return stub; 1624} 1625 1626 1627Handle<Code> KeyedIC::ComputeMonomorphicStubWithoutMapCheck( 1628 Handle<Map> receiver_map, 1629 StrictModeFlag strict_mode, 1630 KeyedAccessGrowMode grow_mode) { 1631 if ((receiver_map->instance_type() & kNotStringTag) == 0) { 1632 ASSERT(!string_stub().is_null()); 1633 return string_stub(); 1634 } else { 1635 ASSERT(receiver_map->has_dictionary_elements() || 1636 receiver_map->has_fast_elements() || 1637 receiver_map->has_fast_smi_only_elements() || 1638 receiver_map->has_fast_double_elements() || 1639 receiver_map->has_external_array_elements()); 1640 bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE; 1641 return GetElementStubWithoutMapCheck(is_js_array, 1642 receiver_map->elements_kind(), 1643 grow_mode); 1644 } 1645} 1646 1647 1648Handle<Code> KeyedIC::ComputeMonomorphicStub(Handle<JSObject> receiver, 1649 StubKind stub_kind, 1650 StrictModeFlag strict_mode, 1651 Handle<Code> generic_stub) { 1652 if (receiver->HasFastElements() || 1653 receiver->HasFastSmiOnlyElements() || 1654 receiver->HasExternalArrayElements() || 1655 receiver->HasFastDoubleElements() || 1656 receiver->HasDictionaryElements()) { 1657 return isolate()->stub_cache()->ComputeKeyedLoadOrStoreElement( 1658 receiver, stub_kind, strict_mode); 1659 } else { 1660 return generic_stub; 1661 } 1662} 1663 1664 1665Handle<Map> KeyedIC::ComputeTransitionedMap(Handle<JSObject> receiver, 1666 StubKind stub_kind) { 1667 switch (stub_kind) { 1668 case KeyedIC::STORE_TRANSITION_SMI_TO_OBJECT: 1669 case KeyedIC::STORE_TRANSITION_DOUBLE_TO_OBJECT: 1670 case KeyedIC::STORE_AND_GROW_TRANSITION_SMI_TO_OBJECT: 1671 case KeyedIC::STORE_AND_GROW_TRANSITION_DOUBLE_TO_OBJECT: 1672 return JSObject::GetElementsTransitionMap(receiver, FAST_ELEMENTS); 1673 break; 1674 case KeyedIC::STORE_TRANSITION_SMI_TO_DOUBLE: 1675 case KeyedIC::STORE_AND_GROW_TRANSITION_SMI_TO_DOUBLE: 1676 return JSObject::GetElementsTransitionMap(receiver, FAST_DOUBLE_ELEMENTS); 1677 break; 1678 default: 1679 UNREACHABLE(); 1680 return Handle<Map>::null(); 1681 } 1682} 1683 1684 1685Handle<Code> KeyedStoreIC::GetElementStubWithoutMapCheck( 1686 bool is_js_array, 1687 ElementsKind elements_kind, 1688 KeyedAccessGrowMode grow_mode) { 1689 return KeyedStoreElementStub(is_js_array, elements_kind, grow_mode).GetCode(); 1690} 1691 1692 1693Handle<Code> KeyedStoreIC::ComputePolymorphicStub( 1694 MapHandleList* receiver_maps, 1695 StrictModeFlag strict_mode, 1696 KeyedAccessGrowMode grow_mode) { 1697 // Collect MONOMORPHIC stubs for all target_receiver_maps. 1698 CodeHandleList handler_ics(receiver_maps->length()); 1699 MapHandleList transitioned_maps(receiver_maps->length()); 1700 for (int i = 0; i < receiver_maps->length(); ++i) { 1701 Handle<Map> receiver_map(receiver_maps->at(i)); 1702 Handle<Code> cached_stub; 1703 Handle<Map> transitioned_map = 1704 receiver_map->FindTransitionedMap(receiver_maps); 1705 if (!transitioned_map.is_null()) { 1706 cached_stub = ElementsTransitionAndStoreStub( 1707 receiver_map->elements_kind(), // original elements_kind 1708 transitioned_map->elements_kind(), 1709 receiver_map->instance_type() == JS_ARRAY_TYPE, // is_js_array 1710 strict_mode, grow_mode).GetCode(); 1711 } else { 1712 cached_stub = ComputeMonomorphicStubWithoutMapCheck(receiver_map, 1713 strict_mode, 1714 grow_mode); 1715 } 1716 ASSERT(!cached_stub.is_null()); 1717 handler_ics.Add(cached_stub); 1718 transitioned_maps.Add(transitioned_map); 1719 } 1720 KeyedStoreStubCompiler compiler(isolate(), strict_mode, grow_mode); 1721 Handle<Code> code = compiler.CompileStorePolymorphic( 1722 receiver_maps, &handler_ics, &transitioned_maps); 1723 isolate()->counters()->keyed_store_polymorphic_stubs()->Increment(); 1724 PROFILE(isolate(), 1725 CodeCreateEvent(Logger::KEYED_STORE_MEGAMORPHIC_IC_TAG, *code, 0)); 1726 return code; 1727} 1728 1729 1730KeyedIC::StubKind KeyedStoreIC::GetStubKind(Handle<JSObject> receiver, 1731 Handle<Object> key, 1732 Handle<Object> value) { 1733 ASSERT(key->IsSmi()); 1734 int index = Smi::cast(*key)->value(); 1735 bool allow_growth = receiver->IsJSArray() && 1736 JSArray::cast(*receiver)->length()->IsSmi() && 1737 index >= Smi::cast(JSArray::cast(*receiver)->length())->value(); 1738 1739 if (allow_growth) { 1740 // Handle growing array in stub if necessary. 1741 if (receiver->HasFastSmiOnlyElements()) { 1742 if (value->IsHeapNumber()) { 1743 return STORE_AND_GROW_TRANSITION_SMI_TO_DOUBLE; 1744 } 1745 if (value->IsHeapObject()) { 1746 return STORE_AND_GROW_TRANSITION_SMI_TO_OBJECT; 1747 } 1748 } else if (receiver->HasFastDoubleElements()) { 1749 if (!value->IsSmi() && !value->IsHeapNumber()) { 1750 return STORE_AND_GROW_TRANSITION_DOUBLE_TO_OBJECT; 1751 } 1752 } 1753 return STORE_AND_GROW_NO_TRANSITION; 1754 } else { 1755 // Handle only in-bounds elements accesses. 1756 if (receiver->HasFastSmiOnlyElements()) { 1757 if (value->IsHeapNumber()) { 1758 return STORE_TRANSITION_SMI_TO_DOUBLE; 1759 } else if (value->IsHeapObject()) { 1760 return STORE_TRANSITION_SMI_TO_OBJECT; 1761 } 1762 } else if (receiver->HasFastDoubleElements()) { 1763 if (!value->IsSmi() && !value->IsHeapNumber()) { 1764 return STORE_TRANSITION_DOUBLE_TO_OBJECT; 1765 } 1766 } 1767 return STORE_NO_TRANSITION; 1768 } 1769} 1770 1771 1772MaybeObject* KeyedStoreIC::Store(State state, 1773 StrictModeFlag strict_mode, 1774 Handle<Object> object, 1775 Handle<Object> key, 1776 Handle<Object> value, 1777 bool force_generic) { 1778 if (key->IsSymbol()) { 1779 Handle<String> name = Handle<String>::cast(key); 1780 1781 // Handle proxies. 1782 if (object->IsJSProxy()) { 1783 return JSProxy::cast(*object)->SetProperty( 1784 *name, *value, NONE, strict_mode); 1785 } 1786 1787 // If the object is undefined or null it's illegal to try to set any 1788 // properties on it; throw a TypeError in that case. 1789 if (object->IsUndefined() || object->IsNull()) { 1790 return TypeError("non_object_property_store", object, name); 1791 } 1792 1793 // Ignore stores where the receiver is not a JSObject. 1794 if (!object->IsJSObject()) return *value; 1795 Handle<JSObject> receiver = Handle<JSObject>::cast(object); 1796 1797 // Check if the given name is an array index. 1798 uint32_t index; 1799 if (name->AsArrayIndex(&index)) { 1800 Handle<Object> result = 1801 JSObject::SetElement(receiver, index, value, NONE, strict_mode); 1802 RETURN_IF_EMPTY_HANDLE(isolate(), result); 1803 return *value; 1804 } 1805 1806 // Update inline cache and stub cache. 1807 if (FLAG_use_ic && !receiver->IsJSGlobalProxy()) { 1808 LookupResult lookup(isolate()); 1809 if (LookupForWrite(receiver, name, &lookup)) { 1810 UpdateCaches(&lookup, state, strict_mode, receiver, name, value); 1811 } 1812 } 1813 1814 // Set the property. 1815 return receiver->SetProperty(*name, *value, NONE, strict_mode); 1816 } 1817 1818 // Do not use ICs for objects that require access checks (including 1819 // the global object). 1820 bool use_ic = FLAG_use_ic && !object->IsAccessCheckNeeded(); 1821 ASSERT(!(use_ic && object->IsJSGlobalProxy())); 1822 1823 if (use_ic) { 1824 Handle<Code> stub = (strict_mode == kStrictMode) 1825 ? generic_stub_strict() 1826 : generic_stub(); 1827 if (object->IsJSObject()) { 1828 Handle<JSObject> receiver = Handle<JSObject>::cast(object); 1829 if (receiver->elements()->map() == 1830 isolate()->heap()->non_strict_arguments_elements_map()) { 1831 stub = non_strict_arguments_stub(); 1832 } else if (!force_generic) { 1833 if (key->IsSmi() && (target() != *non_strict_arguments_stub())) { 1834 StubKind stub_kind = GetStubKind(receiver, key, value); 1835 stub = ComputeStub(receiver, stub_kind, strict_mode, stub); 1836 } 1837 } else { 1838 TRACE_GENERIC_IC("KeyedStoreIC", "force generic"); 1839 } 1840 } 1841 if (!stub.is_null()) set_target(*stub); 1842 } 1843 1844 TRACE_IC("KeyedStoreIC", key, state, target()); 1845 1846 // Set the property. 1847 return Runtime::SetObjectProperty( 1848 isolate(), object , key, value, NONE, strict_mode); 1849} 1850 1851 1852void KeyedStoreIC::UpdateCaches(LookupResult* lookup, 1853 State state, 1854 StrictModeFlag strict_mode, 1855 Handle<JSObject> receiver, 1856 Handle<String> name, 1857 Handle<Object> value) { 1858 ASSERT(!receiver->IsJSGlobalProxy()); 1859 ASSERT(StoreICableLookup(lookup)); 1860 // These are not cacheable, so we never see such LookupResults here. 1861 ASSERT(lookup->type() != HANDLER); 1862 // We get only called for properties or transitions, see StoreICableLookup. 1863 ASSERT(lookup->type() != NULL_DESCRIPTOR); 1864 1865 // If the property has a non-field type allowing map transitions 1866 // where there is extra room in the object, we leave the IC in its 1867 // current state. 1868 PropertyType type = lookup->type(); 1869 1870 // Compute the code stub for this store; used for rewriting to 1871 // monomorphic state and making sure that the code stub is in the 1872 // stub cache. 1873 Handle<Code> code; 1874 1875 switch (type) { 1876 case FIELD: 1877 code = isolate()->stub_cache()->ComputeKeyedStoreField( 1878 name, receiver, lookup->GetFieldIndex(), 1879 Handle<Map>::null(), strict_mode); 1880 break; 1881 case MAP_TRANSITION: 1882 if (lookup->GetAttributes() == NONE) { 1883 Handle<Map> transition(lookup->GetTransitionMap()); 1884 int index = transition->PropertyIndexFor(*name); 1885 code = isolate()->stub_cache()->ComputeKeyedStoreField( 1886 name, receiver, index, transition, strict_mode); 1887 break; 1888 } 1889 // fall through. 1890 case NORMAL: 1891 case CONSTANT_FUNCTION: 1892 case CALLBACKS: 1893 case INTERCEPTOR: 1894 case CONSTANT_TRANSITION: 1895 case ELEMENTS_TRANSITION: 1896 // Always rewrite to the generic case so that we do not 1897 // repeatedly try to rewrite. 1898 code = (strict_mode == kStrictMode) 1899 ? generic_stub_strict() 1900 : generic_stub(); 1901 break; 1902 case HANDLER: 1903 case NULL_DESCRIPTOR: 1904 UNREACHABLE(); 1905 return; 1906 } 1907 1908 ASSERT(!code.is_null()); 1909 1910 // Patch the call site depending on the state of the cache. Make 1911 // sure to always rewrite from monomorphic to megamorphic. 1912 ASSERT(state != MONOMORPHIC_PROTOTYPE_FAILURE); 1913 if (state == UNINITIALIZED || state == PREMONOMORPHIC) { 1914 set_target(*code); 1915 } else if (state == MONOMORPHIC) { 1916 set_target((strict_mode == kStrictMode) 1917 ? *megamorphic_stub_strict() 1918 : *megamorphic_stub()); 1919 } 1920 1921 TRACE_IC("KeyedStoreIC", name, state, target()); 1922} 1923 1924 1925#undef TRACE_IC 1926 1927 1928// ---------------------------------------------------------------------------- 1929// Static IC stub generators. 1930// 1931 1932// Used from ic-<arch>.cc. 1933RUNTIME_FUNCTION(MaybeObject*, CallIC_Miss) { 1934 HandleScope scope(isolate); 1935 ASSERT(args.length() == 2); 1936 CallIC ic(isolate); 1937 IC::State state = IC::StateFrom(ic.target(), args[0], args[1]); 1938 Code::ExtraICState extra_ic_state = ic.target()->extra_ic_state(); 1939 MaybeObject* maybe_result = ic.LoadFunction(state, 1940 extra_ic_state, 1941 args.at<Object>(0), 1942 args.at<String>(1)); 1943 // Result could be a function or a failure. 1944 JSFunction* raw_function = NULL; 1945 if (!maybe_result->To(&raw_function)) return maybe_result; 1946 1947 // The first time the inline cache is updated may be the first time the 1948 // function it references gets called. If the function is lazily compiled 1949 // then the first call will trigger a compilation. We check for this case 1950 // and we do the compilation immediately, instead of waiting for the stub 1951 // currently attached to the JSFunction object to trigger compilation. 1952 if (raw_function->is_compiled()) return raw_function; 1953 1954 Handle<JSFunction> function(raw_function); 1955 JSFunction::CompileLazy(function, CLEAR_EXCEPTION); 1956 return *function; 1957} 1958 1959 1960// Used from ic-<arch>.cc. 1961RUNTIME_FUNCTION(MaybeObject*, KeyedCallIC_Miss) { 1962 HandleScope scope(isolate); 1963 ASSERT(args.length() == 2); 1964 KeyedCallIC ic(isolate); 1965 IC::State state = IC::StateFrom(ic.target(), args[0], args[1]); 1966 MaybeObject* maybe_result = 1967 ic.LoadFunction(state, args.at<Object>(0), args.at<Object>(1)); 1968 // Result could be a function or a failure. 1969 JSFunction* raw_function = NULL; 1970 if (!maybe_result->To(&raw_function)) return maybe_result; 1971 1972 if (raw_function->is_compiled()) return raw_function; 1973 1974 Handle<JSFunction> function(raw_function); 1975 JSFunction::CompileLazy(function, CLEAR_EXCEPTION); 1976 return *function; 1977} 1978 1979 1980// Used from ic-<arch>.cc. 1981RUNTIME_FUNCTION(MaybeObject*, LoadIC_Miss) { 1982 HandleScope scope(isolate); 1983 ASSERT(args.length() == 2); 1984 LoadIC ic(isolate); 1985 IC::State state = IC::StateFrom(ic.target(), args[0], args[1]); 1986 return ic.Load(state, args.at<Object>(0), args.at<String>(1)); 1987} 1988 1989 1990// Used from ic-<arch>.cc 1991RUNTIME_FUNCTION(MaybeObject*, KeyedLoadIC_Miss) { 1992 HandleScope scope(isolate); 1993 ASSERT(args.length() == 2); 1994 KeyedLoadIC ic(isolate); 1995 IC::State state = IC::StateFrom(ic.target(), args[0], args[1]); 1996 return ic.Load(state, args.at<Object>(0), args.at<Object>(1), false); 1997} 1998 1999 2000RUNTIME_FUNCTION(MaybeObject*, KeyedLoadIC_MissForceGeneric) { 2001 HandleScope scope(isolate); 2002 ASSERT(args.length() == 2); 2003 KeyedLoadIC ic(isolate); 2004 IC::State state = IC::StateFrom(ic.target(), args[0], args[1]); 2005 return ic.Load(state, args.at<Object>(0), args.at<Object>(1), true); 2006} 2007 2008 2009// Used from ic-<arch>.cc. 2010RUNTIME_FUNCTION(MaybeObject*, StoreIC_Miss) { 2011 HandleScope scope; 2012 ASSERT(args.length() == 3); 2013 StoreIC ic(isolate); 2014 IC::State state = IC::StateFrom(ic.target(), args[0], args[1]); 2015 Code::ExtraICState extra_ic_state = ic.target()->extra_ic_state(); 2016 return ic.Store(state, 2017 Code::GetStrictMode(extra_ic_state), 2018 args.at<Object>(0), 2019 args.at<String>(1), 2020 args.at<Object>(2)); 2021} 2022 2023 2024RUNTIME_FUNCTION(MaybeObject*, StoreIC_ArrayLength) { 2025 NoHandleAllocation nha; 2026 2027 ASSERT(args.length() == 2); 2028 JSArray* receiver = JSArray::cast(args[0]); 2029 Object* len = args[1]; 2030 2031 // The generated code should filter out non-Smis before we get here. 2032 ASSERT(len->IsSmi()); 2033 2034#ifdef DEBUG 2035 // The length property has to be a writable callback property. 2036 LookupResult debug_lookup(isolate); 2037 receiver->LocalLookup(isolate->heap()->length_symbol(), &debug_lookup); 2038 ASSERT(debug_lookup.type() == CALLBACKS && !debug_lookup.IsReadOnly()); 2039#endif 2040 2041 Object* result; 2042 { MaybeObject* maybe_result = receiver->SetElementsLength(len); 2043 if (!maybe_result->ToObject(&result)) return maybe_result; 2044 } 2045 return len; 2046} 2047 2048 2049// Extend storage is called in a store inline cache when 2050// it is necessary to extend the properties array of a 2051// JSObject. 2052RUNTIME_FUNCTION(MaybeObject*, SharedStoreIC_ExtendStorage) { 2053 NoHandleAllocation na; 2054 ASSERT(args.length() == 3); 2055 2056 // Convert the parameters 2057 JSObject* object = JSObject::cast(args[0]); 2058 Map* transition = Map::cast(args[1]); 2059 Object* value = args[2]; 2060 2061 // Check the object has run out out property space. 2062 ASSERT(object->HasFastProperties()); 2063 ASSERT(object->map()->unused_property_fields() == 0); 2064 2065 // Expand the properties array. 2066 FixedArray* old_storage = object->properties(); 2067 int new_unused = transition->unused_property_fields(); 2068 int new_size = old_storage->length() + new_unused + 1; 2069 Object* result; 2070 { MaybeObject* maybe_result = old_storage->CopySize(new_size); 2071 if (!maybe_result->ToObject(&result)) return maybe_result; 2072 } 2073 FixedArray* new_storage = FixedArray::cast(result); 2074 new_storage->set(old_storage->length(), value); 2075 2076 // Set the new property value and do the map transition. 2077 object->set_properties(new_storage); 2078 object->set_map(transition); 2079 2080 // Return the stored value. 2081 return value; 2082} 2083 2084 2085// Used from ic-<arch>.cc. 2086RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_Miss) { 2087 HandleScope scope(isolate); 2088 ASSERT(args.length() == 3); 2089 KeyedStoreIC ic(isolate); 2090 IC::State state = IC::StateFrom(ic.target(), args[0], args[1]); 2091 Code::ExtraICState extra_ic_state = ic.target()->extra_ic_state(); 2092 return ic.Store(state, 2093 Code::GetStrictMode(extra_ic_state), 2094 args.at<Object>(0), 2095 args.at<Object>(1), 2096 args.at<Object>(2), 2097 false); 2098} 2099 2100 2101RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_Slow) { 2102 NoHandleAllocation na; 2103 ASSERT(args.length() == 3); 2104 KeyedStoreIC ic(isolate); 2105 Code::ExtraICState extra_ic_state = ic.target()->extra_ic_state(); 2106 Handle<Object> object = args.at<Object>(0); 2107 Handle<Object> key = args.at<Object>(1); 2108 Handle<Object> value = args.at<Object>(2); 2109 StrictModeFlag strict_mode = Code::GetStrictMode(extra_ic_state); 2110 return Runtime::SetObjectProperty(isolate, 2111 object, 2112 key, 2113 value, 2114 NONE, 2115 strict_mode); 2116} 2117 2118 2119RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_MissForceGeneric) { 2120 HandleScope scope(isolate); 2121 ASSERT(args.length() == 3); 2122 KeyedStoreIC ic(isolate); 2123 IC::State state = IC::StateFrom(ic.target(), args[0], args[1]); 2124 Code::ExtraICState extra_ic_state = ic.target()->extra_ic_state(); 2125 return ic.Store(state, 2126 Code::GetStrictMode(extra_ic_state), 2127 args.at<Object>(0), 2128 args.at<Object>(1), 2129 args.at<Object>(2), 2130 true); 2131} 2132 2133 2134void UnaryOpIC::patch(Code* code) { 2135 set_target(code); 2136} 2137 2138 2139const char* UnaryOpIC::GetName(TypeInfo type_info) { 2140 switch (type_info) { 2141 case UNINITIALIZED: return "Uninitialized"; 2142 case SMI: return "Smi"; 2143 case HEAP_NUMBER: return "HeapNumbers"; 2144 case GENERIC: return "Generic"; 2145 default: return "Invalid"; 2146 } 2147} 2148 2149 2150UnaryOpIC::State UnaryOpIC::ToState(TypeInfo type_info) { 2151 switch (type_info) { 2152 case UNINITIALIZED: 2153 return ::v8::internal::UNINITIALIZED; 2154 case SMI: 2155 case HEAP_NUMBER: 2156 return MONOMORPHIC; 2157 case GENERIC: 2158 return MEGAMORPHIC; 2159 } 2160 UNREACHABLE(); 2161 return ::v8::internal::UNINITIALIZED; 2162} 2163 2164UnaryOpIC::TypeInfo UnaryOpIC::GetTypeInfo(Handle<Object> operand) { 2165 ::v8::internal::TypeInfo operand_type = 2166 ::v8::internal::TypeInfo::TypeFromValue(operand); 2167 if (operand_type.IsSmi()) { 2168 return SMI; 2169 } else if (operand_type.IsNumber()) { 2170 return HEAP_NUMBER; 2171 } else { 2172 return GENERIC; 2173 } 2174} 2175 2176 2177UnaryOpIC::TypeInfo UnaryOpIC::ComputeNewType( 2178 UnaryOpIC::TypeInfo current_type, 2179 UnaryOpIC::TypeInfo previous_type) { 2180 switch (previous_type) { 2181 case UnaryOpIC::UNINITIALIZED: 2182 return current_type; 2183 case UnaryOpIC::SMI: 2184 return (current_type == UnaryOpIC::GENERIC) 2185 ? UnaryOpIC::GENERIC 2186 : UnaryOpIC::HEAP_NUMBER; 2187 case UnaryOpIC::HEAP_NUMBER: 2188 return UnaryOpIC::GENERIC; 2189 case UnaryOpIC::GENERIC: 2190 // We should never do patching if we are in GENERIC state. 2191 UNREACHABLE(); 2192 return UnaryOpIC::GENERIC; 2193 } 2194 UNREACHABLE(); 2195 return UnaryOpIC::GENERIC; 2196} 2197 2198 2199void BinaryOpIC::patch(Code* code) { 2200 set_target(code); 2201} 2202 2203 2204const char* BinaryOpIC::GetName(TypeInfo type_info) { 2205 switch (type_info) { 2206 case UNINITIALIZED: return "Uninitialized"; 2207 case SMI: return "SMI"; 2208 case INT32: return "Int32s"; 2209 case HEAP_NUMBER: return "HeapNumbers"; 2210 case ODDBALL: return "Oddball"; 2211 case BOTH_STRING: return "BothStrings"; 2212 case STRING: return "Strings"; 2213 case GENERIC: return "Generic"; 2214 default: return "Invalid"; 2215 } 2216} 2217 2218 2219BinaryOpIC::State BinaryOpIC::ToState(TypeInfo type_info) { 2220 switch (type_info) { 2221 case UNINITIALIZED: 2222 return ::v8::internal::UNINITIALIZED; 2223 case SMI: 2224 case INT32: 2225 case HEAP_NUMBER: 2226 case ODDBALL: 2227 case BOTH_STRING: 2228 case STRING: 2229 return MONOMORPHIC; 2230 case GENERIC: 2231 return MEGAMORPHIC; 2232 } 2233 UNREACHABLE(); 2234 return ::v8::internal::UNINITIALIZED; 2235} 2236 2237 2238BinaryOpIC::TypeInfo BinaryOpIC::JoinTypes(BinaryOpIC::TypeInfo x, 2239 BinaryOpIC::TypeInfo y) { 2240 if (x == UNINITIALIZED) return y; 2241 if (y == UNINITIALIZED) return x; 2242 if (x == y) return x; 2243 if (x == BOTH_STRING && y == STRING) return STRING; 2244 if (x == STRING && y == BOTH_STRING) return STRING; 2245 if (x == STRING || x == BOTH_STRING || y == STRING || y == BOTH_STRING) { 2246 return GENERIC; 2247 } 2248 if (x > y) return x; 2249 return y; 2250} 2251 2252 2253BinaryOpIC::TypeInfo BinaryOpIC::GetTypeInfo(Handle<Object> left, 2254 Handle<Object> right) { 2255 ::v8::internal::TypeInfo left_type = 2256 ::v8::internal::TypeInfo::TypeFromValue(left); 2257 ::v8::internal::TypeInfo right_type = 2258 ::v8::internal::TypeInfo::TypeFromValue(right); 2259 2260 if (left_type.IsSmi() && right_type.IsSmi()) { 2261 return SMI; 2262 } 2263 2264 if (left_type.IsInteger32() && right_type.IsInteger32()) { 2265 // Platforms with 32-bit Smis have no distinct INT32 type. 2266 if (kSmiValueSize == 32) return SMI; 2267 return INT32; 2268 } 2269 2270 if (left_type.IsNumber() && right_type.IsNumber()) { 2271 return HEAP_NUMBER; 2272 } 2273 2274 // Patching for fast string ADD makes sense even if only one of the 2275 // arguments is a string. 2276 if (left_type.IsString()) { 2277 return right_type.IsString() ? BOTH_STRING : STRING; 2278 } else if (right_type.IsString()) { 2279 return STRING; 2280 } 2281 2282 // Check for oddball objects. 2283 if (left->IsUndefined() && right->IsNumber()) return ODDBALL; 2284 if (left->IsNumber() && right->IsUndefined()) return ODDBALL; 2285 2286 return GENERIC; 2287} 2288 2289 2290RUNTIME_FUNCTION(MaybeObject*, UnaryOp_Patch) { 2291 ASSERT(args.length() == 4); 2292 2293 HandleScope scope(isolate); 2294 Handle<Object> operand = args.at<Object>(0); 2295 Token::Value op = static_cast<Token::Value>(args.smi_at(1)); 2296 UnaryOverwriteMode mode = static_cast<UnaryOverwriteMode>(args.smi_at(2)); 2297 UnaryOpIC::TypeInfo previous_type = 2298 static_cast<UnaryOpIC::TypeInfo>(args.smi_at(3)); 2299 2300 UnaryOpIC::TypeInfo type = UnaryOpIC::GetTypeInfo(operand); 2301 type = UnaryOpIC::ComputeNewType(type, previous_type); 2302 2303 UnaryOpStub stub(op, mode, type); 2304 Handle<Code> code = stub.GetCode(); 2305 if (!code.is_null()) { 2306 if (FLAG_trace_ic) { 2307 PrintF("[UnaryOpIC (%s->%s)#%s]\n", 2308 UnaryOpIC::GetName(previous_type), 2309 UnaryOpIC::GetName(type), 2310 Token::Name(op)); 2311 } 2312 UnaryOpIC ic(isolate); 2313 ic.patch(*code); 2314 } 2315 2316 Handle<JSBuiltinsObject> builtins = Handle<JSBuiltinsObject>( 2317 isolate->thread_local_top()->context_->builtins(), isolate); 2318 Object* builtin = NULL; // Initialization calms down the compiler. 2319 switch (op) { 2320 case Token::SUB: 2321 builtin = builtins->javascript_builtin(Builtins::UNARY_MINUS); 2322 break; 2323 case Token::BIT_NOT: 2324 builtin = builtins->javascript_builtin(Builtins::BIT_NOT); 2325 break; 2326 default: 2327 UNREACHABLE(); 2328 } 2329 2330 Handle<JSFunction> builtin_function(JSFunction::cast(builtin), isolate); 2331 2332 bool caught_exception; 2333 Handle<Object> result = Execution::Call(builtin_function, operand, 0, NULL, 2334 &caught_exception); 2335 if (caught_exception) { 2336 return Failure::Exception(); 2337 } 2338 return *result; 2339} 2340 2341RUNTIME_FUNCTION(MaybeObject*, BinaryOp_Patch) { 2342 ASSERT(args.length() == 5); 2343 2344 HandleScope scope(isolate); 2345 Handle<Object> left = args.at<Object>(0); 2346 Handle<Object> right = args.at<Object>(1); 2347 int key = args.smi_at(2); 2348 Token::Value op = static_cast<Token::Value>(args.smi_at(3)); 2349 BinaryOpIC::TypeInfo previous_type = 2350 static_cast<BinaryOpIC::TypeInfo>(args.smi_at(4)); 2351 2352 BinaryOpIC::TypeInfo type = BinaryOpIC::GetTypeInfo(left, right); 2353 type = BinaryOpIC::JoinTypes(type, previous_type); 2354 BinaryOpIC::TypeInfo result_type = BinaryOpIC::UNINITIALIZED; 2355 if ((type == BinaryOpIC::STRING || type == BinaryOpIC::BOTH_STRING) && 2356 op != Token::ADD) { 2357 type = BinaryOpIC::GENERIC; 2358 } 2359 if (type == BinaryOpIC::SMI && previous_type == BinaryOpIC::SMI) { 2360 if (op == Token::DIV || 2361 op == Token::MUL || 2362 op == Token::SHR || 2363 kSmiValueSize == 32) { 2364 // Arithmetic on two Smi inputs has yielded a heap number. 2365 // That is the only way to get here from the Smi stub. 2366 // With 32-bit Smis, all overflows give heap numbers, but with 2367 // 31-bit Smis, most operations overflow to int32 results. 2368 result_type = BinaryOpIC::HEAP_NUMBER; 2369 } else { 2370 // Other operations on SMIs that overflow yield int32s. 2371 result_type = BinaryOpIC::INT32; 2372 } 2373 } 2374 if (type == BinaryOpIC::INT32 && previous_type == BinaryOpIC::INT32) { 2375 // We must be here because an operation on two INT32 types overflowed. 2376 result_type = BinaryOpIC::HEAP_NUMBER; 2377 } 2378 2379 BinaryOpStub stub(key, type, result_type); 2380 Handle<Code> code = stub.GetCode(); 2381 if (!code.is_null()) { 2382 if (FLAG_trace_ic) { 2383 PrintF("[BinaryOpIC (%s->(%s->%s))#%s]\n", 2384 BinaryOpIC::GetName(previous_type), 2385 BinaryOpIC::GetName(type), 2386 BinaryOpIC::GetName(result_type), 2387 Token::Name(op)); 2388 } 2389 BinaryOpIC ic(isolate); 2390 ic.patch(*code); 2391 2392 // Activate inlined smi code. 2393 if (previous_type == BinaryOpIC::UNINITIALIZED) { 2394 PatchInlinedSmiCode(ic.address()); 2395 } 2396 } 2397 2398 Handle<JSBuiltinsObject> builtins = Handle<JSBuiltinsObject>( 2399 isolate->thread_local_top()->context_->builtins(), isolate); 2400 Object* builtin = NULL; // Initialization calms down the compiler. 2401 switch (op) { 2402 case Token::ADD: 2403 builtin = builtins->javascript_builtin(Builtins::ADD); 2404 break; 2405 case Token::SUB: 2406 builtin = builtins->javascript_builtin(Builtins::SUB); 2407 break; 2408 case Token::MUL: 2409 builtin = builtins->javascript_builtin(Builtins::MUL); 2410 break; 2411 case Token::DIV: 2412 builtin = builtins->javascript_builtin(Builtins::DIV); 2413 break; 2414 case Token::MOD: 2415 builtin = builtins->javascript_builtin(Builtins::MOD); 2416 break; 2417 case Token::BIT_AND: 2418 builtin = builtins->javascript_builtin(Builtins::BIT_AND); 2419 break; 2420 case Token::BIT_OR: 2421 builtin = builtins->javascript_builtin(Builtins::BIT_OR); 2422 break; 2423 case Token::BIT_XOR: 2424 builtin = builtins->javascript_builtin(Builtins::BIT_XOR); 2425 break; 2426 case Token::SHR: 2427 builtin = builtins->javascript_builtin(Builtins::SHR); 2428 break; 2429 case Token::SAR: 2430 builtin = builtins->javascript_builtin(Builtins::SAR); 2431 break; 2432 case Token::SHL: 2433 builtin = builtins->javascript_builtin(Builtins::SHL); 2434 break; 2435 default: 2436 UNREACHABLE(); 2437 } 2438 2439 Handle<JSFunction> builtin_function(JSFunction::cast(builtin), isolate); 2440 2441 bool caught_exception; 2442 Handle<Object> builtin_args[] = { right }; 2443 Handle<Object> result = Execution::Call(builtin_function, 2444 left, 2445 ARRAY_SIZE(builtin_args), 2446 builtin_args, 2447 &caught_exception); 2448 if (caught_exception) { 2449 return Failure::Exception(); 2450 } 2451 return *result; 2452} 2453 2454 2455Handle<Code> CompareIC::GetUninitialized(Token::Value op) { 2456 ICCompareStub stub(op, UNINITIALIZED); 2457 return stub.GetCode(); 2458} 2459 2460 2461CompareIC::State CompareIC::ComputeState(Code* target) { 2462 int key = target->major_key(); 2463 if (key == CodeStub::Compare) return GENERIC; 2464 ASSERT(key == CodeStub::CompareIC); 2465 return static_cast<State>(target->compare_state()); 2466} 2467 2468 2469const char* CompareIC::GetStateName(State state) { 2470 switch (state) { 2471 case UNINITIALIZED: return "UNINITIALIZED"; 2472 case SMIS: return "SMIS"; 2473 case HEAP_NUMBERS: return "HEAP_NUMBERS"; 2474 case OBJECTS: return "OBJECTS"; 2475 case KNOWN_OBJECTS: return "OBJECTS"; 2476 case SYMBOLS: return "SYMBOLS"; 2477 case STRINGS: return "STRINGS"; 2478 case GENERIC: return "GENERIC"; 2479 default: 2480 UNREACHABLE(); 2481 return NULL; 2482 } 2483} 2484 2485 2486CompareIC::State CompareIC::TargetState(State state, 2487 bool has_inlined_smi_code, 2488 Handle<Object> x, 2489 Handle<Object> y) { 2490 switch (state) { 2491 case UNINITIALIZED: 2492 if (x->IsSmi() && y->IsSmi()) return SMIS; 2493 if (x->IsNumber() && y->IsNumber()) return HEAP_NUMBERS; 2494 if (Token::IsOrderedRelationalCompareOp(op_)) { 2495 // Ordered comparisons treat undefined as NaN, so the 2496 // HEAP_NUMBER stub will do the right thing. 2497 if ((x->IsNumber() && y->IsUndefined()) || 2498 (y->IsNumber() && x->IsUndefined())) { 2499 return HEAP_NUMBERS; 2500 } 2501 } 2502 if (x->IsSymbol() && y->IsSymbol()) { 2503 // We compare symbols as strings if we need to determine 2504 // the order in a non-equality compare. 2505 return Token::IsEqualityOp(op_) ? SYMBOLS : STRINGS; 2506 } 2507 if (x->IsString() && y->IsString()) return STRINGS; 2508 if (!Token::IsEqualityOp(op_)) return GENERIC; 2509 if (x->IsJSObject() && y->IsJSObject()) { 2510 if (Handle<JSObject>::cast(x)->map() == 2511 Handle<JSObject>::cast(y)->map() && 2512 Token::IsEqualityOp(op_)) { 2513 return KNOWN_OBJECTS; 2514 } else { 2515 return OBJECTS; 2516 } 2517 } 2518 return GENERIC; 2519 case SMIS: 2520 return has_inlined_smi_code && x->IsNumber() && y->IsNumber() 2521 ? HEAP_NUMBERS 2522 : GENERIC; 2523 case SYMBOLS: 2524 ASSERT(Token::IsEqualityOp(op_)); 2525 return x->IsString() && y->IsString() ? STRINGS : GENERIC; 2526 case HEAP_NUMBERS: 2527 case STRINGS: 2528 case OBJECTS: 2529 case KNOWN_OBJECTS: 2530 case GENERIC: 2531 return GENERIC; 2532 } 2533 UNREACHABLE(); 2534 return GENERIC; 2535} 2536 2537 2538// Used from ic_<arch>.cc. 2539RUNTIME_FUNCTION(Code*, CompareIC_Miss) { 2540 NoHandleAllocation na; 2541 ASSERT(args.length() == 3); 2542 CompareIC ic(isolate, static_cast<Token::Value>(args.smi_at(2))); 2543 ic.UpdateCaches(args.at<Object>(0), args.at<Object>(1)); 2544 return ic.target(); 2545} 2546 2547 2548RUNTIME_FUNCTION(MaybeObject*, ToBoolean_Patch) { 2549 ASSERT(args.length() == 3); 2550 2551 HandleScope scope(isolate); 2552 Handle<Object> object = args.at<Object>(0); 2553 Register tos = Register::from_code(args.smi_at(1)); 2554 ToBooleanStub::Types old_types(args.smi_at(2)); 2555 2556 ToBooleanStub::Types new_types(old_types); 2557 bool to_boolean_value = new_types.Record(object); 2558 old_types.TraceTransition(new_types); 2559 2560 ToBooleanStub stub(tos, new_types); 2561 Handle<Code> code = stub.GetCode(); 2562 ToBooleanIC ic(isolate); 2563 ic.patch(*code); 2564 return Smi::FromInt(to_boolean_value ? 1 : 0); 2565} 2566 2567 2568void ToBooleanIC::patch(Code* code) { 2569 set_target(code); 2570} 2571 2572 2573static const Address IC_utilities[] = { 2574#define ADDR(name) FUNCTION_ADDR(name), 2575 IC_UTIL_LIST(ADDR) 2576 NULL 2577#undef ADDR 2578}; 2579 2580 2581Address IC::AddressFromUtilityId(IC::UtilityId id) { 2582 return IC_utilities[id]; 2583} 2584 2585 2586} } // namespace v8::internal 2587