stub-cache.cc revision 3bec4d28b1f388dbc06a9c4276e1a03e86c52b04
1// Copyright 2006-2009 the V8 project authors. All rights reserved. 2// Redistribution and use in source and binary forms, with or without 3// modification, are permitted provided that the following conditions are 4// met: 5// 6// * Redistributions of source code must retain the above copyright 7// notice, this list of conditions and the following disclaimer. 8// * Redistributions in binary form must reproduce the above 9// copyright notice, this list of conditions and the following 10// disclaimer in the documentation and/or other materials provided 11// with the distribution. 12// * Neither the name of Google Inc. nor the names of its 13// contributors may be used to endorse or promote products derived 14// from this software without specific prior written permission. 15// 16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28#include "v8.h" 29 30#include "api.h" 31#include "arguments.h" 32#include "ic-inl.h" 33#include "stub-cache.h" 34 35namespace v8 { 36namespace internal { 37 38// ----------------------------------------------------------------------- 39// StubCache implementation. 40 41 42StubCache::Entry StubCache::primary_[StubCache::kPrimaryTableSize]; 43StubCache::Entry StubCache::secondary_[StubCache::kSecondaryTableSize]; 44 45void StubCache::Initialize(bool create_heap_objects) { 46 ASSERT(IsPowerOf2(kPrimaryTableSize)); 47 ASSERT(IsPowerOf2(kSecondaryTableSize)); 48 if (create_heap_objects) { 49 HandleScope scope; 50 Clear(); 51 } 52} 53 54 55Code* StubCache::Set(String* name, Map* map, Code* code) { 56 // Get the flags from the code. 57 Code::Flags flags = Code::RemoveTypeFromFlags(code->flags()); 58 59 // Validate that the name does not move on scavenge, and that we 60 // can use identity checks instead of string equality checks. 61 ASSERT(!Heap::InNewSpace(name)); 62 ASSERT(name->IsSymbol()); 63 64 // The state bits are not important to the hash function because 65 // the stub cache only contains monomorphic stubs. Make sure that 66 // the bits are the least significant so they will be the ones 67 // masked out. 68 ASSERT(Code::ExtractICStateFromFlags(flags) == MONOMORPHIC); 69 ASSERT(Code::kFlagsICStateShift == 0); 70 71 // Make sure that the code type is not included in the hash. 72 ASSERT(Code::ExtractTypeFromFlags(flags) == 0); 73 74 // Compute the primary entry. 75 int primary_offset = PrimaryOffset(name, flags, map); 76 Entry* primary = entry(primary_, primary_offset); 77 Code* hit = primary->value; 78 79 // If the primary entry has useful data in it, we retire it to the 80 // secondary cache before overwriting it. 81 if (hit != Builtins::builtin(Builtins::Illegal)) { 82 Code::Flags primary_flags = Code::RemoveTypeFromFlags(hit->flags()); 83 int secondary_offset = 84 SecondaryOffset(primary->key, primary_flags, primary_offset); 85 Entry* secondary = entry(secondary_, secondary_offset); 86 *secondary = *primary; 87 } 88 89 // Update primary cache. 90 primary->key = name; 91 primary->value = code; 92 return code; 93} 94 95 96Object* StubCache::ComputeLoadNonexistent(String* name, JSObject* receiver) { 97 ASSERT(receiver->IsGlobalObject() || receiver->HasFastProperties()); 98 // If no global objects are present in the prototype chain, the load 99 // nonexistent IC stub can be shared for all names for a given map 100 // and we use the empty string for the map cache in that case. If 101 // there are global objects involved, we need to check global 102 // property cells in the stub and therefore the stub will be 103 // specific to the name. 104 String* cache_name = Heap::empty_string(); 105 if (receiver->IsGlobalObject()) cache_name = name; 106 JSObject* last = receiver; 107 while (last->GetPrototype() != Heap::null_value()) { 108 last = JSObject::cast(last->GetPrototype()); 109 if (last->IsGlobalObject()) cache_name = name; 110 } 111 // Compile the stub that is either shared for all names or 112 // name specific if there are global objects involved. 113 Code::Flags flags = 114 Code::ComputeMonomorphicFlags(Code::LOAD_IC, NONEXISTENT); 115 Object* code = receiver->map()->FindInCodeCache(cache_name, flags); 116 if (code->IsUndefined()) { 117 LoadStubCompiler compiler; 118 code = compiler.CompileLoadNonexistent(cache_name, receiver, last); 119 if (code->IsFailure()) return code; 120 PROFILE(CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), cache_name)); 121 Object* result = 122 receiver->map()->UpdateCodeCache(cache_name, Code::cast(code)); 123 if (result->IsFailure()) return result; 124 } 125 return code; 126} 127 128 129Object* StubCache::ComputeLoadField(String* name, 130 JSObject* receiver, 131 JSObject* holder, 132 int field_index) { 133 ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP); 134 Map* map = receiver->map(); 135 Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, FIELD); 136 Object* code = map->FindInCodeCache(name, flags); 137 if (code->IsUndefined()) { 138 LoadStubCompiler compiler; 139 code = compiler.CompileLoadField(receiver, holder, field_index, name); 140 if (code->IsFailure()) return code; 141 PROFILE(CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), name)); 142 Object* result = map->UpdateCodeCache(name, Code::cast(code)); 143 if (result->IsFailure()) return result; 144 } 145 return code; 146} 147 148 149Object* StubCache::ComputeLoadCallback(String* name, 150 JSObject* receiver, 151 JSObject* holder, 152 AccessorInfo* callback) { 153 ASSERT(v8::ToCData<Address>(callback->getter()) != 0); 154 ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP); 155 Map* map = receiver->map(); 156 Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, CALLBACKS); 157 Object* code = map->FindInCodeCache(name, flags); 158 if (code->IsUndefined()) { 159 LoadStubCompiler compiler; 160 code = compiler.CompileLoadCallback(name, receiver, holder, callback); 161 if (code->IsFailure()) return code; 162 PROFILE(CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), name)); 163 Object* result = map->UpdateCodeCache(name, Code::cast(code)); 164 if (result->IsFailure()) return result; 165 } 166 return code; 167} 168 169 170Object* StubCache::ComputeLoadConstant(String* name, 171 JSObject* receiver, 172 JSObject* holder, 173 Object* value) { 174 ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP); 175 Map* map = receiver->map(); 176 Code::Flags flags = 177 Code::ComputeMonomorphicFlags(Code::LOAD_IC, CONSTANT_FUNCTION); 178 Object* code = map->FindInCodeCache(name, flags); 179 if (code->IsUndefined()) { 180 LoadStubCompiler compiler; 181 code = compiler.CompileLoadConstant(receiver, holder, value, name); 182 if (code->IsFailure()) return code; 183 PROFILE(CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), name)); 184 Object* result = map->UpdateCodeCache(name, Code::cast(code)); 185 if (result->IsFailure()) return result; 186 } 187 return code; 188} 189 190 191Object* StubCache::ComputeLoadInterceptor(String* name, 192 JSObject* receiver, 193 JSObject* holder) { 194 ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP); 195 Map* map = receiver->map(); 196 Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, INTERCEPTOR); 197 Object* code = map->FindInCodeCache(name, flags); 198 if (code->IsUndefined()) { 199 LoadStubCompiler compiler; 200 code = compiler.CompileLoadInterceptor(receiver, holder, name); 201 if (code->IsFailure()) return code; 202 PROFILE(CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), name)); 203 Object* result = map->UpdateCodeCache(name, Code::cast(code)); 204 if (result->IsFailure()) return result; 205 } 206 return code; 207} 208 209 210Object* StubCache::ComputeLoadNormal() { 211 return Builtins::builtin(Builtins::LoadIC_Normal); 212} 213 214 215Object* StubCache::ComputeLoadGlobal(String* name, 216 JSObject* receiver, 217 GlobalObject* holder, 218 JSGlobalPropertyCell* cell, 219 bool is_dont_delete) { 220 ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP); 221 Map* map = receiver->map(); 222 Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, NORMAL); 223 Object* code = map->FindInCodeCache(name, flags); 224 if (code->IsUndefined()) { 225 LoadStubCompiler compiler; 226 code = compiler.CompileLoadGlobal(receiver, 227 holder, 228 cell, 229 name, 230 is_dont_delete); 231 if (code->IsFailure()) return code; 232 PROFILE(CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), name)); 233 Object* result = map->UpdateCodeCache(name, Code::cast(code)); 234 if (result->IsFailure()) return result; 235 } 236 return code; 237} 238 239 240Object* StubCache::ComputeKeyedLoadField(String* name, 241 JSObject* receiver, 242 JSObject* holder, 243 int field_index) { 244 ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP); 245 Map* map = receiver->map(); 246 Code::Flags flags = Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, FIELD); 247 Object* code = map->FindInCodeCache(name, flags); 248 if (code->IsUndefined()) { 249 KeyedLoadStubCompiler compiler; 250 code = compiler.CompileLoadField(name, receiver, holder, field_index); 251 if (code->IsFailure()) return code; 252 PROFILE(CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name)); 253 Object* result = map->UpdateCodeCache(name, Code::cast(code)); 254 if (result->IsFailure()) return result; 255 } 256 return code; 257} 258 259 260Object* StubCache::ComputeKeyedLoadConstant(String* name, 261 JSObject* receiver, 262 JSObject* holder, 263 Object* value) { 264 ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP); 265 Map* map = receiver->map(); 266 Code::Flags flags = 267 Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, CONSTANT_FUNCTION); 268 Object* code = map->FindInCodeCache(name, flags); 269 if (code->IsUndefined()) { 270 KeyedLoadStubCompiler compiler; 271 code = compiler.CompileLoadConstant(name, receiver, holder, value); 272 if (code->IsFailure()) return code; 273 PROFILE(CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name)); 274 Object* result = map->UpdateCodeCache(name, Code::cast(code)); 275 if (result->IsFailure()) return result; 276 } 277 return code; 278} 279 280 281Object* StubCache::ComputeKeyedLoadInterceptor(String* name, 282 JSObject* receiver, 283 JSObject* holder) { 284 ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP); 285 Map* map = receiver->map(); 286 Code::Flags flags = 287 Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, INTERCEPTOR); 288 Object* code = map->FindInCodeCache(name, flags); 289 if (code->IsUndefined()) { 290 KeyedLoadStubCompiler compiler; 291 code = compiler.CompileLoadInterceptor(receiver, holder, name); 292 if (code->IsFailure()) return code; 293 PROFILE(CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name)); 294 Object* result = map->UpdateCodeCache(name, Code::cast(code)); 295 if (result->IsFailure()) return result; 296 } 297 return code; 298} 299 300 301Object* StubCache::ComputeKeyedLoadCallback(String* name, 302 JSObject* receiver, 303 JSObject* holder, 304 AccessorInfo* callback) { 305 ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP); 306 Map* map = receiver->map(); 307 Code::Flags flags = 308 Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, CALLBACKS); 309 Object* code = map->FindInCodeCache(name, flags); 310 if (code->IsUndefined()) { 311 KeyedLoadStubCompiler compiler; 312 code = compiler.CompileLoadCallback(name, receiver, holder, callback); 313 if (code->IsFailure()) return code; 314 PROFILE(CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name)); 315 Object* result = map->UpdateCodeCache(name, Code::cast(code)); 316 if (result->IsFailure()) return result; 317 } 318 return code; 319} 320 321 322 323Object* StubCache::ComputeKeyedLoadArrayLength(String* name, 324 JSArray* receiver) { 325 Code::Flags flags = 326 Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, CALLBACKS); 327 ASSERT(receiver->IsJSObject()); 328 Map* map = receiver->map(); 329 Object* code = map->FindInCodeCache(name, flags); 330 if (code->IsUndefined()) { 331 KeyedLoadStubCompiler compiler; 332 code = compiler.CompileLoadArrayLength(name); 333 if (code->IsFailure()) return code; 334 PROFILE(CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name)); 335 Object* result = map->UpdateCodeCache(name, Code::cast(code)); 336 if (result->IsFailure()) return result; 337 } 338 return code; 339} 340 341 342Object* StubCache::ComputeKeyedLoadStringLength(String* name, 343 String* receiver) { 344 Code::Flags flags = 345 Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, CALLBACKS); 346 Map* map = receiver->map(); 347 Object* code = map->FindInCodeCache(name, flags); 348 if (code->IsUndefined()) { 349 KeyedLoadStubCompiler compiler; 350 code = compiler.CompileLoadStringLength(name); 351 if (code->IsFailure()) return code; 352 PROFILE(CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name)); 353 Object* result = map->UpdateCodeCache(name, Code::cast(code)); 354 if (result->IsFailure()) return result; 355 } 356 return code; 357} 358 359 360Object* StubCache::ComputeKeyedLoadFunctionPrototype(String* name, 361 JSFunction* receiver) { 362 Code::Flags flags = 363 Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, CALLBACKS); 364 Map* map = receiver->map(); 365 Object* code = map->FindInCodeCache(name, flags); 366 if (code->IsUndefined()) { 367 KeyedLoadStubCompiler compiler; 368 code = compiler.CompileLoadFunctionPrototype(name); 369 if (code->IsFailure()) return code; 370 PROFILE(CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name)); 371 Object* result = map->UpdateCodeCache(name, Code::cast(code)); 372 if (result->IsFailure()) return result; 373 } 374 return code; 375} 376 377 378Object* StubCache::ComputeStoreField(String* name, 379 JSObject* receiver, 380 int field_index, 381 Map* transition) { 382 PropertyType type = (transition == NULL) ? FIELD : MAP_TRANSITION; 383 Code::Flags flags = Code::ComputeMonomorphicFlags(Code::STORE_IC, type); 384 Object* code = receiver->map()->FindInCodeCache(name, flags); 385 if (code->IsUndefined()) { 386 StoreStubCompiler compiler; 387 code = compiler.CompileStoreField(receiver, field_index, transition, name); 388 if (code->IsFailure()) return code; 389 PROFILE(CodeCreateEvent(Logger::STORE_IC_TAG, Code::cast(code), name)); 390 Object* result = receiver->map()->UpdateCodeCache(name, Code::cast(code)); 391 if (result->IsFailure()) return result; 392 } 393 return code; 394} 395 396 397Object* StubCache::ComputeStoreNormal() { 398 return Builtins::builtin(Builtins::StoreIC_Normal); 399} 400 401 402Object* StubCache::ComputeStoreGlobal(String* name, 403 GlobalObject* receiver, 404 JSGlobalPropertyCell* cell) { 405 Code::Flags flags = Code::ComputeMonomorphicFlags(Code::STORE_IC, NORMAL); 406 Object* code = receiver->map()->FindInCodeCache(name, flags); 407 if (code->IsUndefined()) { 408 StoreStubCompiler compiler; 409 code = compiler.CompileStoreGlobal(receiver, cell, name); 410 if (code->IsFailure()) return code; 411 PROFILE(CodeCreateEvent(Logger::STORE_IC_TAG, Code::cast(code), name)); 412 Object* result = receiver->map()->UpdateCodeCache(name, Code::cast(code)); 413 if (result->IsFailure()) return result; 414 } 415 return code; 416} 417 418 419Object* StubCache::ComputeStoreCallback(String* name, 420 JSObject* receiver, 421 AccessorInfo* callback) { 422 ASSERT(v8::ToCData<Address>(callback->setter()) != 0); 423 Code::Flags flags = Code::ComputeMonomorphicFlags(Code::STORE_IC, CALLBACKS); 424 Object* code = receiver->map()->FindInCodeCache(name, flags); 425 if (code->IsUndefined()) { 426 StoreStubCompiler compiler; 427 code = compiler.CompileStoreCallback(receiver, callback, name); 428 if (code->IsFailure()) return code; 429 PROFILE(CodeCreateEvent(Logger::STORE_IC_TAG, Code::cast(code), name)); 430 Object* result = receiver->map()->UpdateCodeCache(name, Code::cast(code)); 431 if (result->IsFailure()) return result; 432 } 433 return code; 434} 435 436 437Object* StubCache::ComputeStoreInterceptor(String* name, 438 JSObject* receiver) { 439 Code::Flags flags = 440 Code::ComputeMonomorphicFlags(Code::STORE_IC, INTERCEPTOR); 441 Object* code = receiver->map()->FindInCodeCache(name, flags); 442 if (code->IsUndefined()) { 443 StoreStubCompiler compiler; 444 code = compiler.CompileStoreInterceptor(receiver, name); 445 if (code->IsFailure()) return code; 446 PROFILE(CodeCreateEvent(Logger::STORE_IC_TAG, Code::cast(code), name)); 447 Object* result = receiver->map()->UpdateCodeCache(name, Code::cast(code)); 448 if (result->IsFailure()) return result; 449 } 450 return code; 451} 452 453 454Object* StubCache::ComputeKeyedStoreField(String* name, JSObject* receiver, 455 int field_index, Map* transition) { 456 PropertyType type = (transition == NULL) ? FIELD : MAP_TRANSITION; 457 Code::Flags flags = Code::ComputeMonomorphicFlags(Code::KEYED_STORE_IC, type); 458 Object* code = receiver->map()->FindInCodeCache(name, flags); 459 if (code->IsUndefined()) { 460 KeyedStoreStubCompiler compiler; 461 code = compiler.CompileStoreField(receiver, field_index, transition, name); 462 if (code->IsFailure()) return code; 463 PROFILE(CodeCreateEvent( 464 Logger::KEYED_STORE_IC_TAG, Code::cast(code), name)); 465 Object* result = receiver->map()->UpdateCodeCache(name, Code::cast(code)); 466 if (result->IsFailure()) return result; 467 } 468 return code; 469} 470 471#define CALL_LOGGER_TAG(kind, type) \ 472 (kind == Code::CALL_IC ? Logger::type : Logger::KEYED_##type) 473 474Object* StubCache::ComputeCallConstant(int argc, 475 InLoopFlag in_loop, 476 Code::Kind kind, 477 String* name, 478 Object* object, 479 JSObject* holder, 480 JSFunction* function) { 481 // Compute the check type and the map. 482 InlineCacheHolderFlag cache_holder = 483 IC::GetCodeCacheForObject(object, holder); 484 Map* map = IC::GetCodeCacheMap(object, cache_holder); 485 486 // Compute check type based on receiver/holder. 487 StubCompiler::CheckType check = StubCompiler::RECEIVER_MAP_CHECK; 488 if (object->IsString()) { 489 check = StubCompiler::STRING_CHECK; 490 } else if (object->IsNumber()) { 491 check = StubCompiler::NUMBER_CHECK; 492 } else if (object->IsBoolean()) { 493 check = StubCompiler::BOOLEAN_CHECK; 494 } 495 496 Code::Flags flags = 497 Code::ComputeMonomorphicFlags(kind, 498 CONSTANT_FUNCTION, 499 cache_holder, 500 in_loop, 501 argc); 502 Object* code = map->FindInCodeCache(name, flags); 503 if (code->IsUndefined()) { 504 // If the function hasn't been compiled yet, we cannot do it now 505 // because it may cause GC. To avoid this issue, we return an 506 // internal error which will make sure we do not update any 507 // caches. 508 if (!function->is_compiled()) return Failure::InternalError(); 509 // Compile the stub - only create stubs for fully compiled functions. 510 CallStubCompiler compiler(argc, in_loop, kind, cache_holder); 511 code = compiler.CompileCallConstant(object, holder, function, name, check); 512 if (code->IsFailure()) return code; 513 ASSERT_EQ(flags, Code::cast(code)->flags()); 514 PROFILE(CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_IC_TAG), 515 Code::cast(code), name)); 516 Object* result = map->UpdateCodeCache(name, Code::cast(code)); 517 if (result->IsFailure()) return result; 518 } 519 return code; 520} 521 522 523Object* StubCache::ComputeCallField(int argc, 524 InLoopFlag in_loop, 525 Code::Kind kind, 526 String* name, 527 Object* object, 528 JSObject* holder, 529 int index) { 530 // Compute the check type and the map. 531 InlineCacheHolderFlag cache_holder = 532 IC::GetCodeCacheForObject(object, holder); 533 Map* map = IC::GetCodeCacheMap(object, cache_holder); 534 535 // TODO(1233596): We cannot do receiver map check for non-JS objects 536 // because they may be represented as immediates without a 537 // map. Instead, we check against the map in the holder. 538 if (object->IsNumber() || object->IsBoolean() || object->IsString()) { 539 object = holder; 540 } 541 542 Code::Flags flags = Code::ComputeMonomorphicFlags(kind, 543 FIELD, 544 cache_holder, 545 in_loop, 546 argc); 547 Object* code = map->FindInCodeCache(name, flags); 548 if (code->IsUndefined()) { 549 CallStubCompiler compiler(argc, in_loop, kind, cache_holder); 550 code = compiler.CompileCallField(JSObject::cast(object), 551 holder, 552 index, 553 name); 554 if (code->IsFailure()) return code; 555 ASSERT_EQ(flags, Code::cast(code)->flags()); 556 PROFILE(CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_IC_TAG), 557 Code::cast(code), name)); 558 Object* result = map->UpdateCodeCache(name, Code::cast(code)); 559 if (result->IsFailure()) return result; 560 } 561 return code; 562} 563 564 565Object* StubCache::ComputeCallInterceptor(int argc, 566 Code::Kind kind, 567 String* name, 568 Object* object, 569 JSObject* holder) { 570 // Compute the check type and the map. 571 InlineCacheHolderFlag cache_holder = 572 IC::GetCodeCacheForObject(object, holder); 573 Map* map = IC::GetCodeCacheMap(object, cache_holder); 574 575 // TODO(1233596): We cannot do receiver map check for non-JS objects 576 // because they may be represented as immediates without a 577 // map. Instead, we check against the map in the holder. 578 if (object->IsNumber() || object->IsBoolean() || object->IsString()) { 579 object = holder; 580 } 581 582 Code::Flags flags = 583 Code::ComputeMonomorphicFlags(kind, 584 INTERCEPTOR, 585 cache_holder, 586 NOT_IN_LOOP, 587 argc); 588 Object* code = map->FindInCodeCache(name, flags); 589 if (code->IsUndefined()) { 590 CallStubCompiler compiler(argc, NOT_IN_LOOP, kind, cache_holder); 591 code = compiler.CompileCallInterceptor(JSObject::cast(object), 592 holder, 593 name); 594 if (code->IsFailure()) return code; 595 ASSERT_EQ(flags, Code::cast(code)->flags()); 596 PROFILE(CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_IC_TAG), 597 Code::cast(code), name)); 598 Object* result = map->UpdateCodeCache(name, Code::cast(code)); 599 if (result->IsFailure()) return result; 600 } 601 return code; 602} 603 604 605Object* StubCache::ComputeCallNormal(int argc, 606 InLoopFlag in_loop, 607 Code::Kind kind, 608 String* name, 609 JSObject* receiver) { 610 Object* code = ComputeCallNormal(argc, in_loop, kind); 611 if (code->IsFailure()) return code; 612 return code; 613} 614 615 616Object* StubCache::ComputeCallGlobal(int argc, 617 InLoopFlag in_loop, 618 Code::Kind kind, 619 String* name, 620 JSObject* receiver, 621 GlobalObject* holder, 622 JSGlobalPropertyCell* cell, 623 JSFunction* function) { 624 InlineCacheHolderFlag cache_holder = 625 IC::GetCodeCacheForObject(receiver, holder); 626 Map* map = IC::GetCodeCacheMap(receiver, cache_holder); 627 Code::Flags flags = 628 Code::ComputeMonomorphicFlags(kind, 629 NORMAL, 630 cache_holder, 631 in_loop, 632 argc); 633 Object* code = map->FindInCodeCache(name, flags); 634 if (code->IsUndefined()) { 635 // If the function hasn't been compiled yet, we cannot do it now 636 // because it may cause GC. To avoid this issue, we return an 637 // internal error which will make sure we do not update any 638 // caches. 639 if (!function->is_compiled()) return Failure::InternalError(); 640 CallStubCompiler compiler(argc, in_loop, kind, cache_holder); 641 code = compiler.CompileCallGlobal(receiver, holder, cell, function, name); 642 if (code->IsFailure()) return code; 643 ASSERT_EQ(flags, Code::cast(code)->flags()); 644 PROFILE(CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_IC_TAG), 645 Code::cast(code), name)); 646 Object* result = map->UpdateCodeCache(name, Code::cast(code)); 647 if (result->IsFailure()) return result; 648 } 649 return code; 650} 651 652 653static Object* GetProbeValue(Code::Flags flags) { 654 // Use raw_unchecked... so we don't get assert failures during GC. 655 NumberDictionary* dictionary = Heap::raw_unchecked_non_monomorphic_cache(); 656 int entry = dictionary->FindEntry(flags); 657 if (entry != -1) return dictionary->ValueAt(entry); 658 return Heap::raw_unchecked_undefined_value(); 659} 660 661 662static Object* ProbeCache(Code::Flags flags) { 663 Object* probe = GetProbeValue(flags); 664 if (probe != Heap::undefined_value()) return probe; 665 // Seed the cache with an undefined value to make sure that any 666 // generated code object can always be inserted into the cache 667 // without causing allocation failures. 668 Object* result = 669 Heap::non_monomorphic_cache()->AtNumberPut(flags, 670 Heap::undefined_value()); 671 if (result->IsFailure()) return result; 672 Heap::public_set_non_monomorphic_cache(NumberDictionary::cast(result)); 673 return probe; 674} 675 676 677static Object* FillCache(Object* code) { 678 if (code->IsCode()) { 679 int entry = 680 Heap::non_monomorphic_cache()->FindEntry( 681 Code::cast(code)->flags()); 682 // The entry must be present see comment in ProbeCache. 683 ASSERT(entry != -1); 684 ASSERT(Heap::non_monomorphic_cache()->ValueAt(entry) == 685 Heap::undefined_value()); 686 Heap::non_monomorphic_cache()->ValueAtPut(entry, code); 687 CHECK(GetProbeValue(Code::cast(code)->flags()) == code); 688 } 689 return code; 690} 691 692 693Code* StubCache::FindCallInitialize(int argc, 694 InLoopFlag in_loop, 695 Code::Kind kind) { 696 Code::Flags flags = 697 Code::ComputeFlags(kind, in_loop, UNINITIALIZED, NORMAL, argc); 698 Object* result = ProbeCache(flags); 699 ASSERT(!result->IsUndefined()); 700 // This might be called during the marking phase of the collector 701 // hence the unchecked cast. 702 return reinterpret_cast<Code*>(result); 703} 704 705 706Object* StubCache::ComputeCallInitialize(int argc, 707 InLoopFlag in_loop, 708 Code::Kind kind) { 709 Code::Flags flags = 710 Code::ComputeFlags(kind, in_loop, UNINITIALIZED, NORMAL, argc); 711 Object* probe = ProbeCache(flags); 712 if (!probe->IsUndefined()) return probe; 713 StubCompiler compiler; 714 return FillCache(compiler.CompileCallInitialize(flags)); 715} 716 717 718Object* StubCache::ComputeCallPreMonomorphic(int argc, 719 InLoopFlag in_loop, 720 Code::Kind kind) { 721 Code::Flags flags = 722 Code::ComputeFlags(kind, in_loop, PREMONOMORPHIC, NORMAL, argc); 723 Object* probe = ProbeCache(flags); 724 if (!probe->IsUndefined()) return probe; 725 StubCompiler compiler; 726 return FillCache(compiler.CompileCallPreMonomorphic(flags)); 727} 728 729 730Object* StubCache::ComputeCallNormal(int argc, 731 InLoopFlag in_loop, 732 Code::Kind kind) { 733 Code::Flags flags = 734 Code::ComputeFlags(kind, in_loop, MONOMORPHIC, NORMAL, argc); 735 Object* probe = ProbeCache(flags); 736 if (!probe->IsUndefined()) return probe; 737 StubCompiler compiler; 738 return FillCache(compiler.CompileCallNormal(flags)); 739} 740 741 742Object* StubCache::ComputeCallMegamorphic(int argc, 743 InLoopFlag in_loop, 744 Code::Kind kind) { 745 Code::Flags flags = 746 Code::ComputeFlags(kind, in_loop, MEGAMORPHIC, NORMAL, argc); 747 Object* probe = ProbeCache(flags); 748 if (!probe->IsUndefined()) return probe; 749 StubCompiler compiler; 750 return FillCache(compiler.CompileCallMegamorphic(flags)); 751} 752 753 754Object* StubCache::ComputeCallMiss(int argc, Code::Kind kind) { 755 // MONOMORPHIC_PROTOTYPE_FAILURE state is used to make sure that miss stubs 756 // and monomorphic stubs are not mixed up together in the stub cache. 757 Code::Flags flags = Code::ComputeFlags( 758 kind, NOT_IN_LOOP, MONOMORPHIC_PROTOTYPE_FAILURE, NORMAL, argc); 759 Object* probe = ProbeCache(flags); 760 if (!probe->IsUndefined()) return probe; 761 StubCompiler compiler; 762 return FillCache(compiler.CompileCallMiss(flags)); 763} 764 765 766#ifdef ENABLE_DEBUGGER_SUPPORT 767Object* StubCache::ComputeCallDebugBreak(int argc, Code::Kind kind) { 768 Code::Flags flags = 769 Code::ComputeFlags(kind, NOT_IN_LOOP, DEBUG_BREAK, NORMAL, argc); 770 Object* probe = ProbeCache(flags); 771 if (!probe->IsUndefined()) return probe; 772 StubCompiler compiler; 773 return FillCache(compiler.CompileCallDebugBreak(flags)); 774} 775 776 777Object* StubCache::ComputeCallDebugPrepareStepIn(int argc, Code::Kind kind) { 778 Code::Flags flags = 779 Code::ComputeFlags(kind, 780 NOT_IN_LOOP, 781 DEBUG_PREPARE_STEP_IN, 782 NORMAL, 783 argc); 784 Object* probe = ProbeCache(flags); 785 if (!probe->IsUndefined()) return probe; 786 StubCompiler compiler; 787 return FillCache(compiler.CompileCallDebugPrepareStepIn(flags)); 788} 789#endif 790 791 792Object* StubCache::ComputeLazyCompile(int argc) { 793 Code::Flags flags = 794 Code::ComputeFlags(Code::STUB, NOT_IN_LOOP, UNINITIALIZED, NORMAL, argc); 795 Object* probe = ProbeCache(flags); 796 if (!probe->IsUndefined()) return probe; 797 StubCompiler compiler; 798 Object* result = FillCache(compiler.CompileLazyCompile(flags)); 799 if (result->IsCode()) { 800 Code* code = Code::cast(result); 801 USE(code); 802 PROFILE(CodeCreateEvent(Logger::LAZY_COMPILE_TAG, 803 code, code->arguments_count())); 804 } 805 return result; 806} 807 808 809void StubCache::Clear() { 810 for (int i = 0; i < kPrimaryTableSize; i++) { 811 primary_[i].key = Heap::empty_string(); 812 primary_[i].value = Builtins::builtin(Builtins::Illegal); 813 } 814 for (int j = 0; j < kSecondaryTableSize; j++) { 815 secondary_[j].key = Heap::empty_string(); 816 secondary_[j].value = Builtins::builtin(Builtins::Illegal); 817 } 818} 819 820 821// ------------------------------------------------------------------------ 822// StubCompiler implementation. 823 824 825// Support function for computing call IC miss stubs. 826Handle<Code> ComputeCallMiss(int argc, Code::Kind kind) { 827 CALL_HEAP_FUNCTION(StubCache::ComputeCallMiss(argc, kind), Code); 828} 829 830 831 832Object* LoadCallbackProperty(Arguments args) { 833 ASSERT(args[0]->IsJSObject()); 834 ASSERT(args[1]->IsJSObject()); 835 AccessorInfo* callback = AccessorInfo::cast(args[2]); 836 Address getter_address = v8::ToCData<Address>(callback->getter()); 837 v8::AccessorGetter fun = FUNCTION_CAST<v8::AccessorGetter>(getter_address); 838 ASSERT(fun != NULL); 839 CustomArguments custom_args(callback->data(), 840 JSObject::cast(args[0]), 841 JSObject::cast(args[1])); 842 v8::AccessorInfo info(custom_args.end()); 843 HandleScope scope; 844 v8::Handle<v8::Value> result; 845 { 846 // Leaving JavaScript. 847 VMState state(EXTERNAL); 848#ifdef ENABLE_LOGGING_AND_PROFILING 849 state.set_external_callback(getter_address); 850#endif 851 result = fun(v8::Utils::ToLocal(args.at<String>(4)), info); 852 } 853 RETURN_IF_SCHEDULED_EXCEPTION(); 854 if (result.IsEmpty()) return Heap::undefined_value(); 855 return *v8::Utils::OpenHandle(*result); 856} 857 858 859Object* StoreCallbackProperty(Arguments args) { 860 JSObject* recv = JSObject::cast(args[0]); 861 AccessorInfo* callback = AccessorInfo::cast(args[1]); 862 Address setter_address = v8::ToCData<Address>(callback->setter()); 863 v8::AccessorSetter fun = FUNCTION_CAST<v8::AccessorSetter>(setter_address); 864 ASSERT(fun != NULL); 865 Handle<String> name = args.at<String>(2); 866 Handle<Object> value = args.at<Object>(3); 867 HandleScope scope; 868 LOG(ApiNamedPropertyAccess("store", recv, *name)); 869 CustomArguments custom_args(callback->data(), recv, recv); 870 v8::AccessorInfo info(custom_args.end()); 871 { 872 // Leaving JavaScript. 873 VMState state(EXTERNAL); 874#ifdef ENABLE_LOGGING_AND_PROFILING 875 state.set_external_callback(setter_address); 876#endif 877 fun(v8::Utils::ToLocal(name), v8::Utils::ToLocal(value), info); 878 } 879 RETURN_IF_SCHEDULED_EXCEPTION(); 880 return *value; 881} 882 883 884static const int kAccessorInfoOffsetInInterceptorArgs = 2; 885 886 887/** 888 * Attempts to load a property with an interceptor (which must be present), 889 * but doesn't search the prototype chain. 890 * 891 * Returns |Heap::no_interceptor_result_sentinel()| if interceptor doesn't 892 * provide any value for the given name. 893 */ 894Object* LoadPropertyWithInterceptorOnly(Arguments args) { 895 Handle<String> name_handle = args.at<String>(0); 896 Handle<InterceptorInfo> interceptor_info = args.at<InterceptorInfo>(1); 897 ASSERT(kAccessorInfoOffsetInInterceptorArgs == 2); 898 ASSERT(args[2]->IsJSObject()); // Receiver. 899 ASSERT(args[3]->IsJSObject()); // Holder. 900 ASSERT(args.length() == 5); // Last arg is data object. 901 902 Address getter_address = v8::ToCData<Address>(interceptor_info->getter()); 903 v8::NamedPropertyGetter getter = 904 FUNCTION_CAST<v8::NamedPropertyGetter>(getter_address); 905 ASSERT(getter != NULL); 906 907 { 908 // Use the interceptor getter. 909 v8::AccessorInfo info(args.arguments() - 910 kAccessorInfoOffsetInInterceptorArgs); 911 HandleScope scope; 912 v8::Handle<v8::Value> r; 913 { 914 // Leaving JavaScript. 915 VMState state(EXTERNAL); 916 r = getter(v8::Utils::ToLocal(name_handle), info); 917 } 918 RETURN_IF_SCHEDULED_EXCEPTION(); 919 if (!r.IsEmpty()) { 920 return *v8::Utils::OpenHandle(*r); 921 } 922 } 923 924 return Heap::no_interceptor_result_sentinel(); 925} 926 927 928static Object* ThrowReferenceError(String* name) { 929 // If the load is non-contextual, just return the undefined result. 930 // Note that both keyed and non-keyed loads may end up here, so we 931 // can't use either LoadIC or KeyedLoadIC constructors. 932 IC ic(IC::NO_EXTRA_FRAME); 933 ASSERT(ic.target()->is_load_stub() || ic.target()->is_keyed_load_stub()); 934 if (!ic.SlowIsContextual()) return Heap::undefined_value(); 935 936 // Throw a reference error. 937 HandleScope scope; 938 Handle<String> name_handle(name); 939 Handle<Object> error = 940 Factory::NewReferenceError("not_defined", 941 HandleVector(&name_handle, 1)); 942 return Top::Throw(*error); 943} 944 945 946static Object* LoadWithInterceptor(Arguments* args, 947 PropertyAttributes* attrs) { 948 Handle<String> name_handle = args->at<String>(0); 949 Handle<InterceptorInfo> interceptor_info = args->at<InterceptorInfo>(1); 950 ASSERT(kAccessorInfoOffsetInInterceptorArgs == 2); 951 Handle<JSObject> receiver_handle = args->at<JSObject>(2); 952 Handle<JSObject> holder_handle = args->at<JSObject>(3); 953 ASSERT(args->length() == 5); // Last arg is data object. 954 955 Address getter_address = v8::ToCData<Address>(interceptor_info->getter()); 956 v8::NamedPropertyGetter getter = 957 FUNCTION_CAST<v8::NamedPropertyGetter>(getter_address); 958 ASSERT(getter != NULL); 959 960 { 961 // Use the interceptor getter. 962 v8::AccessorInfo info(args->arguments() - 963 kAccessorInfoOffsetInInterceptorArgs); 964 HandleScope scope; 965 v8::Handle<v8::Value> r; 966 { 967 // Leaving JavaScript. 968 VMState state(EXTERNAL); 969 r = getter(v8::Utils::ToLocal(name_handle), info); 970 } 971 RETURN_IF_SCHEDULED_EXCEPTION(); 972 if (!r.IsEmpty()) { 973 *attrs = NONE; 974 return *v8::Utils::OpenHandle(*r); 975 } 976 } 977 978 Object* result = holder_handle->GetPropertyPostInterceptor( 979 *receiver_handle, 980 *name_handle, 981 attrs); 982 RETURN_IF_SCHEDULED_EXCEPTION(); 983 return result; 984} 985 986 987/** 988 * Loads a property with an interceptor performing post interceptor 989 * lookup if interceptor failed. 990 */ 991Object* LoadPropertyWithInterceptorForLoad(Arguments args) { 992 PropertyAttributes attr = NONE; 993 Object* result = LoadWithInterceptor(&args, &attr); 994 if (result->IsFailure()) return result; 995 996 // If the property is present, return it. 997 if (attr != ABSENT) return result; 998 return ThrowReferenceError(String::cast(args[0])); 999} 1000 1001 1002Object* LoadPropertyWithInterceptorForCall(Arguments args) { 1003 PropertyAttributes attr; 1004 Object* result = LoadWithInterceptor(&args, &attr); 1005 RETURN_IF_SCHEDULED_EXCEPTION(); 1006 // This is call IC. In this case, we simply return the undefined result which 1007 // will lead to an exception when trying to invoke the result as a 1008 // function. 1009 return result; 1010} 1011 1012 1013Object* StoreInterceptorProperty(Arguments args) { 1014 JSObject* recv = JSObject::cast(args[0]); 1015 String* name = String::cast(args[1]); 1016 Object* value = args[2]; 1017 ASSERT(recv->HasNamedInterceptor()); 1018 PropertyAttributes attr = NONE; 1019 Object* result = recv->SetPropertyWithInterceptor(name, value, attr); 1020 return result; 1021} 1022 1023 1024Object* KeyedLoadPropertyWithInterceptor(Arguments args) { 1025 JSObject* receiver = JSObject::cast(args[0]); 1026 uint32_t index = Smi::cast(args[1])->value(); 1027 return receiver->GetElementWithInterceptor(receiver, index); 1028} 1029 1030 1031Object* StubCompiler::CompileCallInitialize(Code::Flags flags) { 1032 HandleScope scope; 1033 int argc = Code::ExtractArgumentsCountFromFlags(flags); 1034 Code::Kind kind = Code::ExtractKindFromFlags(flags); 1035 if (kind == Code::CALL_IC) { 1036 CallIC::GenerateInitialize(masm(), argc); 1037 } else { 1038 KeyedCallIC::GenerateInitialize(masm(), argc); 1039 } 1040 Object* result = GetCodeWithFlags(flags, "CompileCallInitialize"); 1041 if (!result->IsFailure()) { 1042 Counters::call_initialize_stubs.Increment(); 1043 Code* code = Code::cast(result); 1044 USE(code); 1045 PROFILE(CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_INITIALIZE_TAG), 1046 code, code->arguments_count())); 1047 } 1048 return result; 1049} 1050 1051 1052Object* StubCompiler::CompileCallPreMonomorphic(Code::Flags flags) { 1053 HandleScope scope; 1054 int argc = Code::ExtractArgumentsCountFromFlags(flags); 1055 // The code of the PreMonomorphic stub is the same as the code 1056 // of the Initialized stub. They just differ on the code object flags. 1057 Code::Kind kind = Code::ExtractKindFromFlags(flags); 1058 if (kind == Code::CALL_IC) { 1059 CallIC::GenerateInitialize(masm(), argc); 1060 } else { 1061 KeyedCallIC::GenerateInitialize(masm(), argc); 1062 } 1063 Object* result = GetCodeWithFlags(flags, "CompileCallPreMonomorphic"); 1064 if (!result->IsFailure()) { 1065 Counters::call_premonomorphic_stubs.Increment(); 1066 Code* code = Code::cast(result); 1067 USE(code); 1068 PROFILE(CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_PRE_MONOMORPHIC_TAG), 1069 code, code->arguments_count())); 1070 } 1071 return result; 1072} 1073 1074 1075Object* StubCompiler::CompileCallNormal(Code::Flags flags) { 1076 HandleScope scope; 1077 int argc = Code::ExtractArgumentsCountFromFlags(flags); 1078 Code::Kind kind = Code::ExtractKindFromFlags(flags); 1079 if (kind == Code::CALL_IC) { 1080 CallIC::GenerateNormal(masm(), argc); 1081 } else { 1082 KeyedCallIC::GenerateNormal(masm(), argc); 1083 } 1084 Object* result = GetCodeWithFlags(flags, "CompileCallNormal"); 1085 if (!result->IsFailure()) { 1086 Counters::call_normal_stubs.Increment(); 1087 Code* code = Code::cast(result); 1088 USE(code); 1089 PROFILE(CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_NORMAL_TAG), 1090 code, code->arguments_count())); 1091 } 1092 return result; 1093} 1094 1095 1096Object* StubCompiler::CompileCallMegamorphic(Code::Flags flags) { 1097 HandleScope scope; 1098 int argc = Code::ExtractArgumentsCountFromFlags(flags); 1099 Code::Kind kind = Code::ExtractKindFromFlags(flags); 1100 if (kind == Code::CALL_IC) { 1101 CallIC::GenerateMegamorphic(masm(), argc); 1102 } else { 1103 KeyedCallIC::GenerateMegamorphic(masm(), argc); 1104 } 1105 1106 Object* result = GetCodeWithFlags(flags, "CompileCallMegamorphic"); 1107 if (!result->IsFailure()) { 1108 Counters::call_megamorphic_stubs.Increment(); 1109 Code* code = Code::cast(result); 1110 USE(code); 1111 PROFILE(CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_MEGAMORPHIC_TAG), 1112 code, code->arguments_count())); 1113 } 1114 return result; 1115} 1116 1117 1118Object* StubCompiler::CompileCallMiss(Code::Flags flags) { 1119 HandleScope scope; 1120 int argc = Code::ExtractArgumentsCountFromFlags(flags); 1121 Code::Kind kind = Code::ExtractKindFromFlags(flags); 1122 if (kind == Code::CALL_IC) { 1123 CallIC::GenerateMiss(masm(), argc); 1124 } else { 1125 KeyedCallIC::GenerateMiss(masm(), argc); 1126 } 1127 Object* result = GetCodeWithFlags(flags, "CompileCallMiss"); 1128 if (!result->IsFailure()) { 1129 Counters::call_megamorphic_stubs.Increment(); 1130 Code* code = Code::cast(result); 1131 USE(code); 1132 PROFILE(CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_MISS_TAG), 1133 code, code->arguments_count())); 1134 } 1135 return result; 1136} 1137 1138 1139#ifdef ENABLE_DEBUGGER_SUPPORT 1140Object* StubCompiler::CompileCallDebugBreak(Code::Flags flags) { 1141 HandleScope scope; 1142 Debug::GenerateCallICDebugBreak(masm()); 1143 Object* result = GetCodeWithFlags(flags, "CompileCallDebugBreak"); 1144 if (!result->IsFailure()) { 1145 Code* code = Code::cast(result); 1146 USE(code); 1147 Code::Kind kind = Code::ExtractKindFromFlags(flags); 1148 USE(kind); 1149 PROFILE(CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_DEBUG_BREAK_TAG), 1150 code, code->arguments_count())); 1151 } 1152 return result; 1153} 1154 1155 1156Object* StubCompiler::CompileCallDebugPrepareStepIn(Code::Flags flags) { 1157 HandleScope scope; 1158 // Use the same code for the the step in preparations as we do for 1159 // the miss case. 1160 int argc = Code::ExtractArgumentsCountFromFlags(flags); 1161 Code::Kind kind = Code::ExtractKindFromFlags(flags); 1162 if (kind == Code::CALL_IC) { 1163 CallIC::GenerateMiss(masm(), argc); 1164 } else { 1165 KeyedCallIC::GenerateMiss(masm(), argc); 1166 } 1167 Object* result = GetCodeWithFlags(flags, "CompileCallDebugPrepareStepIn"); 1168 if (!result->IsFailure()) { 1169 Code* code = Code::cast(result); 1170 USE(code); 1171 PROFILE(CodeCreateEvent( 1172 CALL_LOGGER_TAG(kind, CALL_DEBUG_PREPARE_STEP_IN_TAG), 1173 code, 1174 code->arguments_count())); 1175 } 1176 return result; 1177} 1178#endif 1179 1180#undef CALL_LOGGER_TAG 1181 1182Object* StubCompiler::GetCodeWithFlags(Code::Flags flags, const char* name) { 1183 // Check for allocation failures during stub compilation. 1184 if (failure_->IsFailure()) return failure_; 1185 1186 // Create code object in the heap. 1187 CodeDesc desc; 1188 masm_.GetCode(&desc); 1189 Object* result = Heap::CreateCode(desc, flags, masm_.CodeObject()); 1190#ifdef ENABLE_DISASSEMBLER 1191 if (FLAG_print_code_stubs && !result->IsFailure()) { 1192 Code::cast(result)->Disassemble(name); 1193 } 1194#endif 1195 return result; 1196} 1197 1198 1199Object* StubCompiler::GetCodeWithFlags(Code::Flags flags, String* name) { 1200 if (FLAG_print_code_stubs && (name != NULL)) { 1201 return GetCodeWithFlags(flags, *name->ToCString()); 1202 } 1203 return GetCodeWithFlags(flags, reinterpret_cast<char*>(NULL)); 1204} 1205 1206 1207void StubCompiler::LookupPostInterceptor(JSObject* holder, 1208 String* name, 1209 LookupResult* lookup) { 1210 holder->LocalLookupRealNamedProperty(name, lookup); 1211 if (!lookup->IsProperty()) { 1212 lookup->NotFound(); 1213 Object* proto = holder->GetPrototype(); 1214 if (proto != Heap::null_value()) { 1215 proto->Lookup(name, lookup); 1216 } 1217 } 1218} 1219 1220 1221 1222Object* LoadStubCompiler::GetCode(PropertyType type, String* name) { 1223 Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, type); 1224 return GetCodeWithFlags(flags, name); 1225} 1226 1227 1228Object* KeyedLoadStubCompiler::GetCode(PropertyType type, String* name) { 1229 Code::Flags flags = Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, type); 1230 return GetCodeWithFlags(flags, name); 1231} 1232 1233 1234Object* StoreStubCompiler::GetCode(PropertyType type, String* name) { 1235 Code::Flags flags = Code::ComputeMonomorphicFlags(Code::STORE_IC, type); 1236 return GetCodeWithFlags(flags, name); 1237} 1238 1239 1240Object* KeyedStoreStubCompiler::GetCode(PropertyType type, String* name) { 1241 Code::Flags flags = Code::ComputeMonomorphicFlags(Code::KEYED_STORE_IC, type); 1242 return GetCodeWithFlags(flags, name); 1243} 1244 1245 1246CallStubCompiler::CallStubCompiler(int argc, 1247 InLoopFlag in_loop, 1248 Code::Kind kind, 1249 InlineCacheHolderFlag cache_holder) 1250 : arguments_(argc) 1251 , in_loop_(in_loop) 1252 , kind_(kind) 1253 , cache_holder_(cache_holder) { 1254} 1255 1256 1257Object* CallStubCompiler::CompileCustomCall(int generator_id, 1258 Object* object, 1259 JSObject* holder, 1260 JSFunction* function, 1261 String* fname, 1262 CheckType check) { 1263 ASSERT(generator_id >= 0 && generator_id < kNumCallGenerators); 1264 switch (generator_id) { 1265#define CALL_GENERATOR_CASE(ignored1, ignored2, name) \ 1266 case k##name##CallGenerator: \ 1267 return CallStubCompiler::Compile##name##Call(object, \ 1268 holder, \ 1269 function, \ 1270 fname, \ 1271 check); 1272 CUSTOM_CALL_IC_GENERATORS(CALL_GENERATOR_CASE) 1273#undef CALL_GENERATOR_CASE 1274 } 1275 UNREACHABLE(); 1276 return Heap::undefined_value(); 1277} 1278 1279 1280Object* CallStubCompiler::GetCode(PropertyType type, String* name) { 1281 int argc = arguments_.immediate(); 1282 Code::Flags flags = Code::ComputeMonomorphicFlags(kind_, 1283 type, 1284 cache_holder_, 1285 in_loop_, 1286 argc); 1287 return GetCodeWithFlags(flags, name); 1288} 1289 1290 1291Object* CallStubCompiler::GetCode(JSFunction* function) { 1292 String* function_name = NULL; 1293 if (function->shared()->name()->IsString()) { 1294 function_name = String::cast(function->shared()->name()); 1295 } 1296 return GetCode(CONSTANT_FUNCTION, function_name); 1297} 1298 1299 1300Object* ConstructStubCompiler::GetCode() { 1301 Code::Flags flags = Code::ComputeFlags(Code::STUB); 1302 Object* result = GetCodeWithFlags(flags, "ConstructStub"); 1303 if (!result->IsFailure()) { 1304 Code* code = Code::cast(result); 1305 USE(code); 1306 PROFILE(CodeCreateEvent(Logger::STUB_TAG, code, "ConstructStub")); 1307 } 1308 return result; 1309} 1310 1311 1312CallOptimization::CallOptimization(LookupResult* lookup) { 1313 if (!lookup->IsProperty() || !lookup->IsCacheable() || 1314 lookup->type() != CONSTANT_FUNCTION) { 1315 Initialize(NULL); 1316 } else { 1317 // We only optimize constant function calls. 1318 Initialize(lookup->GetConstantFunction()); 1319 } 1320} 1321 1322CallOptimization::CallOptimization(JSFunction* function) { 1323 Initialize(function); 1324} 1325 1326 1327int CallOptimization::GetPrototypeDepthOfExpectedType(JSObject* object, 1328 JSObject* holder) const { 1329 ASSERT(is_simple_api_call_); 1330 if (expected_receiver_type_ == NULL) return 0; 1331 int depth = 0; 1332 while (object != holder) { 1333 if (object->IsInstanceOf(expected_receiver_type_)) return depth; 1334 object = JSObject::cast(object->GetPrototype()); 1335 ++depth; 1336 } 1337 if (holder->IsInstanceOf(expected_receiver_type_)) return depth; 1338 return kInvalidProtoDepth; 1339} 1340 1341 1342void CallOptimization::Initialize(JSFunction* function) { 1343 constant_function_ = NULL; 1344 is_simple_api_call_ = false; 1345 expected_receiver_type_ = NULL; 1346 api_call_info_ = NULL; 1347 1348 if (function == NULL || !function->is_compiled()) return; 1349 1350 constant_function_ = function; 1351 AnalyzePossibleApiFunction(function); 1352} 1353 1354 1355void CallOptimization::AnalyzePossibleApiFunction(JSFunction* function) { 1356 SharedFunctionInfo* sfi = function->shared(); 1357 if (!sfi->IsApiFunction()) return; 1358 FunctionTemplateInfo* info = sfi->get_api_func_data(); 1359 1360 // Require a C++ callback. 1361 if (info->call_code()->IsUndefined()) return; 1362 api_call_info_ = CallHandlerInfo::cast(info->call_code()); 1363 1364 // Accept signatures that either have no restrictions at all or 1365 // only have restrictions on the receiver. 1366 if (!info->signature()->IsUndefined()) { 1367 SignatureInfo* signature = SignatureInfo::cast(info->signature()); 1368 if (!signature->args()->IsUndefined()) return; 1369 if (!signature->receiver()->IsUndefined()) { 1370 expected_receiver_type_ = 1371 FunctionTemplateInfo::cast(signature->receiver()); 1372 } 1373 } 1374 1375 is_simple_api_call_ = true; 1376} 1377 1378 1379} } // namespace v8::internal 1380