1// Copyright 2014 the V8 project authors. All rights reserved. 2// Use of this source code is governed by a BSD-style license that can be 3// found in the LICENSE file. 4 5#if V8_TARGET_ARCH_ARM64 6 7#include "src/ic/handler-compiler.h" 8 9#include "src/api-arguments.h" 10#include "src/field-type.h" 11#include "src/ic/call-optimization.h" 12#include "src/ic/ic.h" 13#include "src/isolate-inl.h" 14 15namespace v8 { 16namespace internal { 17 18#define __ ACCESS_MASM(masm) 19 20void PropertyHandlerCompiler::PushVectorAndSlot(Register vector, 21 Register slot) { 22 MacroAssembler* masm = this->masm(); 23 __ Push(vector); 24 __ Push(slot); 25} 26 27 28void PropertyHandlerCompiler::PopVectorAndSlot(Register vector, Register slot) { 29 MacroAssembler* masm = this->masm(); 30 __ Pop(slot); 31 __ Pop(vector); 32} 33 34 35void PropertyHandlerCompiler::DiscardVectorAndSlot() { 36 MacroAssembler* masm = this->masm(); 37 // Remove vector and slot. 38 __ Drop(2); 39} 40 41 42void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup( 43 MacroAssembler* masm, Label* miss_label, Register receiver, 44 Handle<Name> name, Register scratch0, Register scratch1) { 45 DCHECK(!AreAliased(receiver, scratch0, scratch1)); 46 DCHECK(name->IsUniqueName()); 47 Counters* counters = masm->isolate()->counters(); 48 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1); 49 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1); 50 51 Label done; 52 53 const int kInterceptorOrAccessCheckNeededMask = 54 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded); 55 56 // Bail out if the receiver has a named interceptor or requires access checks. 57 Register map = scratch1; 58 __ Ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); 59 __ Ldrb(scratch0, FieldMemOperand(map, Map::kBitFieldOffset)); 60 __ Tst(scratch0, kInterceptorOrAccessCheckNeededMask); 61 __ B(ne, miss_label); 62 63 // Check that receiver is a JSObject. 64 __ Ldrb(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset)); 65 __ Cmp(scratch0, FIRST_JS_RECEIVER_TYPE); 66 __ B(lt, miss_label); 67 68 // Load properties array. 69 Register properties = scratch0; 70 __ Ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); 71 // Check that the properties array is a dictionary. 72 __ Ldr(map, FieldMemOperand(properties, HeapObject::kMapOffset)); 73 __ JumpIfNotRoot(map, Heap::kHashTableMapRootIndex, miss_label); 74 75 NameDictionaryLookupStub::GenerateNegativeLookup( 76 masm, miss_label, &done, receiver, properties, name, scratch1); 77 __ Bind(&done); 78 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1); 79} 80 81 82void NamedLoadHandlerCompiler::GenerateDirectLoadGlobalFunctionPrototype( 83 MacroAssembler* masm, int index, Register result, Label* miss) { 84 __ LoadNativeContextSlot(index, result); 85 // Load its initial map. The global functions all have initial maps. 86 __ Ldr(result, 87 FieldMemOperand(result, JSFunction::kPrototypeOrInitialMapOffset)); 88 // Load the prototype from the initial map. 89 __ Ldr(result, FieldMemOperand(result, Map::kPrototypeOffset)); 90} 91 92 93void NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype( 94 MacroAssembler* masm, Register receiver, Register scratch1, 95 Register scratch2, Label* miss_label) { 96 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label); 97 // TryGetFunctionPrototype can't put the result directly in x0 because the 98 // 3 inputs registers can't alias and we call this function from 99 // LoadIC::GenerateFunctionPrototype, where receiver is x0. So we explicitly 100 // move the result in x0. 101 __ Mov(x0, scratch1); 102 __ Ret(); 103} 104 105 106// Generate code to check that a global property cell is empty. Create 107// the property cell at compilation time if no cell exists for the 108// property. 109void PropertyHandlerCompiler::GenerateCheckPropertyCell( 110 MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name, 111 Register scratch, Label* miss) { 112 Handle<PropertyCell> cell = JSGlobalObject::EnsurePropertyCell(global, name); 113 Isolate* isolate = masm->isolate(); 114 DCHECK(cell->value()->IsTheHole(isolate)); 115 Handle<WeakCell> weak_cell = isolate->factory()->NewWeakCell(cell); 116 __ LoadWeakValue(scratch, weak_cell, miss); 117 __ Ldr(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset)); 118 __ JumpIfNotRoot(scratch, Heap::kTheHoleValueRootIndex, miss); 119} 120 121 122static void PushInterceptorArguments(MacroAssembler* masm, Register receiver, 123 Register holder, Register name, 124 Handle<JSObject> holder_obj) { 125 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0); 126 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 1); 127 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 2); 128 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsLength == 3); 129 130 __ Push(name, receiver, holder); 131} 132 133 134static void CompileCallLoadPropertyWithInterceptor( 135 MacroAssembler* masm, Register receiver, Register holder, Register name, 136 Handle<JSObject> holder_obj, Runtime::FunctionId id) { 137 DCHECK(NamedLoadHandlerCompiler::kInterceptorArgsLength == 138 Runtime::FunctionForId(id)->nargs); 139 PushInterceptorArguments(masm, receiver, holder, name, holder_obj); 140 __ CallRuntime(id); 141} 142 143 144// Generate call to api function. 145void PropertyHandlerCompiler::GenerateApiAccessorCall( 146 MacroAssembler* masm, const CallOptimization& optimization, 147 Handle<Map> receiver_map, Register receiver, Register scratch, 148 bool is_store, Register store_parameter, Register accessor_holder, 149 int accessor_index) { 150 DCHECK(!AreAliased(accessor_holder, scratch)); 151 DCHECK(!AreAliased(receiver, scratch)); 152 153 MacroAssembler::PushPopQueue queue(masm); 154 queue.Queue(receiver); 155 // Write the arguments to the stack frame. 156 if (is_store) { 157 DCHECK(!receiver.is(store_parameter)); 158 DCHECK(!scratch.is(store_parameter)); 159 queue.Queue(store_parameter); 160 } 161 queue.PushQueued(); 162 163 DCHECK(optimization.is_simple_api_call()); 164 165 // Abi for CallApiCallbackStub. 166 Register callee = x0; 167 Register data = x4; 168 Register holder = x2; 169 Register api_function_address = x1; 170 171 // Put callee in place. 172 __ LoadAccessor(callee, accessor_holder, accessor_index, 173 is_store ? ACCESSOR_SETTER : ACCESSOR_GETTER); 174 175 // Put holder in place. 176 CallOptimization::HolderLookup holder_lookup; 177 int holder_depth = 0; 178 optimization.LookupHolderOfExpectedType(receiver_map, &holder_lookup, 179 &holder_depth); 180 switch (holder_lookup) { 181 case CallOptimization::kHolderIsReceiver: 182 __ Mov(holder, receiver); 183 break; 184 case CallOptimization::kHolderFound: 185 __ Ldr(holder, FieldMemOperand(receiver, HeapObject::kMapOffset)); 186 __ Ldr(holder, FieldMemOperand(holder, Map::kPrototypeOffset)); 187 for (int i = 1; i < holder_depth; i++) { 188 __ Ldr(holder, FieldMemOperand(holder, HeapObject::kMapOffset)); 189 __ Ldr(holder, FieldMemOperand(holder, Map::kPrototypeOffset)); 190 } 191 break; 192 case CallOptimization::kHolderNotFound: 193 UNREACHABLE(); 194 break; 195 } 196 197 Isolate* isolate = masm->isolate(); 198 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info(); 199 bool call_data_undefined = false; 200 // Put call data in place. 201 if (api_call_info->data()->IsUndefined(isolate)) { 202 call_data_undefined = true; 203 __ LoadRoot(data, Heap::kUndefinedValueRootIndex); 204 } else { 205 if (optimization.is_constant_call()) { 206 __ Ldr(data, 207 FieldMemOperand(callee, JSFunction::kSharedFunctionInfoOffset)); 208 __ Ldr(data, 209 FieldMemOperand(data, SharedFunctionInfo::kFunctionDataOffset)); 210 __ Ldr(data, 211 FieldMemOperand(data, FunctionTemplateInfo::kCallCodeOffset)); 212 } else { 213 __ Ldr(data, 214 FieldMemOperand(callee, FunctionTemplateInfo::kCallCodeOffset)); 215 } 216 __ Ldr(data, FieldMemOperand(data, CallHandlerInfo::kDataOffset)); 217 } 218 219 if (api_call_info->fast_handler()->IsCode()) { 220 // Just tail call into the fast handler if present. 221 __ Jump(handle(Code::cast(api_call_info->fast_handler())), 222 RelocInfo::CODE_TARGET); 223 return; 224 } 225 226 // Put api_function_address in place. 227 Address function_address = v8::ToCData<Address>(api_call_info->callback()); 228 ApiFunction fun(function_address); 229 ExternalReference ref = ExternalReference( 230 &fun, ExternalReference::DIRECT_API_CALL, masm->isolate()); 231 __ Mov(api_function_address, ref); 232 233 // Jump to stub. 234 CallApiCallbackStub stub(isolate, is_store, call_data_undefined, 235 !optimization.is_constant_call()); 236 __ TailCallStub(&stub); 237} 238 239 240void NamedStoreHandlerCompiler::GenerateStoreViaSetter( 241 MacroAssembler* masm, Handle<Map> map, Register receiver, Register holder, 242 int accessor_index, int expected_arguments, Register scratch) { 243 // ----------- S t a t e ------------- 244 // -- lr : return address 245 // ----------------------------------- 246 Label miss; 247 { 248 FrameScope scope(masm, StackFrame::INTERNAL); 249 250 // Save context register 251 __ Push(cp); 252 // Save value register, so we can restore it later. 253 __ Push(value()); 254 255 if (accessor_index >= 0) { 256 DCHECK(!AreAliased(holder, scratch)); 257 DCHECK(!AreAliased(receiver, scratch)); 258 DCHECK(!AreAliased(value(), scratch)); 259 // Call the JavaScript setter with receiver and value on the stack. 260 if (map->IsJSGlobalObjectMap()) { 261 // Swap in the global receiver. 262 __ Ldr(scratch, 263 FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset)); 264 receiver = scratch; 265 } 266 __ Push(receiver, value()); 267 __ LoadAccessor(x1, holder, accessor_index, ACCESSOR_SETTER); 268 __ Mov(x0, 1); 269 __ Call(masm->isolate()->builtins()->CallFunction( 270 ConvertReceiverMode::kNotNullOrUndefined), 271 RelocInfo::CODE_TARGET); 272 } else { 273 // If we generate a global code snippet for deoptimization only, remember 274 // the place to continue after deoptimization. 275 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset()); 276 } 277 278 // We have to return the passed value, not the return value of the setter. 279 __ Pop(x0); 280 281 // Restore context register. 282 __ Pop(cp); 283 } 284 __ Ret(); 285} 286 287 288void NamedLoadHandlerCompiler::GenerateLoadViaGetter( 289 MacroAssembler* masm, Handle<Map> map, Register receiver, Register holder, 290 int accessor_index, int expected_arguments, Register scratch) { 291 { 292 FrameScope scope(masm, StackFrame::INTERNAL); 293 294 // Save context register 295 __ Push(cp); 296 297 if (accessor_index >= 0) { 298 DCHECK(!AreAliased(holder, scratch)); 299 DCHECK(!AreAliased(receiver, scratch)); 300 // Call the JavaScript getter with the receiver on the stack. 301 if (map->IsJSGlobalObjectMap()) { 302 // Swap in the global receiver. 303 __ Ldr(scratch, 304 FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset)); 305 receiver = scratch; 306 } 307 __ Push(receiver); 308 __ LoadAccessor(x1, holder, accessor_index, ACCESSOR_GETTER); 309 __ Mov(x0, 0); 310 __ Call(masm->isolate()->builtins()->CallFunction( 311 ConvertReceiverMode::kNotNullOrUndefined), 312 RelocInfo::CODE_TARGET); 313 } else { 314 // If we generate a global code snippet for deoptimization only, remember 315 // the place to continue after deoptimization. 316 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset()); 317 } 318 319 // Restore context register. 320 __ Pop(cp); 321 } 322 __ Ret(); 323} 324 325 326static void StoreIC_PushArgs(MacroAssembler* masm) { 327 __ Push(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister(), 328 StoreDescriptor::ValueRegister(), 329 VectorStoreICDescriptor::SlotRegister(), 330 VectorStoreICDescriptor::VectorRegister()); 331} 332 333 334void NamedStoreHandlerCompiler::GenerateSlow(MacroAssembler* masm) { 335 StoreIC_PushArgs(masm); 336 337 // The slow case calls into the runtime to complete the store without causing 338 // an IC miss that would otherwise cause a transition to the generic stub. 339 __ TailCallRuntime(Runtime::kStoreIC_Slow); 340} 341 342 343void ElementHandlerCompiler::GenerateStoreSlow(MacroAssembler* masm) { 344 ASM_LOCATION("ElementHandlerCompiler::GenerateStoreSlow"); 345 StoreIC_PushArgs(masm); 346 347 // The slow case calls into the runtime to complete the store without causing 348 // an IC miss that would otherwise cause a transition to the generic stub. 349 __ TailCallRuntime(Runtime::kKeyedStoreIC_Slow); 350} 351 352 353#undef __ 354#define __ ACCESS_MASM(masm()) 355 356 357Handle<Code> NamedLoadHandlerCompiler::CompileLoadGlobal( 358 Handle<PropertyCell> cell, Handle<Name> name, bool is_configurable) { 359 Label miss; 360 if (IC::ICUseVector(kind())) { 361 PushVectorAndSlot(); 362 } 363 FrontendHeader(receiver(), name, &miss, DONT_RETURN_ANYTHING); 364 365 // Get the value from the cell. 366 Register result = StoreDescriptor::ValueRegister(); 367 Handle<WeakCell> weak_cell = factory()->NewWeakCell(cell); 368 __ LoadWeakValue(result, weak_cell, &miss); 369 __ Ldr(result, FieldMemOperand(result, PropertyCell::kValueOffset)); 370 371 // Check for deleted property if property can actually be deleted. 372 if (is_configurable) { 373 __ JumpIfRoot(result, Heap::kTheHoleValueRootIndex, &miss); 374 } 375 376 Counters* counters = isolate()->counters(); 377 __ IncrementCounter(counters->ic_named_load_global_stub(), 1, x1, x3); 378 if (IC::ICUseVector(kind())) { 379 DiscardVectorAndSlot(); 380 } 381 __ Ret(); 382 383 FrontendFooter(name, &miss); 384 385 // Return the generated code. 386 return GetCode(kind(), name); 387} 388 389 390Register NamedStoreHandlerCompiler::value() { 391 return StoreDescriptor::ValueRegister(); 392} 393 394 395void NamedStoreHandlerCompiler::GenerateRestoreName(Label* label, 396 Handle<Name> name) { 397 if (!label->is_unused()) { 398 __ Bind(label); 399 __ Mov(this->name(), Operand(name)); 400 } 401} 402 403 404void NamedStoreHandlerCompiler::GenerateRestoreName(Handle<Name> name) { 405 __ Mov(this->name(), Operand(name)); 406} 407 408 409void NamedStoreHandlerCompiler::RearrangeVectorAndSlot( 410 Register current_map, Register destination_map) { 411 DCHECK(false); // Not implemented. 412} 413 414 415void NamedStoreHandlerCompiler::GenerateRestoreMap(Handle<Map> transition, 416 Register map_reg, 417 Register scratch, 418 Label* miss) { 419 Handle<WeakCell> cell = Map::WeakCellForMap(transition); 420 DCHECK(!map_reg.is(scratch)); 421 __ LoadWeakValue(map_reg, cell, miss); 422 if (transition->CanBeDeprecated()) { 423 __ Ldrsw(scratch, FieldMemOperand(map_reg, Map::kBitField3Offset)); 424 __ TestAndBranchIfAnySet(scratch, Map::Deprecated::kMask, miss); 425 } 426} 427 428 429void NamedStoreHandlerCompiler::GenerateConstantCheck(Register map_reg, 430 int descriptor, 431 Register value_reg, 432 Register scratch, 433 Label* miss_label) { 434 DCHECK(!map_reg.is(scratch)); 435 DCHECK(!map_reg.is(value_reg)); 436 DCHECK(!value_reg.is(scratch)); 437 __ LoadInstanceDescriptors(map_reg, scratch); 438 __ Ldr(scratch, 439 FieldMemOperand(scratch, DescriptorArray::GetValueOffset(descriptor))); 440 __ Cmp(value_reg, scratch); 441 __ B(ne, miss_label); 442} 443 444void NamedStoreHandlerCompiler::GenerateFieldTypeChecks(FieldType* field_type, 445 Register value_reg, 446 Label* miss_label) { 447 Register map_reg = scratch1(); 448 Register scratch = scratch2(); 449 DCHECK(!value_reg.is(map_reg)); 450 DCHECK(!value_reg.is(scratch)); 451 __ JumpIfSmi(value_reg, miss_label); 452 if (field_type->IsClass()) { 453 __ Ldr(map_reg, FieldMemOperand(value_reg, HeapObject::kMapOffset)); 454 __ CmpWeakValue(map_reg, Map::WeakCellForMap(field_type->AsClass()), 455 scratch); 456 __ B(ne, miss_label); 457 } 458} 459 460 461Register PropertyHandlerCompiler::CheckPrototypes( 462 Register object_reg, Register holder_reg, Register scratch1, 463 Register scratch2, Handle<Name> name, Label* miss, PrototypeCheckType check, 464 ReturnHolder return_what) { 465 Handle<Map> receiver_map = map(); 466 467 // object_reg and holder_reg registers can alias. 468 DCHECK(!AreAliased(object_reg, scratch1, scratch2)); 469 DCHECK(!AreAliased(holder_reg, scratch1, scratch2)); 470 471 Handle<Cell> validity_cell = 472 Map::GetOrCreatePrototypeChainValidityCell(receiver_map, isolate()); 473 if (!validity_cell.is_null()) { 474 DCHECK_EQ(Smi::FromInt(Map::kPrototypeChainValid), validity_cell->value()); 475 __ Mov(scratch1, Operand(validity_cell)); 476 __ Ldr(scratch1, FieldMemOperand(scratch1, Cell::kValueOffset)); 477 __ Cmp(scratch1, Operand(Smi::FromInt(Map::kPrototypeChainValid))); 478 __ B(ne, miss); 479 } 480 481 // The prototype chain of primitives (and their JSValue wrappers) depends 482 // on the native context, which can't be guarded by validity cells. 483 // |object_reg| holds the native context specific prototype in this case; 484 // we need to check its map. 485 if (check == CHECK_ALL_MAPS) { 486 __ Ldr(scratch1, FieldMemOperand(object_reg, HeapObject::kMapOffset)); 487 Handle<WeakCell> cell = Map::WeakCellForMap(receiver_map); 488 __ CmpWeakValue(scratch1, cell, scratch2); 489 __ B(ne, miss); 490 } 491 492 // Keep track of the current object in register reg. 493 Register reg = object_reg; 494 int depth = 0; 495 496 Handle<JSObject> current = Handle<JSObject>::null(); 497 if (receiver_map->IsJSGlobalObjectMap()) { 498 current = isolate()->global_object(); 499 } 500 501 // Check access rights to the global object. This has to happen after 502 // the map check so that we know that the object is actually a global 503 // object. 504 // This allows us to install generated handlers for accesses to the 505 // global proxy (as opposed to using slow ICs). See corresponding code 506 // in LookupForRead(). 507 if (receiver_map->IsJSGlobalProxyMap()) { 508 UseScratchRegisterScope temps(masm()); 509 __ CheckAccessGlobalProxy(reg, scratch2, temps.AcquireX(), miss); 510 } 511 512 Handle<JSObject> prototype = Handle<JSObject>::null(); 513 Handle<Map> current_map = receiver_map; 514 Handle<Map> holder_map(holder()->map()); 515 // Traverse the prototype chain and check the maps in the prototype chain for 516 // fast and global objects or do negative lookup for normal objects. 517 while (!current_map.is_identical_to(holder_map)) { 518 ++depth; 519 520 // Only global objects and objects that do not require access 521 // checks are allowed in stubs. 522 DCHECK(current_map->IsJSGlobalProxyMap() || 523 !current_map->is_access_check_needed()); 524 525 prototype = handle(JSObject::cast(current_map->prototype())); 526 if (current_map->IsJSGlobalObjectMap()) { 527 GenerateCheckPropertyCell(masm(), Handle<JSGlobalObject>::cast(current), 528 name, scratch2, miss); 529 } else if (current_map->is_dictionary_map()) { 530 DCHECK(!current_map->IsJSGlobalProxyMap()); // Proxy maps are fast. 531 if (!name->IsUniqueName()) { 532 DCHECK(name->IsString()); 533 name = factory()->InternalizeString(Handle<String>::cast(name)); 534 } 535 DCHECK(current.is_null() || (current->property_dictionary()->FindEntry( 536 name) == NameDictionary::kNotFound)); 537 538 if (depth > 1) { 539 // TODO(jkummerow): Cache and re-use weak cell. 540 __ LoadWeakValue(reg, isolate()->factory()->NewWeakCell(current), miss); 541 } 542 GenerateDictionaryNegativeLookup(masm(), miss, reg, name, scratch1, 543 scratch2); 544 } 545 546 reg = holder_reg; // From now on the object will be in holder_reg. 547 // Go to the next object in the prototype chain. 548 current = prototype; 549 current_map = handle(current->map()); 550 } 551 552 DCHECK(!current_map->IsJSGlobalProxyMap()); 553 554 // Log the check depth. 555 LOG(isolate(), IntEvent("check-maps-depth", depth + 1)); 556 557 bool return_holder = return_what == RETURN_HOLDER; 558 if (return_holder && depth != 0) { 559 __ LoadWeakValue(reg, isolate()->factory()->NewWeakCell(current), miss); 560 } 561 562 // Return the register containing the holder. 563 return return_holder ? reg : no_reg; 564} 565 566 567void NamedLoadHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) { 568 if (!miss->is_unused()) { 569 Label success; 570 __ B(&success); 571 572 __ Bind(miss); 573 if (IC::ICUseVector(kind())) { 574 DCHECK(kind() == Code::LOAD_IC); 575 PopVectorAndSlot(); 576 } 577 TailCallBuiltin(masm(), MissBuiltin(kind())); 578 579 __ Bind(&success); 580 } 581} 582 583 584void NamedStoreHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) { 585 if (!miss->is_unused()) { 586 Label success; 587 __ B(&success); 588 589 GenerateRestoreName(miss, name); 590 if (IC::ICUseVector(kind())) PopVectorAndSlot(); 591 TailCallBuiltin(masm(), MissBuiltin(kind())); 592 593 __ Bind(&success); 594 } 595} 596 597 598void NamedLoadHandlerCompiler::GenerateLoadConstant(Handle<Object> value) { 599 // Return the constant value. 600 __ LoadObject(x0, value); 601 __ Ret(); 602} 603 604void NamedLoadHandlerCompiler::GenerateLoadInterceptorWithFollowup( 605 LookupIterator* it, Register holder_reg) { 606 DCHECK(!AreAliased(receiver(), this->name(), scratch1(), scratch2(), 607 scratch3())); 608 DCHECK(holder()->HasNamedInterceptor()); 609 DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined(isolate())); 610 611 // Compile the interceptor call, followed by inline code to load the 612 // property from further up the prototype chain if the call fails. 613 // Check that the maps haven't changed. 614 DCHECK(holder_reg.is(receiver()) || holder_reg.is(scratch1())); 615 616 // Preserve the receiver register explicitly whenever it is different from the 617 // holder and it is needed should the interceptor return without any result. 618 // The ACCESSOR case needs the receiver to be passed into C++ code, the FIELD 619 // case might cause a miss during the prototype check. 620 bool must_perform_prototype_check = 621 !holder().is_identical_to(it->GetHolder<JSObject>()); 622 bool must_preserve_receiver_reg = 623 !receiver().is(holder_reg) && 624 (it->state() == LookupIterator::ACCESSOR || must_perform_prototype_check); 625 626 // Save necessary data before invoking an interceptor. 627 // Requires a frame to make GC aware of pushed pointers. 628 { 629 FrameScope frame_scope(masm(), StackFrame::INTERNAL); 630 if (must_preserve_receiver_reg) { 631 __ Push(receiver(), holder_reg, this->name()); 632 } else { 633 __ Push(holder_reg, this->name()); 634 } 635 InterceptorVectorSlotPush(holder_reg); 636 // Invoke an interceptor. Note: map checks from receiver to 637 // interceptor's holder has been compiled before (see a caller 638 // of this method.) 639 CompileCallLoadPropertyWithInterceptor( 640 masm(), receiver(), holder_reg, this->name(), holder(), 641 Runtime::kLoadPropertyWithInterceptorOnly); 642 643 // Check if interceptor provided a value for property. If it's 644 // the case, return immediately. 645 Label interceptor_failed; 646 __ JumpIfRoot(x0, Heap::kNoInterceptorResultSentinelRootIndex, 647 &interceptor_failed); 648 frame_scope.GenerateLeaveFrame(); 649 __ Ret(); 650 651 __ Bind(&interceptor_failed); 652 InterceptorVectorSlotPop(holder_reg); 653 if (must_preserve_receiver_reg) { 654 __ Pop(this->name(), holder_reg, receiver()); 655 } else { 656 __ Pop(this->name(), holder_reg); 657 } 658 // Leave the internal frame. 659 } 660 661 GenerateLoadPostInterceptor(it, holder_reg); 662} 663 664 665void NamedLoadHandlerCompiler::GenerateLoadInterceptor(Register holder_reg) { 666 // Call the runtime system to load the interceptor. 667 DCHECK(holder()->HasNamedInterceptor()); 668 DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined(isolate())); 669 PushInterceptorArguments(masm(), receiver(), holder_reg, this->name(), 670 holder()); 671 672 __ TailCallRuntime(Runtime::kLoadPropertyWithInterceptor); 673} 674 675 676Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback( 677 Handle<JSObject> object, Handle<Name> name, Handle<AccessorInfo> callback, 678 LanguageMode language_mode) { 679 ASM_LOCATION("NamedStoreHandlerCompiler::CompileStoreCallback"); 680 Register holder_reg = Frontend(name); 681 682 // Stub never generated for non-global objects that require access checks. 683 DCHECK(holder()->IsJSGlobalProxy() || !holder()->IsAccessCheckNeeded()); 684 685 // receiver() and holder_reg can alias. 686 DCHECK(!AreAliased(receiver(), scratch1(), scratch2(), value())); 687 DCHECK(!AreAliased(holder_reg, scratch1(), scratch2(), value())); 688 // If the callback cannot leak, then push the callback directly, 689 // otherwise wrap it in a weak cell. 690 if (callback->data()->IsUndefined(isolate()) || callback->data()->IsSmi()) { 691 __ Mov(scratch1(), Operand(callback)); 692 } else { 693 Handle<WeakCell> cell = isolate()->factory()->NewWeakCell(callback); 694 __ Mov(scratch1(), Operand(cell)); 695 } 696 __ Mov(scratch2(), Operand(name)); 697 __ Push(receiver(), holder_reg, scratch1(), scratch2(), value()); 698 __ Push(Smi::FromInt(language_mode)); 699 700 // Do tail-call to the runtime system. 701 __ TailCallRuntime(Runtime::kStoreCallbackProperty); 702 703 // Return the generated code. 704 return GetCode(kind(), name); 705} 706 707 708#undef __ 709} // namespace internal 710} // namespace v8 711 712#endif // V8_TARGET_ARCH_IA32 713