1// Copyright 2014 the V8 project authors. All rights reserved. 2// Use of this source code is governed by a BSD-style license that can be 3// found in the LICENSE file. 4 5#if V8_TARGET_ARCH_ARM64 6 7#include "src/ic/handler-compiler.h" 8 9#include "src/api-arguments.h" 10#include "src/field-type.h" 11#include "src/ic/call-optimization.h" 12#include "src/ic/ic.h" 13#include "src/isolate-inl.h" 14 15namespace v8 { 16namespace internal { 17 18#define __ ACCESS_MASM(masm) 19 20void PropertyHandlerCompiler::PushVectorAndSlot(Register vector, 21 Register slot) { 22 MacroAssembler* masm = this->masm(); 23 STATIC_ASSERT(LoadWithVectorDescriptor::kSlot < 24 LoadWithVectorDescriptor::kVector); 25 STATIC_ASSERT(StoreWithVectorDescriptor::kSlot < 26 StoreWithVectorDescriptor::kVector); 27 STATIC_ASSERT(StoreTransitionDescriptor::kSlot < 28 StoreTransitionDescriptor::kVector); 29 __ Push(slot); 30 __ Push(vector); 31} 32 33 34void PropertyHandlerCompiler::PopVectorAndSlot(Register vector, Register slot) { 35 MacroAssembler* masm = this->masm(); 36 __ Pop(vector); 37 __ Pop(slot); 38} 39 40 41void PropertyHandlerCompiler::DiscardVectorAndSlot() { 42 MacroAssembler* masm = this->masm(); 43 // Remove vector and slot. 44 __ Drop(2); 45} 46 47void PropertyHandlerCompiler::PushReturnAddress(Register tmp) { 48 // No-op. Return address is in lr register. 49} 50 51void PropertyHandlerCompiler::PopReturnAddress(Register tmp) { 52 // No-op. Return address is in lr register. 53} 54 55void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup( 56 MacroAssembler* masm, Label* miss_label, Register receiver, 57 Handle<Name> name, Register scratch0, Register scratch1) { 58 DCHECK(!AreAliased(receiver, scratch0, scratch1)); 59 DCHECK(name->IsUniqueName()); 60 Counters* counters = masm->isolate()->counters(); 61 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1); 62 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1); 63 64 Label done; 65 66 const int kInterceptorOrAccessCheckNeededMask = 67 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded); 68 69 // Bail out if the receiver has a named interceptor or requires access checks. 70 Register map = scratch1; 71 __ Ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); 72 __ Ldrb(scratch0, FieldMemOperand(map, Map::kBitFieldOffset)); 73 __ Tst(scratch0, kInterceptorOrAccessCheckNeededMask); 74 __ B(ne, miss_label); 75 76 // Check that receiver is a JSObject. 77 __ Ldrb(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset)); 78 __ Cmp(scratch0, FIRST_JS_RECEIVER_TYPE); 79 __ B(lt, miss_label); 80 81 // Load properties array. 82 Register properties = scratch0; 83 __ Ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); 84 // Check that the properties array is a dictionary. 85 __ Ldr(map, FieldMemOperand(properties, HeapObject::kMapOffset)); 86 __ JumpIfNotRoot(map, Heap::kHashTableMapRootIndex, miss_label); 87 88 NameDictionaryLookupStub::GenerateNegativeLookup( 89 masm, miss_label, &done, receiver, properties, name, scratch1); 90 __ Bind(&done); 91 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1); 92} 93 94 95void NamedLoadHandlerCompiler::GenerateDirectLoadGlobalFunctionPrototype( 96 MacroAssembler* masm, int index, Register result, Label* miss) { 97 __ LoadNativeContextSlot(index, result); 98 // Load its initial map. The global functions all have initial maps. 99 __ Ldr(result, 100 FieldMemOperand(result, JSFunction::kPrototypeOrInitialMapOffset)); 101 // Load the prototype from the initial map. 102 __ Ldr(result, FieldMemOperand(result, Map::kPrototypeOffset)); 103} 104 105 106void NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype( 107 MacroAssembler* masm, Register receiver, Register scratch1, 108 Register scratch2, Label* miss_label) { 109 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label); 110 // TryGetFunctionPrototype can't put the result directly in x0 because the 111 // 3 inputs registers can't alias and we call this function from 112 // LoadIC::GenerateFunctionPrototype, where receiver is x0. So we explicitly 113 // move the result in x0. 114 __ Mov(x0, scratch1); 115 __ Ret(); 116} 117 118 119// Generate code to check that a global property cell is empty. Create 120// the property cell at compilation time if no cell exists for the 121// property. 122void PropertyHandlerCompiler::GenerateCheckPropertyCell( 123 MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name, 124 Register scratch, Label* miss) { 125 Handle<PropertyCell> cell = JSGlobalObject::EnsureEmptyPropertyCell( 126 global, name, PropertyCellType::kInvalidated); 127 Isolate* isolate = masm->isolate(); 128 DCHECK(cell->value()->IsTheHole(isolate)); 129 Handle<WeakCell> weak_cell = isolate->factory()->NewWeakCell(cell); 130 __ LoadWeakValue(scratch, weak_cell, miss); 131 __ Ldr(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset)); 132 __ JumpIfNotRoot(scratch, Heap::kTheHoleValueRootIndex, miss); 133} 134 135 136static void PushInterceptorArguments(MacroAssembler* masm, Register receiver, 137 Register holder, Register name, 138 Handle<JSObject> holder_obj) { 139 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0); 140 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 1); 141 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 2); 142 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsLength == 3); 143 144 __ Push(name, receiver, holder); 145} 146 147 148static void CompileCallLoadPropertyWithInterceptor( 149 MacroAssembler* masm, Register receiver, Register holder, Register name, 150 Handle<JSObject> holder_obj, Runtime::FunctionId id) { 151 DCHECK(NamedLoadHandlerCompiler::kInterceptorArgsLength == 152 Runtime::FunctionForId(id)->nargs); 153 PushInterceptorArguments(masm, receiver, holder, name, holder_obj); 154 __ CallRuntime(id); 155} 156 157 158// Generate call to api function. 159void PropertyHandlerCompiler::GenerateApiAccessorCall( 160 MacroAssembler* masm, const CallOptimization& optimization, 161 Handle<Map> receiver_map, Register receiver, Register scratch, 162 bool is_store, Register store_parameter, Register accessor_holder, 163 int accessor_index) { 164 DCHECK(!AreAliased(accessor_holder, scratch)); 165 DCHECK(!AreAliased(receiver, scratch)); 166 167 MacroAssembler::PushPopQueue queue(masm); 168 queue.Queue(receiver); 169 // Write the arguments to the stack frame. 170 if (is_store) { 171 DCHECK(!receiver.is(store_parameter)); 172 DCHECK(!scratch.is(store_parameter)); 173 queue.Queue(store_parameter); 174 } 175 queue.PushQueued(); 176 177 DCHECK(optimization.is_simple_api_call()); 178 179 // Abi for CallApiCallbackStub. 180 Register callee = x0; 181 Register data = x4; 182 Register holder = x2; 183 Register api_function_address = x1; 184 185 // Put callee in place. 186 __ LoadAccessor(callee, accessor_holder, accessor_index, 187 is_store ? ACCESSOR_SETTER : ACCESSOR_GETTER); 188 189 // Put holder in place. 190 CallOptimization::HolderLookup holder_lookup; 191 int holder_depth = 0; 192 optimization.LookupHolderOfExpectedType(receiver_map, &holder_lookup, 193 &holder_depth); 194 switch (holder_lookup) { 195 case CallOptimization::kHolderIsReceiver: 196 __ Mov(holder, receiver); 197 break; 198 case CallOptimization::kHolderFound: 199 __ Ldr(holder, FieldMemOperand(receiver, HeapObject::kMapOffset)); 200 __ Ldr(holder, FieldMemOperand(holder, Map::kPrototypeOffset)); 201 for (int i = 1; i < holder_depth; i++) { 202 __ Ldr(holder, FieldMemOperand(holder, HeapObject::kMapOffset)); 203 __ Ldr(holder, FieldMemOperand(holder, Map::kPrototypeOffset)); 204 } 205 break; 206 case CallOptimization::kHolderNotFound: 207 UNREACHABLE(); 208 break; 209 } 210 211 Isolate* isolate = masm->isolate(); 212 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info(); 213 bool call_data_undefined = false; 214 // Put call data in place. 215 if (api_call_info->data()->IsUndefined(isolate)) { 216 call_data_undefined = true; 217 __ LoadRoot(data, Heap::kUndefinedValueRootIndex); 218 } else { 219 if (optimization.is_constant_call()) { 220 __ Ldr(data, 221 FieldMemOperand(callee, JSFunction::kSharedFunctionInfoOffset)); 222 __ Ldr(data, 223 FieldMemOperand(data, SharedFunctionInfo::kFunctionDataOffset)); 224 __ Ldr(data, 225 FieldMemOperand(data, FunctionTemplateInfo::kCallCodeOffset)); 226 } else { 227 __ Ldr(data, 228 FieldMemOperand(callee, FunctionTemplateInfo::kCallCodeOffset)); 229 } 230 __ Ldr(data, FieldMemOperand(data, CallHandlerInfo::kDataOffset)); 231 } 232 233 if (api_call_info->fast_handler()->IsCode()) { 234 // Just tail call into the fast handler if present. 235 __ Jump(handle(Code::cast(api_call_info->fast_handler())), 236 RelocInfo::CODE_TARGET); 237 return; 238 } 239 240 // Put api_function_address in place. 241 Address function_address = v8::ToCData<Address>(api_call_info->callback()); 242 ApiFunction fun(function_address); 243 ExternalReference ref = ExternalReference( 244 &fun, ExternalReference::DIRECT_API_CALL, masm->isolate()); 245 __ Mov(api_function_address, ref); 246 247 // Jump to stub. 248 CallApiCallbackStub stub(isolate, is_store, call_data_undefined, 249 !optimization.is_constant_call()); 250 __ TailCallStub(&stub); 251} 252 253 254void NamedStoreHandlerCompiler::GenerateStoreViaSetter( 255 MacroAssembler* masm, Handle<Map> map, Register receiver, Register holder, 256 int accessor_index, int expected_arguments, Register scratch) { 257 // ----------- S t a t e ------------- 258 // -- lr : return address 259 // ----------------------------------- 260 Label miss; 261 { 262 FrameScope scope(masm, StackFrame::INTERNAL); 263 264 // Save context register 265 __ Push(cp); 266 // Save value register, so we can restore it later. 267 __ Push(value()); 268 269 if (accessor_index >= 0) { 270 DCHECK(!AreAliased(holder, scratch)); 271 DCHECK(!AreAliased(receiver, scratch)); 272 DCHECK(!AreAliased(value(), scratch)); 273 // Call the JavaScript setter with receiver and value on the stack. 274 if (map->IsJSGlobalObjectMap()) { 275 // Swap in the global receiver. 276 __ Ldr(scratch, 277 FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset)); 278 receiver = scratch; 279 } 280 __ Push(receiver, value()); 281 __ LoadAccessor(x1, holder, accessor_index, ACCESSOR_SETTER); 282 __ Mov(x0, 1); 283 __ Call(masm->isolate()->builtins()->CallFunction( 284 ConvertReceiverMode::kNotNullOrUndefined), 285 RelocInfo::CODE_TARGET); 286 } else { 287 // If we generate a global code snippet for deoptimization only, remember 288 // the place to continue after deoptimization. 289 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset()); 290 } 291 292 // We have to return the passed value, not the return value of the setter. 293 __ Pop(x0); 294 295 // Restore context register. 296 __ Pop(cp); 297 } 298 __ Ret(); 299} 300 301 302void NamedLoadHandlerCompiler::GenerateLoadViaGetter( 303 MacroAssembler* masm, Handle<Map> map, Register receiver, Register holder, 304 int accessor_index, int expected_arguments, Register scratch) { 305 { 306 FrameScope scope(masm, StackFrame::INTERNAL); 307 308 // Save context register 309 __ Push(cp); 310 311 if (accessor_index >= 0) { 312 DCHECK(!AreAliased(holder, scratch)); 313 DCHECK(!AreAliased(receiver, scratch)); 314 // Call the JavaScript getter with the receiver on the stack. 315 if (map->IsJSGlobalObjectMap()) { 316 // Swap in the global receiver. 317 __ Ldr(scratch, 318 FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset)); 319 receiver = scratch; 320 } 321 __ Push(receiver); 322 __ LoadAccessor(x1, holder, accessor_index, ACCESSOR_GETTER); 323 __ Mov(x0, 0); 324 __ Call(masm->isolate()->builtins()->CallFunction( 325 ConvertReceiverMode::kNotNullOrUndefined), 326 RelocInfo::CODE_TARGET); 327 } else { 328 // If we generate a global code snippet for deoptimization only, remember 329 // the place to continue after deoptimization. 330 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset()); 331 } 332 333 // Restore context register. 334 __ Pop(cp); 335 } 336 __ Ret(); 337} 338 339#undef __ 340#define __ ACCESS_MASM(masm()) 341 342 343Handle<Code> NamedLoadHandlerCompiler::CompileLoadGlobal( 344 Handle<PropertyCell> cell, Handle<Name> name, bool is_configurable) { 345 Label miss; 346 if (IC::ICUseVector(kind())) { 347 PushVectorAndSlot(); 348 } 349 FrontendHeader(receiver(), name, &miss, DONT_RETURN_ANYTHING); 350 351 // Get the value from the cell. 352 Register result = StoreDescriptor::ValueRegister(); 353 Handle<WeakCell> weak_cell = factory()->NewWeakCell(cell); 354 __ LoadWeakValue(result, weak_cell, &miss); 355 __ Ldr(result, FieldMemOperand(result, PropertyCell::kValueOffset)); 356 357 // Check for deleted property if property can actually be deleted. 358 if (is_configurable) { 359 __ JumpIfRoot(result, Heap::kTheHoleValueRootIndex, &miss); 360 } 361 362 Counters* counters = isolate()->counters(); 363 __ IncrementCounter(counters->ic_named_load_global_stub(), 1, x1, x3); 364 if (IC::ICUseVector(kind())) { 365 DiscardVectorAndSlot(); 366 } 367 __ Ret(); 368 369 FrontendFooter(name, &miss); 370 371 // Return the generated code. 372 return GetCode(kind(), name); 373} 374 375 376Register NamedStoreHandlerCompiler::value() { 377 return StoreDescriptor::ValueRegister(); 378} 379 380 381void NamedStoreHandlerCompiler::GenerateRestoreName(Label* label, 382 Handle<Name> name) { 383 if (!label->is_unused()) { 384 __ Bind(label); 385 __ Mov(this->name(), Operand(name)); 386 } 387} 388 389 390void NamedStoreHandlerCompiler::GenerateRestoreName(Handle<Name> name) { 391 __ Mov(this->name(), Operand(name)); 392} 393 394 395void NamedStoreHandlerCompiler::GenerateRestoreMap(Handle<Map> transition, 396 Register map_reg, 397 Register scratch, 398 Label* miss) { 399 Handle<WeakCell> cell = Map::WeakCellForMap(transition); 400 DCHECK(!map_reg.is(scratch)); 401 __ LoadWeakValue(map_reg, cell, miss); 402 if (transition->CanBeDeprecated()) { 403 __ Ldrsw(scratch, FieldMemOperand(map_reg, Map::kBitField3Offset)); 404 __ TestAndBranchIfAnySet(scratch, Map::Deprecated::kMask, miss); 405 } 406} 407 408 409void NamedStoreHandlerCompiler::GenerateConstantCheck(Register map_reg, 410 int descriptor, 411 Register value_reg, 412 Register scratch, 413 Label* miss_label) { 414 DCHECK(!map_reg.is(scratch)); 415 DCHECK(!map_reg.is(value_reg)); 416 DCHECK(!value_reg.is(scratch)); 417 __ LoadInstanceDescriptors(map_reg, scratch); 418 __ Ldr(scratch, 419 FieldMemOperand(scratch, DescriptorArray::GetValueOffset(descriptor))); 420 __ Cmp(value_reg, scratch); 421 __ B(ne, miss_label); 422} 423 424void NamedStoreHandlerCompiler::GenerateFieldTypeChecks(FieldType* field_type, 425 Register value_reg, 426 Label* miss_label) { 427 Register map_reg = scratch1(); 428 Register scratch = scratch2(); 429 DCHECK(!value_reg.is(map_reg)); 430 DCHECK(!value_reg.is(scratch)); 431 __ JumpIfSmi(value_reg, miss_label); 432 if (field_type->IsClass()) { 433 __ Ldr(map_reg, FieldMemOperand(value_reg, HeapObject::kMapOffset)); 434 __ CmpWeakValue(map_reg, Map::WeakCellForMap(field_type->AsClass()), 435 scratch); 436 __ B(ne, miss_label); 437 } 438} 439 440void PropertyHandlerCompiler::GenerateAccessCheck( 441 Handle<WeakCell> native_context_cell, Register scratch1, Register scratch2, 442 Label* miss, bool compare_native_contexts_only) { 443 Label done; 444 // Load current native context. 445 __ Ldr(scratch1, NativeContextMemOperand()); 446 // Load expected native context. 447 __ LoadWeakValue(scratch2, native_context_cell, miss); 448 __ Cmp(scratch1, scratch2); 449 450 if (!compare_native_contexts_only) { 451 __ B(eq, &done); 452 453 // Compare security tokens of current and expected native contexts. 454 __ Ldr(scratch1, 455 ContextMemOperand(scratch1, Context::SECURITY_TOKEN_INDEX)); 456 __ Ldr(scratch2, 457 ContextMemOperand(scratch2, Context::SECURITY_TOKEN_INDEX)); 458 __ Cmp(scratch1, scratch2); 459 } 460 __ B(ne, miss); 461 462 __ bind(&done); 463} 464 465Register PropertyHandlerCompiler::CheckPrototypes( 466 Register object_reg, Register holder_reg, Register scratch1, 467 Register scratch2, Handle<Name> name, Label* miss, 468 ReturnHolder return_what) { 469 Handle<Map> receiver_map = map(); 470 471 // object_reg and holder_reg registers can alias. 472 DCHECK(!AreAliased(object_reg, scratch1, scratch2)); 473 DCHECK(!AreAliased(holder_reg, scratch1, scratch2)); 474 475 Handle<Cell> validity_cell = 476 Map::GetOrCreatePrototypeChainValidityCell(receiver_map, isolate()); 477 if (!validity_cell.is_null()) { 478 DCHECK_EQ(Smi::FromInt(Map::kPrototypeChainValid), validity_cell->value()); 479 __ Mov(scratch1, Operand(validity_cell)); 480 __ Ldr(scratch1, FieldMemOperand(scratch1, Cell::kValueOffset)); 481 // Compare scratch1 against Map::kPrototypeChainValid. 482 static_assert(Map::kPrototypeChainValid == 0, 483 "Map::kPrototypeChainValid has unexpected value"); 484 __ Cbnz(scratch1, miss); 485 } 486 487 // Keep track of the current object in register reg. 488 Register reg = object_reg; 489 int depth = 0; 490 491 Handle<JSObject> current = Handle<JSObject>::null(); 492 if (receiver_map->IsJSGlobalObjectMap()) { 493 current = isolate()->global_object(); 494 } 495 496 Handle<Map> current_map(receiver_map->GetPrototypeChainRootMap(isolate()), 497 isolate()); 498 Handle<Map> holder_map(holder()->map()); 499 // Traverse the prototype chain and check the maps in the prototype chain for 500 // fast and global objects or do negative lookup for normal objects. 501 while (!current_map.is_identical_to(holder_map)) { 502 ++depth; 503 504 if (current_map->IsJSGlobalObjectMap()) { 505 GenerateCheckPropertyCell(masm(), Handle<JSGlobalObject>::cast(current), 506 name, scratch2, miss); 507 } else if (current_map->is_dictionary_map()) { 508 DCHECK(!current_map->IsJSGlobalProxyMap()); // Proxy maps are fast. 509 DCHECK(name->IsUniqueName()); 510 DCHECK(current.is_null() || (current->property_dictionary()->FindEntry( 511 name) == NameDictionary::kNotFound)); 512 513 if (depth > 1) { 514 Handle<WeakCell> weak_cell = 515 Map::GetOrCreatePrototypeWeakCell(current, isolate()); 516 __ LoadWeakValue(reg, weak_cell, miss); 517 } 518 GenerateDictionaryNegativeLookup(masm(), miss, reg, name, scratch1, 519 scratch2); 520 } 521 522 reg = holder_reg; // From now on the object will be in holder_reg. 523 // Go to the next object in the prototype chain. 524 current = handle(JSObject::cast(current_map->prototype())); 525 current_map = handle(current->map()); 526 } 527 528 DCHECK(!current_map->IsJSGlobalProxyMap()); 529 530 // Log the check depth. 531 LOG(isolate(), IntEvent("check-maps-depth", depth + 1)); 532 533 bool return_holder = return_what == RETURN_HOLDER; 534 if (return_holder && depth != 0) { 535 Handle<WeakCell> weak_cell = 536 Map::GetOrCreatePrototypeWeakCell(current, isolate()); 537 __ LoadWeakValue(reg, weak_cell, miss); 538 } 539 540 // Return the register containing the holder. 541 return return_holder ? reg : no_reg; 542} 543 544 545void NamedLoadHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) { 546 if (!miss->is_unused()) { 547 Label success; 548 __ B(&success); 549 550 __ Bind(miss); 551 if (IC::ICUseVector(kind())) { 552 DCHECK(kind() == Code::LOAD_IC); 553 PopVectorAndSlot(); 554 } 555 TailCallBuiltin(masm(), MissBuiltin(kind())); 556 557 __ Bind(&success); 558 } 559} 560 561 562void NamedStoreHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) { 563 if (!miss->is_unused()) { 564 Label success; 565 __ B(&success); 566 567 GenerateRestoreName(miss, name); 568 if (IC::ICUseVector(kind())) PopVectorAndSlot(); 569 TailCallBuiltin(masm(), MissBuiltin(kind())); 570 571 __ Bind(&success); 572 } 573} 574 575 576void NamedLoadHandlerCompiler::GenerateLoadConstant(Handle<Object> value) { 577 // Return the constant value. 578 __ LoadObject(x0, value); 579 __ Ret(); 580} 581 582void NamedLoadHandlerCompiler::GenerateLoadInterceptorWithFollowup( 583 LookupIterator* it, Register holder_reg) { 584 DCHECK(!AreAliased(receiver(), this->name(), scratch1(), scratch2(), 585 scratch3())); 586 DCHECK(holder()->HasNamedInterceptor()); 587 DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined(isolate())); 588 589 // Compile the interceptor call, followed by inline code to load the 590 // property from further up the prototype chain if the call fails. 591 // Check that the maps haven't changed. 592 DCHECK(holder_reg.is(receiver()) || holder_reg.is(scratch1())); 593 594 // Preserve the receiver register explicitly whenever it is different from the 595 // holder and it is needed should the interceptor return without any result. 596 // The ACCESSOR case needs the receiver to be passed into C++ code, the FIELD 597 // case might cause a miss during the prototype check. 598 bool must_perform_prototype_check = 599 !holder().is_identical_to(it->GetHolder<JSObject>()); 600 bool must_preserve_receiver_reg = 601 !receiver().is(holder_reg) && 602 (it->state() == LookupIterator::ACCESSOR || must_perform_prototype_check); 603 604 // Save necessary data before invoking an interceptor. 605 // Requires a frame to make GC aware of pushed pointers. 606 { 607 FrameScope frame_scope(masm(), StackFrame::INTERNAL); 608 if (must_preserve_receiver_reg) { 609 __ Push(receiver(), holder_reg, this->name()); 610 } else { 611 __ Push(holder_reg, this->name()); 612 } 613 InterceptorVectorSlotPush(holder_reg); 614 // Invoke an interceptor. Note: map checks from receiver to 615 // interceptor's holder has been compiled before (see a caller 616 // of this method.) 617 CompileCallLoadPropertyWithInterceptor( 618 masm(), receiver(), holder_reg, this->name(), holder(), 619 Runtime::kLoadPropertyWithInterceptorOnly); 620 621 // Check if interceptor provided a value for property. If it's 622 // the case, return immediately. 623 Label interceptor_failed; 624 __ JumpIfRoot(x0, Heap::kNoInterceptorResultSentinelRootIndex, 625 &interceptor_failed); 626 frame_scope.GenerateLeaveFrame(); 627 __ Ret(); 628 629 __ Bind(&interceptor_failed); 630 InterceptorVectorSlotPop(holder_reg); 631 if (must_preserve_receiver_reg) { 632 __ Pop(this->name(), holder_reg, receiver()); 633 } else { 634 __ Pop(this->name(), holder_reg); 635 } 636 // Leave the internal frame. 637 } 638 639 GenerateLoadPostInterceptor(it, holder_reg); 640} 641 642 643void NamedLoadHandlerCompiler::GenerateLoadInterceptor(Register holder_reg) { 644 // Call the runtime system to load the interceptor. 645 DCHECK(holder()->HasNamedInterceptor()); 646 DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined(isolate())); 647 PushInterceptorArguments(masm(), receiver(), holder_reg, this->name(), 648 holder()); 649 650 __ TailCallRuntime(Runtime::kLoadPropertyWithInterceptor); 651} 652 653void NamedStoreHandlerCompiler::ZapStackArgumentsRegisterAliases() { 654 STATIC_ASSERT(!StoreWithVectorDescriptor::kPassLastArgsOnStack); 655} 656 657Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback( 658 Handle<JSObject> object, Handle<Name> name, Handle<AccessorInfo> callback, 659 LanguageMode language_mode) { 660 ASM_LOCATION("NamedStoreHandlerCompiler::CompileStoreCallback"); 661 Register holder_reg = Frontend(name); 662 663 // Stub never generated for non-global objects that require access checks. 664 DCHECK(holder()->IsJSGlobalProxy() || !holder()->IsAccessCheckNeeded()); 665 666 // receiver() and holder_reg can alias. 667 DCHECK(!AreAliased(receiver(), scratch1(), scratch2(), value())); 668 DCHECK(!AreAliased(holder_reg, scratch1(), scratch2(), value())); 669 // If the callback cannot leak, then push the callback directly, 670 // otherwise wrap it in a weak cell. 671 if (callback->data()->IsUndefined(isolate()) || callback->data()->IsSmi()) { 672 __ Mov(scratch1(), Operand(callback)); 673 } else { 674 Handle<WeakCell> cell = isolate()->factory()->NewWeakCell(callback); 675 __ Mov(scratch1(), Operand(cell)); 676 } 677 __ Mov(scratch2(), Operand(name)); 678 __ Push(receiver(), holder_reg, scratch1(), scratch2(), value()); 679 __ Push(Smi::FromInt(language_mode)); 680 681 // Do tail-call to the runtime system. 682 __ TailCallRuntime(Runtime::kStoreCallbackProperty); 683 684 // Return the generated code. 685 return GetCode(kind(), name); 686} 687 688 689#undef __ 690} // namespace internal 691} // namespace v8 692 693#endif // V8_TARGET_ARCH_IA32 694