handler-compiler-s390.cc revision bcf72ee8e3b26f1d0726869c7ddb3921c68b09a8
1// Copyright 2015 the V8 project authors. All rights reserved. 2// Use of this source code is governed by a BSD-style license that can be 3// found in the LICENSE file. 4 5#if V8_TARGET_ARCH_S390 6 7#include "src/ic/handler-compiler.h" 8 9#include "src/api-arguments.h" 10#include "src/field-type.h" 11#include "src/ic/call-optimization.h" 12#include "src/ic/ic.h" 13#include "src/isolate-inl.h" 14 15namespace v8 { 16namespace internal { 17 18#define __ ACCESS_MASM(masm) 19 20void NamedLoadHandlerCompiler::GenerateLoadViaGetter( 21 MacroAssembler* masm, Handle<Map> map, Register receiver, Register holder, 22 int accessor_index, int expected_arguments, Register scratch) { 23 // ----------- S t a t e ------------- 24 // -- r2 : receiver 25 // -- r4 : name 26 // -- lr : return address 27 // ----------------------------------- 28 { 29 FrameScope scope(masm, StackFrame::INTERNAL); 30 31 // Save context register 32 __ push(cp); 33 34 if (accessor_index >= 0) { 35 DCHECK(!holder.is(scratch)); 36 DCHECK(!receiver.is(scratch)); 37 // Call the JavaScript getter with the receiver on the stack. 38 if (map->IsJSGlobalObjectMap()) { 39 // Swap in the global receiver. 40 __ LoadP(scratch, 41 FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset)); 42 receiver = scratch; 43 } 44 __ Push(receiver); 45 __ LoadAccessor(r3, holder, accessor_index, ACCESSOR_GETTER); 46 __ LoadImmP(r2, Operand::Zero()); 47 __ Call(masm->isolate()->builtins()->CallFunction( 48 ConvertReceiverMode::kNotNullOrUndefined), 49 RelocInfo::CODE_TARGET); 50 } else { 51 // If we generate a global code snippet for deoptimization only, remember 52 // the place to continue after deoptimization. 53 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset()); 54 } 55 56 // Restore context register. 57 __ pop(cp); 58 } 59 __ Ret(); 60} 61 62void NamedStoreHandlerCompiler::GenerateStoreViaSetter( 63 MacroAssembler* masm, Handle<Map> map, Register receiver, Register holder, 64 int accessor_index, int expected_arguments, Register scratch) { 65 // ----------- S t a t e ------------- 66 // -- lr : return address 67 // ----------------------------------- 68 { 69 FrameScope scope(masm, StackFrame::INTERNAL); 70 71 // Save context register 72 // Save value register, so we can restore it later. 73 __ Push(cp, value()); 74 75 if (accessor_index >= 0) { 76 DCHECK(!holder.is(scratch)); 77 DCHECK(!receiver.is(scratch)); 78 DCHECK(!value().is(scratch)); 79 // Call the JavaScript setter with receiver and value on the stack. 80 if (map->IsJSGlobalObjectMap()) { 81 // Swap in the global receiver. 82 __ LoadP(scratch, 83 FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset)); 84 receiver = scratch; 85 } 86 __ Push(receiver, value()); 87 __ LoadAccessor(r3, holder, accessor_index, ACCESSOR_SETTER); 88 __ LoadImmP(r2, Operand(1)); 89 __ Call(masm->isolate()->builtins()->CallFunction( 90 ConvertReceiverMode::kNotNullOrUndefined), 91 RelocInfo::CODE_TARGET); 92 } else { 93 // If we generate a global code snippet for deoptimization only, remember 94 // the place to continue after deoptimization. 95 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset()); 96 } 97 98 // We have to return the passed value, not the return value of the setter. 99 // Restore context register. 100 __ Pop(cp, r2); 101 } 102 __ Ret(); 103} 104 105void PropertyHandlerCompiler::PushVectorAndSlot(Register vector, 106 Register slot) { 107 MacroAssembler* masm = this->masm(); 108 __ Push(vector, slot); 109} 110 111void PropertyHandlerCompiler::PopVectorAndSlot(Register vector, Register slot) { 112 MacroAssembler* masm = this->masm(); 113 __ Pop(vector, slot); 114} 115 116void PropertyHandlerCompiler::DiscardVectorAndSlot() { 117 MacroAssembler* masm = this->masm(); 118 // Remove vector and slot. 119 __ la(sp, MemOperand(sp, 2 * kPointerSize)); 120} 121 122void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup( 123 MacroAssembler* masm, Label* miss_label, Register receiver, 124 Handle<Name> name, Register scratch0, Register scratch1) { 125 DCHECK(name->IsUniqueName()); 126 DCHECK(!receiver.is(scratch0)); 127 Counters* counters = masm->isolate()->counters(); 128 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1); 129 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1); 130 131 Label done; 132 133 const int kInterceptorOrAccessCheckNeededMask = 134 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded); 135 136 // Bail out if the receiver has a named interceptor or requires access checks. 137 Register map = scratch1; 138 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); 139 __ LoadlB(scratch0, FieldMemOperand(map, Map::kBitFieldOffset)); 140 __ AndP(r0, scratch0, Operand(kInterceptorOrAccessCheckNeededMask)); 141 __ bne(miss_label); 142 143 // Check that receiver is a JSObject. 144 // TODO(joransiu): Merge into SI compare 145 __ LoadlB(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset)); 146 __ CmpP(scratch0, Operand(FIRST_JS_RECEIVER_TYPE)); 147 __ blt(miss_label); 148 149 // Load properties array. 150 Register properties = scratch0; 151 __ LoadP(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); 152 // Check that the properties array is a dictionary. 153 __ LoadP(map, FieldMemOperand(properties, HeapObject::kMapOffset)); 154 __ CompareRoot(map, Heap::kHashTableMapRootIndex); 155 __ bne(miss_label); 156 157 // Restore the temporarily used register. 158 __ LoadP(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); 159 160 NameDictionaryLookupStub::GenerateNegativeLookup( 161 masm, miss_label, &done, receiver, properties, name, scratch1); 162 __ bind(&done); 163 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1); 164} 165 166void NamedLoadHandlerCompiler::GenerateDirectLoadGlobalFunctionPrototype( 167 MacroAssembler* masm, int index, Register result, Label* miss) { 168 __ LoadNativeContextSlot(index, result); 169 // Load its initial map. The global functions all have initial maps. 170 __ LoadP(result, 171 FieldMemOperand(result, JSFunction::kPrototypeOrInitialMapOffset)); 172 // Load the prototype from the initial map. 173 __ LoadP(result, FieldMemOperand(result, Map::kPrototypeOffset)); 174} 175 176void NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype( 177 MacroAssembler* masm, Register receiver, Register scratch1, 178 Register scratch2, Label* miss_label) { 179 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label); 180 __ LoadRR(r2, scratch1); 181 __ Ret(); 182} 183 184// Generate code to check that a global property cell is empty. Create 185// the property cell at compilation time if no cell exists for the 186// property. 187void PropertyHandlerCompiler::GenerateCheckPropertyCell( 188 MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name, 189 Register scratch, Label* miss) { 190 Handle<PropertyCell> cell = JSGlobalObject::EnsurePropertyCell(global, name); 191 DCHECK(cell->value()->IsTheHole()); 192 Handle<WeakCell> weak_cell = masm->isolate()->factory()->NewWeakCell(cell); 193 __ LoadWeakValue(scratch, weak_cell, miss); 194 __ LoadP(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset)); 195 __ CompareRoot(scratch, Heap::kTheHoleValueRootIndex); 196 __ bne(miss); 197} 198 199static void PushInterceptorArguments(MacroAssembler* masm, Register receiver, 200 Register holder, Register name, 201 Handle<JSObject> holder_obj) { 202 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0); 203 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 1); 204 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 2); 205 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsLength == 3); 206 __ Push(name); 207 __ Push(receiver); 208 __ Push(holder); 209} 210 211static void CompileCallLoadPropertyWithInterceptor( 212 MacroAssembler* masm, Register receiver, Register holder, Register name, 213 Handle<JSObject> holder_obj, Runtime::FunctionId id) { 214 DCHECK(NamedLoadHandlerCompiler::kInterceptorArgsLength == 215 Runtime::FunctionForId(id)->nargs); 216 PushInterceptorArguments(masm, receiver, holder, name, holder_obj); 217 __ CallRuntime(id); 218} 219 220// Generate call to api function. 221void PropertyHandlerCompiler::GenerateApiAccessorCall( 222 MacroAssembler* masm, const CallOptimization& optimization, 223 Handle<Map> receiver_map, Register receiver, Register scratch_in, 224 bool is_store, Register store_parameter, Register accessor_holder, 225 int accessor_index) { 226 DCHECK(!accessor_holder.is(scratch_in)); 227 DCHECK(!receiver.is(scratch_in)); 228 __ Push(receiver); 229 // Write the arguments to stack frame. 230 if (is_store) { 231 DCHECK(!receiver.is(store_parameter)); 232 DCHECK(!scratch_in.is(store_parameter)); 233 __ Push(store_parameter); 234 } 235 DCHECK(optimization.is_simple_api_call()); 236 237 // Abi for CallApiCallbackStub. 238 Register callee = r2; 239 Register data = r6; 240 Register holder = r4; 241 Register api_function_address = r3; 242 243 // Put callee in place. 244 __ LoadAccessor(callee, accessor_holder, accessor_index, 245 is_store ? ACCESSOR_SETTER : ACCESSOR_GETTER); 246 247 // Put holder in place. 248 CallOptimization::HolderLookup holder_lookup; 249 int holder_depth = 0; 250 optimization.LookupHolderOfExpectedType(receiver_map, &holder_lookup, 251 &holder_depth); 252 switch (holder_lookup) { 253 case CallOptimization::kHolderIsReceiver: 254 __ Move(holder, receiver); 255 break; 256 case CallOptimization::kHolderFound: 257 __ LoadP(holder, FieldMemOperand(receiver, HeapObject::kMapOffset)); 258 __ LoadP(holder, FieldMemOperand(holder, Map::kPrototypeOffset)); 259 for (int i = 1; i < holder_depth; i++) { 260 __ LoadP(holder, FieldMemOperand(holder, HeapObject::kMapOffset)); 261 __ LoadP(holder, FieldMemOperand(holder, Map::kPrototypeOffset)); 262 } 263 break; 264 case CallOptimization::kHolderNotFound: 265 UNREACHABLE(); 266 break; 267 } 268 269 Isolate* isolate = masm->isolate(); 270 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info(); 271 bool call_data_undefined = false; 272 // Put call data in place. 273 if (api_call_info->data()->IsUndefined()) { 274 call_data_undefined = true; 275 __ LoadRoot(data, Heap::kUndefinedValueRootIndex); 276 } else { 277 if (optimization.is_constant_call()) { 278 __ LoadP(data, 279 FieldMemOperand(callee, JSFunction::kSharedFunctionInfoOffset)); 280 __ LoadP(data, 281 FieldMemOperand(data, SharedFunctionInfo::kFunctionDataOffset)); 282 __ LoadP(data, 283 FieldMemOperand(data, FunctionTemplateInfo::kCallCodeOffset)); 284 } else { 285 __ LoadP(data, 286 FieldMemOperand(callee, FunctionTemplateInfo::kCallCodeOffset)); 287 } 288 __ LoadP(data, FieldMemOperand(data, CallHandlerInfo::kDataOffset)); 289 } 290 291 if (api_call_info->fast_handler()->IsCode()) { 292 // Just tail call into the fast handler if present. 293 __ Jump(handle(Code::cast(api_call_info->fast_handler())), 294 RelocInfo::CODE_TARGET); 295 return; 296 } 297 298 // Put api_function_address in place. 299 Address function_address = v8::ToCData<Address>(api_call_info->callback()); 300 ApiFunction fun(function_address); 301 ExternalReference::Type type = ExternalReference::DIRECT_API_CALL; 302 ExternalReference ref = ExternalReference(&fun, type, masm->isolate()); 303 __ mov(api_function_address, Operand(ref)); 304 305 // Jump to stub. 306 CallApiCallbackStub stub(isolate, is_store, call_data_undefined, 307 !optimization.is_constant_call()); 308 __ TailCallStub(&stub); 309} 310 311static void StoreIC_PushArgs(MacroAssembler* masm) { 312 __ Push(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister(), 313 StoreDescriptor::ValueRegister(), 314 VectorStoreICDescriptor::SlotRegister(), 315 VectorStoreICDescriptor::VectorRegister()); 316} 317 318void NamedStoreHandlerCompiler::GenerateSlow(MacroAssembler* masm) { 319 StoreIC_PushArgs(masm); 320 321 // The slow case calls into the runtime to complete the store without causing 322 // an IC miss that would otherwise cause a transition to the generic stub. 323 __ TailCallRuntime(Runtime::kStoreIC_Slow); 324} 325 326void ElementHandlerCompiler::GenerateStoreSlow(MacroAssembler* masm) { 327 StoreIC_PushArgs(masm); 328 329 // The slow case calls into the runtime to complete the store without causing 330 // an IC miss that would otherwise cause a transition to the generic stub. 331 __ TailCallRuntime(Runtime::kKeyedStoreIC_Slow); 332} 333 334#undef __ 335#define __ ACCESS_MASM(masm()) 336 337void NamedStoreHandlerCompiler::GenerateRestoreName(Label* label, 338 Handle<Name> name) { 339 if (!label->is_unused()) { 340 __ bind(label); 341 __ mov(this->name(), Operand(name)); 342 } 343} 344 345void NamedStoreHandlerCompiler::GenerateRestoreName(Handle<Name> name) { 346 __ mov(this->name(), Operand(name)); 347} 348 349void NamedStoreHandlerCompiler::RearrangeVectorAndSlot( 350 Register current_map, Register destination_map) { 351 DCHECK(false); // Not implemented. 352} 353 354void NamedStoreHandlerCompiler::GenerateRestoreMap(Handle<Map> transition, 355 Register map_reg, 356 Register scratch, 357 Label* miss) { 358 Handle<WeakCell> cell = Map::WeakCellForMap(transition); 359 DCHECK(!map_reg.is(scratch)); 360 __ LoadWeakValue(map_reg, cell, miss); 361 if (transition->CanBeDeprecated()) { 362 __ LoadlW(scratch, FieldMemOperand(map_reg, Map::kBitField3Offset)); 363 __ DecodeField<Map::Deprecated>(r0, scratch); 364 __ bne(miss); 365 } 366} 367 368void NamedStoreHandlerCompiler::GenerateConstantCheck(Register map_reg, 369 int descriptor, 370 Register value_reg, 371 Register scratch, 372 Label* miss_label) { 373 DCHECK(!map_reg.is(scratch)); 374 DCHECK(!map_reg.is(value_reg)); 375 DCHECK(!value_reg.is(scratch)); 376 __ LoadInstanceDescriptors(map_reg, scratch); 377 __ CmpP(value_reg, FieldMemOperand( 378 scratch, DescriptorArray::GetValueOffset(descriptor))); 379 __ bne(miss_label); 380} 381 382void NamedStoreHandlerCompiler::GenerateFieldTypeChecks(FieldType* field_type, 383 Register value_reg, 384 Label* miss_label) { 385 Register map_reg = scratch1(); 386 Register scratch = scratch2(); 387 DCHECK(!value_reg.is(map_reg)); 388 DCHECK(!value_reg.is(scratch)); 389 __ JumpIfSmi(value_reg, miss_label); 390 if (field_type->IsClass()) { 391 __ LoadP(map_reg, FieldMemOperand(value_reg, HeapObject::kMapOffset)); 392 __ CmpWeakValue(map_reg, Map::WeakCellForMap(field_type->AsClass()), 393 scratch); 394 __ bne(miss_label); 395 } 396} 397 398Register PropertyHandlerCompiler::CheckPrototypes( 399 Register object_reg, Register holder_reg, Register scratch1, 400 Register scratch2, Handle<Name> name, Label* miss, PrototypeCheckType check, 401 ReturnHolder return_what) { 402 Handle<Map> receiver_map = map(); 403 404 // Make sure there's no overlap between holder and object registers. 405 DCHECK(!scratch1.is(object_reg) && !scratch1.is(holder_reg)); 406 DCHECK(!scratch2.is(object_reg) && !scratch2.is(holder_reg) && 407 !scratch2.is(scratch1)); 408 409 if (FLAG_eliminate_prototype_chain_checks) { 410 Handle<Cell> validity_cell = 411 Map::GetOrCreatePrototypeChainValidityCell(receiver_map, isolate()); 412 if (!validity_cell.is_null()) { 413 DCHECK_EQ(Smi::FromInt(Map::kPrototypeChainValid), 414 validity_cell->value()); 415 __ mov(scratch1, Operand(validity_cell)); 416 __ LoadP(scratch1, FieldMemOperand(scratch1, Cell::kValueOffset)); 417 __ CmpSmiLiteral(scratch1, Smi::FromInt(Map::kPrototypeChainValid), r0); 418 __ bne(miss); 419 } 420 421 // The prototype chain of primitives (and their JSValue wrappers) depends 422 // on the native context, which can't be guarded by validity cells. 423 // |object_reg| holds the native context specific prototype in this case; 424 // we need to check its map. 425 if (check == CHECK_ALL_MAPS) { 426 __ LoadP(scratch1, FieldMemOperand(object_reg, HeapObject::kMapOffset)); 427 Handle<WeakCell> cell = Map::WeakCellForMap(receiver_map); 428 __ CmpWeakValue(scratch1, cell, scratch2); 429 __ b(ne, miss); 430 } 431 } 432 433 // Keep track of the current object in register reg. 434 Register reg = object_reg; 435 int depth = 0; 436 437 Handle<JSObject> current = Handle<JSObject>::null(); 438 if (receiver_map->IsJSGlobalObjectMap()) { 439 current = isolate()->global_object(); 440 } 441 // Check access rights to the global object. This has to happen after 442 // the map check so that we know that the object is actually a global 443 // object. 444 // This allows us to install generated handlers for accesses to the 445 // global proxy (as opposed to using slow ICs). See corresponding code 446 // in LookupForRead(). 447 if (receiver_map->IsJSGlobalProxyMap()) { 448 __ CheckAccessGlobalProxy(reg, scratch2, miss); 449 } 450 451 Handle<JSObject> prototype = Handle<JSObject>::null(); 452 Handle<Map> current_map = receiver_map; 453 Handle<Map> holder_map(holder()->map()); 454 // Traverse the prototype chain and check the maps in the prototype chain for 455 // fast and global objects or do negative lookup for normal objects. 456 while (!current_map.is_identical_to(holder_map)) { 457 ++depth; 458 459 // Only global objects and objects that do not require access 460 // checks are allowed in stubs. 461 DCHECK(current_map->IsJSGlobalProxyMap() || 462 !current_map->is_access_check_needed()); 463 464 prototype = handle(JSObject::cast(current_map->prototype())); 465 if (current_map->is_dictionary_map() && 466 !current_map->IsJSGlobalObjectMap()) { 467 DCHECK(!current_map->IsJSGlobalProxyMap()); // Proxy maps are fast. 468 if (!name->IsUniqueName()) { 469 DCHECK(name->IsString()); 470 name = factory()->InternalizeString(Handle<String>::cast(name)); 471 } 472 DCHECK(current.is_null() || 473 current->property_dictionary()->FindEntry(name) == 474 NameDictionary::kNotFound); 475 476 if (FLAG_eliminate_prototype_chain_checks && depth > 1) { 477 // TODO(jkummerow): Cache and re-use weak cell. 478 __ LoadWeakValue(reg, isolate()->factory()->NewWeakCell(current), miss); 479 } 480 GenerateDictionaryNegativeLookup(masm(), miss, reg, name, scratch1, 481 scratch2); 482 if (!FLAG_eliminate_prototype_chain_checks) { 483 __ LoadP(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); 484 __ LoadP(holder_reg, FieldMemOperand(scratch1, Map::kPrototypeOffset)); 485 } 486 } else { 487 Register map_reg = scratch1; 488 if (!FLAG_eliminate_prototype_chain_checks) { 489 __ LoadP(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset)); 490 } 491 if (current_map->IsJSGlobalObjectMap()) { 492 GenerateCheckPropertyCell(masm(), Handle<JSGlobalObject>::cast(current), 493 name, scratch2, miss); 494 } else if (!FLAG_eliminate_prototype_chain_checks && 495 (depth != 1 || check == CHECK_ALL_MAPS)) { 496 Handle<WeakCell> cell = Map::WeakCellForMap(current_map); 497 __ CmpWeakValue(map_reg, cell, scratch2); 498 __ bne(miss); 499 } 500 if (!FLAG_eliminate_prototype_chain_checks) { 501 __ LoadP(holder_reg, FieldMemOperand(map_reg, Map::kPrototypeOffset)); 502 } 503 } 504 505 reg = holder_reg; // From now on the object will be in holder_reg. 506 // Go to the next object in the prototype chain. 507 current = prototype; 508 current_map = handle(current->map()); 509 } 510 511 DCHECK(!current_map->IsJSGlobalProxyMap()); 512 513 // Log the check depth. 514 LOG(isolate(), IntEvent("check-maps-depth", depth + 1)); 515 516 if (!FLAG_eliminate_prototype_chain_checks && 517 (depth != 0 || check == CHECK_ALL_MAPS)) { 518 // Check the holder map. 519 __ LoadP(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); 520 Handle<WeakCell> cell = Map::WeakCellForMap(current_map); 521 __ CmpWeakValue(scratch1, cell, scratch2); 522 __ bne(miss); 523 } 524 525 bool return_holder = return_what == RETURN_HOLDER; 526 if (FLAG_eliminate_prototype_chain_checks && return_holder && depth != 0) { 527 __ LoadWeakValue(reg, isolate()->factory()->NewWeakCell(current), miss); 528 } 529 530 // Return the register containing the holder. 531 return return_holder ? reg : no_reg; 532} 533 534void NamedLoadHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) { 535 if (!miss->is_unused()) { 536 Label success; 537 __ b(&success); 538 __ bind(miss); 539 if (IC::ICUseVector(kind())) { 540 DCHECK(kind() == Code::LOAD_IC); 541 PopVectorAndSlot(); 542 } 543 TailCallBuiltin(masm(), MissBuiltin(kind())); 544 __ bind(&success); 545 } 546} 547 548void NamedStoreHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) { 549 if (!miss->is_unused()) { 550 Label success; 551 __ b(&success); 552 GenerateRestoreName(miss, name); 553 if (IC::ICUseVector(kind())) PopVectorAndSlot(); 554 TailCallBuiltin(masm(), MissBuiltin(kind())); 555 __ bind(&success); 556 } 557} 558 559void NamedLoadHandlerCompiler::GenerateLoadConstant(Handle<Object> value) { 560 // Return the constant value. 561 __ Move(r2, value); 562 __ Ret(); 563} 564 565void NamedLoadHandlerCompiler::GenerateLoadInterceptorWithFollowup( 566 LookupIterator* it, Register holder_reg) { 567 DCHECK(holder()->HasNamedInterceptor()); 568 DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined()); 569 570 // Compile the interceptor call, followed by inline code to load the 571 // property from further up the prototype chain if the call fails. 572 // Check that the maps haven't changed. 573 DCHECK(holder_reg.is(receiver()) || holder_reg.is(scratch1())); 574 575 // Preserve the receiver register explicitly whenever it is different from the 576 // holder and it is needed should the interceptor return without any result. 577 // The ACCESSOR case needs the receiver to be passed into C++ code, the FIELD 578 // case might cause a miss during the prototype check. 579 bool must_perform_prototype_check = 580 !holder().is_identical_to(it->GetHolder<JSObject>()); 581 bool must_preserve_receiver_reg = 582 !receiver().is(holder_reg) && 583 (it->state() == LookupIterator::ACCESSOR || must_perform_prototype_check); 584 585 // Save necessary data before invoking an interceptor. 586 // Requires a frame to make GC aware of pushed pointers. 587 { 588 FrameScope frame_scope(masm(), StackFrame::INTERNAL); 589 if (must_preserve_receiver_reg) { 590 __ Push(receiver(), holder_reg, this->name()); 591 } else { 592 __ Push(holder_reg, this->name()); 593 } 594 InterceptorVectorSlotPush(holder_reg); 595 // Invoke an interceptor. Note: map checks from receiver to 596 // interceptor's holder has been compiled before (see a caller 597 // of this method.) 598 CompileCallLoadPropertyWithInterceptor( 599 masm(), receiver(), holder_reg, this->name(), holder(), 600 Runtime::kLoadPropertyWithInterceptorOnly); 601 602 // Check if interceptor provided a value for property. If it's 603 // the case, return immediately. 604 Label interceptor_failed; 605 __ CompareRoot(r2, Heap::kNoInterceptorResultSentinelRootIndex); 606 __ beq(&interceptor_failed, Label::kNear); 607 frame_scope.GenerateLeaveFrame(); 608 __ Ret(); 609 610 __ bind(&interceptor_failed); 611 InterceptorVectorSlotPop(holder_reg); 612 __ Pop(this->name()); 613 __ Pop(holder_reg); 614 if (must_preserve_receiver_reg) { 615 __ Pop(receiver()); 616 } 617 // Leave the internal frame. 618 } 619 620 GenerateLoadPostInterceptor(it, holder_reg); 621} 622 623void NamedLoadHandlerCompiler::GenerateLoadInterceptor(Register holder_reg) { 624 // Call the runtime system to load the interceptor. 625 DCHECK(holder()->HasNamedInterceptor()); 626 DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined()); 627 PushInterceptorArguments(masm(), receiver(), holder_reg, this->name(), 628 holder()); 629 630 __ TailCallRuntime(Runtime::kLoadPropertyWithInterceptor); 631} 632 633Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback( 634 Handle<JSObject> object, Handle<Name> name, Handle<AccessorInfo> callback, 635 LanguageMode language_mode) { 636 Register holder_reg = Frontend(name); 637 638 __ Push(receiver(), holder_reg); // receiver 639 640 // If the callback cannot leak, then push the callback directly, 641 // otherwise wrap it in a weak cell. 642 if (callback->data()->IsUndefined() || callback->data()->IsSmi()) { 643 __ mov(ip, Operand(callback)); 644 } else { 645 Handle<WeakCell> cell = isolate()->factory()->NewWeakCell(callback); 646 __ mov(ip, Operand(cell)); 647 } 648 __ Push(ip); 649 __ mov(ip, Operand(name)); 650 __ Push(ip, value()); 651 __ Push(Smi::FromInt(language_mode)); 652 653 // Do tail-call to the runtime system. 654 __ TailCallRuntime(Runtime::kStoreCallbackProperty); 655 656 // Return the generated code. 657 return GetCode(kind(), name); 658} 659 660Register NamedStoreHandlerCompiler::value() { 661 return StoreDescriptor::ValueRegister(); 662} 663 664Handle<Code> NamedLoadHandlerCompiler::CompileLoadGlobal( 665 Handle<PropertyCell> cell, Handle<Name> name, bool is_configurable) { 666 Label miss; 667 if (IC::ICUseVector(kind())) { 668 PushVectorAndSlot(); 669 } 670 FrontendHeader(receiver(), name, &miss, DONT_RETURN_ANYTHING); 671 672 // Get the value from the cell. 673 Register result = StoreDescriptor::ValueRegister(); 674 Handle<WeakCell> weak_cell = factory()->NewWeakCell(cell); 675 __ LoadWeakValue(result, weak_cell, &miss); 676 __ LoadP(result, FieldMemOperand(result, PropertyCell::kValueOffset)); 677 678 // Check for deleted property if property can actually be deleted. 679 if (is_configurable) { 680 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); 681 __ beq(&miss); 682 } 683 684 Counters* counters = isolate()->counters(); 685 __ IncrementCounter(counters->ic_named_load_global_stub(), 1, r3, r5); 686 if (IC::ICUseVector(kind())) { 687 DiscardVectorAndSlot(); 688 } 689 __ Ret(); 690 691 FrontendFooter(name, &miss); 692 693 // Return the generated code. 694 return GetCode(kind(), name); 695} 696 697#undef __ 698} // namespace internal 699} // namespace v8 700 701#endif // V8_TARGET_ARCH_ARM 702