1// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/v8.h"
6
7#if V8_TARGET_ARCH_X64
8
9#include "src/ic/call-optimization.h"
10#include "src/ic/handler-compiler.h"
11#include "src/ic/ic.h"
12
13namespace v8 {
14namespace internal {
15
16#define __ ACCESS_MASM(masm)
17
18void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup(
19    MacroAssembler* masm, Label* miss_label, Register receiver,
20    Handle<Name> name, Register scratch0, Register scratch1) {
21  DCHECK(name->IsUniqueName());
22  DCHECK(!receiver.is(scratch0));
23  Counters* counters = masm->isolate()->counters();
24  __ IncrementCounter(counters->negative_lookups(), 1);
25  __ IncrementCounter(counters->negative_lookups_miss(), 1);
26
27  __ movp(scratch0, FieldOperand(receiver, HeapObject::kMapOffset));
28
29  const int kInterceptorOrAccessCheckNeededMask =
30      (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
31
32  // Bail out if the receiver has a named interceptor or requires access checks.
33  __ testb(FieldOperand(scratch0, Map::kBitFieldOffset),
34           Immediate(kInterceptorOrAccessCheckNeededMask));
35  __ j(not_zero, miss_label);
36
37  // Check that receiver is a JSObject.
38  __ CmpInstanceType(scratch0, FIRST_SPEC_OBJECT_TYPE);
39  __ j(below, miss_label);
40
41  // Load properties array.
42  Register properties = scratch0;
43  __ movp(properties, FieldOperand(receiver, JSObject::kPropertiesOffset));
44
45  // Check that the properties array is a dictionary.
46  __ CompareRoot(FieldOperand(properties, HeapObject::kMapOffset),
47                 Heap::kHashTableMapRootIndex);
48  __ j(not_equal, miss_label);
49
50  Label done;
51  NameDictionaryLookupStub::GenerateNegativeLookup(masm, miss_label, &done,
52                                                   properties, name, scratch1);
53  __ bind(&done);
54  __ DecrementCounter(counters->negative_lookups_miss(), 1);
55}
56
57
58void NamedLoadHandlerCompiler::GenerateDirectLoadGlobalFunctionPrototype(
59    MacroAssembler* masm, int index, Register prototype, Label* miss) {
60  Isolate* isolate = masm->isolate();
61  // Get the global function with the given index.
62  Handle<JSFunction> function(
63      JSFunction::cast(isolate->native_context()->get(index)));
64
65  // Check we're still in the same context.
66  Register scratch = prototype;
67  const int offset = Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX);
68  __ movp(scratch, Operand(rsi, offset));
69  __ movp(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
70  __ Cmp(Operand(scratch, Context::SlotOffset(index)), function);
71  __ j(not_equal, miss);
72
73  // Load its initial map. The global functions all have initial maps.
74  __ Move(prototype, Handle<Map>(function->initial_map()));
75  // Load the prototype from the initial map.
76  __ movp(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
77}
78
79
80void NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(
81    MacroAssembler* masm, Register receiver, Register result, Register scratch,
82    Label* miss_label) {
83  __ TryGetFunctionPrototype(receiver, result, miss_label);
84  if (!result.is(rax)) __ movp(rax, result);
85  __ ret(0);
86}
87
88
89static void PushInterceptorArguments(MacroAssembler* masm, Register receiver,
90                                     Register holder, Register name,
91                                     Handle<JSObject> holder_obj) {
92  STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0);
93  STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsInfoIndex == 1);
94  STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 2);
95  STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 3);
96  STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsLength == 4);
97  __ Push(name);
98  Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
99  DCHECK(!masm->isolate()->heap()->InNewSpace(*interceptor));
100  __ Move(kScratchRegister, interceptor);
101  __ Push(kScratchRegister);
102  __ Push(receiver);
103  __ Push(holder);
104}
105
106
107static void CompileCallLoadPropertyWithInterceptor(
108    MacroAssembler* masm, Register receiver, Register holder, Register name,
109    Handle<JSObject> holder_obj, IC::UtilityId id) {
110  PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
111  __ CallExternalReference(ExternalReference(IC_Utility(id), masm->isolate()),
112                           NamedLoadHandlerCompiler::kInterceptorArgsLength);
113}
114
115
116// Generate call to api function.
117void PropertyHandlerCompiler::GenerateFastApiCall(
118    MacroAssembler* masm, const CallOptimization& optimization,
119    Handle<Map> receiver_map, Register receiver, Register scratch_in,
120    bool is_store, int argc, Register* values) {
121  DCHECK(optimization.is_simple_api_call());
122
123  __ PopReturnAddressTo(scratch_in);
124  // receiver
125  __ Push(receiver);
126  // Write the arguments to stack frame.
127  for (int i = 0; i < argc; i++) {
128    Register arg = values[argc - 1 - i];
129    DCHECK(!receiver.is(arg));
130    DCHECK(!scratch_in.is(arg));
131    __ Push(arg);
132  }
133  __ PushReturnAddressFrom(scratch_in);
134  // Stack now matches JSFunction abi.
135
136  // Abi for CallApiFunctionStub.
137  Register callee = rax;
138  Register call_data = rbx;
139  Register holder = rcx;
140  Register api_function_address = rdx;
141  Register scratch = rdi;  // scratch_in is no longer valid.
142
143  // Put holder in place.
144  CallOptimization::HolderLookup holder_lookup;
145  Handle<JSObject> api_holder =
146      optimization.LookupHolderOfExpectedType(receiver_map, &holder_lookup);
147  switch (holder_lookup) {
148    case CallOptimization::kHolderIsReceiver:
149      __ Move(holder, receiver);
150      break;
151    case CallOptimization::kHolderFound:
152      __ Move(holder, api_holder);
153      break;
154    case CallOptimization::kHolderNotFound:
155      UNREACHABLE();
156      break;
157  }
158
159  Isolate* isolate = masm->isolate();
160  Handle<JSFunction> function = optimization.constant_function();
161  Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
162  Handle<Object> call_data_obj(api_call_info->data(), isolate);
163
164  // Put callee in place.
165  __ Move(callee, function);
166
167  bool call_data_undefined = false;
168  // Put call_data in place.
169  if (isolate->heap()->InNewSpace(*call_data_obj)) {
170    __ Move(scratch, api_call_info);
171    __ movp(call_data, FieldOperand(scratch, CallHandlerInfo::kDataOffset));
172  } else if (call_data_obj->IsUndefined()) {
173    call_data_undefined = true;
174    __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex);
175  } else {
176    __ Move(call_data, call_data_obj);
177  }
178
179  // Put api_function_address in place.
180  Address function_address = v8::ToCData<Address>(api_call_info->callback());
181  __ Move(api_function_address, function_address,
182          RelocInfo::EXTERNAL_REFERENCE);
183
184  // Jump to stub.
185  CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc);
186  __ TailCallStub(&stub);
187}
188
189
190void PropertyHandlerCompiler::GenerateCheckPropertyCell(
191    MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name,
192    Register scratch, Label* miss) {
193  Handle<PropertyCell> cell = JSGlobalObject::EnsurePropertyCell(global, name);
194  DCHECK(cell->value()->IsTheHole());
195  __ Move(scratch, cell);
196  __ Cmp(FieldOperand(scratch, Cell::kValueOffset),
197         masm->isolate()->factory()->the_hole_value());
198  __ j(not_equal, miss);
199}
200
201
202void NamedStoreHandlerCompiler::GenerateStoreViaSetter(
203    MacroAssembler* masm, Handle<HeapType> type, Register receiver,
204    Handle<JSFunction> setter) {
205  // ----------- S t a t e -------------
206  //  -- rsp[0] : return address
207  // -----------------------------------
208  {
209    FrameScope scope(masm, StackFrame::INTERNAL);
210
211    // Save value register, so we can restore it later.
212    __ Push(value());
213
214    if (!setter.is_null()) {
215      // Call the JavaScript setter with receiver and value on the stack.
216      if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
217        // Swap in the global receiver.
218        __ movp(receiver,
219                FieldOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
220      }
221      __ Push(receiver);
222      __ Push(value());
223      ParameterCount actual(1);
224      ParameterCount expected(setter);
225      __ InvokeFunction(setter, expected, actual, CALL_FUNCTION,
226                        NullCallWrapper());
227    } else {
228      // If we generate a global code snippet for deoptimization only, remember
229      // the place to continue after deoptimization.
230      masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
231    }
232
233    // We have to return the passed value, not the return value of the setter.
234    __ Pop(rax);
235
236    // Restore context register.
237    __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
238  }
239  __ ret(0);
240}
241
242
243void NamedLoadHandlerCompiler::GenerateLoadViaGetter(
244    MacroAssembler* masm, Handle<HeapType> type, Register receiver,
245    Handle<JSFunction> getter) {
246  // ----------- S t a t e -------------
247  //  -- rax    : receiver
248  //  -- rcx    : name
249  //  -- rsp[0] : return address
250  // -----------------------------------
251  {
252    FrameScope scope(masm, StackFrame::INTERNAL);
253
254    if (!getter.is_null()) {
255      // Call the JavaScript getter with the receiver on the stack.
256      if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
257        // Swap in the global receiver.
258        __ movp(receiver,
259                FieldOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
260      }
261      __ Push(receiver);
262      ParameterCount actual(0);
263      ParameterCount expected(getter);
264      __ InvokeFunction(getter, expected, actual, CALL_FUNCTION,
265                        NullCallWrapper());
266    } else {
267      // If we generate a global code snippet for deoptimization only, remember
268      // the place to continue after deoptimization.
269      masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
270    }
271
272    // Restore context register.
273    __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
274  }
275  __ ret(0);
276}
277
278
279static void StoreIC_PushArgs(MacroAssembler* masm) {
280  Register receiver = StoreDescriptor::ReceiverRegister();
281  Register name = StoreDescriptor::NameRegister();
282  Register value = StoreDescriptor::ValueRegister();
283
284  DCHECK(!rbx.is(receiver) && !rbx.is(name) && !rbx.is(value));
285
286  __ PopReturnAddressTo(rbx);
287  __ Push(receiver);
288  __ Push(name);
289  __ Push(value);
290  __ PushReturnAddressFrom(rbx);
291}
292
293
294void NamedStoreHandlerCompiler::GenerateSlow(MacroAssembler* masm) {
295  // Return address is on the stack.
296  StoreIC_PushArgs(masm);
297
298  // Do tail-call to runtime routine.
299  ExternalReference ref(IC_Utility(IC::kStoreIC_Slow), masm->isolate());
300  __ TailCallExternalReference(ref, 3, 1);
301}
302
303
304void ElementHandlerCompiler::GenerateStoreSlow(MacroAssembler* masm) {
305  // Return address is on the stack.
306  StoreIC_PushArgs(masm);
307
308  // Do tail-call to runtime routine.
309  ExternalReference ref(IC_Utility(IC::kKeyedStoreIC_Slow), masm->isolate());
310  __ TailCallExternalReference(ref, 3, 1);
311}
312
313
314#undef __
315#define __ ACCESS_MASM((masm()))
316
317
318void NamedStoreHandlerCompiler::GenerateRestoreName(Label* label,
319                                                    Handle<Name> name) {
320  if (!label->is_unused()) {
321    __ bind(label);
322    __ Move(this->name(), name);
323  }
324}
325
326
327// Receiver_reg is preserved on jumps to miss_label, but may be destroyed if
328// store is successful.
329void NamedStoreHandlerCompiler::GenerateStoreTransition(
330    Handle<Map> transition, Handle<Name> name, Register receiver_reg,
331    Register storage_reg, Register value_reg, Register scratch1,
332    Register scratch2, Register unused, Label* miss_label, Label* slow) {
333  int descriptor = transition->LastAdded();
334  DescriptorArray* descriptors = transition->instance_descriptors();
335  PropertyDetails details = descriptors->GetDetails(descriptor);
336  Representation representation = details.representation();
337  DCHECK(!representation.IsNone());
338
339  if (details.type() == CONSTANT) {
340    Handle<Object> constant(descriptors->GetValue(descriptor), isolate());
341    __ Cmp(value_reg, constant);
342    __ j(not_equal, miss_label);
343  } else if (representation.IsSmi()) {
344    __ JumpIfNotSmi(value_reg, miss_label);
345  } else if (representation.IsHeapObject()) {
346    __ JumpIfSmi(value_reg, miss_label);
347    HeapType* field_type = descriptors->GetFieldType(descriptor);
348    HeapType::Iterator<Map> it = field_type->Classes();
349    if (!it.Done()) {
350      Label do_store;
351      while (true) {
352        __ CompareMap(value_reg, it.Current());
353        it.Advance();
354        if (it.Done()) {
355          __ j(not_equal, miss_label);
356          break;
357        }
358        __ j(equal, &do_store, Label::kNear);
359      }
360      __ bind(&do_store);
361    }
362  } else if (representation.IsDouble()) {
363    Label do_store, heap_number;
364    __ AllocateHeapNumber(storage_reg, scratch1, slow, MUTABLE);
365
366    __ JumpIfNotSmi(value_reg, &heap_number);
367    __ SmiToInteger32(scratch1, value_reg);
368    __ Cvtlsi2sd(xmm0, scratch1);
369    __ jmp(&do_store);
370
371    __ bind(&heap_number);
372    __ CheckMap(value_reg, isolate()->factory()->heap_number_map(), miss_label,
373                DONT_DO_SMI_CHECK);
374    __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset));
375
376    __ bind(&do_store);
377    __ movsd(FieldOperand(storage_reg, HeapNumber::kValueOffset), xmm0);
378  }
379
380  // Stub never generated for objects that require access checks.
381  DCHECK(!transition->is_access_check_needed());
382
383  // Perform map transition for the receiver if necessary.
384  if (details.type() == FIELD &&
385      Map::cast(transition->GetBackPointer())->unused_property_fields() == 0) {
386    // The properties must be extended before we can store the value.
387    // We jump to a runtime call that extends the properties array.
388    __ PopReturnAddressTo(scratch1);
389    __ Push(receiver_reg);
390    __ Push(transition);
391    __ Push(value_reg);
392    __ PushReturnAddressFrom(scratch1);
393    __ TailCallExternalReference(
394        ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
395                          isolate()),
396        3, 1);
397    return;
398  }
399
400  // Update the map of the object.
401  __ Move(scratch1, transition);
402  __ movp(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1);
403
404  // Update the write barrier for the map field.
405  __ RecordWriteField(receiver_reg, HeapObject::kMapOffset, scratch1, scratch2,
406                      kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
407
408  if (details.type() == CONSTANT) {
409    DCHECK(value_reg.is(rax));
410    __ ret(0);
411    return;
412  }
413
414  int index = transition->instance_descriptors()->GetFieldIndex(
415      transition->LastAdded());
416
417  // Adjust for the number of properties stored in the object. Even in the
418  // face of a transition we can use the old map here because the size of the
419  // object and the number of in-object properties is not going to change.
420  index -= transition->inobject_properties();
421
422  // TODO(verwaest): Share this code as a code stub.
423  SmiCheck smi_check =
424      representation.IsTagged() ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
425  if (index < 0) {
426    // Set the property straight into the object.
427    int offset = transition->instance_size() + (index * kPointerSize);
428    if (representation.IsDouble()) {
429      __ movp(FieldOperand(receiver_reg, offset), storage_reg);
430    } else {
431      __ movp(FieldOperand(receiver_reg, offset), value_reg);
432    }
433
434    if (!representation.IsSmi()) {
435      // Update the write barrier for the array address.
436      if (!representation.IsDouble()) {
437        __ movp(storage_reg, value_reg);
438      }
439      __ RecordWriteField(receiver_reg, offset, storage_reg, scratch1,
440                          kDontSaveFPRegs, EMIT_REMEMBERED_SET, smi_check);
441    }
442  } else {
443    // Write to the properties array.
444    int offset = index * kPointerSize + FixedArray::kHeaderSize;
445    // Get the properties array (optimistically).
446    __ movp(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
447    if (representation.IsDouble()) {
448      __ movp(FieldOperand(scratch1, offset), storage_reg);
449    } else {
450      __ movp(FieldOperand(scratch1, offset), value_reg);
451    }
452
453    if (!representation.IsSmi()) {
454      // Update the write barrier for the array address.
455      if (!representation.IsDouble()) {
456        __ movp(storage_reg, value_reg);
457      }
458      __ RecordWriteField(scratch1, offset, storage_reg, receiver_reg,
459                          kDontSaveFPRegs, EMIT_REMEMBERED_SET, smi_check);
460    }
461  }
462
463  // Return the value (register rax).
464  DCHECK(value_reg.is(rax));
465  __ ret(0);
466}
467
468
469void NamedStoreHandlerCompiler::GenerateStoreField(LookupIterator* lookup,
470                                                   Register value_reg,
471                                                   Label* miss_label) {
472  DCHECK(lookup->representation().IsHeapObject());
473  __ JumpIfSmi(value_reg, miss_label);
474  HeapType::Iterator<Map> it = lookup->GetFieldType()->Classes();
475  Label do_store;
476  while (true) {
477    __ CompareMap(value_reg, it.Current());
478    it.Advance();
479    if (it.Done()) {
480      __ j(not_equal, miss_label);
481      break;
482    }
483    __ j(equal, &do_store, Label::kNear);
484  }
485  __ bind(&do_store);
486
487  StoreFieldStub stub(isolate(), lookup->GetFieldIndex(),
488                      lookup->representation());
489  GenerateTailCall(masm(), stub.GetCode());
490}
491
492
493Register PropertyHandlerCompiler::CheckPrototypes(
494    Register object_reg, Register holder_reg, Register scratch1,
495    Register scratch2, Handle<Name> name, Label* miss,
496    PrototypeCheckType check) {
497  Handle<Map> receiver_map(IC::TypeToMap(*type(), isolate()));
498
499  // Make sure there's no overlap between holder and object registers.
500  DCHECK(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
501  DCHECK(!scratch2.is(object_reg) && !scratch2.is(holder_reg) &&
502         !scratch2.is(scratch1));
503
504  // Keep track of the current object in register reg.  On the first
505  // iteration, reg is an alias for object_reg, on later iterations,
506  // it is an alias for holder_reg.
507  Register reg = object_reg;
508  int depth = 0;
509
510  Handle<JSObject> current = Handle<JSObject>::null();
511  if (type()->IsConstant()) {
512    current = Handle<JSObject>::cast(type()->AsConstant()->Value());
513  }
514  Handle<JSObject> prototype = Handle<JSObject>::null();
515  Handle<Map> current_map = receiver_map;
516  Handle<Map> holder_map(holder()->map());
517  // Traverse the prototype chain and check the maps in the prototype chain for
518  // fast and global objects or do negative lookup for normal objects.
519  while (!current_map.is_identical_to(holder_map)) {
520    ++depth;
521
522    // Only global objects and objects that do not require access
523    // checks are allowed in stubs.
524    DCHECK(current_map->IsJSGlobalProxyMap() ||
525           !current_map->is_access_check_needed());
526
527    prototype = handle(JSObject::cast(current_map->prototype()));
528    if (current_map->is_dictionary_map() &&
529        !current_map->IsJSGlobalObjectMap()) {
530      DCHECK(!current_map->IsJSGlobalProxyMap());  // Proxy maps are fast.
531      if (!name->IsUniqueName()) {
532        DCHECK(name->IsString());
533        name = factory()->InternalizeString(Handle<String>::cast(name));
534      }
535      DCHECK(current.is_null() ||
536             current->property_dictionary()->FindEntry(name) ==
537                 NameDictionary::kNotFound);
538
539      GenerateDictionaryNegativeLookup(masm(), miss, reg, name, scratch1,
540                                       scratch2);
541
542      __ movp(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
543      reg = holder_reg;  // From now on the object will be in holder_reg.
544      __ movp(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
545    } else {
546      bool in_new_space = heap()->InNewSpace(*prototype);
547      // Two possible reasons for loading the prototype from the map:
548      // (1) Can't store references to new space in code.
549      // (2) Handler is shared for all receivers with the same prototype
550      //     map (but not necessarily the same prototype instance).
551      bool load_prototype_from_map = in_new_space || depth == 1;
552      if (load_prototype_from_map) {
553        // Save the map in scratch1 for later.
554        __ movp(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
555      }
556      if (depth != 1 || check == CHECK_ALL_MAPS) {
557        __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK);
558      }
559
560      // Check access rights to the global object.  This has to happen after
561      // the map check so that we know that the object is actually a global
562      // object.
563      // This allows us to install generated handlers for accesses to the
564      // global proxy (as opposed to using slow ICs). See corresponding code
565      // in LookupForRead().
566      if (current_map->IsJSGlobalProxyMap()) {
567        __ CheckAccessGlobalProxy(reg, scratch2, miss);
568      } else if (current_map->IsJSGlobalObjectMap()) {
569        GenerateCheckPropertyCell(masm(), Handle<JSGlobalObject>::cast(current),
570                                  name, scratch2, miss);
571      }
572      reg = holder_reg;  // From now on the object will be in holder_reg.
573
574      if (load_prototype_from_map) {
575        __ movp(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
576      } else {
577        __ Move(reg, prototype);
578      }
579    }
580
581    // Go to the next object in the prototype chain.
582    current = prototype;
583    current_map = handle(current->map());
584  }
585
586  // Log the check depth.
587  LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
588
589  if (depth != 0 || check == CHECK_ALL_MAPS) {
590    // Check the holder map.
591    __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK);
592  }
593
594  // Perform security check for access to the global object.
595  DCHECK(current_map->IsJSGlobalProxyMap() ||
596         !current_map->is_access_check_needed());
597  if (current_map->IsJSGlobalProxyMap()) {
598    __ CheckAccessGlobalProxy(reg, scratch1, miss);
599  }
600
601  // Return the register containing the holder.
602  return reg;
603}
604
605
606void NamedLoadHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) {
607  if (!miss->is_unused()) {
608    Label success;
609    __ jmp(&success);
610    __ bind(miss);
611    TailCallBuiltin(masm(), MissBuiltin(kind()));
612    __ bind(&success);
613  }
614}
615
616
617void NamedStoreHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) {
618  if (!miss->is_unused()) {
619    Label success;
620    __ jmp(&success);
621    GenerateRestoreName(miss, name);
622    TailCallBuiltin(masm(), MissBuiltin(kind()));
623    __ bind(&success);
624  }
625}
626
627
628void NamedLoadHandlerCompiler::GenerateLoadCallback(
629    Register reg, Handle<ExecutableAccessorInfo> callback) {
630  // Insert additional parameters into the stack frame above return address.
631  DCHECK(!scratch4().is(reg));
632  __ PopReturnAddressTo(scratch4());
633
634  STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0);
635  STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1);
636  STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2);
637  STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3);
638  STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4);
639  STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5);
640  STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6);
641  __ Push(receiver());  // receiver
642  if (heap()->InNewSpace(callback->data())) {
643    DCHECK(!scratch2().is(reg));
644    __ Move(scratch2(), callback);
645    __ Push(FieldOperand(scratch2(),
646                         ExecutableAccessorInfo::kDataOffset));  // data
647  } else {
648    __ Push(Handle<Object>(callback->data(), isolate()));
649  }
650  DCHECK(!kScratchRegister.is(reg));
651  __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
652  __ Push(kScratchRegister);  // return value
653  __ Push(kScratchRegister);  // return value default
654  __ PushAddress(ExternalReference::isolate_address(isolate()));
655  __ Push(reg);     // holder
656  __ Push(name());  // name
657  // Save a pointer to where we pushed the arguments pointer.  This will be
658  // passed as the const PropertyAccessorInfo& to the C++ callback.
659
660  __ PushReturnAddressFrom(scratch4());
661
662  // Abi for CallApiGetter
663  Register api_function_address = ApiGetterDescriptor::function_address();
664  Address getter_address = v8::ToCData<Address>(callback->getter());
665  __ Move(api_function_address, getter_address, RelocInfo::EXTERNAL_REFERENCE);
666
667  CallApiGetterStub stub(isolate());
668  __ TailCallStub(&stub);
669}
670
671
672void NamedLoadHandlerCompiler::GenerateLoadConstant(Handle<Object> value) {
673  // Return the constant value.
674  __ Move(rax, value);
675  __ ret(0);
676}
677
678
679void NamedLoadHandlerCompiler::GenerateLoadInterceptorWithFollowup(
680    LookupIterator* it, Register holder_reg) {
681  DCHECK(holder()->HasNamedInterceptor());
682  DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined());
683
684  // Compile the interceptor call, followed by inline code to load the
685  // property from further up the prototype chain if the call fails.
686  // Check that the maps haven't changed.
687  DCHECK(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
688
689  // Preserve the receiver register explicitly whenever it is different from the
690  // holder and it is needed should the interceptor return without any result.
691  // The ACCESSOR case needs the receiver to be passed into C++ code, the FIELD
692  // case might cause a miss during the prototype check.
693  bool must_perform_prototype_check =
694      !holder().is_identical_to(it->GetHolder<JSObject>());
695  bool must_preserve_receiver_reg =
696      !receiver().is(holder_reg) &&
697      (it->state() == LookupIterator::ACCESSOR || must_perform_prototype_check);
698
699  // Save necessary data before invoking an interceptor.
700  // Requires a frame to make GC aware of pushed pointers.
701  {
702    FrameScope frame_scope(masm(), StackFrame::INTERNAL);
703
704    if (must_preserve_receiver_reg) {
705      __ Push(receiver());
706    }
707    __ Push(holder_reg);
708    __ Push(this->name());
709
710    // Invoke an interceptor.  Note: map checks from receiver to
711    // interceptor's holder has been compiled before (see a caller
712    // of this method.)
713    CompileCallLoadPropertyWithInterceptor(
714        masm(), receiver(), holder_reg, this->name(), holder(),
715        IC::kLoadPropertyWithInterceptorOnly);
716
717    // Check if interceptor provided a value for property.  If it's
718    // the case, return immediately.
719    Label interceptor_failed;
720    __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
721    __ j(equal, &interceptor_failed);
722    frame_scope.GenerateLeaveFrame();
723    __ ret(0);
724
725    __ bind(&interceptor_failed);
726    __ Pop(this->name());
727    __ Pop(holder_reg);
728    if (must_preserve_receiver_reg) {
729      __ Pop(receiver());
730    }
731
732    // Leave the internal frame.
733  }
734
735  GenerateLoadPostInterceptor(it, holder_reg);
736}
737
738
739void NamedLoadHandlerCompiler::GenerateLoadInterceptor(Register holder_reg) {
740  // Call the runtime system to load the interceptor.
741  DCHECK(holder()->HasNamedInterceptor());
742  DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined());
743  __ PopReturnAddressTo(scratch2());
744  PushInterceptorArguments(masm(), receiver(), holder_reg, this->name(),
745                           holder());
746  __ PushReturnAddressFrom(scratch2());
747
748  ExternalReference ref = ExternalReference(
749      IC_Utility(IC::kLoadPropertyWithInterceptor), isolate());
750  __ TailCallExternalReference(
751      ref, NamedLoadHandlerCompiler::kInterceptorArgsLength, 1);
752}
753
754
755Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback(
756    Handle<JSObject> object, Handle<Name> name,
757    Handle<ExecutableAccessorInfo> callback) {
758  Register holder_reg = Frontend(receiver(), name);
759
760  __ PopReturnAddressTo(scratch1());
761  __ Push(receiver());
762  __ Push(holder_reg);
763  __ Push(callback);  // callback info
764  __ Push(name);
765  __ Push(value());
766  __ PushReturnAddressFrom(scratch1());
767
768  // Do tail-call to the runtime system.
769  ExternalReference store_callback_property =
770      ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
771  __ TailCallExternalReference(store_callback_property, 5, 1);
772
773  // Return the generated code.
774  return GetCode(kind(), Code::FAST, name);
775}
776
777
778Handle<Code> NamedStoreHandlerCompiler::CompileStoreInterceptor(
779    Handle<Name> name) {
780  __ PopReturnAddressTo(scratch1());
781  __ Push(receiver());
782  __ Push(this->name());
783  __ Push(value());
784  __ PushReturnAddressFrom(scratch1());
785
786  // Do tail-call to the runtime system.
787  ExternalReference store_ic_property = ExternalReference(
788      IC_Utility(IC::kStorePropertyWithInterceptor), isolate());
789  __ TailCallExternalReference(store_ic_property, 3, 1);
790
791  // Return the generated code.
792  return GetCode(kind(), Code::FAST, name);
793}
794
795
796Register NamedStoreHandlerCompiler::value() {
797  return StoreDescriptor::ValueRegister();
798}
799
800
801Handle<Code> NamedLoadHandlerCompiler::CompileLoadGlobal(
802    Handle<PropertyCell> cell, Handle<Name> name, bool is_configurable) {
803  Label miss;
804  FrontendHeader(receiver(), name, &miss);
805
806  // Get the value from the cell.
807  Register result = StoreDescriptor::ValueRegister();
808  __ Move(result, cell);
809  __ movp(result, FieldOperand(result, PropertyCell::kValueOffset));
810
811  // Check for deleted property if property can actually be deleted.
812  if (is_configurable) {
813    __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
814    __ j(equal, &miss);
815  } else if (FLAG_debug_code) {
816    __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
817    __ Check(not_equal, kDontDeleteCellsCannotContainTheHole);
818  }
819
820  Counters* counters = isolate()->counters();
821  __ IncrementCounter(counters->named_load_global_stub(), 1);
822  __ ret(0);
823
824  FrontendFooter(name, &miss);
825
826  // Return the generated code.
827  return GetCode(kind(), Code::NORMAL, name);
828}
829
830
831#undef __
832}
833}  // namespace v8::internal
834
835#endif  // V8_TARGET_ARCH_X64
836