1// Copyright 2012 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#if V8_TARGET_ARCH_X64
31
32#include "arguments.h"
33#include "ic-inl.h"
34#include "codegen.h"
35#include "stub-cache.h"
36
37namespace v8 {
38namespace internal {
39
40#define __ ACCESS_MASM(masm)
41
42
43static void ProbeTable(Isolate* isolate,
44                       MacroAssembler* masm,
45                       Code::Flags flags,
46                       StubCache::Table table,
47                       Register receiver,
48                       Register name,
49                       // The offset is scaled by 4, based on
50                       // kHeapObjectTagSize, which is two bits
51                       Register offset) {
52  // We need to scale up the pointer by 2 because the offset is scaled by less
53  // than the pointer size.
54  ASSERT(kPointerSizeLog2 == kHeapObjectTagSize + 1);
55  ScaleFactor scale_factor = times_2;
56
57  ASSERT_EQ(3 * kPointerSize, sizeof(StubCache::Entry));
58  // The offset register holds the entry offset times four (due to masking
59  // and shifting optimizations).
60  ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
61  ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
62  Label miss;
63
64  // Multiply by 3 because there are 3 fields per entry (name, code, map).
65  __ lea(offset, Operand(offset, offset, times_2, 0));
66
67  __ LoadAddress(kScratchRegister, key_offset);
68
69  // Check that the key in the entry matches the name.
70  // Multiply entry offset by 16 to get the entry address. Since the
71  // offset register already holds the entry offset times four, multiply
72  // by a further four.
73  __ cmpl(name, Operand(kScratchRegister, offset, scale_factor, 0));
74  __ j(not_equal, &miss);
75
76  // Get the map entry from the cache.
77  // Use key_offset + kPointerSize * 2, rather than loading map_offset.
78  __ movq(kScratchRegister,
79          Operand(kScratchRegister, offset, scale_factor, kPointerSize * 2));
80  __ cmpq(kScratchRegister, FieldOperand(receiver, HeapObject::kMapOffset));
81  __ j(not_equal, &miss);
82
83  // Get the code entry from the cache.
84  __ LoadAddress(kScratchRegister, value_offset);
85  __ movq(kScratchRegister,
86          Operand(kScratchRegister, offset, scale_factor, 0));
87
88  // Check that the flags match what we're looking for.
89  __ movl(offset, FieldOperand(kScratchRegister, Code::kFlagsOffset));
90  __ and_(offset, Immediate(~Code::kFlagsNotUsedInLookup));
91  __ cmpl(offset, Immediate(flags));
92  __ j(not_equal, &miss);
93
94#ifdef DEBUG
95    if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
96      __ jmp(&miss);
97    } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
98      __ jmp(&miss);
99    }
100#endif
101
102  // Jump to the first instruction in the code stub.
103  __ addq(kScratchRegister, Immediate(Code::kHeaderSize - kHeapObjectTag));
104  __ jmp(kScratchRegister);
105
106  __ bind(&miss);
107}
108
109
110void StubCompiler::GenerateDictionaryNegativeLookup(MacroAssembler* masm,
111                                                    Label* miss_label,
112                                                    Register receiver,
113                                                    Handle<Name> name,
114                                                    Register scratch0,
115                                                    Register scratch1) {
116  ASSERT(name->IsUniqueName());
117  ASSERT(!receiver.is(scratch0));
118  Counters* counters = masm->isolate()->counters();
119  __ IncrementCounter(counters->negative_lookups(), 1);
120  __ IncrementCounter(counters->negative_lookups_miss(), 1);
121
122  __ movq(scratch0, FieldOperand(receiver, HeapObject::kMapOffset));
123
124  const int kInterceptorOrAccessCheckNeededMask =
125      (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
126
127  // Bail out if the receiver has a named interceptor or requires access checks.
128  __ testb(FieldOperand(scratch0, Map::kBitFieldOffset),
129           Immediate(kInterceptorOrAccessCheckNeededMask));
130  __ j(not_zero, miss_label);
131
132  // Check that receiver is a JSObject.
133  __ CmpInstanceType(scratch0, FIRST_SPEC_OBJECT_TYPE);
134  __ j(below, miss_label);
135
136  // Load properties array.
137  Register properties = scratch0;
138  __ movq(properties, FieldOperand(receiver, JSObject::kPropertiesOffset));
139
140  // Check that the properties array is a dictionary.
141  __ CompareRoot(FieldOperand(properties, HeapObject::kMapOffset),
142                 Heap::kHashTableMapRootIndex);
143  __ j(not_equal, miss_label);
144
145  Label done;
146  NameDictionaryLookupStub::GenerateNegativeLookup(masm,
147                                                   miss_label,
148                                                   &done,
149                                                   properties,
150                                                   name,
151                                                   scratch1);
152  __ bind(&done);
153  __ DecrementCounter(counters->negative_lookups_miss(), 1);
154}
155
156
157void StubCache::GenerateProbe(MacroAssembler* masm,
158                              Code::Flags flags,
159                              Register receiver,
160                              Register name,
161                              Register scratch,
162                              Register extra,
163                              Register extra2,
164                              Register extra3) {
165  Isolate* isolate = masm->isolate();
166  Label miss;
167  USE(extra);   // The register extra is not used on the X64 platform.
168  USE(extra2);  // The register extra2 is not used on the X64 platform.
169  USE(extra3);  // The register extra2 is not used on the X64 platform.
170  // Make sure that code is valid. The multiplying code relies on the
171  // entry size being 3 * kPointerSize.
172  ASSERT(sizeof(Entry) == 3 * kPointerSize);
173
174  // Make sure the flags do not name a specific type.
175  ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
176
177  // Make sure that there are no register conflicts.
178  ASSERT(!scratch.is(receiver));
179  ASSERT(!scratch.is(name));
180
181  // Check scratch register is valid, extra and extra2 are unused.
182  ASSERT(!scratch.is(no_reg));
183  ASSERT(extra2.is(no_reg));
184  ASSERT(extra3.is(no_reg));
185
186  Counters* counters = masm->isolate()->counters();
187  __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1);
188
189  // Check that the receiver isn't a smi.
190  __ JumpIfSmi(receiver, &miss);
191
192  // Get the map of the receiver and compute the hash.
193  __ movl(scratch, FieldOperand(name, Name::kHashFieldOffset));
194  // Use only the low 32 bits of the map pointer.
195  __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
196  __ xor_(scratch, Immediate(flags));
197  // We mask out the last two bits because they are not part of the hash and
198  // they are always 01 for maps.  Also in the two 'and' instructions below.
199  __ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize));
200
201  // Probe the primary table.
202  ProbeTable(isolate, masm, flags, kPrimary, receiver, name, scratch);
203
204  // Primary miss: Compute hash for secondary probe.
205  __ movl(scratch, FieldOperand(name, Name::kHashFieldOffset));
206  __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
207  __ xor_(scratch, Immediate(flags));
208  __ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize));
209  __ subl(scratch, name);
210  __ addl(scratch, Immediate(flags));
211  __ and_(scratch, Immediate((kSecondaryTableSize - 1) << kHeapObjectTagSize));
212
213  // Probe the secondary table.
214  ProbeTable(isolate, masm, flags, kSecondary, receiver, name, scratch);
215
216  // Cache miss: Fall-through and let caller handle the miss by
217  // entering the runtime system.
218  __ bind(&miss);
219  __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1);
220}
221
222
223void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
224                                                       int index,
225                                                       Register prototype) {
226  // Load the global or builtins object from the current context.
227  __ movq(prototype,
228          Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
229  // Load the native context from the global or builtins object.
230  __ movq(prototype,
231          FieldOperand(prototype, GlobalObject::kNativeContextOffset));
232  // Load the function from the native context.
233  __ movq(prototype, Operand(prototype, Context::SlotOffset(index)));
234  // Load the initial map.  The global functions all have initial maps.
235  __ movq(prototype,
236          FieldOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
237  // Load the prototype from the initial map.
238  __ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
239}
240
241
242void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
243    MacroAssembler* masm,
244    int index,
245    Register prototype,
246    Label* miss) {
247  Isolate* isolate = masm->isolate();
248  // Check we're still in the same context.
249  __ Move(prototype, isolate->global_object());
250  __ cmpq(Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)),
251          prototype);
252  __ j(not_equal, miss);
253  // Get the global function with the given index.
254  Handle<JSFunction> function(
255      JSFunction::cast(isolate->native_context()->get(index)));
256  // Load its initial map. The global functions all have initial maps.
257  __ Move(prototype, Handle<Map>(function->initial_map()));
258  // Load the prototype from the initial map.
259  __ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
260}
261
262
263void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
264                                           Register receiver,
265                                           Register scratch,
266                                           Label* miss_label) {
267  // Check that the receiver isn't a smi.
268  __ JumpIfSmi(receiver, miss_label);
269
270  // Check that the object is a JS array.
271  __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
272  __ j(not_equal, miss_label);
273
274  // Load length directly from the JS array.
275  __ movq(rax, FieldOperand(receiver, JSArray::kLengthOffset));
276  __ ret(0);
277}
278
279
280// Generate code to check if an object is a string.  If the object is
281// a string, the map's instance type is left in the scratch register.
282static void GenerateStringCheck(MacroAssembler* masm,
283                                Register receiver,
284                                Register scratch,
285                                Label* smi,
286                                Label* non_string_object) {
287  // Check that the object isn't a smi.
288  __ JumpIfSmi(receiver, smi);
289
290  // Check that the object is a string.
291  __ movq(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
292  __ movzxbq(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
293  STATIC_ASSERT(kNotStringTag != 0);
294  __ testl(scratch, Immediate(kNotStringTag));
295  __ j(not_zero, non_string_object);
296}
297
298
299void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
300                                            Register receiver,
301                                            Register scratch1,
302                                            Register scratch2,
303                                            Label* miss) {
304  Label check_wrapper;
305
306  // Check if the object is a string leaving the instance type in the
307  // scratch register.
308  GenerateStringCheck(masm, receiver, scratch1, miss, &check_wrapper);
309
310  // Load length directly from the string.
311  __ movq(rax, FieldOperand(receiver, String::kLengthOffset));
312  __ ret(0);
313
314  // Check if the object is a JSValue wrapper.
315  __ bind(&check_wrapper);
316  __ cmpl(scratch1, Immediate(JS_VALUE_TYPE));
317  __ j(not_equal, miss);
318
319  // Check if the wrapped value is a string and load the length
320  // directly if it is.
321  __ movq(scratch2, FieldOperand(receiver, JSValue::kValueOffset));
322  GenerateStringCheck(masm, scratch2, scratch1, miss, miss);
323  __ movq(rax, FieldOperand(scratch2, String::kLengthOffset));
324  __ ret(0);
325}
326
327
328void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
329                                                 Register receiver,
330                                                 Register result,
331                                                 Register scratch,
332                                                 Label* miss_label) {
333  __ TryGetFunctionPrototype(receiver, result, miss_label);
334  if (!result.is(rax)) __ movq(rax, result);
335  __ ret(0);
336}
337
338
339void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
340                                            Register dst,
341                                            Register src,
342                                            bool inobject,
343                                            int index,
344                                            Representation representation) {
345  ASSERT(!FLAG_track_double_fields || !representation.IsDouble());
346  int offset = index * kPointerSize;
347  if (!inobject) {
348    // Calculate the offset into the properties array.
349    offset = offset + FixedArray::kHeaderSize;
350    __ movq(dst, FieldOperand(src, JSObject::kPropertiesOffset));
351    src = dst;
352  }
353  __ movq(dst, FieldOperand(src, offset));
354}
355
356
357static void PushInterceptorArguments(MacroAssembler* masm,
358                                     Register receiver,
359                                     Register holder,
360                                     Register name,
361                                     Handle<JSObject> holder_obj) {
362  STATIC_ASSERT(StubCache::kInterceptorArgsNameIndex == 0);
363  STATIC_ASSERT(StubCache::kInterceptorArgsInfoIndex == 1);
364  STATIC_ASSERT(StubCache::kInterceptorArgsThisIndex == 2);
365  STATIC_ASSERT(StubCache::kInterceptorArgsHolderIndex == 3);
366  STATIC_ASSERT(StubCache::kInterceptorArgsLength == 4);
367  __ push(name);
368  Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
369  ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
370  __ Move(kScratchRegister, interceptor);
371  __ push(kScratchRegister);
372  __ push(receiver);
373  __ push(holder);
374}
375
376
377static void CompileCallLoadPropertyWithInterceptor(
378    MacroAssembler* masm,
379    Register receiver,
380    Register holder,
381    Register name,
382    Handle<JSObject> holder_obj,
383    IC::UtilityId id) {
384  PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
385  __ CallExternalReference(
386      ExternalReference(IC_Utility(id), masm->isolate()),
387      StubCache::kInterceptorArgsLength);
388}
389
390
391// Number of pointers to be reserved on stack for fast API call.
392static const int kFastApiCallArguments = FunctionCallbackArguments::kArgsLength;
393
394
395// Reserves space for the extra arguments to API function in the
396// caller's frame.
397//
398// These arguments are set by CheckPrototypes and GenerateFastApiCall.
399static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
400  // ----------- S t a t e -------------
401  //  -- rsp[0] : return address
402  //  -- rsp[8] : last argument in the internal frame of the caller
403  // -----------------------------------
404  __ movq(scratch, StackOperandForReturnAddress(0));
405  __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
406  __ movq(StackOperandForReturnAddress(0), scratch);
407  __ Move(scratch, Smi::FromInt(0));
408  StackArgumentsAccessor args(rsp, kFastApiCallArguments,
409                              ARGUMENTS_DONT_CONTAIN_RECEIVER);
410  for (int i = 0; i < kFastApiCallArguments; i++) {
411     __ movq(args.GetArgumentOperand(i), scratch);
412  }
413}
414
415
416// Undoes the effects of ReserveSpaceForFastApiCall.
417static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
418  // ----------- S t a t e -------------
419  //  -- rsp[0]                             : return address.
420  //  -- rsp[8]                             : last fast api call extra argument.
421  //  -- ...
422  //  -- rsp[kFastApiCallArguments * 8]     : first fast api call extra
423  //                                          argument.
424  //  -- rsp[kFastApiCallArguments * 8 + 8] : last argument in the internal
425  //                                          frame.
426  // -----------------------------------
427  __ movq(scratch, StackOperandForReturnAddress(0));
428  __ movq(StackOperandForReturnAddress(kFastApiCallArguments * kPointerSize),
429          scratch);
430  __ addq(rsp, Immediate(kPointerSize * kFastApiCallArguments));
431}
432
433
434static void GenerateFastApiCallBody(MacroAssembler* masm,
435                                    const CallOptimization& optimization,
436                                    int argc,
437                                    bool restore_context);
438
439
440// Generates call to API function.
441static void GenerateFastApiCall(MacroAssembler* masm,
442                                const CallOptimization& optimization,
443                                int argc) {
444  typedef FunctionCallbackArguments FCA;
445  StackArgumentsAccessor args(rsp, argc + kFastApiCallArguments);
446
447  // Save calling context.
448  int offset = argc + kFastApiCallArguments;
449  __ movq(args.GetArgumentOperand(offset - FCA::kContextSaveIndex), rsi);
450
451  // Get the function and setup the context.
452  Handle<JSFunction> function = optimization.constant_function();
453  __ Move(rdi, function);
454  __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
455  // Construct the FunctionCallbackInfo on the stack.
456  __ movq(args.GetArgumentOperand(offset - FCA::kCalleeIndex), rdi);
457  Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
458  Handle<Object> call_data(api_call_info->data(), masm->isolate());
459  if (masm->isolate()->heap()->InNewSpace(*call_data)) {
460    __ Move(rcx, api_call_info);
461    __ movq(rbx, FieldOperand(rcx, CallHandlerInfo::kDataOffset));
462    __ movq(args.GetArgumentOperand(offset - FCA::kDataIndex), rbx);
463  } else {
464    __ Move(args.GetArgumentOperand(offset - FCA::kDataIndex), call_data);
465  }
466  __ Move(kScratchRegister,
467          ExternalReference::isolate_address(masm->isolate()));
468  __ movq(args.GetArgumentOperand(offset - FCA::kIsolateIndex),
469          kScratchRegister);
470  __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
471  __ movq(args.GetArgumentOperand(offset - FCA::kReturnValueDefaultValueIndex),
472          kScratchRegister);
473  __ movq(args.GetArgumentOperand(offset - FCA::kReturnValueOffset),
474          kScratchRegister);
475
476  // Prepare arguments.
477  STATIC_ASSERT(kFastApiCallArguments == 7);
478  __ lea(rax, Operand(rsp, 1 * kPointerSize));
479
480  GenerateFastApiCallBody(masm, optimization, argc, false);
481}
482
483
484// Generate call to api function.
485// This function uses push() to generate smaller, faster code than
486// the version above. It is an optimization that should will be removed
487// when api call ICs are generated in hydrogen.
488static void GenerateFastApiCall(MacroAssembler* masm,
489                                const CallOptimization& optimization,
490                                Register receiver,
491                                Register scratch1,
492                                Register scratch2,
493                                Register scratch3,
494                                int argc,
495                                Register* values) {
496  ASSERT(optimization.is_simple_api_call());
497
498  // Copy return value.
499  __ pop(scratch1);
500
501  // receiver
502  __ push(receiver);
503
504  // Write the arguments to stack frame.
505  for (int i = 0; i < argc; i++) {
506    Register arg = values[argc-1-i];
507    ASSERT(!receiver.is(arg));
508    ASSERT(!scratch1.is(arg));
509    ASSERT(!scratch2.is(arg));
510    ASSERT(!scratch3.is(arg));
511    __ push(arg);
512  }
513
514  typedef FunctionCallbackArguments FCA;
515
516  STATIC_ASSERT(FCA::kHolderIndex == 0);
517  STATIC_ASSERT(FCA::kIsolateIndex == 1);
518  STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
519  STATIC_ASSERT(FCA::kReturnValueOffset == 3);
520  STATIC_ASSERT(FCA::kDataIndex == 4);
521  STATIC_ASSERT(FCA::kCalleeIndex == 5);
522  STATIC_ASSERT(FCA::kContextSaveIndex == 6);
523  STATIC_ASSERT(FCA::kArgsLength == 7);
524
525  // context save
526  __ push(rsi);
527
528  // Get the function and setup the context.
529  Handle<JSFunction> function = optimization.constant_function();
530  __ Move(scratch2, function);
531  __ push(scratch2);
532
533  Isolate* isolate = masm->isolate();
534  Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
535  Handle<Object> call_data(api_call_info->data(), isolate);
536  // Push data from ExecutableAccessorInfo.
537  bool call_data_undefined = false;
538  if (isolate->heap()->InNewSpace(*call_data)) {
539    __ Move(scratch2, api_call_info);
540    __ movq(scratch3, FieldOperand(scratch2, CallHandlerInfo::kDataOffset));
541  } else if (call_data->IsUndefined()) {
542    call_data_undefined = true;
543    __ LoadRoot(scratch3, Heap::kUndefinedValueRootIndex);
544  } else {
545    __ Move(scratch3, call_data);
546  }
547  // call data
548  __ push(scratch3);
549  if (!call_data_undefined) {
550    __ LoadRoot(scratch3, Heap::kUndefinedValueRootIndex);
551  }
552  // return value
553  __ push(scratch3);
554  // return value default
555  __ push(scratch3);
556  // isolate
557  __ Move(scratch3,
558          ExternalReference::isolate_address(masm->isolate()));
559  __ push(scratch3);
560  // holder
561  __ push(receiver);
562
563  ASSERT(!scratch1.is(rax));
564  // store receiver address for GenerateFastApiCallBody
565  __ movq(rax, rsp);
566
567  // return address
568  __ push(scratch1);
569
570  GenerateFastApiCallBody(masm, optimization, argc, true);
571}
572
573
574static void GenerateFastApiCallBody(MacroAssembler* masm,
575                                    const CallOptimization& optimization,
576                                    int argc,
577                                    bool restore_context) {
578  // ----------- S t a t e -------------
579  //  -- rsp[0]              : return address
580  //  -- rsp[8] - rsp[56]    : FunctionCallbackInfo, incl.
581  //                         :  object passing the type check
582  //                            (set by CheckPrototypes)
583  //  -- rsp[64]             : last argument
584  //  -- ...
585  //  -- rsp[(argc + 7) * 8] : first argument
586  //  -- rsp[(argc + 8) * 8] : receiver
587  //
588  // rax : receiver address
589  // -----------------------------------
590  typedef FunctionCallbackArguments FCA;
591
592  Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
593  // Function address is a foreign pointer outside V8's heap.
594  Address function_address = v8::ToCData<Address>(api_call_info->callback());
595
596  // Allocate the v8::Arguments structure in the arguments' space since
597  // it's not controlled by GC.
598  const int kApiStackSpace = 4;
599
600  __ PrepareCallApiFunction(kApiStackSpace);
601
602  __ movq(StackSpaceOperand(0), rax);  // FunctionCallbackInfo::implicit_args_.
603  __ addq(rax, Immediate((argc + kFastApiCallArguments - 1) * kPointerSize));
604  __ movq(StackSpaceOperand(1), rax);  // FunctionCallbackInfo::values_.
605  __ Set(StackSpaceOperand(2), argc);  // FunctionCallbackInfo::length_.
606  // FunctionCallbackInfo::is_construct_call_.
607  __ Set(StackSpaceOperand(3), 0);
608
609#if defined(__MINGW64__) || defined(_WIN64)
610  Register arguments_arg = rcx;
611  Register callback_arg = rdx;
612#else
613  Register arguments_arg = rdi;
614  Register callback_arg = rsi;
615#endif
616
617  // v8::InvocationCallback's argument.
618  __ lea(arguments_arg, StackSpaceOperand(0));
619
620  Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback);
621
622  StackArgumentsAccessor args_from_rbp(rbp, kFastApiCallArguments,
623                                       ARGUMENTS_DONT_CONTAIN_RECEIVER);
624  Operand context_restore_operand = args_from_rbp.GetArgumentOperand(
625      kFastApiCallArguments - 1 - FCA::kContextSaveIndex);
626  Operand return_value_operand = args_from_rbp.GetArgumentOperand(
627      kFastApiCallArguments - 1 - FCA::kReturnValueOffset);
628  __ CallApiFunctionAndReturn(
629      function_address,
630      thunk_address,
631      callback_arg,
632      argc + kFastApiCallArguments + 1,
633      return_value_operand,
634      restore_context ? &context_restore_operand : NULL);
635}
636
637
638class CallInterceptorCompiler BASE_EMBEDDED {
639 public:
640  CallInterceptorCompiler(CallStubCompiler* stub_compiler,
641                          const ParameterCount& arguments,
642                          Register name,
643                          ExtraICState extra_ic_state)
644      : stub_compiler_(stub_compiler),
645        arguments_(arguments),
646        name_(name),
647        extra_ic_state_(extra_ic_state) {}
648
649  void Compile(MacroAssembler* masm,
650               Handle<JSObject> object,
651               Handle<JSObject> holder,
652               Handle<Name> name,
653               LookupResult* lookup,
654               Register receiver,
655               Register scratch1,
656               Register scratch2,
657               Register scratch3,
658               Label* miss) {
659    ASSERT(holder->HasNamedInterceptor());
660    ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
661
662    // Check that the receiver isn't a smi.
663    __ JumpIfSmi(receiver, miss);
664
665    CallOptimization optimization(lookup);
666    if (optimization.is_constant_call()) {
667      CompileCacheable(masm, object, receiver, scratch1, scratch2, scratch3,
668                       holder, lookup, name, optimization, miss);
669    } else {
670      CompileRegular(masm, object, receiver, scratch1, scratch2, scratch3,
671                     name, holder, miss);
672    }
673  }
674
675 private:
676  void CompileCacheable(MacroAssembler* masm,
677                        Handle<JSObject> object,
678                        Register receiver,
679                        Register scratch1,
680                        Register scratch2,
681                        Register scratch3,
682                        Handle<JSObject> interceptor_holder,
683                        LookupResult* lookup,
684                        Handle<Name> name,
685                        const CallOptimization& optimization,
686                        Label* miss_label) {
687    ASSERT(optimization.is_constant_call());
688    ASSERT(!lookup->holder()->IsGlobalObject());
689
690    int depth1 = kInvalidProtoDepth;
691    int depth2 = kInvalidProtoDepth;
692    bool can_do_fast_api_call = false;
693    if (optimization.is_simple_api_call() &&
694        !lookup->holder()->IsGlobalObject()) {
695      depth1 = optimization.GetPrototypeDepthOfExpectedType(
696          object, interceptor_holder);
697      if (depth1 == kInvalidProtoDepth) {
698        depth2 = optimization.GetPrototypeDepthOfExpectedType(
699            interceptor_holder, Handle<JSObject>(lookup->holder()));
700      }
701      can_do_fast_api_call =
702          depth1 != kInvalidProtoDepth || depth2 != kInvalidProtoDepth;
703    }
704
705    Counters* counters = masm->isolate()->counters();
706    __ IncrementCounter(counters->call_const_interceptor(), 1);
707
708    if (can_do_fast_api_call) {
709      __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1);
710      ReserveSpaceForFastApiCall(masm, scratch1);
711    }
712
713    // Check that the maps from receiver to interceptor's holder
714    // haven't changed and thus we can invoke interceptor.
715    Label miss_cleanup;
716    Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
717    Register holder =
718        stub_compiler_->CheckPrototypes(
719            IC::CurrentTypeOf(object, masm->isolate()), receiver,
720            interceptor_holder, scratch1, scratch2, scratch3,
721            name, depth1, miss);
722
723    // Invoke an interceptor and if it provides a value,
724    // branch to |regular_invoke|.
725    Label regular_invoke;
726    LoadWithInterceptor(masm, receiver, holder, interceptor_holder,
727                        &regular_invoke);
728
729    // Interceptor returned nothing for this property.  Try to use cached
730    // constant function.
731
732    // Check that the maps from interceptor's holder to constant function's
733    // holder haven't changed and thus we can use cached constant function.
734    if (*interceptor_holder != lookup->holder()) {
735      stub_compiler_->CheckPrototypes(
736          IC::CurrentTypeOf(interceptor_holder, masm->isolate()), holder,
737          handle(lookup->holder()), scratch1, scratch2, scratch3,
738          name, depth2, miss);
739    } else {
740      // CheckPrototypes has a side effect of fetching a 'holder'
741      // for API (object which is instanceof for the signature).  It's
742      // safe to omit it here, as if present, it should be fetched
743      // by the previous CheckPrototypes.
744      ASSERT(depth2 == kInvalidProtoDepth);
745    }
746
747    // Invoke function.
748    if (can_do_fast_api_call) {
749      GenerateFastApiCall(masm, optimization, arguments_.immediate());
750    } else {
751      Handle<JSFunction> fun = optimization.constant_function();
752      stub_compiler_->GenerateJumpFunction(object, fun);
753    }
754
755    // Deferred code for fast API call case---clean preallocated space.
756    if (can_do_fast_api_call) {
757      __ bind(&miss_cleanup);
758      FreeSpaceForFastApiCall(masm, scratch1);
759      __ jmp(miss_label);
760    }
761
762    // Invoke a regular function.
763    __ bind(&regular_invoke);
764    if (can_do_fast_api_call) {
765      FreeSpaceForFastApiCall(masm, scratch1);
766    }
767  }
768
769  void CompileRegular(MacroAssembler* masm,
770                      Handle<JSObject> object,
771                      Register receiver,
772                      Register scratch1,
773                      Register scratch2,
774                      Register scratch3,
775                      Handle<Name> name,
776                      Handle<JSObject> interceptor_holder,
777                      Label* miss_label) {
778    Register holder =
779        stub_compiler_->CheckPrototypes(
780            IC::CurrentTypeOf(object, masm->isolate()), receiver,
781            interceptor_holder, scratch1, scratch2, scratch3, name, miss_label);
782
783    FrameScope scope(masm, StackFrame::INTERNAL);
784    // Save the name_ register across the call.
785    __ push(name_);
786
787    CompileCallLoadPropertyWithInterceptor(
788        masm, receiver, holder, name_, interceptor_holder,
789        IC::kLoadPropertyWithInterceptorForCall);
790
791    // Restore the name_ register.
792    __ pop(name_);
793
794    // Leave the internal frame.
795  }
796
797  void LoadWithInterceptor(MacroAssembler* masm,
798                           Register receiver,
799                           Register holder,
800                           Handle<JSObject> holder_obj,
801                           Label* interceptor_succeeded) {
802    {
803      FrameScope scope(masm, StackFrame::INTERNAL);
804      __ push(receiver);
805      __ push(holder);
806      __ push(name_);
807
808      CompileCallLoadPropertyWithInterceptor(
809          masm, receiver, holder, name_, holder_obj,
810          IC::kLoadPropertyWithInterceptorOnly);
811
812      __ pop(name_);
813      __ pop(holder);
814      __ pop(receiver);
815      // Leave the internal frame.
816    }
817
818    __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
819    __ j(not_equal, interceptor_succeeded);
820  }
821
822  CallStubCompiler* stub_compiler_;
823  const ParameterCount& arguments_;
824  Register name_;
825  ExtraICState extra_ic_state_;
826};
827
828
829void StoreStubCompiler::GenerateRestoreName(MacroAssembler* masm,
830                                            Label* label,
831                                            Handle<Name> name) {
832  if (!label->is_unused()) {
833    __ bind(label);
834    __ Move(this->name(), name);
835  }
836}
837
838
839void StubCompiler::GenerateCheckPropertyCell(MacroAssembler* masm,
840                                             Handle<JSGlobalObject> global,
841                                             Handle<Name> name,
842                                             Register scratch,
843                                             Label* miss) {
844  Handle<PropertyCell> cell =
845      JSGlobalObject::EnsurePropertyCell(global, name);
846  ASSERT(cell->value()->IsTheHole());
847  __ Move(scratch, cell);
848  __ Cmp(FieldOperand(scratch, Cell::kValueOffset),
849         masm->isolate()->factory()->the_hole_value());
850  __ j(not_equal, miss);
851}
852
853
854void StoreStubCompiler::GenerateNegativeHolderLookup(
855    MacroAssembler* masm,
856    Handle<JSObject> holder,
857    Register holder_reg,
858    Handle<Name> name,
859    Label* miss) {
860  if (holder->IsJSGlobalObject()) {
861    GenerateCheckPropertyCell(
862        masm, Handle<JSGlobalObject>::cast(holder), name, scratch1(), miss);
863  } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
864    GenerateDictionaryNegativeLookup(
865        masm, miss, holder_reg, name, scratch1(), scratch2());
866  }
867}
868
869
870// Receiver_reg is preserved on jumps to miss_label, but may be destroyed if
871// store is successful.
872void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
873                                                Handle<JSObject> object,
874                                                LookupResult* lookup,
875                                                Handle<Map> transition,
876                                                Handle<Name> name,
877                                                Register receiver_reg,
878                                                Register storage_reg,
879                                                Register value_reg,
880                                                Register scratch1,
881                                                Register scratch2,
882                                                Register unused,
883                                                Label* miss_label,
884                                                Label* slow) {
885  int descriptor = transition->LastAdded();
886  DescriptorArray* descriptors = transition->instance_descriptors();
887  PropertyDetails details = descriptors->GetDetails(descriptor);
888  Representation representation = details.representation();
889  ASSERT(!representation.IsNone());
890
891  if (details.type() == CONSTANT) {
892    Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
893    __ Cmp(value_reg, constant);
894    __ j(not_equal, miss_label);
895  } else if (FLAG_track_fields && representation.IsSmi()) {
896    __ JumpIfNotSmi(value_reg, miss_label);
897  } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
898    __ JumpIfSmi(value_reg, miss_label);
899  } else if (FLAG_track_double_fields && representation.IsDouble()) {
900    Label do_store, heap_number;
901    __ AllocateHeapNumber(storage_reg, scratch1, slow);
902
903    __ JumpIfNotSmi(value_reg, &heap_number);
904    __ SmiToInteger32(scratch1, value_reg);
905    __ Cvtlsi2sd(xmm0, scratch1);
906    __ jmp(&do_store);
907
908    __ bind(&heap_number);
909    __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(),
910                miss_label, DONT_DO_SMI_CHECK);
911    __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset));
912
913    __ bind(&do_store);
914    __ movsd(FieldOperand(storage_reg, HeapNumber::kValueOffset), xmm0);
915  }
916
917  // Stub never generated for non-global objects that require access
918  // checks.
919  ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
920
921  // Perform map transition for the receiver if necessary.
922  if (details.type() == FIELD &&
923      object->map()->unused_property_fields() == 0) {
924    // The properties must be extended before we can store the value.
925    // We jump to a runtime call that extends the properties array.
926    __ PopReturnAddressTo(scratch1);
927    __ push(receiver_reg);
928    __ Push(transition);
929    __ push(value_reg);
930    __ PushReturnAddressFrom(scratch1);
931    __ TailCallExternalReference(
932        ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
933                          masm->isolate()),
934        3,
935        1);
936    return;
937  }
938
939  // Update the map of the object.
940  __ Move(scratch1, transition);
941  __ movq(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1);
942
943  // Update the write barrier for the map field.
944  __ RecordWriteField(receiver_reg,
945                      HeapObject::kMapOffset,
946                      scratch1,
947                      scratch2,
948                      kDontSaveFPRegs,
949                      OMIT_REMEMBERED_SET,
950                      OMIT_SMI_CHECK);
951
952  if (details.type() == CONSTANT) {
953    ASSERT(value_reg.is(rax));
954    __ ret(0);
955    return;
956  }
957
958  int index = transition->instance_descriptors()->GetFieldIndex(
959      transition->LastAdded());
960
961  // Adjust for the number of properties stored in the object. Even in the
962  // face of a transition we can use the old map here because the size of the
963  // object and the number of in-object properties is not going to change.
964  index -= object->map()->inobject_properties();
965
966  // TODO(verwaest): Share this code as a code stub.
967  SmiCheck smi_check = representation.IsTagged()
968      ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
969  if (index < 0) {
970    // Set the property straight into the object.
971    int offset = object->map()->instance_size() + (index * kPointerSize);
972    if (FLAG_track_double_fields && representation.IsDouble()) {
973      __ movq(FieldOperand(receiver_reg, offset), storage_reg);
974    } else {
975      __ movq(FieldOperand(receiver_reg, offset), value_reg);
976    }
977
978    if (!FLAG_track_fields || !representation.IsSmi()) {
979      // Update the write barrier for the array address.
980      if (!FLAG_track_double_fields || !representation.IsDouble()) {
981        __ movq(storage_reg, value_reg);
982      }
983      __ RecordWriteField(
984          receiver_reg, offset, storage_reg, scratch1, kDontSaveFPRegs,
985          EMIT_REMEMBERED_SET, smi_check);
986    }
987  } else {
988    // Write to the properties array.
989    int offset = index * kPointerSize + FixedArray::kHeaderSize;
990    // Get the properties array (optimistically).
991    __ movq(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
992    if (FLAG_track_double_fields && representation.IsDouble()) {
993      __ movq(FieldOperand(scratch1, offset), storage_reg);
994    } else {
995      __ movq(FieldOperand(scratch1, offset), value_reg);
996    }
997
998    if (!FLAG_track_fields || !representation.IsSmi()) {
999      // Update the write barrier for the array address.
1000      if (!FLAG_track_double_fields || !representation.IsDouble()) {
1001        __ movq(storage_reg, value_reg);
1002      }
1003      __ RecordWriteField(
1004          scratch1, offset, storage_reg, receiver_reg, kDontSaveFPRegs,
1005          EMIT_REMEMBERED_SET, smi_check);
1006    }
1007  }
1008
1009  // Return the value (register rax).
1010  ASSERT(value_reg.is(rax));
1011  __ ret(0);
1012}
1013
1014
1015// Both name_reg and receiver_reg are preserved on jumps to miss_label,
1016// but may be destroyed if store is successful.
1017void StoreStubCompiler::GenerateStoreField(MacroAssembler* masm,
1018                                           Handle<JSObject> object,
1019                                           LookupResult* lookup,
1020                                           Register receiver_reg,
1021                                           Register name_reg,
1022                                           Register value_reg,
1023                                           Register scratch1,
1024                                           Register scratch2,
1025                                           Label* miss_label) {
1026  // Stub never generated for non-global objects that require access
1027  // checks.
1028  ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
1029
1030  int index = lookup->GetFieldIndex().field_index();
1031
1032  // Adjust for the number of properties stored in the object. Even in the
1033  // face of a transition we can use the old map here because the size of the
1034  // object and the number of in-object properties is not going to change.
1035  index -= object->map()->inobject_properties();
1036
1037  Representation representation = lookup->representation();
1038  ASSERT(!representation.IsNone());
1039  if (FLAG_track_fields && representation.IsSmi()) {
1040    __ JumpIfNotSmi(value_reg, miss_label);
1041  } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
1042    __ JumpIfSmi(value_reg, miss_label);
1043  } else if (FLAG_track_double_fields && representation.IsDouble()) {
1044    // Load the double storage.
1045    if (index < 0) {
1046      int offset = object->map()->instance_size() + (index * kPointerSize);
1047      __ movq(scratch1, FieldOperand(receiver_reg, offset));
1048    } else {
1049      __ movq(scratch1,
1050              FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
1051      int offset = index * kPointerSize + FixedArray::kHeaderSize;
1052      __ movq(scratch1, FieldOperand(scratch1, offset));
1053    }
1054
1055    // Store the value into the storage.
1056    Label do_store, heap_number;
1057    __ JumpIfNotSmi(value_reg, &heap_number);
1058    __ SmiToInteger32(scratch2, value_reg);
1059    __ Cvtlsi2sd(xmm0, scratch2);
1060    __ jmp(&do_store);
1061
1062    __ bind(&heap_number);
1063    __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(),
1064                miss_label, DONT_DO_SMI_CHECK);
1065    __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset));
1066    __ bind(&do_store);
1067    __ movsd(FieldOperand(scratch1, HeapNumber::kValueOffset), xmm0);
1068    // Return the value (register rax).
1069    ASSERT(value_reg.is(rax));
1070    __ ret(0);
1071    return;
1072  }
1073
1074  // TODO(verwaest): Share this code as a code stub.
1075  SmiCheck smi_check = representation.IsTagged()
1076      ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
1077  if (index < 0) {
1078    // Set the property straight into the object.
1079    int offset = object->map()->instance_size() + (index * kPointerSize);
1080    __ movq(FieldOperand(receiver_reg, offset), value_reg);
1081
1082    if (!FLAG_track_fields || !representation.IsSmi()) {
1083      // Update the write barrier for the array address.
1084      // Pass the value being stored in the now unused name_reg.
1085      __ movq(name_reg, value_reg);
1086      __ RecordWriteField(
1087          receiver_reg, offset, name_reg, scratch1, kDontSaveFPRegs,
1088          EMIT_REMEMBERED_SET, smi_check);
1089    }
1090  } else {
1091    // Write to the properties array.
1092    int offset = index * kPointerSize + FixedArray::kHeaderSize;
1093    // Get the properties array (optimistically).
1094    __ movq(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
1095    __ movq(FieldOperand(scratch1, offset), value_reg);
1096
1097    if (!FLAG_track_fields || !representation.IsSmi()) {
1098      // Update the write barrier for the array address.
1099      // Pass the value being stored in the now unused name_reg.
1100      __ movq(name_reg, value_reg);
1101      __ RecordWriteField(
1102          scratch1, offset, name_reg, receiver_reg, kDontSaveFPRegs,
1103          EMIT_REMEMBERED_SET, smi_check);
1104    }
1105  }
1106
1107  // Return the value (register rax).
1108  ASSERT(value_reg.is(rax));
1109  __ ret(0);
1110}
1111
1112
1113void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code> code) {
1114  __ jmp(code, RelocInfo::CODE_TARGET);
1115}
1116
1117
1118#undef __
1119#define __ ACCESS_MASM((masm()))
1120
1121
1122Register StubCompiler::CheckPrototypes(Handle<Type> type,
1123                                       Register object_reg,
1124                                       Handle<JSObject> holder,
1125                                       Register holder_reg,
1126                                       Register scratch1,
1127                                       Register scratch2,
1128                                       Handle<Name> name,
1129                                       int save_at_depth,
1130                                       Label* miss,
1131                                       PrototypeCheckType check) {
1132  Handle<Map> receiver_map(IC::TypeToMap(*type, isolate()));
1133  // Make sure that the type feedback oracle harvests the receiver map.
1134  // TODO(svenpanne) Remove this hack when all ICs are reworked.
1135  __ Move(scratch1, receiver_map);
1136
1137  // Make sure there's no overlap between holder and object registers.
1138  ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
1139  ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
1140         && !scratch2.is(scratch1));
1141
1142  // Keep track of the current object in register reg.  On the first
1143  // iteration, reg is an alias for object_reg, on later iterations,
1144  // it is an alias for holder_reg.
1145  Register reg = object_reg;
1146  int depth = 0;
1147
1148  StackArgumentsAccessor args(rsp, kFastApiCallArguments,
1149                              ARGUMENTS_DONT_CONTAIN_RECEIVER);
1150  const int kHolderIndex = kFastApiCallArguments - 1 -
1151      FunctionCallbackArguments::kHolderIndex;
1152
1153  if (save_at_depth == depth) {
1154    __ movq(args.GetArgumentOperand(kHolderIndex), object_reg);
1155  }
1156
1157  Handle<JSObject> current = Handle<JSObject>::null();
1158  if (type->IsConstant()) current = Handle<JSObject>::cast(type->AsConstant());
1159  Handle<JSObject> prototype = Handle<JSObject>::null();
1160  Handle<Map> current_map = receiver_map;
1161  Handle<Map> holder_map(holder->map());
1162  // Traverse the prototype chain and check the maps in the prototype chain for
1163  // fast and global objects or do negative lookup for normal objects.
1164  while (!current_map.is_identical_to(holder_map)) {
1165    ++depth;
1166
1167    // Only global objects and objects that do not require access
1168    // checks are allowed in stubs.
1169    ASSERT(current_map->IsJSGlobalProxyMap() ||
1170           !current_map->is_access_check_needed());
1171
1172    prototype = handle(JSObject::cast(current_map->prototype()));
1173    if (current_map->is_dictionary_map() &&
1174        !current_map->IsJSGlobalObjectMap() &&
1175        !current_map->IsJSGlobalProxyMap()) {
1176      if (!name->IsUniqueName()) {
1177        ASSERT(name->IsString());
1178        name = factory()->InternalizeString(Handle<String>::cast(name));
1179      }
1180      ASSERT(current.is_null() ||
1181             current->property_dictionary()->FindEntry(*name) ==
1182             NameDictionary::kNotFound);
1183
1184      GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
1185                                       scratch1, scratch2);
1186
1187      __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
1188      reg = holder_reg;  // From now on the object will be in holder_reg.
1189      __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
1190    } else {
1191      bool in_new_space = heap()->InNewSpace(*prototype);
1192      if (in_new_space) {
1193        // Save the map in scratch1 for later.
1194        __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
1195      }
1196      if (depth != 1 || check == CHECK_ALL_MAPS) {
1197        __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK);
1198      }
1199
1200      // Check access rights to the global object.  This has to happen after
1201      // the map check so that we know that the object is actually a global
1202      // object.
1203      if (current_map->IsJSGlobalProxyMap()) {
1204        __ CheckAccessGlobalProxy(reg, scratch2, miss);
1205      } else if (current_map->IsJSGlobalObjectMap()) {
1206        GenerateCheckPropertyCell(
1207            masm(), Handle<JSGlobalObject>::cast(current), name,
1208            scratch2, miss);
1209      }
1210      reg = holder_reg;  // From now on the object will be in holder_reg.
1211
1212      if (in_new_space) {
1213        // The prototype is in new space; we cannot store a reference to it
1214        // in the code.  Load it from the map.
1215        __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
1216      } else {
1217        // The prototype is in old space; load it directly.
1218        __ Move(reg, prototype);
1219      }
1220    }
1221
1222    if (save_at_depth == depth) {
1223      __ movq(args.GetArgumentOperand(kHolderIndex), reg);
1224    }
1225
1226    // Go to the next object in the prototype chain.
1227    current = prototype;
1228    current_map = handle(current->map());
1229  }
1230
1231  // Log the check depth.
1232  LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
1233
1234  if (depth != 0 || check == CHECK_ALL_MAPS) {
1235    // Check the holder map.
1236    __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK);
1237  }
1238
1239  // Perform security check for access to the global object.
1240  ASSERT(current_map->IsJSGlobalProxyMap() ||
1241         !current_map->is_access_check_needed());
1242  if (current_map->IsJSGlobalProxyMap()) {
1243    __ CheckAccessGlobalProxy(reg, scratch1, miss);
1244  }
1245
1246  // Return the register containing the holder.
1247  return reg;
1248}
1249
1250
1251void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
1252  if (!miss->is_unused()) {
1253    Label success;
1254    __ jmp(&success);
1255    __ bind(miss);
1256    TailCallBuiltin(masm(), MissBuiltin(kind()));
1257    __ bind(&success);
1258  }
1259}
1260
1261
1262void StoreStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
1263  if (!miss->is_unused()) {
1264    Label success;
1265    __ jmp(&success);
1266    GenerateRestoreName(masm(), miss, name);
1267    TailCallBuiltin(masm(), MissBuiltin(kind()));
1268    __ bind(&success);
1269  }
1270}
1271
1272
1273Register LoadStubCompiler::CallbackHandlerFrontend(
1274    Handle<Type> type,
1275    Register object_reg,
1276    Handle<JSObject> holder,
1277    Handle<Name> name,
1278    Handle<Object> callback) {
1279  Label miss;
1280
1281  Register reg = HandlerFrontendHeader(type, object_reg, holder, name, &miss);
1282
1283  if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
1284    ASSERT(!reg.is(scratch2()));
1285    ASSERT(!reg.is(scratch3()));
1286    ASSERT(!reg.is(scratch4()));
1287
1288    // Load the properties dictionary.
1289    Register dictionary = scratch4();
1290    __ movq(dictionary, FieldOperand(reg, JSObject::kPropertiesOffset));
1291
1292    // Probe the dictionary.
1293    Label probe_done;
1294    NameDictionaryLookupStub::GeneratePositiveLookup(masm(),
1295                                                     &miss,
1296                                                     &probe_done,
1297                                                     dictionary,
1298                                                     this->name(),
1299                                                     scratch2(),
1300                                                     scratch3());
1301    __ bind(&probe_done);
1302
1303    // If probing finds an entry in the dictionary, scratch3 contains the
1304    // index into the dictionary. Check that the value is the callback.
1305    Register index = scratch3();
1306    const int kElementsStartOffset =
1307        NameDictionary::kHeaderSize +
1308        NameDictionary::kElementsStartIndex * kPointerSize;
1309    const int kValueOffset = kElementsStartOffset + kPointerSize;
1310    __ movq(scratch2(),
1311            Operand(dictionary, index, times_pointer_size,
1312                    kValueOffset - kHeapObjectTag));
1313    __ movq(scratch3(), callback, RelocInfo::EMBEDDED_OBJECT);
1314    __ cmpq(scratch2(), scratch3());
1315    __ j(not_equal, &miss);
1316  }
1317
1318  HandlerFrontendFooter(name, &miss);
1319  return reg;
1320}
1321
1322
1323void LoadStubCompiler::GenerateLoadField(Register reg,
1324                                         Handle<JSObject> holder,
1325                                         PropertyIndex field,
1326                                         Representation representation) {
1327  if (!reg.is(receiver())) __ movq(receiver(), reg);
1328  if (kind() == Code::LOAD_IC) {
1329    LoadFieldStub stub(field.is_inobject(holder),
1330                       field.translate(holder),
1331                       representation);
1332    GenerateTailCall(masm(), stub.GetCode(isolate()));
1333  } else {
1334    KeyedLoadFieldStub stub(field.is_inobject(holder),
1335                            field.translate(holder),
1336                            representation);
1337    GenerateTailCall(masm(), stub.GetCode(isolate()));
1338  }
1339}
1340
1341
1342void LoadStubCompiler::GenerateLoadCallback(
1343    const CallOptimization& call_optimization) {
1344  GenerateFastApiCall(
1345      masm(), call_optimization, receiver(),
1346      scratch1(), scratch2(), name(), 0, NULL);
1347}
1348
1349
1350void LoadStubCompiler::GenerateLoadCallback(
1351    Register reg,
1352    Handle<ExecutableAccessorInfo> callback) {
1353  // Insert additional parameters into the stack frame above return address.
1354  ASSERT(!scratch4().is(reg));
1355  __ PopReturnAddressTo(scratch4());
1356
1357  STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0);
1358  STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1);
1359  STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2);
1360  STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3);
1361  STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4);
1362  STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5);
1363  STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6);
1364  __ push(receiver());  // receiver
1365  if (heap()->InNewSpace(callback->data())) {
1366    ASSERT(!scratch2().is(reg));
1367    __ Move(scratch2(), callback);
1368    __ push(FieldOperand(scratch2(),
1369                         ExecutableAccessorInfo::kDataOffset));  // data
1370  } else {
1371    __ Push(Handle<Object>(callback->data(), isolate()));
1372  }
1373  ASSERT(!kScratchRegister.is(reg));
1374  __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
1375  __ push(kScratchRegister);  // return value
1376  __ push(kScratchRegister);  // return value default
1377  __ PushAddress(ExternalReference::isolate_address(isolate()));
1378  __ push(reg);  // holder
1379  __ push(name());  // name
1380  // Save a pointer to where we pushed the arguments pointer.  This will be
1381  // passed as the const PropertyAccessorInfo& to the C++ callback.
1382
1383  Address getter_address = v8::ToCData<Address>(callback->getter());
1384
1385#if defined(__MINGW64__) || defined(_WIN64)
1386  Register getter_arg = r8;
1387  Register accessor_info_arg = rdx;
1388  Register name_arg = rcx;
1389#else
1390  Register getter_arg = rdx;
1391  Register accessor_info_arg = rsi;
1392  Register name_arg = rdi;
1393#endif
1394
1395  ASSERT(!name_arg.is(scratch4()));
1396  __ movq(name_arg, rsp);
1397  __ PushReturnAddressFrom(scratch4());
1398
1399  // v8::Arguments::values_ and handler for name.
1400  const int kStackSpace = PropertyCallbackArguments::kArgsLength + 1;
1401
1402  // Allocate v8::AccessorInfo in non-GCed stack space.
1403  const int kArgStackSpace = 1;
1404
1405  __ PrepareCallApiFunction(kArgStackSpace);
1406  __ lea(rax, Operand(name_arg, 1 * kPointerSize));
1407
1408  // v8::PropertyAccessorInfo::args_.
1409  __ movq(StackSpaceOperand(0), rax);
1410
1411  // The context register (rsi) has been saved in PrepareCallApiFunction and
1412  // could be used to pass arguments.
1413  __ lea(accessor_info_arg, StackSpaceOperand(0));
1414
1415  Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback);
1416
1417  // The name handler is counted as an argument.
1418  StackArgumentsAccessor args(rbp, PropertyCallbackArguments::kArgsLength);
1419  Operand return_value_operand = args.GetArgumentOperand(
1420      PropertyCallbackArguments::kArgsLength - 1 -
1421      PropertyCallbackArguments::kReturnValueOffset);
1422  __ CallApiFunctionAndReturn(getter_address,
1423                              thunk_address,
1424                              getter_arg,
1425                              kStackSpace,
1426                              return_value_operand,
1427                              NULL);
1428}
1429
1430
1431void LoadStubCompiler::GenerateLoadConstant(Handle<Object> value) {
1432  // Return the constant value.
1433  __ Move(rax, value);
1434  __ ret(0);
1435}
1436
1437
1438void LoadStubCompiler::GenerateLoadInterceptor(
1439    Register holder_reg,
1440    Handle<Object> object,
1441    Handle<JSObject> interceptor_holder,
1442    LookupResult* lookup,
1443    Handle<Name> name) {
1444  ASSERT(interceptor_holder->HasNamedInterceptor());
1445  ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1446
1447  // So far the most popular follow ups for interceptor loads are FIELD
1448  // and CALLBACKS, so inline only them, other cases may be added
1449  // later.
1450  bool compile_followup_inline = false;
1451  if (lookup->IsFound() && lookup->IsCacheable()) {
1452    if (lookup->IsField()) {
1453      compile_followup_inline = true;
1454    } else if (lookup->type() == CALLBACKS &&
1455               lookup->GetCallbackObject()->IsExecutableAccessorInfo()) {
1456      ExecutableAccessorInfo* callback =
1457          ExecutableAccessorInfo::cast(lookup->GetCallbackObject());
1458      compile_followup_inline = callback->getter() != NULL &&
1459          callback->IsCompatibleReceiver(*object);
1460    }
1461  }
1462
1463  if (compile_followup_inline) {
1464    // Compile the interceptor call, followed by inline code to load the
1465    // property from further up the prototype chain if the call fails.
1466    // Check that the maps haven't changed.
1467    ASSERT(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
1468
1469    // Preserve the receiver register explicitly whenever it is different from
1470    // the holder and it is needed should the interceptor return without any
1471    // result. The CALLBACKS case needs the receiver to be passed into C++ code,
1472    // the FIELD case might cause a miss during the prototype check.
1473    bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
1474    bool must_preserve_receiver_reg = !receiver().is(holder_reg) &&
1475        (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
1476
1477    // Save necessary data before invoking an interceptor.
1478    // Requires a frame to make GC aware of pushed pointers.
1479    {
1480      FrameScope frame_scope(masm(), StackFrame::INTERNAL);
1481
1482      if (must_preserve_receiver_reg) {
1483        __ push(receiver());
1484      }
1485      __ push(holder_reg);
1486      __ push(this->name());
1487
1488      // Invoke an interceptor.  Note: map checks from receiver to
1489      // interceptor's holder has been compiled before (see a caller
1490      // of this method.)
1491      CompileCallLoadPropertyWithInterceptor(
1492          masm(), receiver(), holder_reg, this->name(), interceptor_holder,
1493          IC::kLoadPropertyWithInterceptorOnly);
1494
1495      // Check if interceptor provided a value for property.  If it's
1496      // the case, return immediately.
1497      Label interceptor_failed;
1498      __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
1499      __ j(equal, &interceptor_failed);
1500      frame_scope.GenerateLeaveFrame();
1501      __ ret(0);
1502
1503      __ bind(&interceptor_failed);
1504      __ pop(this->name());
1505      __ pop(holder_reg);
1506      if (must_preserve_receiver_reg) {
1507        __ pop(receiver());
1508      }
1509
1510      // Leave the internal frame.
1511    }
1512
1513    GenerateLoadPostInterceptor(holder_reg, interceptor_holder, name, lookup);
1514  } else {  // !compile_followup_inline
1515    // Call the runtime system to load the interceptor.
1516    // Check that the maps haven't changed.
1517    __ PopReturnAddressTo(scratch2());
1518    PushInterceptorArguments(masm(), receiver(), holder_reg,
1519                             this->name(), interceptor_holder);
1520    __ PushReturnAddressFrom(scratch2());
1521
1522    ExternalReference ref = ExternalReference(
1523        IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), isolate());
1524    __ TailCallExternalReference(ref, StubCache::kInterceptorArgsLength, 1);
1525  }
1526}
1527
1528
1529void CallStubCompiler::GenerateNameCheck(Handle<Name> name, Label* miss) {
1530  if (kind_ == Code::KEYED_CALL_IC) {
1531    __ Cmp(rcx, name);
1532    __ j(not_equal, miss);
1533  }
1534}
1535
1536
1537void CallStubCompiler::GenerateFunctionCheck(Register function,
1538                                             Register scratch,
1539                                             Label* miss) {
1540  __ JumpIfSmi(function, miss);
1541  __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch);
1542  __ j(not_equal, miss);
1543}
1544
1545
1546void CallStubCompiler::GenerateLoadFunctionFromCell(
1547    Handle<Cell> cell,
1548    Handle<JSFunction> function,
1549    Label* miss) {
1550  // Get the value from the cell.
1551  __ Move(rdi, cell);
1552  __ movq(rdi, FieldOperand(rdi, Cell::kValueOffset));
1553
1554  // Check that the cell contains the same function.
1555  if (heap()->InNewSpace(*function)) {
1556    // We can't embed a pointer to a function in new space so we have
1557    // to verify that the shared function info is unchanged. This has
1558    // the nice side effect that multiple closures based on the same
1559    // function can all use this call IC. Before we load through the
1560    // function, we have to verify that it still is a function.
1561    GenerateFunctionCheck(rdi, rax, miss);
1562
1563    // Check the shared function info. Make sure it hasn't changed.
1564    __ Move(rax, Handle<SharedFunctionInfo>(function->shared()));
1565    __ cmpq(FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset), rax);
1566  } else {
1567    __ Cmp(rdi, function);
1568  }
1569  __ j(not_equal, miss);
1570}
1571
1572
1573void CallStubCompiler::GenerateMissBranch() {
1574  Handle<Code> code =
1575      isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
1576                                               kind_,
1577                                               extra_state());
1578  __ Jump(code, RelocInfo::CODE_TARGET);
1579}
1580
1581
1582Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
1583                                                Handle<JSObject> holder,
1584                                                PropertyIndex index,
1585                                                Handle<Name> name) {
1586  Label miss;
1587
1588  Register reg = HandlerFrontendHeader(
1589      object, holder, name, RECEIVER_MAP_CHECK, &miss);
1590
1591  GenerateFastPropertyLoad(masm(), rdi, reg, index.is_inobject(holder),
1592                           index.translate(holder), Representation::Tagged());
1593  GenerateJumpFunction(object, rdi, &miss);
1594
1595  HandlerFrontendFooter(&miss);
1596
1597  // Return the generated code.
1598  return GetCode(Code::FAST, name);
1599}
1600
1601
1602Handle<Code> CallStubCompiler::CompileArrayCodeCall(
1603    Handle<Object> object,
1604    Handle<JSObject> holder,
1605    Handle<Cell> cell,
1606    Handle<JSFunction> function,
1607    Handle<String> name,
1608    Code::StubType type) {
1609  Label miss;
1610
1611  HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
1612  if (!cell.is_null()) {
1613    ASSERT(cell->value() == *function);
1614    GenerateLoadFunctionFromCell(cell, function, &miss);
1615  }
1616
1617  Handle<AllocationSite> site = isolate()->factory()->NewAllocationSite();
1618  site->SetElementsKind(GetInitialFastElementsKind());
1619  Handle<Cell> site_feedback_cell = isolate()->factory()->NewCell(site);
1620  const int argc = arguments().immediate();
1621  __ movq(rax, Immediate(argc));
1622  __ Move(rbx, site_feedback_cell);
1623  __ Move(rdi, function);
1624
1625  ArrayConstructorStub stub(isolate());
1626  __ TailCallStub(&stub);
1627
1628  HandlerFrontendFooter(&miss);
1629
1630  // Return the generated code.
1631  return GetCode(type, name);
1632}
1633
1634
1635Handle<Code> CallStubCompiler::CompileArrayPushCall(
1636    Handle<Object> object,
1637    Handle<JSObject> holder,
1638    Handle<Cell> cell,
1639    Handle<JSFunction> function,
1640    Handle<String> name,
1641    Code::StubType type) {
1642  // If object is not an array or is observed or sealed, bail out to regular
1643  // call.
1644  if (!object->IsJSArray() ||
1645      !cell.is_null() ||
1646      Handle<JSArray>::cast(object)->map()->is_observed() ||
1647      !Handle<JSArray>::cast(object)->map()->is_extensible()) {
1648    return Handle<Code>::null();
1649  }
1650
1651  Label miss;
1652
1653  HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
1654
1655  const int argc = arguments().immediate();
1656  StackArgumentsAccessor args(rsp, argc);
1657  if (argc == 0) {
1658    // Noop, return the length.
1659    __ movq(rax, FieldOperand(rdx, JSArray::kLengthOffset));
1660    __ ret((argc + 1) * kPointerSize);
1661  } else {
1662    Label call_builtin;
1663
1664    if (argc == 1) {  // Otherwise fall through to call builtin.
1665      Label attempt_to_grow_elements, with_write_barrier, check_double;
1666
1667      // Get the elements array of the object.
1668      __ movq(rdi, FieldOperand(rdx, JSArray::kElementsOffset));
1669
1670      // Check that the elements are in fast mode and writable.
1671      __ Cmp(FieldOperand(rdi, HeapObject::kMapOffset),
1672             factory()->fixed_array_map());
1673      __ j(not_equal, &check_double);
1674
1675      // Get the array's length into rax and calculate new length.
1676      __ SmiToInteger32(rax, FieldOperand(rdx, JSArray::kLengthOffset));
1677      STATIC_ASSERT(FixedArray::kMaxLength < Smi::kMaxValue);
1678      __ addl(rax, Immediate(argc));
1679
1680      // Get the elements' length into rcx.
1681      __ SmiToInteger32(rcx, FieldOperand(rdi, FixedArray::kLengthOffset));
1682
1683      // Check if we could survive without allocation.
1684      __ cmpl(rax, rcx);
1685      __ j(greater, &attempt_to_grow_elements);
1686
1687      // Check if value is a smi.
1688      __ movq(rcx, args.GetArgumentOperand(1));
1689      __ JumpIfNotSmi(rcx, &with_write_barrier);
1690
1691      // Save new length.
1692      __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax);
1693
1694      // Store the value.
1695      __ movq(FieldOperand(rdi,
1696                           rax,
1697                           times_pointer_size,
1698                           FixedArray::kHeaderSize - argc * kPointerSize),
1699              rcx);
1700
1701      __ Integer32ToSmi(rax, rax);  // Return new length as smi.
1702      __ ret((argc + 1) * kPointerSize);
1703
1704      __ bind(&check_double);
1705
1706      // Check that the elements are in double mode.
1707      __ Cmp(FieldOperand(rdi, HeapObject::kMapOffset),
1708             factory()->fixed_double_array_map());
1709      __ j(not_equal, &call_builtin);
1710
1711      // Get the array's length into rax and calculate new length.
1712      __ SmiToInteger32(rax, FieldOperand(rdx, JSArray::kLengthOffset));
1713      STATIC_ASSERT(FixedArray::kMaxLength < Smi::kMaxValue);
1714      __ addl(rax, Immediate(argc));
1715
1716      // Get the elements' length into rcx.
1717      __ SmiToInteger32(rcx, FieldOperand(rdi, FixedArray::kLengthOffset));
1718
1719      // Check if we could survive without allocation.
1720      __ cmpl(rax, rcx);
1721      __ j(greater, &call_builtin);
1722
1723      __ movq(rcx, args.GetArgumentOperand(1));
1724      __ StoreNumberToDoubleElements(
1725          rcx, rdi, rax, xmm0, &call_builtin, argc * kDoubleSize);
1726
1727      // Save new length.
1728      __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax);
1729      __ Integer32ToSmi(rax, rax);  // Return new length as smi.
1730      __ ret((argc + 1) * kPointerSize);
1731
1732      __ bind(&with_write_barrier);
1733
1734      __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
1735
1736      if (FLAG_smi_only_arrays  && !FLAG_trace_elements_transitions) {
1737        Label fast_object, not_fast_object;
1738        __ CheckFastObjectElements(rbx, &not_fast_object, Label::kNear);
1739        __ jmp(&fast_object);
1740        // In case of fast smi-only, convert to fast object, otherwise bail out.
1741        __ bind(&not_fast_object);
1742        __ CheckFastSmiElements(rbx, &call_builtin);
1743        __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset),
1744               factory()->heap_number_map());
1745        __ j(equal, &call_builtin);
1746        // rdx: receiver
1747        // rbx: map
1748
1749        Label try_holey_map;
1750        __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
1751                                               FAST_ELEMENTS,
1752                                               rbx,
1753                                               rdi,
1754                                               &try_holey_map);
1755
1756        ElementsTransitionGenerator::
1757            GenerateMapChangeElementsTransition(masm(),
1758                                                DONT_TRACK_ALLOCATION_SITE,
1759                                                NULL);
1760        // Restore edi.
1761        __ movq(rdi, FieldOperand(rdx, JSArray::kElementsOffset));
1762        __ jmp(&fast_object);
1763
1764        __ bind(&try_holey_map);
1765        __ LoadTransitionedArrayMapConditional(FAST_HOLEY_SMI_ELEMENTS,
1766                                               FAST_HOLEY_ELEMENTS,
1767                                               rbx,
1768                                               rdi,
1769                                               &call_builtin);
1770        ElementsTransitionGenerator::
1771            GenerateMapChangeElementsTransition(masm(),
1772                                                DONT_TRACK_ALLOCATION_SITE,
1773                                                NULL);
1774        __ movq(rdi, FieldOperand(rdx, JSArray::kElementsOffset));
1775        __ bind(&fast_object);
1776      } else {
1777        __ CheckFastObjectElements(rbx, &call_builtin);
1778      }
1779
1780      // Save new length.
1781      __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax);
1782
1783      // Store the value.
1784      __ lea(rdx, FieldOperand(rdi,
1785                               rax, times_pointer_size,
1786                               FixedArray::kHeaderSize - argc * kPointerSize));
1787      __ movq(Operand(rdx, 0), rcx);
1788
1789      __ RecordWrite(rdi, rdx, rcx, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1790                     OMIT_SMI_CHECK);
1791
1792      __ Integer32ToSmi(rax, rax);  // Return new length as smi.
1793      __ ret((argc + 1) * kPointerSize);
1794
1795      __ bind(&attempt_to_grow_elements);
1796      if (!FLAG_inline_new) {
1797        __ jmp(&call_builtin);
1798      }
1799
1800      __ movq(rbx, args.GetArgumentOperand(1));
1801      // Growing elements that are SMI-only requires special handling in case
1802      // the new element is non-Smi. For now, delegate to the builtin.
1803      Label no_fast_elements_check;
1804      __ JumpIfSmi(rbx, &no_fast_elements_check);
1805      __ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
1806      __ CheckFastObjectElements(rcx, &call_builtin, Label::kFar);
1807      __ bind(&no_fast_elements_check);
1808
1809      ExternalReference new_space_allocation_top =
1810          ExternalReference::new_space_allocation_top_address(isolate());
1811      ExternalReference new_space_allocation_limit =
1812          ExternalReference::new_space_allocation_limit_address(isolate());
1813
1814      const int kAllocationDelta = 4;
1815      // Load top.
1816      __ Load(rcx, new_space_allocation_top);
1817
1818      // Check if it's the end of elements.
1819      __ lea(rdx, FieldOperand(rdi,
1820                               rax, times_pointer_size,
1821                               FixedArray::kHeaderSize - argc * kPointerSize));
1822      __ cmpq(rdx, rcx);
1823      __ j(not_equal, &call_builtin);
1824      __ addq(rcx, Immediate(kAllocationDelta * kPointerSize));
1825      Operand limit_operand =
1826          masm()->ExternalOperand(new_space_allocation_limit);
1827      __ cmpq(rcx, limit_operand);
1828      __ j(above, &call_builtin);
1829
1830      // We fit and could grow elements.
1831      __ Store(new_space_allocation_top, rcx);
1832
1833      // Push the argument...
1834      __ movq(Operand(rdx, 0), rbx);
1835      // ... and fill the rest with holes.
1836      __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
1837      for (int i = 1; i < kAllocationDelta; i++) {
1838        __ movq(Operand(rdx, i * kPointerSize), kScratchRegister);
1839      }
1840
1841      // We know the elements array is in new space so we don't need the
1842      // remembered set, but we just pushed a value onto it so we may have to
1843      // tell the incremental marker to rescan the object that we just grew.  We
1844      // don't need to worry about the holes because they are in old space and
1845      // already marked black.
1846      __ RecordWrite(rdi, rdx, rbx, kDontSaveFPRegs, OMIT_REMEMBERED_SET);
1847
1848      // Restore receiver to rdx as finish sequence assumes it's here.
1849      __ movq(rdx, args.GetReceiverOperand());
1850
1851      // Increment element's and array's sizes.
1852      __ SmiAddConstant(FieldOperand(rdi, FixedArray::kLengthOffset),
1853                        Smi::FromInt(kAllocationDelta));
1854
1855      // Make new length a smi before returning it.
1856      __ Integer32ToSmi(rax, rax);
1857      __ movq(FieldOperand(rdx, JSArray::kLengthOffset), rax);
1858
1859      __ ret((argc + 1) * kPointerSize);
1860    }
1861
1862    __ bind(&call_builtin);
1863    __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush,
1864                                                   isolate()),
1865                                 argc + 1,
1866                                 1);
1867  }
1868
1869  HandlerFrontendFooter(&miss);
1870
1871  // Return the generated code.
1872  return GetCode(type, name);
1873}
1874
1875
1876Handle<Code> CallStubCompiler::CompileArrayPopCall(
1877    Handle<Object> object,
1878    Handle<JSObject> holder,
1879    Handle<Cell> cell,
1880    Handle<JSFunction> function,
1881    Handle<String> name,
1882    Code::StubType type) {
1883  // If object is not an array or is observed or sealed, bail out to regular
1884  // call.
1885  if (!object->IsJSArray() ||
1886      !cell.is_null() ||
1887      Handle<JSArray>::cast(object)->map()->is_observed() ||
1888      !Handle<JSArray>::cast(object)->map()->is_extensible()) {
1889    return Handle<Code>::null();
1890  }
1891
1892  Label miss, return_undefined, call_builtin;
1893
1894  HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
1895
1896  // Get the elements array of the object.
1897  __ movq(rbx, FieldOperand(rdx, JSArray::kElementsOffset));
1898
1899  // Check that the elements are in fast mode and writable.
1900  __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
1901                 Heap::kFixedArrayMapRootIndex);
1902  __ j(not_equal, &call_builtin);
1903
1904  // Get the array's length into rcx and calculate new length.
1905  __ SmiToInteger32(rcx, FieldOperand(rdx, JSArray::kLengthOffset));
1906  __ subl(rcx, Immediate(1));
1907  __ j(negative, &return_undefined);
1908
1909  // Get the last element.
1910  __ LoadRoot(r9, Heap::kTheHoleValueRootIndex);
1911  __ movq(rax, FieldOperand(rbx,
1912                            rcx, times_pointer_size,
1913                            FixedArray::kHeaderSize));
1914  // Check if element is already the hole.
1915  __ cmpq(rax, r9);
1916  // If so, call slow-case to also check prototypes for value.
1917  __ j(equal, &call_builtin);
1918
1919  // Set the array's length.
1920  __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rcx);
1921
1922  // Fill with the hole and return original value.
1923  __ movq(FieldOperand(rbx,
1924                       rcx, times_pointer_size,
1925                       FixedArray::kHeaderSize),
1926          r9);
1927  const int argc = arguments().immediate();
1928  __ ret((argc + 1) * kPointerSize);
1929
1930  __ bind(&return_undefined);
1931  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1932  __ ret((argc + 1) * kPointerSize);
1933
1934  __ bind(&call_builtin);
1935  __ TailCallExternalReference(
1936      ExternalReference(Builtins::c_ArrayPop, isolate()),
1937      argc + 1,
1938      1);
1939
1940  HandlerFrontendFooter(&miss);
1941
1942  // Return the generated code.
1943  return GetCode(type, name);
1944}
1945
1946
1947Handle<Code> CallStubCompiler::CompileStringCharCodeAtCall(
1948    Handle<Object> object,
1949    Handle<JSObject> holder,
1950    Handle<Cell> cell,
1951    Handle<JSFunction> function,
1952    Handle<String> name,
1953    Code::StubType type) {
1954  // If object is not a string, bail out to regular call.
1955  if (!object->IsString() || !cell.is_null()) return Handle<Code>::null();
1956
1957  Label miss;
1958  Label name_miss;
1959  Label index_out_of_range;
1960  Label* index_out_of_range_label = &index_out_of_range;
1961  if (kind_ == Code::CALL_IC &&
1962      (CallICBase::StringStubState::decode(extra_state()) ==
1963       DEFAULT_STRING_STUB)) {
1964    index_out_of_range_label = &miss;
1965  }
1966
1967  HandlerFrontendHeader(object, holder, name, STRING_CHECK, &name_miss);
1968
1969  Register receiver = rbx;
1970  Register index = rdi;
1971  Register result = rax;
1972  const int argc = arguments().immediate();
1973  StackArgumentsAccessor args(rsp, argc);
1974
1975  __ movq(receiver, args.GetReceiverOperand());
1976  if (argc > 0) {
1977    __ movq(index, args.GetArgumentOperand(1));
1978  } else {
1979    __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1980  }
1981
1982  StringCharCodeAtGenerator generator(receiver,
1983                                      index,
1984                                      result,
1985                                      &miss,  // When not a string.
1986                                      &miss,  // When not a number.
1987                                      index_out_of_range_label,
1988                                      STRING_INDEX_IS_NUMBER);
1989  generator.GenerateFast(masm());
1990  __ ret((argc + 1) * kPointerSize);
1991
1992  StubRuntimeCallHelper call_helper;
1993  generator.GenerateSlow(masm(), call_helper);
1994
1995  if (index_out_of_range.is_linked()) {
1996    __ bind(&index_out_of_range);
1997    __ LoadRoot(rax, Heap::kNanValueRootIndex);
1998    __ ret((argc + 1) * kPointerSize);
1999  }
2000
2001  __ bind(&miss);
2002  // Restore function name in rcx.
2003  __ Move(rcx, name);
2004  HandlerFrontendFooter(&name_miss);
2005
2006  // Return the generated code.
2007  return GetCode(type, name);
2008}
2009
2010
2011Handle<Code> CallStubCompiler::CompileStringCharAtCall(
2012    Handle<Object> object,
2013    Handle<JSObject> holder,
2014    Handle<Cell> cell,
2015    Handle<JSFunction> function,
2016    Handle<String> name,
2017    Code::StubType type) {
2018  // If object is not a string, bail out to regular call.
2019  if (!object->IsString() || !cell.is_null()) return Handle<Code>::null();
2020
2021  const int argc = arguments().immediate();
2022  StackArgumentsAccessor args(rsp, argc);
2023
2024  Label miss;
2025  Label name_miss;
2026  Label index_out_of_range;
2027  Label* index_out_of_range_label = &index_out_of_range;
2028  if (kind_ == Code::CALL_IC &&
2029      (CallICBase::StringStubState::decode(extra_state()) ==
2030       DEFAULT_STRING_STUB)) {
2031    index_out_of_range_label = &miss;
2032  }
2033
2034  HandlerFrontendHeader(object, holder, name, STRING_CHECK, &name_miss);
2035
2036  Register receiver = rax;
2037  Register index = rdi;
2038  Register scratch = rdx;
2039  Register result = rax;
2040  __ movq(receiver, args.GetReceiverOperand());
2041  if (argc > 0) {
2042    __ movq(index, args.GetArgumentOperand(1));
2043  } else {
2044    __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
2045  }
2046
2047  StringCharAtGenerator generator(receiver,
2048                                  index,
2049                                  scratch,
2050                                  result,
2051                                  &miss,  // When not a string.
2052                                  &miss,  // When not a number.
2053                                  index_out_of_range_label,
2054                                  STRING_INDEX_IS_NUMBER);
2055  generator.GenerateFast(masm());
2056  __ ret((argc + 1) * kPointerSize);
2057
2058  StubRuntimeCallHelper call_helper;
2059  generator.GenerateSlow(masm(), call_helper);
2060
2061  if (index_out_of_range.is_linked()) {
2062    __ bind(&index_out_of_range);
2063    __ LoadRoot(rax, Heap::kempty_stringRootIndex);
2064    __ ret((argc + 1) * kPointerSize);
2065  }
2066  __ bind(&miss);
2067  // Restore function name in rcx.
2068  __ Move(rcx, name);
2069  HandlerFrontendFooter(&name_miss);
2070
2071  // Return the generated code.
2072  return GetCode(type, name);
2073}
2074
2075
2076Handle<Code> CallStubCompiler::CompileStringFromCharCodeCall(
2077    Handle<Object> object,
2078    Handle<JSObject> holder,
2079    Handle<Cell> cell,
2080    Handle<JSFunction> function,
2081    Handle<String> name,
2082    Code::StubType type) {
2083  // If the object is not a JSObject or we got an unexpected number of
2084  // arguments, bail out to the regular call.
2085  const int argc = arguments().immediate();
2086  StackArgumentsAccessor args(rsp, argc);
2087  if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
2088
2089  Label miss;
2090
2091  HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
2092  if (!cell.is_null()) {
2093    ASSERT(cell->value() == *function);
2094    GenerateLoadFunctionFromCell(cell, function, &miss);
2095  }
2096
2097  // Load the char code argument.
2098  Register code = rbx;
2099  __ movq(code, args.GetArgumentOperand(1));
2100
2101  // Check the code is a smi.
2102  Label slow;
2103  __ JumpIfNotSmi(code, &slow);
2104
2105  // Convert the smi code to uint16.
2106  __ SmiAndConstant(code, code, Smi::FromInt(0xffff));
2107
2108  StringCharFromCodeGenerator generator(code, rax);
2109  generator.GenerateFast(masm());
2110  __ ret(2 * kPointerSize);
2111
2112  StubRuntimeCallHelper call_helper;
2113  generator.GenerateSlow(masm(), call_helper);
2114
2115  __ bind(&slow);
2116  // We do not have to patch the receiver because the function makes no use of
2117  // it.
2118  GenerateJumpFunctionIgnoreReceiver(function);
2119
2120  HandlerFrontendFooter(&miss);
2121
2122  // Return the generated code.
2123  return GetCode(type, name);
2124}
2125
2126
2127Handle<Code> CallStubCompiler::CompileMathFloorCall(
2128    Handle<Object> object,
2129    Handle<JSObject> holder,
2130    Handle<Cell> cell,
2131    Handle<JSFunction> function,
2132    Handle<String> name,
2133    Code::StubType type) {
2134  const int argc = arguments().immediate();
2135  StackArgumentsAccessor args(rsp, argc);
2136
2137  // If the object is not a JSObject or we got an unexpected number of
2138  // arguments, bail out to the regular call.
2139  if (!object->IsJSObject() || argc != 1) {
2140    return Handle<Code>::null();
2141  }
2142
2143  Label miss, slow;
2144
2145  HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
2146  if (!cell.is_null()) {
2147    ASSERT(cell->value() == *function);
2148    GenerateLoadFunctionFromCell(cell, function, &miss);
2149  }
2150
2151  // Load the (only) argument into rax.
2152  __ movq(rax, args.GetArgumentOperand(1));
2153
2154  // Check if the argument is a smi.
2155  Label smi;
2156  STATIC_ASSERT(kSmiTag == 0);
2157  __ JumpIfSmi(rax, &smi);
2158
2159  // Check if the argument is a heap number and load its value into xmm0.
2160  __ CheckMap(rax, factory()->heap_number_map(), &slow, DONT_DO_SMI_CHECK);
2161  __ movsd(xmm0, FieldOperand(rax, HeapNumber::kValueOffset));
2162
2163  // Check if the argument is strictly positive. Note this also discards NaN.
2164  __ xorpd(xmm1, xmm1);
2165  __ ucomisd(xmm0, xmm1);
2166  __ j(below_equal, &slow);
2167
2168  // Do a truncating conversion.
2169  __ cvttsd2si(rax, xmm0);
2170
2171  // Checks for 0x80000000 which signals a failed conversion.
2172  Label conversion_failure;
2173  __ cmpl(rax, Immediate(0x80000000));
2174  __ j(equal, &conversion_failure);
2175
2176  // Smi tag and return.
2177  __ Integer32ToSmi(rax, rax);
2178  __ bind(&smi);
2179  __ ret(2 * kPointerSize);
2180
2181  // Check if the argument is < 2^kMantissaBits.
2182  Label already_round;
2183  __ bind(&conversion_failure);
2184  int64_t kTwoMantissaBits= V8_INT64_C(0x4330000000000000);
2185  __ movq(rbx, kTwoMantissaBits);
2186  __ movq(xmm1, rbx);
2187  __ ucomisd(xmm0, xmm1);
2188  __ j(above_equal, &already_round);
2189
2190  // Save a copy of the argument.
2191  __ movaps(xmm2, xmm0);
2192
2193  // Compute (argument + 2^kMantissaBits) - 2^kMantissaBits.
2194  __ addsd(xmm0, xmm1);
2195  __ subsd(xmm0, xmm1);
2196
2197  // Compare the argument and the tentative result to get the right mask:
2198  //   if xmm2 < xmm0:
2199  //     xmm2 = 1...1
2200  //   else:
2201  //     xmm2 = 0...0
2202  __ cmpltsd(xmm2, xmm0);
2203
2204  // Subtract 1 if the argument was less than the tentative result.
2205  int64_t kOne = V8_INT64_C(0x3ff0000000000000);
2206  __ movq(rbx, kOne);
2207  __ movq(xmm1, rbx);
2208  __ andpd(xmm1, xmm2);
2209  __ subsd(xmm0, xmm1);
2210
2211  // Return a new heap number.
2212  __ AllocateHeapNumber(rax, rbx, &slow);
2213  __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0);
2214  __ ret(2 * kPointerSize);
2215
2216  // Return the argument (when it's an already round heap number).
2217  __ bind(&already_round);
2218  __ movq(rax, args.GetArgumentOperand(1));
2219  __ ret(2 * kPointerSize);
2220
2221  __ bind(&slow);
2222  // We do not have to patch the receiver because the function makes no use of
2223  // it.
2224  GenerateJumpFunctionIgnoreReceiver(function);
2225
2226  HandlerFrontendFooter(&miss);
2227
2228  // Return the generated code.
2229  return GetCode(type, name);
2230}
2231
2232
2233Handle<Code> CallStubCompiler::CompileMathAbsCall(
2234    Handle<Object> object,
2235    Handle<JSObject> holder,
2236    Handle<Cell> cell,
2237    Handle<JSFunction> function,
2238    Handle<String> name,
2239    Code::StubType type) {
2240  // If the object is not a JSObject or we got an unexpected number of
2241  // arguments, bail out to the regular call.
2242  const int argc = arguments().immediate();
2243  StackArgumentsAccessor args(rsp, argc);
2244  if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
2245
2246  Label miss;
2247
2248  HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
2249  if (!cell.is_null()) {
2250    ASSERT(cell->value() == *function);
2251    GenerateLoadFunctionFromCell(cell, function, &miss);
2252  }
2253
2254  // Load the (only) argument into rax.
2255  __ movq(rax, args.GetArgumentOperand(1));
2256
2257  // Check if the argument is a smi.
2258  Label not_smi;
2259  STATIC_ASSERT(kSmiTag == 0);
2260  __ JumpIfNotSmi(rax, &not_smi);
2261
2262  // Branchless abs implementation, refer to below:
2263  // http://graphics.stanford.edu/~seander/bithacks.html#IntegerAbs
2264  // Set ebx to 1...1 (== -1) if the argument is negative, or to 0...0
2265  // otherwise.
2266  __ movq(rbx, rax);
2267  __ sar(rbx, Immediate(kBitsPerPointer - 1));
2268
2269  // Do bitwise not or do nothing depending on ebx.
2270  __ xor_(rax, rbx);
2271
2272  // Add 1 or do nothing depending on ebx.
2273  __ subq(rax, rbx);
2274
2275  // If the result is still negative, go to the slow case.
2276  // This only happens for the most negative smi.
2277  Label slow;
2278  __ j(negative, &slow);
2279
2280  __ ret(2 * kPointerSize);
2281
2282  // Check if the argument is a heap number and load its value.
2283  __ bind(&not_smi);
2284  __ CheckMap(rax, factory()->heap_number_map(), &slow, DONT_DO_SMI_CHECK);
2285  __ MoveDouble(rbx, FieldOperand(rax, HeapNumber::kValueOffset));
2286
2287  // Check the sign of the argument. If the argument is positive,
2288  // just return it.
2289  Label negative_sign;
2290  const int sign_mask_shift =
2291      (HeapNumber::kExponentOffset - HeapNumber::kValueOffset) * kBitsPerByte;
2292  __ Set(rdi, static_cast<int64_t>(HeapNumber::kSignMask) << sign_mask_shift);
2293  __ testq(rbx, rdi);
2294  __ j(not_zero, &negative_sign);
2295  __ ret(2 * kPointerSize);
2296
2297  // If the argument is negative, clear the sign, and return a new
2298  // number. We still have the sign mask in rdi.
2299  __ bind(&negative_sign);
2300  __ xor_(rbx, rdi);
2301  __ AllocateHeapNumber(rax, rdx, &slow);
2302  __ MoveDouble(FieldOperand(rax, HeapNumber::kValueOffset), rbx);
2303  __ ret(2 * kPointerSize);
2304
2305  __ bind(&slow);
2306  // We do not have to patch the receiver because the function makes no use of
2307  // it.
2308  GenerateJumpFunctionIgnoreReceiver(function);
2309
2310  HandlerFrontendFooter(&miss);
2311
2312  // Return the generated code.
2313  return GetCode(type, name);
2314}
2315
2316
2317Handle<Code> CallStubCompiler::CompileFastApiCall(
2318    const CallOptimization& optimization,
2319    Handle<Object> object,
2320    Handle<JSObject> holder,
2321    Handle<Cell> cell,
2322    Handle<JSFunction> function,
2323    Handle<String> name) {
2324  ASSERT(optimization.is_simple_api_call());
2325  // Bail out if object is a global object as we don't want to
2326  // repatch it to global receiver.
2327  if (object->IsGlobalObject()) return Handle<Code>::null();
2328  if (!cell.is_null()) return Handle<Code>::null();
2329  if (!object->IsJSObject()) return Handle<Code>::null();
2330  int depth = optimization.GetPrototypeDepthOfExpectedType(
2331      Handle<JSObject>::cast(object), holder);
2332  if (depth == kInvalidProtoDepth) return Handle<Code>::null();
2333
2334  Label miss, miss_before_stack_reserved;
2335  GenerateNameCheck(name, &miss_before_stack_reserved);
2336
2337  const int argc = arguments().immediate();
2338  StackArgumentsAccessor args(rsp, argc);
2339  __ movq(rdx, args.GetReceiverOperand());
2340
2341  // Check that the receiver isn't a smi.
2342  __ JumpIfSmi(rdx, &miss_before_stack_reserved);
2343
2344  Counters* counters = isolate()->counters();
2345  __ IncrementCounter(counters->call_const(), 1);
2346  __ IncrementCounter(counters->call_const_fast_api(), 1);
2347
2348  // Allocate space for v8::Arguments implicit values. Must be initialized
2349  // before calling any runtime function.
2350  __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
2351
2352  // Check that the maps haven't changed and find a Holder as a side effect.
2353  CheckPrototypes(IC::CurrentTypeOf(object, isolate()), rdx, holder,
2354                  rbx, rax, rdi, name, depth, &miss);
2355
2356  // Move the return address on top of the stack.
2357  __ movq(rax,
2358          StackOperandForReturnAddress(kFastApiCallArguments * kPointerSize));
2359  __ movq(StackOperandForReturnAddress(0), rax);
2360
2361  GenerateFastApiCall(masm(), optimization, argc);
2362
2363  __ bind(&miss);
2364  __ addq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
2365
2366  HandlerFrontendFooter(&miss_before_stack_reserved);
2367
2368  // Return the generated code.
2369  return GetCode(function);
2370}
2371
2372
2373void StubCompiler::GenerateBooleanCheck(Register object, Label* miss) {
2374  Label success;
2375  // Check that the object is a boolean.
2376  __ CompareRoot(object, Heap::kTrueValueRootIndex);
2377  __ j(equal, &success);
2378  __ CompareRoot(object, Heap::kFalseValueRootIndex);
2379  __ j(not_equal, miss);
2380  __ bind(&success);
2381}
2382
2383
2384void CallStubCompiler::PatchGlobalProxy(Handle<Object> object) {
2385  if (object->IsGlobalObject()) {
2386    StackArgumentsAccessor args(rsp, arguments());
2387    __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
2388    __ movq(args.GetReceiverOperand(), rdx);
2389  }
2390}
2391
2392
2393Register CallStubCompiler::HandlerFrontendHeader(Handle<Object> object,
2394                                                 Handle<JSObject> holder,
2395                                                 Handle<Name> name,
2396                                                 CheckType check,
2397                                                 Label* miss) {
2398  GenerateNameCheck(name, miss);
2399
2400  Register reg = rdx;
2401
2402  StackArgumentsAccessor args(rsp, arguments());
2403  __ movq(reg, args.GetReceiverOperand());
2404
2405  // Check that the receiver isn't a smi.
2406  if (check != NUMBER_CHECK) {
2407    __ JumpIfSmi(reg, miss);
2408  }
2409
2410  // Make sure that it's okay not to patch the on stack receiver
2411  // unless we're doing a receiver map check.
2412  ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
2413
2414  Counters* counters = isolate()->counters();
2415  switch (check) {
2416    case RECEIVER_MAP_CHECK:
2417      __ IncrementCounter(counters->call_const(), 1);
2418
2419      // Check that the maps haven't changed.
2420      reg = CheckPrototypes(IC::CurrentTypeOf(object, isolate()), reg, holder,
2421                            rbx, rax, rdi, name, miss);
2422      break;
2423
2424    case STRING_CHECK: {
2425      // Check that the object is a string.
2426      __ CmpObjectType(reg, FIRST_NONSTRING_TYPE, rax);
2427      __ j(above_equal, miss);
2428      // Check that the maps starting from the prototype haven't changed.
2429      GenerateDirectLoadGlobalFunctionPrototype(
2430          masm(), Context::STRING_FUNCTION_INDEX, rax, miss);
2431      break;
2432    }
2433    case SYMBOL_CHECK: {
2434      // Check that the object is a symbol.
2435      __ CmpObjectType(reg, SYMBOL_TYPE, rax);
2436      __ j(not_equal, miss);
2437      // Check that the maps starting from the prototype haven't changed.
2438      GenerateDirectLoadGlobalFunctionPrototype(
2439          masm(), Context::SYMBOL_FUNCTION_INDEX, rax, miss);
2440      break;
2441    }
2442    case NUMBER_CHECK: {
2443      Label fast;
2444      // Check that the object is a smi or a heap number.
2445      __ JumpIfSmi(reg, &fast);
2446      __ CmpObjectType(reg, HEAP_NUMBER_TYPE, rax);
2447      __ j(not_equal, miss);
2448      __ bind(&fast);
2449      // Check that the maps starting from the prototype haven't changed.
2450      GenerateDirectLoadGlobalFunctionPrototype(
2451          masm(), Context::NUMBER_FUNCTION_INDEX, rax, miss);
2452      break;
2453    }
2454    case BOOLEAN_CHECK: {
2455      GenerateBooleanCheck(reg, miss);
2456      // Check that the maps starting from the prototype haven't changed.
2457      GenerateDirectLoadGlobalFunctionPrototype(
2458          masm(), Context::BOOLEAN_FUNCTION_INDEX, rax, miss);
2459      break;
2460    }
2461  }
2462
2463  if (check != RECEIVER_MAP_CHECK) {
2464    Handle<Object> prototype(object->GetPrototype(isolate()), isolate());
2465    reg = CheckPrototypes(
2466        IC::CurrentTypeOf(prototype, isolate()),
2467        rax, holder, rbx, rdx, rdi, name, miss);
2468  }
2469
2470  return reg;
2471}
2472
2473
2474void CallStubCompiler::GenerateJumpFunction(Handle<Object> object,
2475                                            Register function,
2476                                            Label* miss) {
2477  // Check that the function really is a function.
2478  GenerateFunctionCheck(function, rbx, miss);
2479
2480  if (!function.is(rdi)) __ movq(rdi, function);
2481  PatchGlobalProxy(object);
2482
2483  // Invoke the function.
2484  __ InvokeFunction(rdi, arguments(), JUMP_FUNCTION,
2485                    NullCallWrapper(), call_kind());
2486}
2487
2488
2489Handle<Code> CallStubCompiler::CompileCallInterceptor(Handle<JSObject> object,
2490                                                      Handle<JSObject> holder,
2491                                                      Handle<Name> name) {
2492  Label miss;
2493  GenerateNameCheck(name, &miss);
2494
2495  LookupResult lookup(isolate());
2496  LookupPostInterceptor(holder, name, &lookup);
2497
2498  // Get the receiver from the stack.
2499  StackArgumentsAccessor args(rsp, arguments());
2500  __ movq(rdx, args.GetReceiverOperand());
2501
2502  CallInterceptorCompiler compiler(this, arguments(), rcx, extra_state());
2503  compiler.Compile(masm(), object, holder, name, &lookup, rdx, rbx, rdi, rax,
2504                   &miss);
2505
2506  // Restore receiver.
2507  __ movq(rdx, args.GetReceiverOperand());
2508
2509  GenerateJumpFunction(object, rax, &miss);
2510
2511  HandlerFrontendFooter(&miss);
2512
2513  // Return the generated code.
2514  return GetCode(Code::FAST, name);
2515}
2516
2517
2518Handle<Code> CallStubCompiler::CompileCallGlobal(
2519    Handle<JSObject> object,
2520    Handle<GlobalObject> holder,
2521    Handle<PropertyCell> cell,
2522    Handle<JSFunction> function,
2523    Handle<Name> name) {
2524  if (HasCustomCallGenerator(function)) {
2525    Handle<Code> code = CompileCustomCall(
2526        object, holder, cell, function, Handle<String>::cast(name),
2527        Code::NORMAL);
2528    // A null handle means bail out to the regular compiler code below.
2529    if (!code.is_null()) return code;
2530  }
2531
2532  Label miss;
2533  HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
2534  // Potentially loads a closure that matches the shared function info of the
2535  // function, rather than function.
2536  GenerateLoadFunctionFromCell(cell, function, &miss);
2537  Counters* counters = isolate()->counters();
2538  __ IncrementCounter(counters->call_global_inline(), 1);
2539  GenerateJumpFunction(object, rdi, function);
2540  HandlerFrontendFooter(&miss);
2541
2542  // Return the generated code.
2543  return GetCode(Code::NORMAL, name);
2544}
2545
2546
2547Handle<Code> StoreStubCompiler::CompileStoreCallback(
2548    Handle<JSObject> object,
2549    Handle<JSObject> holder,
2550    Handle<Name> name,
2551    Handle<ExecutableAccessorInfo> callback) {
2552  HandlerFrontend(IC::CurrentTypeOf(object, isolate()),
2553                  receiver(), holder, name);
2554
2555  __ PopReturnAddressTo(scratch1());
2556  __ push(receiver());
2557  __ Push(callback);  // callback info
2558  __ Push(name);
2559  __ push(value());
2560  __ PushReturnAddressFrom(scratch1());
2561
2562  // Do tail-call to the runtime system.
2563  ExternalReference store_callback_property =
2564      ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
2565  __ TailCallExternalReference(store_callback_property, 4, 1);
2566
2567  // Return the generated code.
2568  return GetCode(kind(), Code::FAST, name);
2569}
2570
2571
2572Handle<Code> StoreStubCompiler::CompileStoreCallback(
2573    Handle<JSObject> object,
2574    Handle<JSObject> holder,
2575    Handle<Name> name,
2576    const CallOptimization& call_optimization) {
2577  HandlerFrontend(IC::CurrentTypeOf(object, isolate()),
2578                  receiver(), holder, name);
2579
2580  Register values[] = { value() };
2581  GenerateFastApiCall(
2582      masm(), call_optimization, receiver(), scratch1(),
2583      scratch2(), this->name(), 1, values);
2584
2585  // Return the generated code.
2586  return GetCode(kind(), Code::FAST, name);
2587}
2588
2589
2590#undef __
2591#define __ ACCESS_MASM(masm)
2592
2593
2594void StoreStubCompiler::GenerateStoreViaSetter(
2595    MacroAssembler* masm,
2596    Handle<JSFunction> setter) {
2597  // ----------- S t a t e -------------
2598  //  -- rax    : value
2599  //  -- rcx    : name
2600  //  -- rdx    : receiver
2601  //  -- rsp[0] : return address
2602  // -----------------------------------
2603  {
2604    FrameScope scope(masm, StackFrame::INTERNAL);
2605
2606    // Save value register, so we can restore it later.
2607    __ push(rax);
2608
2609    if (!setter.is_null()) {
2610      // Call the JavaScript setter with receiver and value on the stack.
2611      __ push(rdx);
2612      __ push(rax);
2613      ParameterCount actual(1);
2614      ParameterCount expected(setter);
2615      __ InvokeFunction(setter, expected, actual,
2616                        CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
2617    } else {
2618      // If we generate a global code snippet for deoptimization only, remember
2619      // the place to continue after deoptimization.
2620      masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
2621    }
2622
2623    // We have to return the passed value, not the return value of the setter.
2624    __ pop(rax);
2625
2626    // Restore context register.
2627    __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2628  }
2629  __ ret(0);
2630}
2631
2632
2633#undef __
2634#define __ ACCESS_MASM(masm())
2635
2636
2637Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
2638    Handle<JSObject> object,
2639    Handle<Name> name) {
2640  __ PopReturnAddressTo(scratch1());
2641  __ push(receiver());
2642  __ push(this->name());
2643  __ push(value());
2644  __ PushReturnAddressFrom(scratch1());
2645
2646  // Do tail-call to the runtime system.
2647  ExternalReference store_ic_property =
2648      ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
2649  __ TailCallExternalReference(store_ic_property, 3, 1);
2650
2651  // Return the generated code.
2652  return GetCode(kind(), Code::FAST, name);
2653}
2654
2655
2656Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
2657    MapHandleList* receiver_maps,
2658    CodeHandleList* handler_stubs,
2659    MapHandleList* transitioned_maps) {
2660  Label miss;
2661  __ JumpIfSmi(receiver(), &miss, Label::kNear);
2662
2663  __ movq(scratch1(), FieldOperand(receiver(), HeapObject::kMapOffset));
2664  int receiver_count = receiver_maps->length();
2665  for (int i = 0; i < receiver_count; ++i) {
2666    // Check map and tail call if there's a match
2667    __ Cmp(scratch1(), receiver_maps->at(i));
2668    if (transitioned_maps->at(i).is_null()) {
2669      __ j(equal, handler_stubs->at(i), RelocInfo::CODE_TARGET);
2670    } else {
2671      Label next_map;
2672      __ j(not_equal, &next_map, Label::kNear);
2673      __ movq(transition_map(),
2674              transitioned_maps->at(i),
2675              RelocInfo::EMBEDDED_OBJECT);
2676      __ jmp(handler_stubs->at(i), RelocInfo::CODE_TARGET);
2677      __ bind(&next_map);
2678    }
2679  }
2680
2681  __ bind(&miss);
2682
2683  TailCallBuiltin(masm(), MissBuiltin(kind()));
2684
2685  // Return the generated code.
2686  return GetICCode(
2687      kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC);
2688}
2689
2690
2691Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<Type> type,
2692                                                      Handle<JSObject> last,
2693                                                      Handle<Name> name) {
2694  NonexistentHandlerFrontend(type, last, name);
2695
2696  // Return undefined if maps of the full prototype chain are still the
2697  // same and no global property with this name contains a value.
2698  __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
2699  __ ret(0);
2700
2701  // Return the generated code.
2702  return GetCode(kind(), Code::FAST, name);
2703}
2704
2705
2706Register* LoadStubCompiler::registers() {
2707  // receiver, name, scratch1, scratch2, scratch3, scratch4.
2708  static Register registers[] = { rax, rcx, rdx, rbx, rdi, r8 };
2709  return registers;
2710}
2711
2712
2713Register* KeyedLoadStubCompiler::registers() {
2714  // receiver, name, scratch1, scratch2, scratch3, scratch4.
2715  static Register registers[] = { rdx, rax, rbx, rcx, rdi, r8 };
2716  return registers;
2717}
2718
2719
2720Register* StoreStubCompiler::registers() {
2721  // receiver, name, value, scratch1, scratch2, scratch3.
2722  static Register registers[] = { rdx, rcx, rax, rbx, rdi, r8 };
2723  return registers;
2724}
2725
2726
2727Register* KeyedStoreStubCompiler::registers() {
2728  // receiver, name, value, scratch1, scratch2, scratch3.
2729  static Register registers[] = { rdx, rcx, rax, rbx, rdi, r8 };
2730  return registers;
2731}
2732
2733
2734void KeyedLoadStubCompiler::GenerateNameCheck(Handle<Name> name,
2735                                              Register name_reg,
2736                                              Label* miss) {
2737  __ Cmp(name_reg, name);
2738  __ j(not_equal, miss);
2739}
2740
2741
2742void KeyedStoreStubCompiler::GenerateNameCheck(Handle<Name> name,
2743                                               Register name_reg,
2744                                               Label* miss) {
2745  __ Cmp(name_reg, name);
2746  __ j(not_equal, miss);
2747}
2748
2749
2750#undef __
2751#define __ ACCESS_MASM(masm)
2752
2753
2754void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
2755                                             Register receiver,
2756                                             Handle<JSFunction> getter) {
2757  // ----------- S t a t e -------------
2758  //  -- rax    : receiver
2759  //  -- rcx    : name
2760  //  -- rsp[0] : return address
2761  // -----------------------------------
2762  {
2763    FrameScope scope(masm, StackFrame::INTERNAL);
2764
2765    if (!getter.is_null()) {
2766      // Call the JavaScript getter with the receiver on the stack.
2767      __ push(receiver);
2768      ParameterCount actual(0);
2769      ParameterCount expected(getter);
2770      __ InvokeFunction(getter, expected, actual,
2771                        CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
2772    } else {
2773      // If we generate a global code snippet for deoptimization only, remember
2774      // the place to continue after deoptimization.
2775      masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
2776    }
2777
2778    // Restore context register.
2779    __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2780  }
2781  __ ret(0);
2782}
2783
2784
2785#undef __
2786#define __ ACCESS_MASM(masm())
2787
2788
2789Handle<Code> LoadStubCompiler::CompileLoadGlobal(
2790    Handle<Type> type,
2791    Handle<GlobalObject> global,
2792    Handle<PropertyCell> cell,
2793    Handle<Name> name,
2794    bool is_dont_delete) {
2795  Label miss;
2796  // TODO(verwaest): Directly store to rax. Currently we cannot do this, since
2797  // rax is used as receiver(), which we would otherwise clobber before a
2798  // potential miss.
2799  HandlerFrontendHeader(type, receiver(), global, name, &miss);
2800
2801  // Get the value from the cell.
2802  __ Move(rbx, cell);
2803  __ movq(rbx, FieldOperand(rbx, PropertyCell::kValueOffset));
2804
2805  // Check for deleted property if property can actually be deleted.
2806  if (!is_dont_delete) {
2807    __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
2808    __ j(equal, &miss);
2809  } else if (FLAG_debug_code) {
2810    __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
2811    __ Check(not_equal, kDontDeleteCellsCannotContainTheHole);
2812  }
2813
2814  HandlerFrontendFooter(name, &miss);
2815
2816  Counters* counters = isolate()->counters();
2817  __ IncrementCounter(counters->named_load_global_stub(), 1);
2818  __ movq(rax, rbx);
2819  __ ret(0);
2820
2821  // Return the generated code.
2822  return GetCode(kind(), Code::NORMAL, name);
2823}
2824
2825
2826Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC(
2827    TypeHandleList* types,
2828    CodeHandleList* handlers,
2829    Handle<Name> name,
2830    Code::StubType type,
2831    IcCheckType check) {
2832  Label miss;
2833
2834  if (check == PROPERTY) {
2835    GenerateNameCheck(name, this->name(), &miss);
2836  }
2837
2838  Label number_case;
2839  Label* smi_target = IncludesNumberType(types) ? &number_case : &miss;
2840  __ JumpIfSmi(receiver(), smi_target);
2841
2842  Register map_reg = scratch1();
2843  __ movq(map_reg, FieldOperand(receiver(), HeapObject::kMapOffset));
2844  int receiver_count = types->length();
2845  int number_of_handled_maps = 0;
2846  for (int current = 0; current < receiver_count; ++current) {
2847    Handle<Type> type = types->at(current);
2848    Handle<Map> map = IC::TypeToMap(*type, isolate());
2849    if (!map->is_deprecated()) {
2850      number_of_handled_maps++;
2851      // Check map and tail call if there's a match
2852      __ Cmp(map_reg, map);
2853      if (type->Is(Type::Number())) {
2854        ASSERT(!number_case.is_unused());
2855        __ bind(&number_case);
2856      }
2857      __ j(equal, handlers->at(current), RelocInfo::CODE_TARGET);
2858    }
2859  }
2860  ASSERT(number_of_handled_maps > 0);
2861
2862  __  bind(&miss);
2863  TailCallBuiltin(masm(), MissBuiltin(kind()));
2864
2865  // Return the generated code.
2866  InlineCacheState state =
2867      number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC;
2868  return GetICCode(kind(), type, name, state);
2869}
2870
2871
2872#undef __
2873#define __ ACCESS_MASM(masm)
2874
2875
2876void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
2877    MacroAssembler* masm) {
2878  // ----------- S t a t e -------------
2879  //  -- rax    : key
2880  //  -- rdx    : receiver
2881  //  -- rsp[0] : return address
2882  // -----------------------------------
2883  Label slow, miss;
2884
2885  // This stub is meant to be tail-jumped to, the receiver must already
2886  // have been verified by the caller to not be a smi.
2887
2888  __ JumpIfNotSmi(rax, &miss);
2889  __ SmiToInteger32(rbx, rax);
2890  __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
2891
2892  // Check whether the elements is a number dictionary.
2893  // rdx: receiver
2894  // rax: key
2895  // rbx: key as untagged int32
2896  // rcx: elements
2897  __ LoadFromNumberDictionary(&slow, rcx, rax, rbx, r9, rdi, rax);
2898  __ ret(0);
2899
2900  __ bind(&slow);
2901  // ----------- S t a t e -------------
2902  //  -- rax    : key
2903  //  -- rdx    : receiver
2904  //  -- rsp[0] : return address
2905  // -----------------------------------
2906  TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow);
2907
2908  __ bind(&miss);
2909  // ----------- S t a t e -------------
2910  //  -- rax    : key
2911  //  -- rdx    : receiver
2912  //  -- rsp[0] : return address
2913  // -----------------------------------
2914  TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss);
2915}
2916
2917
2918#undef __
2919
2920} }  // namespace v8::internal
2921
2922#endif  // V8_TARGET_ARCH_X64
2923