1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_X87
6
7#include "src/code-factory.h"
8#include "src/codegen.h"
9#include "src/deoptimizer.h"
10#include "src/full-codegen/full-codegen.h"
11#include "src/x87/frames-x87.h"
12
13namespace v8 {
14namespace internal {
15
16
17#define __ ACCESS_MASM(masm)
18
19void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id) {
20  // ----------- S t a t e -------------
21  //  -- eax                : number of arguments excluding receiver
22  //  -- edi                : target
23  //  -- edx                : new.target
24  //  -- esp[0]             : return address
25  //  -- esp[4]             : last argument
26  //  -- ...
27  //  -- esp[4 * argc]      : first argument
28  //  -- esp[4 * (argc +1)] : receiver
29  // -----------------------------------
30  __ AssertFunction(edi);
31
32  // Make sure we operate in the context of the called function (for example
33  // ConstructStubs implemented in C++ will be run in the context of the caller
34  // instead of the callee, due to the way that [[Construct]] is defined for
35  // ordinary functions).
36  __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
37
38  // Insert extra arguments.
39  const int num_extra_args = 2;
40  __ PopReturnAddressTo(ecx);
41  __ Push(edi);
42  __ Push(edx);
43  __ PushReturnAddressFrom(ecx);
44
45  // JumpToExternalReference expects eax to contain the number of arguments
46  // including the receiver and the extra arguments.
47  __ add(eax, Immediate(num_extra_args + 1));
48
49  __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
50}
51
52static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
53                                           Runtime::FunctionId function_id) {
54  // ----------- S t a t e -------------
55  //  -- eax : argument count (preserved for callee)
56  //  -- edx : new target (preserved for callee)
57  //  -- edi : target function (preserved for callee)
58  // -----------------------------------
59  {
60    FrameScope scope(masm, StackFrame::INTERNAL);
61    // Push the number of arguments to the callee.
62    __ SmiTag(eax);
63    __ push(eax);
64    // Push a copy of the target function and the new target.
65    __ push(edi);
66    __ push(edx);
67    // Function is also the parameter to the runtime call.
68    __ push(edi);
69
70    __ CallRuntime(function_id, 1);
71    __ mov(ebx, eax);
72
73    // Restore target function and new target.
74    __ pop(edx);
75    __ pop(edi);
76    __ pop(eax);
77    __ SmiUntag(eax);
78  }
79
80  __ lea(ebx, FieldOperand(ebx, Code::kHeaderSize));
81  __ jmp(ebx);
82}
83
84static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
85  __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
86  __ mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kCodeOffset));
87  __ lea(ebx, FieldOperand(ebx, Code::kHeaderSize));
88  __ jmp(ebx);
89}
90
91void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
92  // Checking whether the queued function is ready for install is optional,
93  // since we come across interrupts and stack checks elsewhere.  However,
94  // not checking may delay installing ready functions, and always checking
95  // would be quite expensive.  A good compromise is to first check against
96  // stack limit as a cue for an interrupt signal.
97  Label ok;
98  ExternalReference stack_limit =
99      ExternalReference::address_of_stack_limit(masm->isolate());
100  __ cmp(esp, Operand::StaticVariable(stack_limit));
101  __ j(above_equal, &ok, Label::kNear);
102
103  GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
104
105  __ bind(&ok);
106  GenerateTailCallToSharedCode(masm);
107}
108
109static void Generate_JSConstructStubHelper(MacroAssembler* masm,
110                                           bool is_api_function,
111                                           bool create_implicit_receiver,
112                                           bool check_derived_construct) {
113  // ----------- S t a t e -------------
114  //  -- eax: number of arguments
115  //  -- esi: context
116  //  -- edi: constructor function
117  //  -- ebx: allocation site or undefined
118  //  -- edx: new target
119  // -----------------------------------
120
121  // Enter a construct frame.
122  {
123    FrameScope scope(masm, StackFrame::CONSTRUCT);
124
125    // Preserve the incoming parameters on the stack.
126    __ AssertUndefinedOrAllocationSite(ebx);
127    __ push(esi);
128    __ push(ebx);
129    __ SmiTag(eax);
130    __ push(eax);
131
132    if (create_implicit_receiver) {
133      // Allocate the new receiver object.
134      __ Push(edi);
135      __ Push(edx);
136      FastNewObjectStub stub(masm->isolate());
137      __ CallStub(&stub);
138      __ mov(ebx, eax);
139      __ Pop(edx);
140      __ Pop(edi);
141
142      // ----------- S t a t e -------------
143      //  -- edi: constructor function
144      //  -- ebx: newly allocated object
145      //  -- edx: new target
146      // -----------------------------------
147
148      // Retrieve smi-tagged arguments count from the stack.
149      __ mov(eax, Operand(esp, 0));
150    }
151
152    __ SmiUntag(eax);
153
154    if (create_implicit_receiver) {
155      // Push the allocated receiver to the stack. We need two copies
156      // because we may have to return the original one and the calling
157      // conventions dictate that the called function pops the receiver.
158      __ push(ebx);
159      __ push(ebx);
160    } else {
161      __ PushRoot(Heap::kTheHoleValueRootIndex);
162    }
163
164    // Set up pointer to last argument.
165    __ lea(ebx, Operand(ebp, StandardFrameConstants::kCallerSPOffset));
166
167    // Copy arguments and receiver to the expression stack.
168    Label loop, entry;
169    __ mov(ecx, eax);
170    __ jmp(&entry);
171    __ bind(&loop);
172    __ push(Operand(ebx, ecx, times_4, 0));
173    __ bind(&entry);
174    __ dec(ecx);
175    __ j(greater_equal, &loop);
176
177    // Call the function.
178    ParameterCount actual(eax);
179    __ InvokeFunction(edi, edx, actual, CALL_FUNCTION,
180                      CheckDebugStepCallWrapper());
181
182    // Store offset of return address for deoptimizer.
183    if (create_implicit_receiver && !is_api_function) {
184      masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
185    }
186
187    // Restore context from the frame.
188    __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
189
190    if (create_implicit_receiver) {
191      // If the result is an object (in the ECMA sense), we should get rid
192      // of the receiver and use the result; see ECMA-262 section 13.2.2-7
193      // on page 74.
194      Label use_receiver, exit;
195
196      // If the result is a smi, it is *not* an object in the ECMA sense.
197      __ JumpIfSmi(eax, &use_receiver);
198
199      // If the type of the result (stored in its map) is less than
200      // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
201      __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
202      __ j(above_equal, &exit);
203
204      // Throw away the result of the constructor invocation and use the
205      // on-stack receiver as the result.
206      __ bind(&use_receiver);
207      __ mov(eax, Operand(esp, 0));
208
209      // Restore the arguments count and leave the construct frame. The
210      // arguments count is stored below the receiver.
211      __ bind(&exit);
212      __ mov(ebx, Operand(esp, 1 * kPointerSize));
213    } else {
214      __ mov(ebx, Operand(esp, 0));
215    }
216
217    // Leave construct frame.
218  }
219
220  // ES6 9.2.2. Step 13+
221  // Check that the result is not a Smi, indicating that the constructor result
222  // from a derived class is neither undefined nor an Object.
223  if (check_derived_construct) {
224    Label dont_throw;
225    __ JumpIfNotSmi(eax, &dont_throw);
226    {
227      FrameScope scope(masm, StackFrame::INTERNAL);
228      __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
229    }
230    __ bind(&dont_throw);
231  }
232
233  // Remove caller arguments from the stack and return.
234  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
235  __ pop(ecx);
236  __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize));  // 1 ~ receiver
237  __ push(ecx);
238  if (create_implicit_receiver) {
239    __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1);
240  }
241  __ ret(0);
242}
243
244
245void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
246  Generate_JSConstructStubHelper(masm, false, true, false);
247}
248
249
250void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
251  Generate_JSConstructStubHelper(masm, true, false, false);
252}
253
254
255void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
256  Generate_JSConstructStubHelper(masm, false, false, false);
257}
258
259
260void Builtins::Generate_JSBuiltinsConstructStubForDerived(
261    MacroAssembler* masm) {
262  Generate_JSConstructStubHelper(masm, false, false, true);
263}
264
265
266void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
267  FrameScope scope(masm, StackFrame::INTERNAL);
268  __ push(edi);
269  __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
270}
271
272
273enum IsTagged { kEaxIsSmiTagged, kEaxIsUntaggedInt };
274
275
276// Clobbers ecx, edx, edi; preserves all other registers.
277static void Generate_CheckStackOverflow(MacroAssembler* masm,
278                                        IsTagged eax_is_tagged) {
279  // eax   : the number of items to be pushed to the stack
280  //
281  // Check the stack for overflow. We are not trying to catch
282  // interruptions (e.g. debug break and preemption) here, so the "real stack
283  // limit" is checked.
284  Label okay;
285  ExternalReference real_stack_limit =
286      ExternalReference::address_of_real_stack_limit(masm->isolate());
287  __ mov(edi, Operand::StaticVariable(real_stack_limit));
288  // Make ecx the space we have left. The stack might already be overflowed
289  // here which will cause ecx to become negative.
290  __ mov(ecx, esp);
291  __ sub(ecx, edi);
292  // Make edx the space we need for the array when it is unrolled onto the
293  // stack.
294  __ mov(edx, eax);
295  int smi_tag = eax_is_tagged == kEaxIsSmiTagged ? kSmiTagSize : 0;
296  __ shl(edx, kPointerSizeLog2 - smi_tag);
297  // Check if the arguments will overflow the stack.
298  __ cmp(ecx, edx);
299  __ j(greater, &okay);  // Signed comparison.
300
301  // Out of stack space.
302  __ CallRuntime(Runtime::kThrowStackOverflow);
303
304  __ bind(&okay);
305}
306
307
308static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
309                                             bool is_construct) {
310  ProfileEntryHookStub::MaybeCallEntryHook(masm);
311
312  {
313    FrameScope scope(masm, StackFrame::INTERNAL);
314
315    // Setup the context (we need to use the caller context from the isolate).
316    ExternalReference context_address(Isolate::kContextAddress,
317                                      masm->isolate());
318    __ mov(esi, Operand::StaticVariable(context_address));
319
320    // Load the previous frame pointer (ebx) to access C arguments
321    __ mov(ebx, Operand(ebp, 0));
322
323    // Push the function and the receiver onto the stack.
324    __ push(Operand(ebx, EntryFrameConstants::kFunctionArgOffset));
325    __ push(Operand(ebx, EntryFrameConstants::kReceiverArgOffset));
326
327    // Load the number of arguments and setup pointer to the arguments.
328    __ mov(eax, Operand(ebx, EntryFrameConstants::kArgcOffset));
329    __ mov(ebx, Operand(ebx, EntryFrameConstants::kArgvOffset));
330
331    // Check if we have enough stack space to push all arguments.
332    // Expects argument count in eax. Clobbers ecx, edx, edi.
333    Generate_CheckStackOverflow(masm, kEaxIsUntaggedInt);
334
335    // Copy arguments to the stack in a loop.
336    Label loop, entry;
337    __ Move(ecx, Immediate(0));
338    __ jmp(&entry, Label::kNear);
339    __ bind(&loop);
340    __ mov(edx, Operand(ebx, ecx, times_4, 0));  // push parameter from argv
341    __ push(Operand(edx, 0));  // dereference handle
342    __ inc(ecx);
343    __ bind(&entry);
344    __ cmp(ecx, eax);
345    __ j(not_equal, &loop);
346
347    // Load the previous frame pointer (ebx) to access C arguments
348    __ mov(ebx, Operand(ebp, 0));
349
350    // Get the new.target and function from the frame.
351    __ mov(edx, Operand(ebx, EntryFrameConstants::kNewTargetArgOffset));
352    __ mov(edi, Operand(ebx, EntryFrameConstants::kFunctionArgOffset));
353
354    // Invoke the code.
355    Handle<Code> builtin = is_construct
356                               ? masm->isolate()->builtins()->Construct()
357                               : masm->isolate()->builtins()->Call();
358    __ Call(builtin, RelocInfo::CODE_TARGET);
359
360    // Exit the internal frame. Notice that this also removes the empty.
361    // context and the function left on the stack by the code
362    // invocation.
363  }
364  __ ret(kPointerSize);  // Remove receiver.
365}
366
367
368void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
369  Generate_JSEntryTrampolineHelper(masm, false);
370}
371
372
373void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
374  Generate_JSEntryTrampolineHelper(masm, true);
375}
376
377// static
378void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
379  // ----------- S t a t e -------------
380  //  -- eax    : the value to pass to the generator
381  //  -- ebx    : the JSGeneratorObject to resume
382  //  -- edx    : the resume mode (tagged)
383  //  -- esp[0] : return address
384  // -----------------------------------
385  __ AssertGeneratorObject(ebx);
386
387  // Store input value into generator object.
388  __ mov(FieldOperand(ebx, JSGeneratorObject::kInputOrDebugPosOffset), eax);
389  __ RecordWriteField(ebx, JSGeneratorObject::kInputOrDebugPosOffset, eax, ecx,
390                      kDontSaveFPRegs);
391
392  // Store resume mode into generator object.
393  __ mov(FieldOperand(ebx, JSGeneratorObject::kResumeModeOffset), edx);
394
395  // Load suspended function and context.
396  __ mov(esi, FieldOperand(ebx, JSGeneratorObject::kContextOffset));
397  __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
398
399  // Flood function if we are stepping.
400  Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
401  Label stepping_prepared;
402  ExternalReference last_step_action =
403      ExternalReference::debug_last_step_action_address(masm->isolate());
404  STATIC_ASSERT(StepFrame > StepIn);
405  __ cmpb(Operand::StaticVariable(last_step_action), Immediate(StepIn));
406  __ j(greater_equal, &prepare_step_in_if_stepping);
407
408  // Flood function if we need to continue stepping in the suspended generator.
409  ExternalReference debug_suspended_generator =
410      ExternalReference::debug_suspended_generator_address(masm->isolate());
411  __ cmp(ebx, Operand::StaticVariable(debug_suspended_generator));
412  __ j(equal, &prepare_step_in_suspended_generator);
413  __ bind(&stepping_prepared);
414
415  // Pop return address.
416  __ PopReturnAddressTo(eax);
417
418  // Push receiver.
419  __ Push(FieldOperand(ebx, JSGeneratorObject::kReceiverOffset));
420
421  // ----------- S t a t e -------------
422  //  -- eax    : return address
423  //  -- ebx    : the JSGeneratorObject to resume
424  //  -- edx    : the resume mode (tagged)
425  //  -- edi    : generator function
426  //  -- esi    : generator context
427  //  -- esp[0] : generator receiver
428  // -----------------------------------
429
430  // Push holes for arguments to generator function. Since the parser forced
431  // context allocation for any variables in generators, the actual argument
432  // values have already been copied into the context and these dummy values
433  // will never be used.
434  __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
435  __ mov(ecx,
436         FieldOperand(ecx, SharedFunctionInfo::kFormalParameterCountOffset));
437  {
438    Label done_loop, loop;
439    __ bind(&loop);
440    __ sub(ecx, Immediate(Smi::FromInt(1)));
441    __ j(carry, &done_loop, Label::kNear);
442    __ PushRoot(Heap::kTheHoleValueRootIndex);
443    __ jmp(&loop);
444    __ bind(&done_loop);
445  }
446
447  // Dispatch on the kind of generator object.
448  Label old_generator;
449  __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
450  __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kFunctionDataOffset));
451  __ CmpObjectType(ecx, BYTECODE_ARRAY_TYPE, ecx);
452  __ j(not_equal, &old_generator);
453
454  // New-style (ignition/turbofan) generator object
455  {
456    __ PushReturnAddressFrom(eax);
457    __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
458    __ mov(eax,
459           FieldOperand(ecx, SharedFunctionInfo::kFormalParameterCountOffset));
460    // We abuse new.target both to indicate that this is a resume call and to
461    // pass in the generator object.  In ordinary calls, new.target is always
462    // undefined because generator functions are non-constructable.
463    __ mov(edx, ebx);
464    __ jmp(FieldOperand(edi, JSFunction::kCodeEntryOffset));
465  }
466
467  // Old-style (full-codegen) generator object
468  __ bind(&old_generator);
469  {
470    // Enter a new JavaScript frame, and initialize its slots as they were when
471    // the generator was suspended.
472    FrameScope scope(masm, StackFrame::MANUAL);
473    __ PushReturnAddressFrom(eax);  // Return address.
474    __ Push(ebp);                   // Caller's frame pointer.
475    __ Move(ebp, esp);
476    __ Push(esi);  // Callee's context.
477    __ Push(edi);  // Callee's JS Function.
478
479    // Restore the operand stack.
480    __ mov(eax, FieldOperand(ebx, JSGeneratorObject::kOperandStackOffset));
481    {
482      Label done_loop, loop;
483      __ Move(ecx, Smi::FromInt(0));
484      __ bind(&loop);
485      __ cmp(ecx, FieldOperand(eax, FixedArray::kLengthOffset));
486      __ j(equal, &done_loop, Label::kNear);
487      __ Push(FieldOperand(eax, ecx, times_half_pointer_size,
488                           FixedArray::kHeaderSize));
489      __ add(ecx, Immediate(Smi::FromInt(1)));
490      __ jmp(&loop);
491      __ bind(&done_loop);
492    }
493
494    // Reset operand stack so we don't leak.
495    __ mov(FieldOperand(ebx, JSGeneratorObject::kOperandStackOffset),
496           Immediate(masm->isolate()->factory()->empty_fixed_array()));
497
498    // Resume the generator function at the continuation.
499    __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
500    __ mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
501    __ mov(ecx, FieldOperand(ebx, JSGeneratorObject::kContinuationOffset));
502    __ SmiUntag(ecx);
503    __ lea(edx, FieldOperand(edx, ecx, times_1, Code::kHeaderSize));
504    __ mov(FieldOperand(ebx, JSGeneratorObject::kContinuationOffset),
505           Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
506    __ mov(eax, ebx);  // Continuation expects generator object in eax.
507    __ jmp(edx);
508  }
509
510  __ bind(&prepare_step_in_if_stepping);
511  {
512    FrameScope scope(masm, StackFrame::INTERNAL);
513    __ Push(ebx);
514    __ Push(edx);
515    __ Push(edi);
516    __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
517    __ Pop(edx);
518    __ Pop(ebx);
519    __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
520  }
521  __ jmp(&stepping_prepared);
522
523  __ bind(&prepare_step_in_suspended_generator);
524  {
525    FrameScope scope(masm, StackFrame::INTERNAL);
526    __ Push(ebx);
527    __ Push(edx);
528    __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
529    __ Pop(edx);
530    __ Pop(ebx);
531    __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
532  }
533  __ jmp(&stepping_prepared);
534}
535
536static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
537                                  Register scratch2) {
538  Register args_count = scratch1;
539  Register return_pc = scratch2;
540
541  // Get the arguments + reciever count.
542  __ mov(args_count,
543         Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
544  __ mov(args_count,
545         FieldOperand(args_count, BytecodeArray::kParameterSizeOffset));
546
547  // Leave the frame (also dropping the register file).
548  __ leave();
549
550  // Drop receiver + arguments.
551  __ pop(return_pc);
552  __ add(esp, args_count);
553  __ push(return_pc);
554}
555
556// Generate code for entering a JS function with the interpreter.
557// On entry to the function the receiver and arguments have been pushed on the
558// stack left to right.  The actual argument count matches the formal parameter
559// count expected by the function.
560//
561// The live registers are:
562//   o edi: the JS function object being called
563//   o edx: the new target
564//   o esi: our context
565//   o ebp: the caller's frame pointer
566//   o esp: stack pointer (pointing to return address)
567//
568// The function builds an interpreter frame.  See InterpreterFrameConstants in
569// frames.h for its layout.
570void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
571  ProfileEntryHookStub::MaybeCallEntryHook(masm);
572
573  // Open a frame scope to indicate that there is a frame on the stack.  The
574  // MANUAL indicates that the scope shouldn't actually generate code to set up
575  // the frame (that is done below).
576  FrameScope frame_scope(masm, StackFrame::MANUAL);
577  __ push(ebp);  // Caller's frame pointer.
578  __ mov(ebp, esp);
579  __ push(esi);  // Callee's context.
580  __ push(edi);  // Callee's JS function.
581  __ push(edx);  // Callee's new target.
582
583  // Get the bytecode array from the function object (or from the DebugInfo if
584  // it is present) and load it into kInterpreterBytecodeArrayRegister.
585  __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
586  Label load_debug_bytecode_array, bytecode_array_loaded;
587  __ cmp(FieldOperand(eax, SharedFunctionInfo::kDebugInfoOffset),
588         Immediate(DebugInfo::uninitialized()));
589  __ j(not_equal, &load_debug_bytecode_array);
590  __ mov(kInterpreterBytecodeArrayRegister,
591         FieldOperand(eax, SharedFunctionInfo::kFunctionDataOffset));
592  __ bind(&bytecode_array_loaded);
593
594  // Check function data field is actually a BytecodeArray object.
595  Label bytecode_array_not_present;
596  __ CompareRoot(kInterpreterBytecodeArrayRegister,
597                 Heap::kUndefinedValueRootIndex);
598  __ j(equal, &bytecode_array_not_present);
599  if (FLAG_debug_code) {
600    __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
601    __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
602                     eax);
603    __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
604  }
605
606  // Push bytecode array.
607  __ push(kInterpreterBytecodeArrayRegister);
608  // Push Smi tagged initial bytecode array offset.
609  __ push(Immediate(Smi::FromInt(BytecodeArray::kHeaderSize - kHeapObjectTag)));
610
611  // Allocate the local and temporary register file on the stack.
612  {
613    // Load frame size from the BytecodeArray object.
614    __ mov(ebx, FieldOperand(kInterpreterBytecodeArrayRegister,
615                             BytecodeArray::kFrameSizeOffset));
616
617    // Do a stack check to ensure we don't go over the limit.
618    Label ok;
619    __ mov(ecx, esp);
620    __ sub(ecx, ebx);
621    ExternalReference stack_limit =
622        ExternalReference::address_of_real_stack_limit(masm->isolate());
623    __ cmp(ecx, Operand::StaticVariable(stack_limit));
624    __ j(above_equal, &ok);
625    __ CallRuntime(Runtime::kThrowStackOverflow);
626    __ bind(&ok);
627
628    // If ok, push undefined as the initial value for all register file entries.
629    Label loop_header;
630    Label loop_check;
631    __ mov(eax, Immediate(masm->isolate()->factory()->undefined_value()));
632    __ jmp(&loop_check);
633    __ bind(&loop_header);
634    // TODO(rmcilroy): Consider doing more than one push per loop iteration.
635    __ push(eax);
636    // Continue loop if not done.
637    __ bind(&loop_check);
638    __ sub(ebx, Immediate(kPointerSize));
639    __ j(greater_equal, &loop_header);
640  }
641
642  // Load accumulator, bytecode offset and dispatch table into registers.
643  __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
644  __ mov(kInterpreterBytecodeOffsetRegister,
645         Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
646  __ mov(kInterpreterDispatchTableRegister,
647         Immediate(ExternalReference::interpreter_dispatch_table_address(
648             masm->isolate())));
649
650  // Dispatch to the first bytecode handler for the function.
651  __ movzx_b(ebx, Operand(kInterpreterBytecodeArrayRegister,
652                          kInterpreterBytecodeOffsetRegister, times_1, 0));
653  __ mov(ebx, Operand(kInterpreterDispatchTableRegister, ebx,
654                      times_pointer_size, 0));
655  __ call(ebx);
656  masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
657
658  // The return value is in eax.
659  LeaveInterpreterFrame(masm, ebx, ecx);
660  __ ret(0);
661
662  // Load debug copy of the bytecode array.
663  __ bind(&load_debug_bytecode_array);
664  Register debug_info = kInterpreterBytecodeArrayRegister;
665  __ mov(debug_info, FieldOperand(eax, SharedFunctionInfo::kDebugInfoOffset));
666  __ mov(kInterpreterBytecodeArrayRegister,
667         FieldOperand(debug_info, DebugInfo::kAbstractCodeIndex));
668  __ jmp(&bytecode_array_loaded);
669
670  // If the bytecode array is no longer present, then the underlying function
671  // has been switched to a different kind of code and we heal the closure by
672  // switching the code entry field over to the new code object as well.
673  __ bind(&bytecode_array_not_present);
674  __ pop(edx);  // Callee's new target.
675  __ pop(edi);  // Callee's JS function.
676  __ pop(esi);  // Callee's context.
677  __ leave();   // Leave the frame so we can tail call.
678  __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
679  __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kCodeOffset));
680  __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
681  __ mov(FieldOperand(edi, JSFunction::kCodeEntryOffset), ecx);
682  __ RecordWriteCodeEntryField(edi, ecx, ebx);
683  __ jmp(ecx);
684}
685
686void Builtins::Generate_InterpreterMarkBaselineOnReturn(MacroAssembler* masm) {
687  // Save the function and context for call to CompileBaseline.
688  __ mov(edi, Operand(ebp, StandardFrameConstants::kFunctionOffset));
689  __ mov(kContextRegister,
690         Operand(ebp, StandardFrameConstants::kContextOffset));
691
692  // Leave the frame before recompiling for baseline so that we don't count as
693  // an activation on the stack.
694  LeaveInterpreterFrame(masm, ebx, ecx);
695
696  {
697    FrameScope frame_scope(masm, StackFrame::INTERNAL);
698    // Push return value.
699    __ push(eax);
700
701    // Push function as argument and compile for baseline.
702    __ push(edi);
703    __ CallRuntime(Runtime::kCompileBaseline);
704
705    // Restore return value.
706    __ pop(eax);
707  }
708  __ ret(0);
709}
710
711static void Generate_InterpreterPushArgs(MacroAssembler* masm,
712                                         Register array_limit) {
713  // ----------- S t a t e -------------
714  //  -- ebx : Pointer to the last argument in the args array.
715  //  -- array_limit : Pointer to one before the first argument in the
716  //                   args array.
717  // -----------------------------------
718  Label loop_header, loop_check;
719  __ jmp(&loop_check);
720  __ bind(&loop_header);
721  __ Push(Operand(ebx, 0));
722  __ sub(ebx, Immediate(kPointerSize));
723  __ bind(&loop_check);
724  __ cmp(ebx, array_limit);
725  __ j(greater, &loop_header, Label::kNear);
726}
727
728// static
729void Builtins::Generate_InterpreterPushArgsAndCallImpl(
730    MacroAssembler* masm, TailCallMode tail_call_mode) {
731  // ----------- S t a t e -------------
732  //  -- eax : the number of arguments (not including the receiver)
733  //  -- ebx : the address of the first argument to be pushed. Subsequent
734  //           arguments should be consecutive above this, in the same order as
735  //           they are to be pushed onto the stack.
736  //  -- edi : the target to call (can be any Object).
737  // -----------------------------------
738
739  // Pop return address to allow tail-call after pushing arguments.
740  __ Pop(edx);
741
742  // Find the address of the last argument.
743  __ mov(ecx, eax);
744  __ add(ecx, Immediate(1));  // Add one for receiver.
745  __ shl(ecx, kPointerSizeLog2);
746  __ neg(ecx);
747  __ add(ecx, ebx);
748
749  Generate_InterpreterPushArgs(masm, ecx);
750
751  // Call the target.
752  __ Push(edx);  // Re-push return address.
753  __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
754                                            tail_call_mode),
755          RelocInfo::CODE_TARGET);
756}
757
758
759// static
760void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
761  // ----------- S t a t e -------------
762  //  -- eax : the number of arguments (not including the receiver)
763  //  -- edx : the new target
764  //  -- edi : the constructor
765  //  -- ebx : the address of the first argument to be pushed. Subsequent
766  //           arguments should be consecutive above this, in the same order as
767  //           they are to be pushed onto the stack.
768  // -----------------------------------
769
770  // Pop return address to allow tail-call after pushing arguments.
771  __ Pop(ecx);
772
773  // Push edi in the slot meant for receiver. We need an extra register
774  // so store edi temporarily on stack.
775  __ Push(edi);
776
777  // Find the address of the last argument.
778  __ mov(edi, eax);
779  __ neg(edi);
780  __ shl(edi, kPointerSizeLog2);
781  __ add(edi, ebx);
782
783  Generate_InterpreterPushArgs(masm, edi);
784
785  // Restore the constructor from slot on stack. It was pushed at the slot
786  // meant for receiver.
787  __ mov(edi, Operand(esp, eax, times_pointer_size, 0));
788
789  // Re-push return address.
790  __ Push(ecx);
791
792  // Call the constructor with unmodified eax, edi, ebi values.
793  __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
794}
795
796void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
797  // Set the return address to the correct point in the interpreter entry
798  // trampoline.
799  Smi* interpreter_entry_return_pc_offset(
800      masm->isolate()->heap()->interpreter_entry_return_pc_offset());
801  DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0));
802  __ LoadHeapObject(ebx,
803                    masm->isolate()->builtins()->InterpreterEntryTrampoline());
804  __ add(ebx, Immediate(interpreter_entry_return_pc_offset->value() +
805                        Code::kHeaderSize - kHeapObjectTag));
806  __ push(ebx);
807
808  // Initialize the dispatch table register.
809  __ mov(kInterpreterDispatchTableRegister,
810         Immediate(ExternalReference::interpreter_dispatch_table_address(
811             masm->isolate())));
812
813  // Get the bytecode array pointer from the frame.
814  __ mov(kInterpreterBytecodeArrayRegister,
815         Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
816
817  if (FLAG_debug_code) {
818    // Check function data field is actually a BytecodeArray object.
819    __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
820    __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
821                     ebx);
822    __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
823  }
824
825  // Get the target bytecode offset from the frame.
826  __ mov(kInterpreterBytecodeOffsetRegister,
827         Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
828  __ SmiUntag(kInterpreterBytecodeOffsetRegister);
829
830  // Dispatch to the target bytecode.
831  __ movzx_b(ebx, Operand(kInterpreterBytecodeArrayRegister,
832                          kInterpreterBytecodeOffsetRegister, times_1, 0));
833  __ mov(ebx, Operand(kInterpreterDispatchTableRegister, ebx,
834                      times_pointer_size, 0));
835  __ jmp(ebx);
836}
837
838void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
839  // ----------- S t a t e -------------
840  //  -- eax : argument count (preserved for callee)
841  //  -- edx : new target (preserved for callee)
842  //  -- edi : target function (preserved for callee)
843  // -----------------------------------
844  // First lookup code, maybe we don't need to compile!
845  Label gotta_call_runtime, gotta_call_runtime_no_stack;
846  Label maybe_call_runtime;
847  Label try_shared;
848  Label loop_top, loop_bottom;
849
850  Register closure = edi;
851  Register new_target = edx;
852  Register argument_count = eax;
853
854  __ push(argument_count);
855  __ push(new_target);
856  __ push(closure);
857
858  Register map = argument_count;
859  Register index = ebx;
860  __ mov(map, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
861  __ mov(map, FieldOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
862  __ mov(index, FieldOperand(map, FixedArray::kLengthOffset));
863  __ cmp(index, Immediate(Smi::FromInt(2)));
864  __ j(less, &gotta_call_runtime);
865
866  // Find literals.
867  // edx : native context
868  // ebx : length / index
869  // eax : optimized code map
870  // stack[0] : new target
871  // stack[4] : closure
872  Register native_context = edx;
873  __ mov(native_context, NativeContextOperand());
874
875  __ bind(&loop_top);
876  Register temp = edi;
877
878  // Does the native context match?
879  __ mov(temp, FieldOperand(map, index, times_half_pointer_size,
880                            SharedFunctionInfo::kOffsetToPreviousContext));
881  __ mov(temp, FieldOperand(temp, WeakCell::kValueOffset));
882  __ cmp(temp, native_context);
883  __ j(not_equal, &loop_bottom);
884  // OSR id set to none?
885  __ mov(temp, FieldOperand(map, index, times_half_pointer_size,
886                            SharedFunctionInfo::kOffsetToPreviousOsrAstId));
887  const int bailout_id = BailoutId::None().ToInt();
888  __ cmp(temp, Immediate(Smi::FromInt(bailout_id)));
889  __ j(not_equal, &loop_bottom);
890
891  // Literals available?
892  Label got_literals, maybe_cleared_weakcell;
893  __ mov(temp, FieldOperand(map, index, times_half_pointer_size,
894                            SharedFunctionInfo::kOffsetToPreviousLiterals));
895
896  // temp contains either a WeakCell pointing to the literals array or the
897  // literals array directly.
898  STATIC_ASSERT(WeakCell::kValueOffset == FixedArray::kLengthOffset);
899  __ JumpIfSmi(FieldOperand(temp, WeakCell::kValueOffset),
900               &maybe_cleared_weakcell);
901  // The WeakCell value is a pointer, therefore it's a valid literals array.
902  __ mov(temp, FieldOperand(temp, WeakCell::kValueOffset));
903  __ jmp(&got_literals);
904
905  // We have a smi. If it's 0, then we are looking at a cleared WeakCell
906  // around the literals array, and we should visit the runtime. If it's > 0,
907  // then temp already contains the literals array.
908  __ bind(&maybe_cleared_weakcell);
909  __ cmp(FieldOperand(temp, WeakCell::kValueOffset), Immediate(0));
910  __ j(equal, &gotta_call_runtime);
911
912  // Save the literals in the closure.
913  __ bind(&got_literals);
914  __ mov(ecx, Operand(esp, 0));
915  __ mov(FieldOperand(ecx, JSFunction::kLiteralsOffset), temp);
916  __ push(index);
917  __ RecordWriteField(ecx, JSFunction::kLiteralsOffset, temp, index,
918                      kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
919  __ pop(index);
920
921  // Code available?
922  Register entry = ecx;
923  __ mov(entry, FieldOperand(map, index, times_half_pointer_size,
924                             SharedFunctionInfo::kOffsetToPreviousCachedCode));
925  __ mov(entry, FieldOperand(entry, WeakCell::kValueOffset));
926  __ JumpIfSmi(entry, &maybe_call_runtime);
927
928  // Found literals and code. Get them into the closure and return.
929  __ pop(closure);
930  // Store code entry in the closure.
931  __ lea(entry, FieldOperand(entry, Code::kHeaderSize));
932
933  Label install_optimized_code_and_tailcall;
934  __ bind(&install_optimized_code_and_tailcall);
935  __ mov(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry);
936  __ RecordWriteCodeEntryField(closure, entry, eax);
937
938  // Link the closure into the optimized function list.
939  // ecx : code entry
940  // edx : native context
941  // edi : closure
942  __ mov(ebx,
943         ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
944  __ mov(FieldOperand(closure, JSFunction::kNextFunctionLinkOffset), ebx);
945  __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, ebx, eax,
946                      kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
947  const int function_list_offset =
948      Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
949  __ mov(ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST),
950         closure);
951  // Save closure before the write barrier.
952  __ mov(ebx, closure);
953  __ RecordWriteContextSlot(native_context, function_list_offset, closure, eax,
954                            kDontSaveFPRegs);
955  __ mov(closure, ebx);
956  __ pop(new_target);
957  __ pop(argument_count);
958  __ jmp(entry);
959
960  __ bind(&loop_bottom);
961  __ sub(index, Immediate(Smi::FromInt(SharedFunctionInfo::kEntryLength)));
962  __ cmp(index, Immediate(Smi::FromInt(1)));
963  __ j(greater, &loop_top);
964
965  // We found neither literals nor code.
966  __ jmp(&gotta_call_runtime);
967
968  __ bind(&maybe_call_runtime);
969  __ pop(closure);
970
971  // Last possibility. Check the context free optimized code map entry.
972  __ mov(entry, FieldOperand(map, FixedArray::kHeaderSize +
973                                      SharedFunctionInfo::kSharedCodeIndex));
974  __ mov(entry, FieldOperand(entry, WeakCell::kValueOffset));
975  __ JumpIfSmi(entry, &try_shared);
976
977  // Store code entry in the closure.
978  __ lea(entry, FieldOperand(entry, Code::kHeaderSize));
979  __ jmp(&install_optimized_code_and_tailcall);
980
981  __ bind(&try_shared);
982  __ pop(new_target);
983  __ pop(argument_count);
984  // Is the full code valid?
985  __ mov(entry, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
986  __ mov(entry, FieldOperand(entry, SharedFunctionInfo::kCodeOffset));
987  __ mov(ebx, FieldOperand(entry, Code::kFlagsOffset));
988  __ and_(ebx, Code::KindField::kMask);
989  __ shr(ebx, Code::KindField::kShift);
990  __ cmp(ebx, Immediate(Code::BUILTIN));
991  __ j(equal, &gotta_call_runtime_no_stack);
992  // Yes, install the full code.
993  __ lea(entry, FieldOperand(entry, Code::kHeaderSize));
994  __ mov(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry);
995  __ RecordWriteCodeEntryField(closure, entry, ebx);
996  __ jmp(entry);
997
998  __ bind(&gotta_call_runtime);
999  __ pop(closure);
1000  __ pop(new_target);
1001  __ pop(argument_count);
1002  __ bind(&gotta_call_runtime_no_stack);
1003
1004  GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1005}
1006
1007void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
1008  GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
1009}
1010
1011void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
1012  GenerateTailCallToReturnedCode(masm,
1013                                 Runtime::kCompileOptimized_NotConcurrent);
1014}
1015
1016
1017void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
1018  GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
1019}
1020
1021
1022static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1023  // For now, we are relying on the fact that make_code_young doesn't do any
1024  // garbage collection which allows us to save/restore the registers without
1025  // worrying about which of them contain pointers. We also don't build an
1026  // internal frame to make the code faster, since we shouldn't have to do stack
1027  // crawls in MakeCodeYoung. This seems a bit fragile.
1028
1029  // Re-execute the code that was patched back to the young age when
1030  // the stub returns.
1031  __ sub(Operand(esp, 0), Immediate(5));
1032  __ pushad();
1033  __ mov(eax, Operand(esp, 8 * kPointerSize));
1034  {
1035    FrameScope scope(masm, StackFrame::MANUAL);
1036    __ PrepareCallCFunction(2, ebx);
1037    __ mov(Operand(esp, 1 * kPointerSize),
1038           Immediate(ExternalReference::isolate_address(masm->isolate())));
1039    __ mov(Operand(esp, 0), eax);
1040    __ CallCFunction(
1041        ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1042  }
1043  __ popad();
1044  __ ret(0);
1045}
1046
1047#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                 \
1048void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking(  \
1049    MacroAssembler* masm) {                                  \
1050  GenerateMakeCodeYoungAgainCommon(masm);                    \
1051}                                                            \
1052void Builtins::Generate_Make##C##CodeYoungAgainOddMarking(   \
1053    MacroAssembler* masm) {                                  \
1054  GenerateMakeCodeYoungAgainCommon(masm);                    \
1055}
1056CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1057#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1058
1059
1060void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1061  // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1062  // that make_code_young doesn't do any garbage collection which allows us to
1063  // save/restore the registers without worrying about which of them contain
1064  // pointers.
1065  __ pushad();
1066  __ mov(eax, Operand(esp, 8 * kPointerSize));
1067  __ sub(eax, Immediate(Assembler::kCallInstructionLength));
1068  {  // NOLINT
1069    FrameScope scope(masm, StackFrame::MANUAL);
1070    __ PrepareCallCFunction(2, ebx);
1071    __ mov(Operand(esp, 1 * kPointerSize),
1072           Immediate(ExternalReference::isolate_address(masm->isolate())));
1073    __ mov(Operand(esp, 0), eax);
1074    __ CallCFunction(
1075        ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1076        2);
1077  }
1078  __ popad();
1079
1080  // Perform prologue operations usually performed by the young code stub.
1081  __ pop(eax);   // Pop return address into scratch register.
1082  __ push(ebp);  // Caller's frame pointer.
1083  __ mov(ebp, esp);
1084  __ push(esi);  // Callee's context.
1085  __ push(edi);  // Callee's JS Function.
1086  __ push(eax);  // Push return address after frame prologue.
1087
1088  // Jump to point after the code-age stub.
1089  __ ret(0);
1090}
1091
1092
1093void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1094  GenerateMakeCodeYoungAgainCommon(masm);
1095}
1096
1097
1098void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1099  Generate_MarkCodeAsExecutedOnce(masm);
1100}
1101
1102
1103static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1104                                             SaveFPRegsMode save_doubles) {
1105  // Enter an internal frame.
1106  {
1107    FrameScope scope(masm, StackFrame::INTERNAL);
1108
1109    // Preserve registers across notification, this is important for compiled
1110    // stubs that tail call the runtime on deopts passing their parameters in
1111    // registers.
1112    __ pushad();
1113    __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
1114    __ popad();
1115    // Tear down internal frame.
1116  }
1117
1118  __ pop(MemOperand(esp, 0));  // Ignore state offset
1119  __ ret(0);  // Return to IC Miss stub, continuation still on stack.
1120}
1121
1122
1123void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1124  Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1125}
1126
1127
1128void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1129  Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1130}
1131
1132
1133static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1134                                             Deoptimizer::BailoutType type) {
1135  {
1136    FrameScope scope(masm, StackFrame::INTERNAL);
1137
1138    // Pass deoptimization type to the runtime system.
1139    __ push(Immediate(Smi::FromInt(static_cast<int>(type))));
1140    __ CallRuntime(Runtime::kNotifyDeoptimized);
1141
1142    // Tear down internal frame.
1143  }
1144
1145  // Get the full codegen state from the stack and untag it.
1146  __ mov(ecx, Operand(esp, 1 * kPointerSize));
1147  __ SmiUntag(ecx);
1148
1149  // Switch on the state.
1150  Label not_no_registers, not_tos_eax;
1151  __ cmp(ecx, static_cast<int>(Deoptimizer::BailoutState::NO_REGISTERS));
1152  __ j(not_equal, &not_no_registers, Label::kNear);
1153  __ ret(1 * kPointerSize);  // Remove state.
1154
1155  __ bind(&not_no_registers);
1156  DCHECK_EQ(kInterpreterAccumulatorRegister.code(), eax.code());
1157  __ mov(eax, Operand(esp, 2 * kPointerSize));
1158  __ cmp(ecx, static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER));
1159  __ j(not_equal, &not_tos_eax, Label::kNear);
1160  __ ret(2 * kPointerSize);  // Remove state, eax.
1161
1162  __ bind(&not_tos_eax);
1163  __ Abort(kNoCasesLeft);
1164}
1165
1166
1167void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1168  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1169}
1170
1171
1172void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1173  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1174}
1175
1176
1177void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1178  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1179}
1180
1181
1182// static
1183void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
1184                                               int field_index) {
1185  // ----------- S t a t e -------------
1186  //  -- eax    : number of arguments
1187  //  -- edi    : function
1188  //  -- esi    : context
1189  //  -- esp[0] : return address
1190  //  -- esp[4] : receiver
1191  // -----------------------------------
1192
1193  // 1. Load receiver into eax and check that it's actually a JSDate object.
1194  Label receiver_not_date;
1195  {
1196    __ mov(eax, Operand(esp, kPointerSize));
1197    __ JumpIfSmi(eax, &receiver_not_date);
1198    __ CmpObjectType(eax, JS_DATE_TYPE, ebx);
1199    __ j(not_equal, &receiver_not_date);
1200  }
1201
1202  // 2. Load the specified date field, falling back to the runtime as necessary.
1203  if (field_index == JSDate::kDateValue) {
1204    __ mov(eax, FieldOperand(eax, JSDate::kValueOffset));
1205  } else {
1206    if (field_index < JSDate::kFirstUncachedField) {
1207      Label stamp_mismatch;
1208      __ mov(edx, Operand::StaticVariable(
1209                      ExternalReference::date_cache_stamp(masm->isolate())));
1210      __ cmp(edx, FieldOperand(eax, JSDate::kCacheStampOffset));
1211      __ j(not_equal, &stamp_mismatch, Label::kNear);
1212      __ mov(eax, FieldOperand(
1213                      eax, JSDate::kValueOffset + field_index * kPointerSize));
1214      __ ret(1 * kPointerSize);
1215      __ bind(&stamp_mismatch);
1216    }
1217    FrameScope scope(masm, StackFrame::INTERNAL);
1218    __ PrepareCallCFunction(2, ebx);
1219    __ mov(Operand(esp, 0), eax);
1220    __ mov(Operand(esp, 1 * kPointerSize),
1221           Immediate(Smi::FromInt(field_index)));
1222    __ CallCFunction(
1223        ExternalReference::get_date_field_function(masm->isolate()), 2);
1224  }
1225  __ ret(1 * kPointerSize);
1226
1227  // 3. Raise a TypeError if the receiver is not a date.
1228  __ bind(&receiver_not_date);
1229  {
1230    FrameScope scope(masm, StackFrame::MANUAL);
1231    __ Push(ebp);
1232    __ Move(ebp, esp);
1233    __ Push(esi);
1234    __ Push(edi);
1235    __ Push(Immediate(0));
1236    __ CallRuntime(Runtime::kThrowNotDateError);
1237  }
1238}
1239
1240// static
1241void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1242  // ----------- S t a t e -------------
1243  //  -- eax     : argc
1244  //  -- esp[0]  : return address
1245  //  -- esp[4]  : argArray
1246  //  -- esp[8]  : thisArg
1247  //  -- esp[12] : receiver
1248  // -----------------------------------
1249
1250  // 1. Load receiver into edi, argArray into eax (if present), remove all
1251  // arguments from the stack (including the receiver), and push thisArg (if
1252  // present) instead.
1253  {
1254    Label no_arg_array, no_this_arg;
1255    __ LoadRoot(edx, Heap::kUndefinedValueRootIndex);
1256    __ mov(ebx, edx);
1257    __ mov(edi, Operand(esp, eax, times_pointer_size, kPointerSize));
1258    __ test(eax, eax);
1259    __ j(zero, &no_this_arg, Label::kNear);
1260    {
1261      __ mov(edx, Operand(esp, eax, times_pointer_size, 0));
1262      __ cmp(eax, Immediate(1));
1263      __ j(equal, &no_arg_array, Label::kNear);
1264      __ mov(ebx, Operand(esp, eax, times_pointer_size, -kPointerSize));
1265      __ bind(&no_arg_array);
1266    }
1267    __ bind(&no_this_arg);
1268    __ PopReturnAddressTo(ecx);
1269    __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1270    __ Push(edx);
1271    __ PushReturnAddressFrom(ecx);
1272    __ Move(eax, ebx);
1273  }
1274
1275  // ----------- S t a t e -------------
1276  //  -- eax    : argArray
1277  //  -- edi    : receiver
1278  //  -- esp[0] : return address
1279  //  -- esp[4] : thisArg
1280  // -----------------------------------
1281
1282  // 2. Make sure the receiver is actually callable.
1283  Label receiver_not_callable;
1284  __ JumpIfSmi(edi, &receiver_not_callable, Label::kNear);
1285  __ mov(ecx, FieldOperand(edi, HeapObject::kMapOffset));
1286  __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
1287            Immediate(1 << Map::kIsCallable));
1288  __ j(zero, &receiver_not_callable, Label::kNear);
1289
1290  // 3. Tail call with no arguments if argArray is null or undefined.
1291  Label no_arguments;
1292  __ JumpIfRoot(eax, Heap::kNullValueRootIndex, &no_arguments, Label::kNear);
1293  __ JumpIfRoot(eax, Heap::kUndefinedValueRootIndex, &no_arguments,
1294                Label::kNear);
1295
1296  // 4a. Apply the receiver to the given argArray (passing undefined for
1297  // new.target).
1298  __ LoadRoot(edx, Heap::kUndefinedValueRootIndex);
1299  __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1300
1301  // 4b. The argArray is either null or undefined, so we tail call without any
1302  // arguments to the receiver.
1303  __ bind(&no_arguments);
1304  {
1305    __ Set(eax, 0);
1306    __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1307  }
1308
1309  // 4c. The receiver is not callable, throw an appropriate TypeError.
1310  __ bind(&receiver_not_callable);
1311  {
1312    __ mov(Operand(esp, kPointerSize), edi);
1313    __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1314  }
1315}
1316
1317
1318// static
1319void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1320  // Stack Layout:
1321  // esp[0]           : Return address
1322  // esp[8]           : Argument n
1323  // esp[16]          : Argument n-1
1324  //  ...
1325  // esp[8 * n]       : Argument 1
1326  // esp[8 * (n + 1)] : Receiver (callable to call)
1327  //
1328  // eax contains the number of arguments, n, not counting the receiver.
1329  //
1330  // 1. Make sure we have at least one argument.
1331  {
1332    Label done;
1333    __ test(eax, eax);
1334    __ j(not_zero, &done, Label::kNear);
1335    __ PopReturnAddressTo(ebx);
1336    __ PushRoot(Heap::kUndefinedValueRootIndex);
1337    __ PushReturnAddressFrom(ebx);
1338    __ inc(eax);
1339    __ bind(&done);
1340  }
1341
1342  // 2. Get the callable to call (passed as receiver) from the stack.
1343  __ mov(edi, Operand(esp, eax, times_pointer_size, kPointerSize));
1344
1345  // 3. Shift arguments and return address one slot down on the stack
1346  //    (overwriting the original receiver).  Adjust argument count to make
1347  //    the original first argument the new receiver.
1348  {
1349    Label loop;
1350    __ mov(ecx, eax);
1351    __ bind(&loop);
1352    __ mov(ebx, Operand(esp, ecx, times_pointer_size, 0));
1353    __ mov(Operand(esp, ecx, times_pointer_size, kPointerSize), ebx);
1354    __ dec(ecx);
1355    __ j(not_sign, &loop);  // While non-negative (to copy return address).
1356    __ pop(ebx);            // Discard copy of return address.
1357    __ dec(eax);  // One fewer argument (first argument is new receiver).
1358  }
1359
1360  // 4. Call the callable.
1361  __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1362}
1363
1364
1365void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1366  // ----------- S t a t e -------------
1367  //  -- eax     : argc
1368  //  -- esp[0]  : return address
1369  //  -- esp[4]  : argumentsList
1370  //  -- esp[8]  : thisArgument
1371  //  -- esp[12] : target
1372  //  -- esp[16] : receiver
1373  // -----------------------------------
1374
1375  // 1. Load target into edi (if present), argumentsList into eax (if present),
1376  // remove all arguments from the stack (including the receiver), and push
1377  // thisArgument (if present) instead.
1378  {
1379    Label done;
1380    __ LoadRoot(edi, Heap::kUndefinedValueRootIndex);
1381    __ mov(edx, edi);
1382    __ mov(ebx, edi);
1383    __ cmp(eax, Immediate(1));
1384    __ j(below, &done, Label::kNear);
1385    __ mov(edi, Operand(esp, eax, times_pointer_size, -0 * kPointerSize));
1386    __ j(equal, &done, Label::kNear);
1387    __ mov(edx, Operand(esp, eax, times_pointer_size, -1 * kPointerSize));
1388    __ cmp(eax, Immediate(3));
1389    __ j(below, &done, Label::kNear);
1390    __ mov(ebx, Operand(esp, eax, times_pointer_size, -2 * kPointerSize));
1391    __ bind(&done);
1392    __ PopReturnAddressTo(ecx);
1393    __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1394    __ Push(edx);
1395    __ PushReturnAddressFrom(ecx);
1396    __ Move(eax, ebx);
1397  }
1398
1399  // ----------- S t a t e -------------
1400  //  -- eax    : argumentsList
1401  //  -- edi    : target
1402  //  -- esp[0] : return address
1403  //  -- esp[4] : thisArgument
1404  // -----------------------------------
1405
1406  // 2. Make sure the target is actually callable.
1407  Label target_not_callable;
1408  __ JumpIfSmi(edi, &target_not_callable, Label::kNear);
1409  __ mov(ecx, FieldOperand(edi, HeapObject::kMapOffset));
1410  __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
1411            Immediate(1 << Map::kIsCallable));
1412  __ j(zero, &target_not_callable, Label::kNear);
1413
1414  // 3a. Apply the target to the given argumentsList (passing undefined for
1415  // new.target).
1416  __ LoadRoot(edx, Heap::kUndefinedValueRootIndex);
1417  __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1418
1419  // 3b. The target is not callable, throw an appropriate TypeError.
1420  __ bind(&target_not_callable);
1421  {
1422    __ mov(Operand(esp, kPointerSize), edi);
1423    __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1424  }
1425}
1426
1427void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1428  // ----------- S t a t e -------------
1429  //  -- eax     : argc
1430  //  -- esp[0]  : return address
1431  //  -- esp[4]  : new.target (optional)
1432  //  -- esp[8]  : argumentsList
1433  //  -- esp[12] : target
1434  //  -- esp[16] : receiver
1435  // -----------------------------------
1436
1437  // 1. Load target into edi (if present), argumentsList into eax (if present),
1438  // new.target into edx (if present, otherwise use target), remove all
1439  // arguments from the stack (including the receiver), and push thisArgument
1440  // (if present) instead.
1441  {
1442    Label done;
1443    __ LoadRoot(edi, Heap::kUndefinedValueRootIndex);
1444    __ mov(edx, edi);
1445    __ mov(ebx, edi);
1446    __ cmp(eax, Immediate(1));
1447    __ j(below, &done, Label::kNear);
1448    __ mov(edi, Operand(esp, eax, times_pointer_size, -0 * kPointerSize));
1449    __ mov(edx, edi);
1450    __ j(equal, &done, Label::kNear);
1451    __ mov(ebx, Operand(esp, eax, times_pointer_size, -1 * kPointerSize));
1452    __ cmp(eax, Immediate(3));
1453    __ j(below, &done, Label::kNear);
1454    __ mov(edx, Operand(esp, eax, times_pointer_size, -2 * kPointerSize));
1455    __ bind(&done);
1456    __ PopReturnAddressTo(ecx);
1457    __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1458    __ PushRoot(Heap::kUndefinedValueRootIndex);
1459    __ PushReturnAddressFrom(ecx);
1460    __ Move(eax, ebx);
1461  }
1462
1463  // ----------- S t a t e -------------
1464  //  -- eax    : argumentsList
1465  //  -- edx    : new.target
1466  //  -- edi    : target
1467  //  -- esp[0] : return address
1468  //  -- esp[4] : receiver (undefined)
1469  // -----------------------------------
1470
1471  // 2. Make sure the target is actually a constructor.
1472  Label target_not_constructor;
1473  __ JumpIfSmi(edi, &target_not_constructor, Label::kNear);
1474  __ mov(ecx, FieldOperand(edi, HeapObject::kMapOffset));
1475  __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
1476            Immediate(1 << Map::kIsConstructor));
1477  __ j(zero, &target_not_constructor, Label::kNear);
1478
1479  // 3. Make sure the target is actually a constructor.
1480  Label new_target_not_constructor;
1481  __ JumpIfSmi(edx, &new_target_not_constructor, Label::kNear);
1482  __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
1483  __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
1484            Immediate(1 << Map::kIsConstructor));
1485  __ j(zero, &new_target_not_constructor, Label::kNear);
1486
1487  // 4a. Construct the target with the given new.target and argumentsList.
1488  __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1489
1490  // 4b. The target is not a constructor, throw an appropriate TypeError.
1491  __ bind(&target_not_constructor);
1492  {
1493    __ mov(Operand(esp, kPointerSize), edi);
1494    __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1495  }
1496
1497  // 4c. The new.target is not a constructor, throw an appropriate TypeError.
1498  __ bind(&new_target_not_constructor);
1499  {
1500    __ mov(Operand(esp, kPointerSize), edx);
1501    __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1502  }
1503}
1504
1505
1506void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
1507  // ----------- S t a t e -------------
1508  //  -- eax : argc
1509  //  -- esp[0] : return address
1510  //  -- esp[4] : last argument
1511  // -----------------------------------
1512  Label generic_array_code;
1513
1514  // Get the InternalArray function.
1515  __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, edi);
1516
1517  if (FLAG_debug_code) {
1518    // Initial map for the builtin InternalArray function should be a map.
1519    __ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
1520    // Will both indicate a NULL and a Smi.
1521    __ test(ebx, Immediate(kSmiTagMask));
1522    __ Assert(not_zero, kUnexpectedInitialMapForInternalArrayFunction);
1523    __ CmpObjectType(ebx, MAP_TYPE, ecx);
1524    __ Assert(equal, kUnexpectedInitialMapForInternalArrayFunction);
1525  }
1526
1527  // Run the native code for the InternalArray function called as a normal
1528  // function.
1529  // tail call a stub
1530  InternalArrayConstructorStub stub(masm->isolate());
1531  __ TailCallStub(&stub);
1532}
1533
1534
1535void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1536  // ----------- S t a t e -------------
1537  //  -- eax : argc
1538  //  -- esp[0] : return address
1539  //  -- esp[4] : last argument
1540  // -----------------------------------
1541  Label generic_array_code;
1542
1543  // Get the Array function.
1544  __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, edi);
1545  __ mov(edx, edi);
1546
1547  if (FLAG_debug_code) {
1548    // Initial map for the builtin Array function should be a map.
1549    __ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
1550    // Will both indicate a NULL and a Smi.
1551    __ test(ebx, Immediate(kSmiTagMask));
1552    __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
1553    __ CmpObjectType(ebx, MAP_TYPE, ecx);
1554    __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
1555  }
1556
1557  // Run the native code for the Array function called as a normal function.
1558  // tail call a stub
1559  __ mov(ebx, masm->isolate()->factory()->undefined_value());
1560  ArrayConstructorStub stub(masm->isolate());
1561  __ TailCallStub(&stub);
1562}
1563
1564
1565// static
1566void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
1567  // ----------- S t a t e -------------
1568  //  -- eax                 : number of arguments
1569  //  -- edi                 : function
1570  //  -- esi                 : context
1571  //  -- esp[0]              : return address
1572  //  -- esp[(argc - n) * 8] : arg[n] (zero-based)
1573  //  -- esp[(argc + 1) * 8] : receiver
1574  // -----------------------------------
1575  Condition const cc = (kind == MathMaxMinKind::kMin) ? below : above;
1576  Heap::RootListIndex const root_index =
1577      (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
1578                                     : Heap::kMinusInfinityValueRootIndex;
1579  const int reg_sel = (kind == MathMaxMinKind::kMin) ? 1 : 0;
1580
1581  // Load the accumulator with the default return value (either -Infinity or
1582  // +Infinity), with the tagged value in edx and the double value in stx_0.
1583  __ LoadRoot(edx, root_index);
1584  __ fld_d(FieldOperand(edx, HeapNumber::kValueOffset));
1585  __ Move(ecx, eax);
1586
1587  Label done_loop, loop;
1588  __ bind(&loop);
1589  {
1590    // Check if all parameters done.
1591    __ test(ecx, ecx);
1592    __ j(zero, &done_loop);
1593
1594    // Load the next parameter tagged value into ebx.
1595    __ mov(ebx, Operand(esp, ecx, times_pointer_size, 0));
1596
1597    // Load the double value of the parameter into stx_1, maybe converting the
1598    // parameter to a number first using the ToNumber builtin if necessary.
1599    Label convert, convert_smi, convert_number, done_convert;
1600    __ bind(&convert);
1601    __ JumpIfSmi(ebx, &convert_smi);
1602    __ JumpIfRoot(FieldOperand(ebx, HeapObject::kMapOffset),
1603                  Heap::kHeapNumberMapRootIndex, &convert_number);
1604    {
1605      // Parameter is not a Number, use the ToNumber builtin to convert it.
1606      FrameScope scope(masm, StackFrame::MANUAL);
1607      __ Push(ebp);
1608      __ Move(ebp, esp);
1609      __ Push(esi);
1610      __ Push(edi);
1611      __ SmiTag(eax);
1612      __ SmiTag(ecx);
1613      __ Push(eax);
1614      __ Push(ecx);
1615      __ Push(edx);
1616      __ mov(eax, ebx);
1617      __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
1618      __ mov(ebx, eax);
1619      __ Pop(edx);
1620      __ Pop(ecx);
1621      __ Pop(eax);
1622      __ Pop(edi);
1623      __ Pop(esi);
1624      {
1625        // Restore the double accumulator value (stX_0).
1626        Label restore_smi, done_restore;
1627        __ JumpIfSmi(edx, &restore_smi, Label::kNear);
1628        __ fld_d(FieldOperand(edx, HeapNumber::kValueOffset));
1629        __ jmp(&done_restore, Label::kNear);
1630        __ bind(&restore_smi);
1631        __ SmiUntag(edx);
1632        __ push(edx);
1633        __ fild_s(Operand(esp, 0));
1634        __ pop(edx);
1635        __ SmiTag(edx);
1636        __ bind(&done_restore);
1637      }
1638      __ SmiUntag(ecx);
1639      __ SmiUntag(eax);
1640      __ leave();
1641    }
1642    __ jmp(&convert);
1643    __ bind(&convert_number);
1644    // Load another value into stx_1
1645    __ fld_d(FieldOperand(ebx, HeapNumber::kValueOffset));
1646    __ fxch();
1647    __ jmp(&done_convert, Label::kNear);
1648    __ bind(&convert_smi);
1649    __ SmiUntag(ebx);
1650    __ push(ebx);
1651    __ fild_s(Operand(esp, 0));
1652    __ pop(ebx);
1653    __ fxch();
1654    __ SmiTag(ebx);
1655    __ bind(&done_convert);
1656
1657    // Perform the actual comparison with the accumulator value on the left hand
1658    // side (stx_0) and the next parameter value on the right hand side (stx_1).
1659    Label compare_equal, compare_nan, compare_swap, done_compare;
1660
1661    // Duplicates the 2 float data for FCmp
1662    __ fld(1);
1663    __ fld(1);
1664    __ FCmp();
1665    __ j(parity_even, &compare_nan, Label::kNear);
1666    __ j(cc, &done_compare, Label::kNear);
1667    __ j(equal, &compare_equal, Label::kNear);
1668
1669    // Result is on the right hand side(stx_0).
1670    __ bind(&compare_swap);
1671    __ fxch();
1672    __ mov(edx, ebx);
1673    __ jmp(&done_compare, Label::kNear);
1674
1675    // At least one side is NaN, which means that the result will be NaN too.
1676    __ bind(&compare_nan);
1677    // Set the result on the right hand side (stx_0) to nan
1678    __ fstp(0);
1679    __ LoadRoot(edx, Heap::kNanValueRootIndex);
1680    __ fld_d(FieldOperand(edx, HeapNumber::kValueOffset));
1681    __ jmp(&done_compare, Label::kNear);
1682
1683    // Left and right hand side are equal, check for -0 vs. +0.
1684    __ bind(&compare_equal);
1685    // Check the sign of the value in reg_sel
1686    __ fld(reg_sel);
1687    __ FXamSign();
1688    __ j(not_zero, &compare_swap);
1689
1690    __ bind(&done_compare);
1691    // The right result is on the right hand side(stx_0)
1692    // and can remove the useless stx_1 now.
1693    __ fxch();
1694    __ fstp(0);
1695    __ dec(ecx);
1696    __ jmp(&loop);
1697  }
1698
1699  __ bind(&done_loop);
1700  __ PopReturnAddressTo(ecx);
1701  __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1702  __ PushReturnAddressFrom(ecx);
1703  __ mov(eax, edx);
1704  __ Ret();
1705}
1706
1707// static
1708void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
1709  // ----------- S t a t e -------------
1710  //  -- eax                 : number of arguments
1711  //  -- edi                 : constructor function
1712  //  -- esp[0]              : return address
1713  //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
1714  //  -- esp[(argc + 1) * 4] : receiver
1715  // -----------------------------------
1716
1717  // 1. Load the first argument into eax and get rid of the rest (including the
1718  // receiver).
1719  Label no_arguments;
1720  {
1721    __ test(eax, eax);
1722    __ j(zero, &no_arguments, Label::kNear);
1723    __ mov(ebx, Operand(esp, eax, times_pointer_size, 0));
1724    __ PopReturnAddressTo(ecx);
1725    __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1726    __ PushReturnAddressFrom(ecx);
1727    __ mov(eax, ebx);
1728  }
1729
1730  // 2a. Convert the first argument to a number.
1731  __ Jump(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
1732
1733  // 2b. No arguments, return +0 (already in eax).
1734  __ bind(&no_arguments);
1735  __ ret(1 * kPointerSize);
1736}
1737
1738
1739// static
1740void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
1741  // ----------- S t a t e -------------
1742  //  -- eax                 : number of arguments
1743  //  -- edi                 : constructor function
1744  //  -- edx                 : new target
1745  //  -- esp[0]              : return address
1746  //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
1747  //  -- esp[(argc + 1) * 4] : receiver
1748  // -----------------------------------
1749
1750  // 1. Make sure we operate in the context of the called function.
1751  __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1752
1753  // 2. Load the first argument into ebx and get rid of the rest (including the
1754  // receiver).
1755  {
1756    Label no_arguments, done;
1757    __ test(eax, eax);
1758    __ j(zero, &no_arguments, Label::kNear);
1759    __ mov(ebx, Operand(esp, eax, times_pointer_size, 0));
1760    __ jmp(&done, Label::kNear);
1761    __ bind(&no_arguments);
1762    __ Move(ebx, Smi::FromInt(0));
1763    __ bind(&done);
1764    __ PopReturnAddressTo(ecx);
1765    __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1766    __ PushReturnAddressFrom(ecx);
1767  }
1768
1769  // 3. Make sure ebx is a number.
1770  {
1771    Label done_convert;
1772    __ JumpIfSmi(ebx, &done_convert);
1773    __ CompareRoot(FieldOperand(ebx, HeapObject::kMapOffset),
1774                   Heap::kHeapNumberMapRootIndex);
1775    __ j(equal, &done_convert);
1776    {
1777      FrameScope scope(masm, StackFrame::INTERNAL);
1778      __ Push(edi);
1779      __ Push(edx);
1780      __ Move(eax, ebx);
1781      __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
1782      __ Move(ebx, eax);
1783      __ Pop(edx);
1784      __ Pop(edi);
1785    }
1786    __ bind(&done_convert);
1787  }
1788
1789  // 4. Check if new target and constructor differ.
1790  Label new_object;
1791  __ cmp(edx, edi);
1792  __ j(not_equal, &new_object);
1793
1794  // 5. Allocate a JSValue wrapper for the number.
1795  __ AllocateJSValue(eax, edi, ebx, ecx, &new_object);
1796  __ Ret();
1797
1798  // 6. Fallback to the runtime to create new object.
1799  __ bind(&new_object);
1800  {
1801    FrameScope scope(masm, StackFrame::INTERNAL);
1802    __ Push(ebx);  // the first argument
1803    FastNewObjectStub stub(masm->isolate());
1804    __ CallStub(&stub);
1805    __ Pop(FieldOperand(eax, JSValue::kValueOffset));
1806  }
1807  __ Ret();
1808}
1809
1810
1811// static
1812void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
1813  // ----------- S t a t e -------------
1814  //  -- eax                 : number of arguments
1815  //  -- edi                 : constructor function
1816  //  -- esp[0]              : return address
1817  //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
1818  //  -- esp[(argc + 1) * 4] : receiver
1819  // -----------------------------------
1820
1821  // 1. Load the first argument into eax and get rid of the rest (including the
1822  // receiver).
1823  Label no_arguments;
1824  {
1825    __ test(eax, eax);
1826    __ j(zero, &no_arguments, Label::kNear);
1827    __ mov(ebx, Operand(esp, eax, times_pointer_size, 0));
1828    __ PopReturnAddressTo(ecx);
1829    __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1830    __ PushReturnAddressFrom(ecx);
1831    __ mov(eax, ebx);
1832  }
1833
1834  // 2a. At least one argument, return eax if it's a string, otherwise
1835  // dispatch to appropriate conversion.
1836  Label to_string, symbol_descriptive_string;
1837  {
1838    __ JumpIfSmi(eax, &to_string, Label::kNear);
1839    STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
1840    __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
1841    __ j(above, &to_string, Label::kNear);
1842    __ j(equal, &symbol_descriptive_string, Label::kNear);
1843    __ Ret();
1844  }
1845
1846  // 2b. No arguments, return the empty string (and pop the receiver).
1847  __ bind(&no_arguments);
1848  {
1849    __ LoadRoot(eax, Heap::kempty_stringRootIndex);
1850    __ ret(1 * kPointerSize);
1851  }
1852
1853  // 3a. Convert eax to a string.
1854  __ bind(&to_string);
1855  {
1856    ToStringStub stub(masm->isolate());
1857    __ TailCallStub(&stub);
1858  }
1859
1860  // 3b. Convert symbol in eax to a string.
1861  __ bind(&symbol_descriptive_string);
1862  {
1863    __ PopReturnAddressTo(ecx);
1864    __ Push(eax);
1865    __ PushReturnAddressFrom(ecx);
1866    __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
1867  }
1868}
1869
1870
1871// static
1872void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
1873  // ----------- S t a t e -------------
1874  //  -- eax                 : number of arguments
1875  //  -- edi                 : constructor function
1876  //  -- edx                 : new target
1877  //  -- esp[0]              : return address
1878  //  -- esp[(argc - n) * 4] : arg[n] (zero-based)
1879  //  -- esp[(argc + 1) * 4] : receiver
1880  // -----------------------------------
1881
1882  // 1. Make sure we operate in the context of the called function.
1883  __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1884
1885  // 2. Load the first argument into ebx and get rid of the rest (including the
1886  // receiver).
1887  {
1888    Label no_arguments, done;
1889    __ test(eax, eax);
1890    __ j(zero, &no_arguments, Label::kNear);
1891    __ mov(ebx, Operand(esp, eax, times_pointer_size, 0));
1892    __ jmp(&done, Label::kNear);
1893    __ bind(&no_arguments);
1894    __ LoadRoot(ebx, Heap::kempty_stringRootIndex);
1895    __ bind(&done);
1896    __ PopReturnAddressTo(ecx);
1897    __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1898    __ PushReturnAddressFrom(ecx);
1899  }
1900
1901  // 3. Make sure ebx is a string.
1902  {
1903    Label convert, done_convert;
1904    __ JumpIfSmi(ebx, &convert, Label::kNear);
1905    __ CmpObjectType(ebx, FIRST_NONSTRING_TYPE, ecx);
1906    __ j(below, &done_convert);
1907    __ bind(&convert);
1908    {
1909      FrameScope scope(masm, StackFrame::INTERNAL);
1910      ToStringStub stub(masm->isolate());
1911      __ Push(edi);
1912      __ Push(edx);
1913      __ Move(eax, ebx);
1914      __ CallStub(&stub);
1915      __ Move(ebx, eax);
1916      __ Pop(edx);
1917      __ Pop(edi);
1918    }
1919    __ bind(&done_convert);
1920  }
1921
1922  // 4. Check if new target and constructor differ.
1923  Label new_object;
1924  __ cmp(edx, edi);
1925  __ j(not_equal, &new_object);
1926
1927  // 5. Allocate a JSValue wrapper for the string.
1928  __ AllocateJSValue(eax, edi, ebx, ecx, &new_object);
1929  __ Ret();
1930
1931  // 6. Fallback to the runtime to create new object.
1932  __ bind(&new_object);
1933  {
1934    FrameScope scope(masm, StackFrame::INTERNAL);
1935    __ Push(ebx);  // the first argument
1936    FastNewObjectStub stub(masm->isolate());
1937    __ CallStub(&stub);
1938    __ Pop(FieldOperand(eax, JSValue::kValueOffset));
1939  }
1940  __ Ret();
1941}
1942
1943
1944static void ArgumentsAdaptorStackCheck(MacroAssembler* masm,
1945                                       Label* stack_overflow) {
1946  // ----------- S t a t e -------------
1947  //  -- eax : actual number of arguments
1948  //  -- ebx : expected number of arguments
1949  //  -- edx : new target (passed through to callee)
1950  // -----------------------------------
1951  // Check the stack for overflow. We are not trying to catch
1952  // interruptions (e.g. debug break and preemption) here, so the "real stack
1953  // limit" is checked.
1954  ExternalReference real_stack_limit =
1955      ExternalReference::address_of_real_stack_limit(masm->isolate());
1956  __ mov(edi, Operand::StaticVariable(real_stack_limit));
1957  // Make ecx the space we have left. The stack might already be overflowed
1958  // here which will cause ecx to become negative.
1959  __ mov(ecx, esp);
1960  __ sub(ecx, edi);
1961  // Make edi the space we need for the array when it is unrolled onto the
1962  // stack.
1963  __ mov(edi, ebx);
1964  __ shl(edi, kPointerSizeLog2);
1965  // Check if the arguments will overflow the stack.
1966  __ cmp(ecx, edi);
1967  __ j(less_equal, stack_overflow);  // Signed comparison.
1968}
1969
1970
1971static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1972  __ push(ebp);
1973  __ mov(ebp, esp);
1974
1975  // Store the arguments adaptor context sentinel.
1976  __ push(Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1977
1978  // Push the function on the stack.
1979  __ push(edi);
1980
1981  // Preserve the number of arguments on the stack. Must preserve eax,
1982  // ebx and ecx because these registers are used when copying the
1983  // arguments and the receiver.
1984  STATIC_ASSERT(kSmiTagSize == 1);
1985  __ lea(edi, Operand(eax, eax, times_1, kSmiTag));
1986  __ push(edi);
1987}
1988
1989
1990static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1991  // Retrieve the number of arguments from the stack.
1992  __ mov(ebx, Operand(ebp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1993
1994  // Leave the frame.
1995  __ leave();
1996
1997  // Remove caller arguments from the stack.
1998  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
1999  __ pop(ecx);
2000  __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize));  // 1 ~ receiver
2001  __ push(ecx);
2002}
2003
2004
2005// static
2006void Builtins::Generate_Apply(MacroAssembler* masm) {
2007  // ----------- S t a t e -------------
2008  //  -- eax    : argumentsList
2009  //  -- edi    : target
2010  //  -- edx    : new.target (checked to be constructor or undefined)
2011  //  -- esp[0] : return address.
2012  //  -- esp[4] : thisArgument
2013  // -----------------------------------
2014
2015  // Create the list of arguments from the array-like argumentsList.
2016  {
2017    Label create_arguments, create_array, create_runtime, done_create;
2018    __ JumpIfSmi(eax, &create_runtime);
2019
2020    // Load the map of argumentsList into ecx.
2021    __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
2022
2023    // Load native context into ebx.
2024    __ mov(ebx, NativeContextOperand());
2025
2026    // Check if argumentsList is an (unmodified) arguments object.
2027    __ cmp(ecx, ContextOperand(ebx, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
2028    __ j(equal, &create_arguments);
2029    __ cmp(ecx, ContextOperand(ebx, Context::STRICT_ARGUMENTS_MAP_INDEX));
2030    __ j(equal, &create_arguments);
2031
2032    // Check if argumentsList is a fast JSArray.
2033    __ CmpInstanceType(ecx, JS_ARRAY_TYPE);
2034    __ j(equal, &create_array);
2035
2036    // Ask the runtime to create the list (actually a FixedArray).
2037    __ bind(&create_runtime);
2038    {
2039      FrameScope scope(masm, StackFrame::INTERNAL);
2040      __ Push(edi);
2041      __ Push(edx);
2042      __ Push(eax);
2043      __ CallRuntime(Runtime::kCreateListFromArrayLike);
2044      __ Pop(edx);
2045      __ Pop(edi);
2046      __ mov(ebx, FieldOperand(eax, FixedArray::kLengthOffset));
2047      __ SmiUntag(ebx);
2048    }
2049    __ jmp(&done_create);
2050
2051    // Try to create the list from an arguments object.
2052    __ bind(&create_arguments);
2053    __ mov(ebx, FieldOperand(eax, JSArgumentsObject::kLengthOffset));
2054    __ mov(ecx, FieldOperand(eax, JSObject::kElementsOffset));
2055    __ cmp(ebx, FieldOperand(ecx, FixedArray::kLengthOffset));
2056    __ j(not_equal, &create_runtime);
2057    __ SmiUntag(ebx);
2058    __ mov(eax, ecx);
2059    __ jmp(&done_create);
2060
2061    // Try to create the list from a JSArray object.
2062    __ bind(&create_array);
2063    __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset));
2064    __ DecodeField<Map::ElementsKindBits>(ecx);
2065    STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2066    STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2067    STATIC_ASSERT(FAST_ELEMENTS == 2);
2068    __ cmp(ecx, Immediate(FAST_ELEMENTS));
2069    __ j(above, &create_runtime);
2070    __ cmp(ecx, Immediate(FAST_HOLEY_SMI_ELEMENTS));
2071    __ j(equal, &create_runtime);
2072    __ mov(ebx, FieldOperand(eax, JSArray::kLengthOffset));
2073    __ SmiUntag(ebx);
2074    __ mov(eax, FieldOperand(eax, JSArray::kElementsOffset));
2075
2076    __ bind(&done_create);
2077  }
2078
2079  // Check for stack overflow.
2080  {
2081    // Check the stack for overflow. We are not trying to catch interruptions
2082    // (i.e. debug break and preemption) here, so check the "real stack limit".
2083    Label done;
2084    ExternalReference real_stack_limit =
2085        ExternalReference::address_of_real_stack_limit(masm->isolate());
2086    __ mov(ecx, Operand::StaticVariable(real_stack_limit));
2087    // Make ecx the space we have left. The stack might already be overflowed
2088    // here which will cause ecx to become negative.
2089    __ neg(ecx);
2090    __ add(ecx, esp);
2091    __ sar(ecx, kPointerSizeLog2);
2092    // Check if the arguments will overflow the stack.
2093    __ cmp(ecx, ebx);
2094    __ j(greater, &done, Label::kNear);  // Signed comparison.
2095    __ TailCallRuntime(Runtime::kThrowStackOverflow);
2096    __ bind(&done);
2097  }
2098
2099  // ----------- S t a t e -------------
2100  //  -- edi    : target
2101  //  -- eax    : args (a FixedArray built from argumentsList)
2102  //  -- ebx    : len (number of elements to push from args)
2103  //  -- edx    : new.target (checked to be constructor or undefined)
2104  //  -- esp[0] : return address.
2105  //  -- esp[4] : thisArgument
2106  // -----------------------------------
2107
2108  // Push arguments onto the stack (thisArgument is already on the stack).
2109  {
2110    __ push(edx);
2111    __ fld_s(MemOperand(esp, 0));
2112    __ lea(esp, Operand(esp, kFloatSize));
2113
2114    __ PopReturnAddressTo(edx);
2115    __ Move(ecx, Immediate(0));
2116    Label done, loop;
2117    __ bind(&loop);
2118    __ cmp(ecx, ebx);
2119    __ j(equal, &done, Label::kNear);
2120    __ Push(
2121        FieldOperand(eax, ecx, times_pointer_size, FixedArray::kHeaderSize));
2122    __ inc(ecx);
2123    __ jmp(&loop);
2124    __ bind(&done);
2125    __ PushReturnAddressFrom(edx);
2126
2127    __ lea(esp, Operand(esp, -kFloatSize));
2128    __ fstp_s(MemOperand(esp, 0));
2129    __ pop(edx);
2130
2131    __ Move(eax, ebx);
2132  }
2133
2134  // Dispatch to Call or Construct depending on whether new.target is undefined.
2135  {
2136    __ CompareRoot(edx, Heap::kUndefinedValueRootIndex);
2137    __ j(equal, masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2138    __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2139  }
2140}
2141
2142namespace {
2143
2144// Drops top JavaScript frame and an arguments adaptor frame below it (if
2145// present) preserving all the arguments prepared for current call.
2146// Does nothing if debugger is currently active.
2147// ES6 14.6.3. PrepareForTailCall
2148//
2149// Stack structure for the function g() tail calling f():
2150//
2151// ------- Caller frame: -------
2152// |  ...
2153// |  g()'s arg M
2154// |  ...
2155// |  g()'s arg 1
2156// |  g()'s receiver arg
2157// |  g()'s caller pc
2158// ------- g()'s frame: -------
2159// |  g()'s caller fp      <- fp
2160// |  g()'s context
2161// |  function pointer: g
2162// |  -------------------------
2163// |  ...
2164// |  ...
2165// |  f()'s arg N
2166// |  ...
2167// |  f()'s arg 1
2168// |  f()'s receiver arg
2169// |  f()'s caller pc      <- sp
2170// ----------------------
2171//
2172void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
2173                        Register scratch1, Register scratch2,
2174                        Register scratch3) {
2175  DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
2176  Comment cmnt(masm, "[ PrepareForTailCall");
2177
2178  // Prepare for tail call only if ES2015 tail call elimination is enabled.
2179  Label done;
2180  ExternalReference is_tail_call_elimination_enabled =
2181      ExternalReference::is_tail_call_elimination_enabled_address(
2182          masm->isolate());
2183  __ movzx_b(scratch1,
2184             Operand::StaticVariable(is_tail_call_elimination_enabled));
2185  __ cmp(scratch1, Immediate(0));
2186  __ j(equal, &done, Label::kNear);
2187
2188  // Drop possible interpreter handler/stub frame.
2189  {
2190    Label no_interpreter_frame;
2191    __ cmp(Operand(ebp, CommonFrameConstants::kContextOrFrameTypeOffset),
2192           Immediate(Smi::FromInt(StackFrame::STUB)));
2193    __ j(not_equal, &no_interpreter_frame, Label::kNear);
2194    __ mov(ebp, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2195    __ bind(&no_interpreter_frame);
2196  }
2197
2198  // Check if next frame is an arguments adaptor frame.
2199  Register caller_args_count_reg = scratch1;
2200  Label no_arguments_adaptor, formal_parameter_count_loaded;
2201  __ mov(scratch2, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2202  __ cmp(Operand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset),
2203         Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2204  __ j(not_equal, &no_arguments_adaptor, Label::kNear);
2205
2206  // Drop current frame and load arguments count from arguments adaptor frame.
2207  __ mov(ebp, scratch2);
2208  __ mov(caller_args_count_reg,
2209         Operand(ebp, ArgumentsAdaptorFrameConstants::kLengthOffset));
2210  __ SmiUntag(caller_args_count_reg);
2211  __ jmp(&formal_parameter_count_loaded, Label::kNear);
2212
2213  __ bind(&no_arguments_adaptor);
2214  // Load caller's formal parameter count
2215  __ mov(scratch1, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
2216  __ mov(scratch1,
2217         FieldOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
2218  __ mov(
2219      caller_args_count_reg,
2220      FieldOperand(scratch1, SharedFunctionInfo::kFormalParameterCountOffset));
2221  __ SmiUntag(caller_args_count_reg);
2222
2223  __ bind(&formal_parameter_count_loaded);
2224
2225  ParameterCount callee_args_count(args_reg);
2226  __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
2227                        scratch3, ReturnAddressState::kOnStack, 0);
2228  __ bind(&done);
2229}
2230}  // namespace
2231
2232// static
2233void Builtins::Generate_CallFunction(MacroAssembler* masm,
2234                                     ConvertReceiverMode mode,
2235                                     TailCallMode tail_call_mode) {
2236  // ----------- S t a t e -------------
2237  //  -- eax : the number of arguments (not including the receiver)
2238  //  -- edi : the function to call (checked to be a JSFunction)
2239  // -----------------------------------
2240  __ AssertFunction(edi);
2241
2242  // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2243  // Check that the function is not a "classConstructor".
2244  Label class_constructor;
2245  __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2246  __ test_b(FieldOperand(edx, SharedFunctionInfo::kFunctionKindByteOffset),
2247            Immediate(SharedFunctionInfo::kClassConstructorBitsWithinByte));
2248  __ j(not_zero, &class_constructor);
2249
2250  // Enter the context of the function; ToObject has to run in the function
2251  // context, and we also need to take the global proxy from the function
2252  // context in case of conversion.
2253  STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
2254                SharedFunctionInfo::kStrictModeByteOffset);
2255  __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2256  // We need to convert the receiver for non-native sloppy mode functions.
2257  Label done_convert;
2258  __ test_b(FieldOperand(edx, SharedFunctionInfo::kNativeByteOffset),
2259            Immediate((1 << SharedFunctionInfo::kNativeBitWithinByte) |
2260                      (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
2261  __ j(not_zero, &done_convert);
2262  {
2263    // ----------- S t a t e -------------
2264    //  -- eax : the number of arguments (not including the receiver)
2265    //  -- edx : the shared function info.
2266    //  -- edi : the function to call (checked to be a JSFunction)
2267    //  -- esi : the function context.
2268    // -----------------------------------
2269
2270    if (mode == ConvertReceiverMode::kNullOrUndefined) {
2271      // Patch receiver to global proxy.
2272      __ LoadGlobalProxy(ecx);
2273    } else {
2274      Label convert_to_object, convert_receiver;
2275      __ mov(ecx, Operand(esp, eax, times_pointer_size, kPointerSize));
2276      __ JumpIfSmi(ecx, &convert_to_object, Label::kNear);
2277      STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2278      __ CmpObjectType(ecx, FIRST_JS_RECEIVER_TYPE, ebx);
2279      __ j(above_equal, &done_convert);
2280      if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2281        Label convert_global_proxy;
2282        __ JumpIfRoot(ecx, Heap::kUndefinedValueRootIndex,
2283                      &convert_global_proxy, Label::kNear);
2284        __ JumpIfNotRoot(ecx, Heap::kNullValueRootIndex, &convert_to_object,
2285                         Label::kNear);
2286        __ bind(&convert_global_proxy);
2287        {
2288          // Patch receiver to global proxy.
2289          __ LoadGlobalProxy(ecx);
2290        }
2291        __ jmp(&convert_receiver);
2292      }
2293      __ bind(&convert_to_object);
2294      {
2295        // Convert receiver using ToObject.
2296        // TODO(bmeurer): Inline the allocation here to avoid building the frame
2297        // in the fast case? (fall back to AllocateInNewSpace?)
2298        FrameScope scope(masm, StackFrame::INTERNAL);
2299        __ SmiTag(eax);
2300        __ Push(eax);
2301        __ Push(edi);
2302        __ mov(eax, ecx);
2303        ToObjectStub stub(masm->isolate());
2304        __ CallStub(&stub);
2305        __ mov(ecx, eax);
2306        __ Pop(edi);
2307        __ Pop(eax);
2308        __ SmiUntag(eax);
2309      }
2310      __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2311      __ bind(&convert_receiver);
2312    }
2313    __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), ecx);
2314  }
2315  __ bind(&done_convert);
2316
2317  // ----------- S t a t e -------------
2318  //  -- eax : the number of arguments (not including the receiver)
2319  //  -- edx : the shared function info.
2320  //  -- edi : the function to call (checked to be a JSFunction)
2321  //  -- esi : the function context.
2322  // -----------------------------------
2323
2324  if (tail_call_mode == TailCallMode::kAllow) {
2325    PrepareForTailCall(masm, eax, ebx, ecx, edx);
2326    // Reload shared function info.
2327    __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2328  }
2329
2330  __ mov(ebx,
2331         FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2332  __ SmiUntag(ebx);
2333  ParameterCount actual(eax);
2334  ParameterCount expected(ebx);
2335  __ InvokeFunctionCode(edi, no_reg, expected, actual, JUMP_FUNCTION,
2336                        CheckDebugStepCallWrapper());
2337  // The function is a "classConstructor", need to raise an exception.
2338  __ bind(&class_constructor);
2339  {
2340    FrameScope frame(masm, StackFrame::INTERNAL);
2341    __ push(edi);
2342    __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2343  }
2344}
2345
2346
2347namespace {
2348
2349void Generate_PushBoundArguments(MacroAssembler* masm) {
2350  // ----------- S t a t e -------------
2351  //  -- eax : the number of arguments (not including the receiver)
2352  //  -- edx : new.target (only in case of [[Construct]])
2353  //  -- edi : target (checked to be a JSBoundFunction)
2354  // -----------------------------------
2355
2356  // Load [[BoundArguments]] into ecx and length of that into ebx.
2357  Label no_bound_arguments;
2358  __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundArgumentsOffset));
2359  __ mov(ebx, FieldOperand(ecx, FixedArray::kLengthOffset));
2360  __ SmiUntag(ebx);
2361  __ test(ebx, ebx);
2362  __ j(zero, &no_bound_arguments);
2363  {
2364    // ----------- S t a t e -------------
2365    //  -- eax : the number of arguments (not including the receiver)
2366    //  -- edx : new.target (only in case of [[Construct]])
2367    //  -- edi : target (checked to be a JSBoundFunction)
2368    //  -- ecx : the [[BoundArguments]] (implemented as FixedArray)
2369    //  -- ebx : the number of [[BoundArguments]]
2370    // -----------------------------------
2371
2372    // Reserve stack space for the [[BoundArguments]].
2373    {
2374      Label done;
2375      __ lea(ecx, Operand(ebx, times_pointer_size, 0));
2376      __ sub(esp, ecx);
2377      // Check the stack for overflow. We are not trying to catch interruptions
2378      // (i.e. debug break and preemption) here, so check the "real stack
2379      // limit".
2380      __ CompareRoot(esp, ecx, Heap::kRealStackLimitRootIndex);
2381      __ j(greater, &done, Label::kNear);  // Signed comparison.
2382      // Restore the stack pointer.
2383      __ lea(esp, Operand(esp, ebx, times_pointer_size, 0));
2384      {
2385        FrameScope scope(masm, StackFrame::MANUAL);
2386        __ EnterFrame(StackFrame::INTERNAL);
2387        __ CallRuntime(Runtime::kThrowStackOverflow);
2388      }
2389      __ bind(&done);
2390    }
2391
2392    // Adjust effective number of arguments to include return address.
2393    __ inc(eax);
2394
2395    // Relocate arguments and return address down the stack.
2396    {
2397      Label loop;
2398      __ Set(ecx, 0);
2399      __ lea(ebx, Operand(esp, ebx, times_pointer_size, 0));
2400      __ bind(&loop);
2401      __ fld_s(Operand(ebx, ecx, times_pointer_size, 0));
2402      __ fstp_s(Operand(esp, ecx, times_pointer_size, 0));
2403      __ inc(ecx);
2404      __ cmp(ecx, eax);
2405      __ j(less, &loop);
2406    }
2407
2408    // Copy [[BoundArguments]] to the stack (below the arguments).
2409    {
2410      Label loop;
2411      __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundArgumentsOffset));
2412      __ mov(ebx, FieldOperand(ecx, FixedArray::kLengthOffset));
2413      __ SmiUntag(ebx);
2414      __ bind(&loop);
2415      __ dec(ebx);
2416      __ fld_s(
2417          FieldOperand(ecx, ebx, times_pointer_size, FixedArray::kHeaderSize));
2418      __ fstp_s(Operand(esp, eax, times_pointer_size, 0));
2419      __ lea(eax, Operand(eax, 1));
2420      __ j(greater, &loop);
2421    }
2422
2423    // Adjust effective number of arguments (eax contains the number of
2424    // arguments from the call plus return address plus the number of
2425    // [[BoundArguments]]), so we need to subtract one for the return address.
2426    __ dec(eax);
2427  }
2428  __ bind(&no_bound_arguments);
2429}
2430
2431}  // namespace
2432
2433
2434// static
2435void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2436                                              TailCallMode tail_call_mode) {
2437  // ----------- S t a t e -------------
2438  //  -- eax : the number of arguments (not including the receiver)
2439  //  -- edi : the function to call (checked to be a JSBoundFunction)
2440  // -----------------------------------
2441  __ AssertBoundFunction(edi);
2442
2443  if (tail_call_mode == TailCallMode::kAllow) {
2444    PrepareForTailCall(masm, eax, ebx, ecx, edx);
2445  }
2446
2447  // Patch the receiver to [[BoundThis]].
2448  __ mov(ebx, FieldOperand(edi, JSBoundFunction::kBoundThisOffset));
2449  __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), ebx);
2450
2451  // Push the [[BoundArguments]] onto the stack.
2452  Generate_PushBoundArguments(masm);
2453
2454  // Call the [[BoundTargetFunction]] via the Call builtin.
2455  __ mov(edi, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2456  __ mov(ecx, Operand::StaticVariable(ExternalReference(
2457                  Builtins::kCall_ReceiverIsAny, masm->isolate())));
2458  __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
2459  __ jmp(ecx);
2460}
2461
2462
2463// static
2464void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2465                             TailCallMode tail_call_mode) {
2466  // ----------- S t a t e -------------
2467  //  -- eax : the number of arguments (not including the receiver)
2468  //  -- edi : the target to call (can be any Object).
2469  // -----------------------------------
2470
2471  Label non_callable, non_function, non_smi;
2472  __ JumpIfSmi(edi, &non_callable);
2473  __ bind(&non_smi);
2474  __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2475  __ j(equal, masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
2476       RelocInfo::CODE_TARGET);
2477  __ CmpInstanceType(ecx, JS_BOUND_FUNCTION_TYPE);
2478  __ j(equal, masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
2479       RelocInfo::CODE_TARGET);
2480
2481  // Check if target has a [[Call]] internal method.
2482  __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
2483            Immediate(1 << Map::kIsCallable));
2484  __ j(zero, &non_callable);
2485
2486  __ CmpInstanceType(ecx, JS_PROXY_TYPE);
2487  __ j(not_equal, &non_function);
2488
2489  // 0. Prepare for tail call if necessary.
2490  if (tail_call_mode == TailCallMode::kAllow) {
2491    PrepareForTailCall(masm, eax, ebx, ecx, edx);
2492  }
2493
2494  // 1. Runtime fallback for Proxy [[Call]].
2495  __ PopReturnAddressTo(ecx);
2496  __ Push(edi);
2497  __ PushReturnAddressFrom(ecx);
2498  // Increase the arguments size to include the pushed function and the
2499  // existing receiver on the stack.
2500  __ add(eax, Immediate(2));
2501  // Tail-call to the runtime.
2502  __ JumpToExternalReference(
2503      ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2504
2505  // 2. Call to something else, which might have a [[Call]] internal method (if
2506  // not we raise an exception).
2507  __ bind(&non_function);
2508  // Overwrite the original receiver with the (original) target.
2509  __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi);
2510  // Let the "call_as_function_delegate" take care of the rest.
2511  __ LoadGlobalFunction(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, edi);
2512  __ Jump(masm->isolate()->builtins()->CallFunction(
2513              ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
2514          RelocInfo::CODE_TARGET);
2515
2516  // 3. Call to something that is not callable.
2517  __ bind(&non_callable);
2518  {
2519    FrameScope scope(masm, StackFrame::INTERNAL);
2520    __ Push(edi);
2521    __ CallRuntime(Runtime::kThrowCalledNonCallable);
2522  }
2523}
2524
2525
2526// static
2527void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2528  // ----------- S t a t e -------------
2529  //  -- eax : the number of arguments (not including the receiver)
2530  //  -- edx : the new target (checked to be a constructor)
2531  //  -- edi : the constructor to call (checked to be a JSFunction)
2532  // -----------------------------------
2533  __ AssertFunction(edi);
2534
2535  // Calling convention for function specific ConstructStubs require
2536  // ebx to contain either an AllocationSite or undefined.
2537  __ LoadRoot(ebx, Heap::kUndefinedValueRootIndex);
2538
2539  // Tail call to the function-specific construct stub (still in the caller
2540  // context at this point).
2541  __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2542  __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kConstructStubOffset));
2543  __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
2544  __ jmp(ecx);
2545}
2546
2547
2548// static
2549void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2550  // ----------- S t a t e -------------
2551  //  -- eax : the number of arguments (not including the receiver)
2552  //  -- edx : the new target (checked to be a constructor)
2553  //  -- edi : the constructor to call (checked to be a JSBoundFunction)
2554  // -----------------------------------
2555  __ AssertBoundFunction(edi);
2556
2557  // Push the [[BoundArguments]] onto the stack.
2558  Generate_PushBoundArguments(masm);
2559
2560  // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2561  {
2562    Label done;
2563    __ cmp(edi, edx);
2564    __ j(not_equal, &done, Label::kNear);
2565    __ mov(edx, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2566    __ bind(&done);
2567  }
2568
2569  // Construct the [[BoundTargetFunction]] via the Construct builtin.
2570  __ mov(edi, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2571  __ mov(ecx, Operand::StaticVariable(
2572                  ExternalReference(Builtins::kConstruct, masm->isolate())));
2573  __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
2574  __ jmp(ecx);
2575}
2576
2577
2578// static
2579void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2580  // ----------- S t a t e -------------
2581  //  -- eax : the number of arguments (not including the receiver)
2582  //  -- edi : the constructor to call (checked to be a JSProxy)
2583  //  -- edx : the new target (either the same as the constructor or
2584  //           the JSFunction on which new was invoked initially)
2585  // -----------------------------------
2586
2587  // Call into the Runtime for Proxy [[Construct]].
2588  __ PopReturnAddressTo(ecx);
2589  __ Push(edi);
2590  __ Push(edx);
2591  __ PushReturnAddressFrom(ecx);
2592  // Include the pushed new_target, constructor and the receiver.
2593  __ add(eax, Immediate(3));
2594  // Tail-call to the runtime.
2595  __ JumpToExternalReference(
2596      ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2597}
2598
2599
2600// static
2601void Builtins::Generate_Construct(MacroAssembler* masm) {
2602  // ----------- S t a t e -------------
2603  //  -- eax : the number of arguments (not including the receiver)
2604  //  -- edx : the new target (either the same as the constructor or
2605  //           the JSFunction on which new was invoked initially)
2606  //  -- edi : the constructor to call (can be any Object)
2607  // -----------------------------------
2608
2609  // Check if target is a Smi.
2610  Label non_constructor;
2611  __ JumpIfSmi(edi, &non_constructor, Label::kNear);
2612
2613  // Dispatch based on instance type.
2614  __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2615  __ j(equal, masm->isolate()->builtins()->ConstructFunction(),
2616       RelocInfo::CODE_TARGET);
2617
2618  // Check if target has a [[Construct]] internal method.
2619  __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
2620            Immediate(1 << Map::kIsConstructor));
2621  __ j(zero, &non_constructor, Label::kNear);
2622
2623  // Only dispatch to bound functions after checking whether they are
2624  // constructors.
2625  __ CmpInstanceType(ecx, JS_BOUND_FUNCTION_TYPE);
2626  __ j(equal, masm->isolate()->builtins()->ConstructBoundFunction(),
2627       RelocInfo::CODE_TARGET);
2628
2629  // Only dispatch to proxies after checking whether they are constructors.
2630  __ CmpInstanceType(ecx, JS_PROXY_TYPE);
2631  __ j(equal, masm->isolate()->builtins()->ConstructProxy(),
2632       RelocInfo::CODE_TARGET);
2633
2634  // Called Construct on an exotic Object with a [[Construct]] internal method.
2635  {
2636    // Overwrite the original receiver with the (original) target.
2637    __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi);
2638    // Let the "call_as_constructor_delegate" take care of the rest.
2639    __ LoadGlobalFunction(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, edi);
2640    __ Jump(masm->isolate()->builtins()->CallFunction(),
2641            RelocInfo::CODE_TARGET);
2642  }
2643
2644  // Called Construct on an Object that doesn't have a [[Construct]] internal
2645  // method.
2646  __ bind(&non_constructor);
2647  __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2648          RelocInfo::CODE_TARGET);
2649}
2650
2651// static
2652void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
2653  // ----------- S t a t e -------------
2654  //  -- edx    : requested object size (untagged)
2655  //  -- esp[0] : return address
2656  // -----------------------------------
2657  __ SmiTag(edx);
2658  __ PopReturnAddressTo(ecx);
2659  __ Push(edx);
2660  __ PushReturnAddressFrom(ecx);
2661  __ Move(esi, Smi::FromInt(0));
2662  __ TailCallRuntime(Runtime::kAllocateInNewSpace);
2663}
2664
2665// static
2666void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
2667  // ----------- S t a t e -------------
2668  //  -- edx    : requested object size (untagged)
2669  //  -- esp[0] : return address
2670  // -----------------------------------
2671  __ SmiTag(edx);
2672  __ PopReturnAddressTo(ecx);
2673  __ Push(edx);
2674  __ Push(Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
2675  __ PushReturnAddressFrom(ecx);
2676  __ Move(esi, Smi::FromInt(0));
2677  __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
2678}
2679
2680// static
2681void Builtins::Generate_StringToNumber(MacroAssembler* masm) {
2682  // The StringToNumber stub takes one argument in eax.
2683  __ AssertString(eax);
2684
2685  // Check if string has a cached array index.
2686  Label runtime;
2687  __ test(FieldOperand(eax, String::kHashFieldOffset),
2688          Immediate(String::kContainsCachedArrayIndexMask));
2689  __ j(not_zero, &runtime, Label::kNear);
2690  __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
2691  __ IndexFromHash(eax, eax);
2692  __ Ret();
2693
2694  __ bind(&runtime);
2695  {
2696    FrameScope frame(masm, StackFrame::INTERNAL);
2697    // Push argument.
2698    __ push(eax);
2699    // We cannot use a tail call here because this builtin can also be called
2700    // from wasm.
2701    __ CallRuntime(Runtime::kStringToNumber);
2702  }
2703  __ Ret();
2704}
2705
2706// static
2707void Builtins::Generate_ToNumber(MacroAssembler* masm) {
2708  // The ToNumber stub takes one argument in eax.
2709  Label not_smi;
2710  __ JumpIfNotSmi(eax, &not_smi, Label::kNear);
2711  __ Ret();
2712  __ bind(&not_smi);
2713
2714  Label not_heap_number;
2715  __ CompareMap(eax, masm->isolate()->factory()->heap_number_map());
2716  __ j(not_equal, &not_heap_number, Label::kNear);
2717  __ Ret();
2718  __ bind(&not_heap_number);
2719
2720  __ Jump(masm->isolate()->builtins()->NonNumberToNumber(),
2721          RelocInfo::CODE_TARGET);
2722}
2723
2724// static
2725void Builtins::Generate_NonNumberToNumber(MacroAssembler* masm) {
2726  // The NonNumberToNumber stub takes one argument in eax.
2727  __ AssertNotNumber(eax);
2728
2729  Label not_string;
2730  __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edi);
2731  // eax: object
2732  // edi: object map
2733  __ j(above_equal, &not_string, Label::kNear);
2734  __ Jump(masm->isolate()->builtins()->StringToNumber(),
2735          RelocInfo::CODE_TARGET);
2736  __ bind(&not_string);
2737
2738  Label not_oddball;
2739  __ CmpInstanceType(edi, ODDBALL_TYPE);
2740  __ j(not_equal, &not_oddball, Label::kNear);
2741  __ mov(eax, FieldOperand(eax, Oddball::kToNumberOffset));
2742  __ Ret();
2743  __ bind(&not_oddball);
2744  {
2745    FrameScope frame(masm, StackFrame::INTERNAL);
2746    // Push argument.
2747    __ push(eax);
2748    // We cannot use a tail call here because this builtin can also be called
2749    // from wasm.
2750    __ CallRuntime(Runtime::kToNumber);
2751  }
2752  __ Ret();
2753}
2754
2755void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2756  // ----------- S t a t e -------------
2757  //  -- eax : actual number of arguments
2758  //  -- ebx : expected number of arguments
2759  //  -- edx : new target (passed through to callee)
2760  //  -- edi : function (passed through to callee)
2761  // -----------------------------------
2762
2763  Label invoke, dont_adapt_arguments, stack_overflow;
2764  __ IncrementCounter(masm->isolate()->counters()->arguments_adaptors(), 1);
2765
2766  Label enough, too_few;
2767  __ cmp(eax, ebx);
2768  __ j(less, &too_few);
2769  __ cmp(ebx, SharedFunctionInfo::kDontAdaptArgumentsSentinel);
2770  __ j(equal, &dont_adapt_arguments);
2771
2772  {  // Enough parameters: Actual >= expected.
2773    __ bind(&enough);
2774    EnterArgumentsAdaptorFrame(masm);
2775    ArgumentsAdaptorStackCheck(masm, &stack_overflow);
2776
2777    // Copy receiver and all expected arguments.
2778    const int offset = StandardFrameConstants::kCallerSPOffset;
2779    __ lea(edi, Operand(ebp, eax, times_4, offset));
2780    __ mov(eax, -1);  // account for receiver
2781
2782    Label copy;
2783    __ bind(&copy);
2784    __ inc(eax);
2785    __ push(Operand(edi, 0));
2786    __ sub(edi, Immediate(kPointerSize));
2787    __ cmp(eax, ebx);
2788    __ j(less, &copy);
2789    // eax now contains the expected number of arguments.
2790    __ jmp(&invoke);
2791  }
2792
2793  {  // Too few parameters: Actual < expected.
2794    __ bind(&too_few);
2795
2796    EnterArgumentsAdaptorFrame(masm);
2797    ArgumentsAdaptorStackCheck(masm, &stack_overflow);
2798
2799    // Remember expected arguments in ecx.
2800    __ mov(ecx, ebx);
2801
2802    // Copy receiver and all actual arguments.
2803    const int offset = StandardFrameConstants::kCallerSPOffset;
2804    __ lea(edi, Operand(ebp, eax, times_4, offset));
2805    // ebx = expected - actual.
2806    __ sub(ebx, eax);
2807    // eax = -actual - 1
2808    __ neg(eax);
2809    __ sub(eax, Immediate(1));
2810
2811    Label copy;
2812    __ bind(&copy);
2813    __ inc(eax);
2814    __ push(Operand(edi, 0));
2815    __ sub(edi, Immediate(kPointerSize));
2816    __ test(eax, eax);
2817    __ j(not_zero, &copy);
2818
2819    // Fill remaining expected arguments with undefined values.
2820    Label fill;
2821    __ bind(&fill);
2822    __ inc(eax);
2823    __ push(Immediate(masm->isolate()->factory()->undefined_value()));
2824    __ cmp(eax, ebx);
2825    __ j(less, &fill);
2826
2827    // Restore expected arguments.
2828    __ mov(eax, ecx);
2829  }
2830
2831  // Call the entry point.
2832  __ bind(&invoke);
2833  // Restore function pointer.
2834  __ mov(edi, Operand(ebp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
2835  // eax : expected number of arguments
2836  // edx : new target (passed through to callee)
2837  // edi : function (passed through to callee)
2838  __ mov(ecx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
2839  __ call(ecx);
2840
2841  // Store offset of return address for deoptimizer.
2842  masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2843
2844  // Leave frame and return.
2845  LeaveArgumentsAdaptorFrame(masm);
2846  __ ret(0);
2847
2848  // -------------------------------------------
2849  // Dont adapt arguments.
2850  // -------------------------------------------
2851  __ bind(&dont_adapt_arguments);
2852  __ mov(ecx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
2853  __ jmp(ecx);
2854
2855  __ bind(&stack_overflow);
2856  {
2857    FrameScope frame(masm, StackFrame::MANUAL);
2858    __ CallRuntime(Runtime::kThrowStackOverflow);
2859    __ int3();
2860  }
2861}
2862
2863
2864static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
2865                                    Register function_template_info,
2866                                    Register scratch0, Register scratch1,
2867                                    Label* receiver_check_failed) {
2868  // If there is no signature, return the holder.
2869  __ CompareRoot(FieldOperand(function_template_info,
2870                              FunctionTemplateInfo::kSignatureOffset),
2871                 Heap::kUndefinedValueRootIndex);
2872  Label receiver_check_passed;
2873  __ j(equal, &receiver_check_passed, Label::kNear);
2874
2875  // Walk the prototype chain.
2876  __ mov(scratch0, FieldOperand(receiver, HeapObject::kMapOffset));
2877  Label prototype_loop_start;
2878  __ bind(&prototype_loop_start);
2879
2880  // Get the constructor, if any.
2881  __ GetMapConstructor(scratch0, scratch0, scratch1);
2882  __ CmpInstanceType(scratch1, JS_FUNCTION_TYPE);
2883  Label next_prototype;
2884  __ j(not_equal, &next_prototype, Label::kNear);
2885
2886  // Get the constructor's signature.
2887  __ mov(scratch0,
2888         FieldOperand(scratch0, JSFunction::kSharedFunctionInfoOffset));
2889  __ mov(scratch0,
2890         FieldOperand(scratch0, SharedFunctionInfo::kFunctionDataOffset));
2891
2892  // Loop through the chain of inheriting function templates.
2893  Label function_template_loop;
2894  __ bind(&function_template_loop);
2895
2896  // If the signatures match, we have a compatible receiver.
2897  __ cmp(scratch0, FieldOperand(function_template_info,
2898                                FunctionTemplateInfo::kSignatureOffset));
2899  __ j(equal, &receiver_check_passed, Label::kNear);
2900
2901  // If the current type is not a FunctionTemplateInfo, load the next prototype
2902  // in the chain.
2903  __ JumpIfSmi(scratch0, &next_prototype, Label::kNear);
2904  __ CmpObjectType(scratch0, FUNCTION_TEMPLATE_INFO_TYPE, scratch1);
2905  __ j(not_equal, &next_prototype, Label::kNear);
2906
2907  // Otherwise load the parent function template and iterate.
2908  __ mov(scratch0,
2909         FieldOperand(scratch0, FunctionTemplateInfo::kParentTemplateOffset));
2910  __ jmp(&function_template_loop, Label::kNear);
2911
2912  // Load the next prototype.
2913  __ bind(&next_prototype);
2914  __ mov(receiver, FieldOperand(receiver, HeapObject::kMapOffset));
2915  __ test(FieldOperand(receiver, Map::kBitField3Offset),
2916          Immediate(Map::HasHiddenPrototype::kMask));
2917  __ j(zero, receiver_check_failed);
2918
2919  __ mov(receiver, FieldOperand(receiver, Map::kPrototypeOffset));
2920  __ mov(scratch0, FieldOperand(receiver, HeapObject::kMapOffset));
2921  // Iterate.
2922  __ jmp(&prototype_loop_start, Label::kNear);
2923
2924  __ bind(&receiver_check_passed);
2925}
2926
2927
2928void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
2929  // ----------- S t a t e -------------
2930  //  -- eax                : number of arguments (not including the receiver)
2931  //  -- edi                : callee
2932  //  -- esi                : context
2933  //  -- esp[0]             : return address
2934  //  -- esp[4]             : last argument
2935  //  -- ...
2936  //  -- esp[eax * 4]       : first argument
2937  //  -- esp[(eax + 1) * 4] : receiver
2938  // -----------------------------------
2939
2940  // Load the FunctionTemplateInfo.
2941  __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2942  __ mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kFunctionDataOffset));
2943
2944  // Do the compatible receiver check.
2945  Label receiver_check_failed;
2946  __ mov(ecx, Operand(esp, eax, times_pointer_size, kPCOnStackSize));
2947  __ Push(eax);
2948  CompatibleReceiverCheck(masm, ecx, ebx, edx, eax, &receiver_check_failed);
2949  __ Pop(eax);
2950  // Get the callback offset from the FunctionTemplateInfo, and jump to the
2951  // beginning of the code.
2952  __ mov(edx, FieldOperand(ebx, FunctionTemplateInfo::kCallCodeOffset));
2953  __ mov(edx, FieldOperand(edx, CallHandlerInfo::kFastHandlerOffset));
2954  __ add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2955  __ jmp(edx);
2956
2957  // Compatible receiver check failed: pop return address, arguments and
2958  // receiver and throw an Illegal Invocation exception.
2959  __ bind(&receiver_check_failed);
2960  __ Pop(eax);
2961  __ PopReturnAddressTo(ebx);
2962  __ lea(eax, Operand(eax, times_pointer_size, 1 * kPointerSize));
2963  __ add(esp, eax);
2964  __ PushReturnAddressFrom(ebx);
2965  {
2966    FrameScope scope(masm, StackFrame::INTERNAL);
2967    __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
2968  }
2969}
2970
2971
2972void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
2973  // Lookup the function in the JavaScript frame.
2974  __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
2975  {
2976    FrameScope scope(masm, StackFrame::INTERNAL);
2977    // Pass function as argument.
2978    __ push(eax);
2979    __ CallRuntime(Runtime::kCompileForOnStackReplacement);
2980  }
2981
2982  Label skip;
2983  // If the code object is null, just return to the unoptimized code.
2984  __ cmp(eax, Immediate(0));
2985  __ j(not_equal, &skip, Label::kNear);
2986  __ ret(0);
2987
2988  __ bind(&skip);
2989
2990  // Load deoptimization data from the code object.
2991  __ mov(ebx, Operand(eax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
2992
2993  // Load the OSR entrypoint offset from the deoptimization data.
2994  __ mov(ebx, Operand(ebx, FixedArray::OffsetOfElementAt(
2995      DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
2996  __ SmiUntag(ebx);
2997
2998  // Compute the target address = code_obj + header_size + osr_offset
2999  __ lea(eax, Operand(eax, ebx, times_1, Code::kHeaderSize - kHeapObjectTag));
3000
3001  // Overwrite the return address on the stack.
3002  __ mov(Operand(esp, 0), eax);
3003
3004  // And "return" to the OSR entry point of the function.
3005  __ ret(0);
3006}
3007
3008
3009#undef __
3010}  // namespace internal
3011}  // namespace v8
3012
3013#endif  // V8_TARGET_ARCH_X87
3014