1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_MIPS
6
7#include "src/codegen.h"
8#include "src/debug/debug.h"
9#include "src/deoptimizer.h"
10#include "src/full-codegen/full-codegen.h"
11#include "src/runtime/runtime.h"
12
13
14namespace v8 {
15namespace internal {
16
17
18#define __ ACCESS_MASM(masm)
19
20void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id) {
21  // ----------- S t a t e -------------
22  //  -- a0                 : number of arguments excluding receiver
23  //  -- a1                 : target
24  //  -- a3                 : new.target
25  //  -- sp[0]              : last argument
26  //  -- ...
27  //  -- sp[4 * (argc - 1)] : first argument
28  //  -- sp[4 * agrc]       : receiver
29  // -----------------------------------
30  __ AssertFunction(a1);
31
32  // Make sure we operate in the context of the called function (for example
33  // ConstructStubs implemented in C++ will be run in the context of the caller
34  // instead of the callee, due to the way that [[Construct]] is defined for
35  // ordinary functions).
36  __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
37
38  // Insert extra arguments.
39  const int num_extra_args = 2;
40  __ Push(a1, a3);
41
42  // JumpToExternalReference expects a0 to contain the number of arguments
43  // including the receiver and the extra arguments.
44  __ Addu(a0, a0, num_extra_args + 1);
45
46  __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
47}
48
49
50// Load the built-in InternalArray function from the current context.
51static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
52                                              Register result) {
53  // Load the InternalArray function from the native context.
54  __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
55}
56
57
58// Load the built-in Array function from the current context.
59static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
60  // Load the Array function from the native context.
61  __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
62}
63
64
65void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
66  // ----------- S t a t e -------------
67  //  -- a0     : number of arguments
68  //  -- ra     : return address
69  //  -- sp[...]: constructor arguments
70  // -----------------------------------
71  Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
72
73  // Get the InternalArray function.
74  GenerateLoadInternalArrayFunction(masm, a1);
75
76  if (FLAG_debug_code) {
77    // Initial map for the builtin InternalArray functions should be maps.
78    __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
79    __ SmiTst(a2, t0);
80    __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction,
81              t0, Operand(zero_reg));
82    __ GetObjectType(a2, a3, t0);
83    __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction,
84              t0, Operand(MAP_TYPE));
85  }
86
87  // Run the native code for the InternalArray function called as a normal
88  // function.
89  // Tail call a stub.
90  InternalArrayConstructorStub stub(masm->isolate());
91  __ TailCallStub(&stub);
92}
93
94
95void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
96  // ----------- S t a t e -------------
97  //  -- a0     : number of arguments
98  //  -- ra     : return address
99  //  -- sp[...]: constructor arguments
100  // -----------------------------------
101  Label generic_array_code;
102
103  // Get the Array function.
104  GenerateLoadArrayFunction(masm, a1);
105
106  if (FLAG_debug_code) {
107    // Initial map for the builtin Array functions should be maps.
108    __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
109    __ SmiTst(a2, t0);
110    __ Assert(ne, kUnexpectedInitialMapForArrayFunction1,
111              t0, Operand(zero_reg));
112    __ GetObjectType(a2, a3, t0);
113    __ Assert(eq, kUnexpectedInitialMapForArrayFunction2,
114              t0, Operand(MAP_TYPE));
115  }
116
117  // Run the native code for the Array function called as a normal function.
118  // Tail call a stub.
119  __ mov(a3, a1);
120  __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
121  ArrayConstructorStub stub(masm->isolate());
122  __ TailCallStub(&stub);
123}
124
125
126// static
127void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
128  // ----------- S t a t e -------------
129  //  -- a0                 : number of arguments
130  //  -- a1                 : function
131  //  -- cp                 : context
132  //  -- ra                 : return address
133  //  -- sp[(argc - n) * 8] : arg[n] (zero-based)
134  //  -- sp[(argc + 1) * 8] : receiver
135  // -----------------------------------
136  Heap::RootListIndex const root_index =
137      (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
138                                     : Heap::kMinusInfinityValueRootIndex;
139
140  // Load the accumulator with the default return value (either -Infinity or
141  // +Infinity), with the tagged value in t2 and the double value in f0.
142  __ LoadRoot(t2, root_index);
143  __ ldc1(f0, FieldMemOperand(t2, HeapNumber::kValueOffset));
144  __ Addu(a3, a0, Operand(1));
145
146  Label done_loop, loop;
147  __ bind(&loop);
148  {
149    // Check if all parameters done.
150    __ Subu(a0, a0, Operand(1));
151    __ Branch(&done_loop, lt, a0, Operand(zero_reg));
152
153    // Load the next parameter tagged value into a2.
154    __ Lsa(at, sp, a0, kPointerSizeLog2);
155    __ lw(a2, MemOperand(at));
156
157    // Load the double value of the parameter into f2, maybe converting the
158    // parameter to a number first using the ToNumber builtin if necessary.
159    Label convert, convert_smi, convert_number, done_convert;
160    __ bind(&convert);
161    __ JumpIfSmi(a2, &convert_smi);
162    __ lw(t0, FieldMemOperand(a2, HeapObject::kMapOffset));
163    __ JumpIfRoot(t0, Heap::kHeapNumberMapRootIndex, &convert_number);
164    {
165      // Parameter is not a Number, use the ToNumber builtin to convert it.
166      FrameScope scope(masm, StackFrame::MANUAL);
167      __ Push(ra, fp);
168      __ Move(fp, sp);
169      __ Push(cp, a1);
170      __ SmiTag(a0);
171      __ SmiTag(a3);
172      __ Push(a0, t2, a3);
173      __ mov(a0, a2);
174      __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
175      __ mov(a2, v0);
176      __ Pop(a0, t2, a3);
177      {
178        // Restore the double accumulator value (f0).
179        Label restore_smi, done_restore;
180        __ JumpIfSmi(t2, &restore_smi);
181        __ ldc1(f0, FieldMemOperand(t2, HeapNumber::kValueOffset));
182        __ jmp(&done_restore);
183        __ bind(&restore_smi);
184        __ SmiToDoubleFPURegister(t2, f0, t0);
185        __ bind(&done_restore);
186      }
187      __ SmiUntag(a3);
188      __ SmiUntag(a0);
189      __ Pop(cp, a1);
190      __ Pop(ra, fp);
191    }
192    __ jmp(&convert);
193    __ bind(&convert_number);
194    __ ldc1(f2, FieldMemOperand(a2, HeapNumber::kValueOffset));
195    __ jmp(&done_convert);
196    __ bind(&convert_smi);
197    __ SmiToDoubleFPURegister(a2, f2, t0);
198    __ bind(&done_convert);
199
200    // Perform the actual comparison with using Min/Max macro instructions the
201    // accumulator value on the left hand side (f0) and the next parameter value
202    // on the right hand side (f2).
203    // We need to work out which HeapNumber (or smi) the result came from.
204    Label compare_nan, set_value;
205    __ BranchF(nullptr, &compare_nan, eq, f0, f2);
206    __ Move(t0, t1, f0);
207    if (kind == MathMaxMinKind::kMin) {
208      __ MinNaNCheck_d(f0, f0, f2);
209    } else {
210      DCHECK(kind == MathMaxMinKind::kMax);
211      __ MaxNaNCheck_d(f0, f0, f2);
212    }
213    __ Move(at, t8, f0);
214    __ Branch(&set_value, ne, t0, Operand(at));
215    __ Branch(&set_value, ne, t1, Operand(t8));
216    __ jmp(&loop);
217    __ bind(&set_value);
218    __ mov(t2, a2);
219    __ jmp(&loop);
220
221    // At least one side is NaN, which means that the result will be NaN too.
222    __ bind(&compare_nan);
223    __ LoadRoot(t2, Heap::kNanValueRootIndex);
224    __ ldc1(f0, FieldMemOperand(t2, HeapNumber::kValueOffset));
225    __ jmp(&loop);
226  }
227
228  __ bind(&done_loop);
229  __ Lsa(sp, sp, a3, kPointerSizeLog2);
230  __ Ret(USE_DELAY_SLOT);
231  __ mov(v0, t2);  // In delay slot.
232}
233
234// static
235void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
236  // ----------- S t a t e -------------
237  //  -- a0                     : number of arguments
238  //  -- a1                     : constructor function
239  //  -- ra                     : return address
240  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
241  //  -- sp[argc * 4]           : receiver
242  // -----------------------------------
243
244  // 1. Load the first argument into a0 and get rid of the rest (including the
245  // receiver).
246  Label no_arguments;
247  {
248    __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
249    __ Subu(a0, a0, Operand(1));
250    __ Lsa(sp, sp, a0, kPointerSizeLog2);
251    __ lw(a0, MemOperand(sp));
252    __ Drop(2);
253  }
254
255  // 2a. Convert first argument to number.
256  __ Jump(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
257
258  // 2b. No arguments, return +0.
259  __ bind(&no_arguments);
260  __ Move(v0, Smi::FromInt(0));
261  __ DropAndRet(1);
262}
263
264
265// static
266void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
267  // ----------- S t a t e -------------
268  //  -- a0                     : number of arguments
269  //  -- a1                     : constructor function
270  //  -- a3                     : new target
271  //  -- ra                     : return address
272  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
273  //  -- sp[argc * 4]           : receiver
274  // -----------------------------------
275
276  // 1. Make sure we operate in the context of the called function.
277  __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
278
279  // 2. Load the first argument into a0 and get rid of the rest (including the
280  // receiver).
281  {
282    Label no_arguments, done;
283    __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
284    __ Subu(a0, a0, Operand(1));
285    __ Lsa(sp, sp, a0, kPointerSizeLog2);
286    __ lw(a0, MemOperand(sp));
287    __ Drop(2);
288    __ jmp(&done);
289    __ bind(&no_arguments);
290    __ Move(a0, Smi::FromInt(0));
291    __ Drop(1);
292    __ bind(&done);
293  }
294
295  // 3. Make sure a0 is a number.
296  {
297    Label done_convert;
298    __ JumpIfSmi(a0, &done_convert);
299    __ GetObjectType(a0, a2, a2);
300    __ Branch(&done_convert, eq, a2, Operand(HEAP_NUMBER_TYPE));
301    {
302      FrameScope scope(masm, StackFrame::INTERNAL);
303      __ Push(a1, a3);
304      __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
305      __ Move(a0, v0);
306      __ Pop(a1, a3);
307    }
308    __ bind(&done_convert);
309  }
310
311  // 4. Check if new target and constructor differ.
312  Label new_object;
313  __ Branch(&new_object, ne, a1, Operand(a3));
314
315  // 5. Allocate a JSValue wrapper for the number.
316  __ AllocateJSValue(v0, a1, a0, a2, t0, &new_object);
317  __ Ret();
318
319  // 6. Fallback to the runtime to create new object.
320  __ bind(&new_object);
321  {
322    FrameScope scope(masm, StackFrame::INTERNAL);
323    __ Push(a0);  // first argument
324    FastNewObjectStub stub(masm->isolate());
325    __ CallStub(&stub);
326    __ Pop(a0);
327  }
328  __ Ret(USE_DELAY_SLOT);
329  __ sw(a0, FieldMemOperand(v0, JSValue::kValueOffset));  // In delay slot
330}
331
332
333// static
334void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
335  // ----------- S t a t e -------------
336  //  -- a0                     : number of arguments
337  //  -- a1                     : constructor function
338  //  -- ra                     : return address
339  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
340  //  -- sp[argc * 4]           : receiver
341  // -----------------------------------
342
343  // 1. Load the first argument into a0 and get rid of the rest (including the
344  // receiver).
345  Label no_arguments;
346  {
347    __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
348    __ Subu(a0, a0, Operand(1));
349    __ Lsa(sp, sp, a0, kPointerSizeLog2);
350    __ lw(a0, MemOperand(sp));
351    __ Drop(2);
352  }
353
354  // 2a. At least one argument, return a0 if it's a string, otherwise
355  // dispatch to appropriate conversion.
356  Label to_string, symbol_descriptive_string;
357  {
358    __ JumpIfSmi(a0, &to_string);
359    __ GetObjectType(a0, a1, a1);
360    STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
361    __ Subu(a1, a1, Operand(FIRST_NONSTRING_TYPE));
362    __ Branch(&symbol_descriptive_string, eq, a1, Operand(zero_reg));
363    __ Branch(&to_string, gt, a1, Operand(zero_reg));
364    __ Ret(USE_DELAY_SLOT);
365    __ mov(v0, a0);
366  }
367
368  // 2b. No arguments, return the empty string (and pop the receiver).
369  __ bind(&no_arguments);
370  {
371    __ LoadRoot(v0, Heap::kempty_stringRootIndex);
372    __ DropAndRet(1);
373  }
374
375  // 3a. Convert a0 to a string.
376  __ bind(&to_string);
377  {
378    ToStringStub stub(masm->isolate());
379    __ TailCallStub(&stub);
380  }
381
382  // 3b. Convert symbol in a0 to a string.
383  __ bind(&symbol_descriptive_string);
384  {
385    __ Push(a0);
386    __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
387  }
388}
389
390
391// static
392void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
393  // ----------- S t a t e -------------
394  //  -- a0                     : number of arguments
395  //  -- a1                     : constructor function
396  //  -- a3                     : new target
397  //  -- ra                     : return address
398  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
399  //  -- sp[argc * 4]           : receiver
400  // -----------------------------------
401
402  // 1. Make sure we operate in the context of the called function.
403  __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
404
405  // 2. Load the first argument into a0 and get rid of the rest (including the
406  // receiver).
407  {
408    Label no_arguments, done;
409    __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
410    __ Subu(a0, a0, Operand(1));
411    __ Lsa(sp, sp, a0, kPointerSizeLog2);
412    __ lw(a0, MemOperand(sp));
413    __ Drop(2);
414    __ jmp(&done);
415    __ bind(&no_arguments);
416    __ LoadRoot(a0, Heap::kempty_stringRootIndex);
417    __ Drop(1);
418    __ bind(&done);
419  }
420
421  // 3. Make sure a0 is a string.
422  {
423    Label convert, done_convert;
424    __ JumpIfSmi(a0, &convert);
425    __ GetObjectType(a0, a2, a2);
426    __ And(t0, a2, Operand(kIsNotStringMask));
427    __ Branch(&done_convert, eq, t0, Operand(zero_reg));
428    __ bind(&convert);
429    {
430      FrameScope scope(masm, StackFrame::INTERNAL);
431      ToStringStub stub(masm->isolate());
432      __ Push(a1, a3);
433      __ CallStub(&stub);
434      __ Move(a0, v0);
435      __ Pop(a1, a3);
436    }
437    __ bind(&done_convert);
438  }
439
440  // 4. Check if new target and constructor differ.
441  Label new_object;
442  __ Branch(&new_object, ne, a1, Operand(a3));
443
444  // 5. Allocate a JSValue wrapper for the string.
445  __ AllocateJSValue(v0, a1, a0, a2, t0, &new_object);
446  __ Ret();
447
448  // 6. Fallback to the runtime to create new object.
449  __ bind(&new_object);
450  {
451    FrameScope scope(masm, StackFrame::INTERNAL);
452    __ Push(a0);  // first argument
453    FastNewObjectStub stub(masm->isolate());
454    __ CallStub(&stub);
455    __ Pop(a0);
456  }
457  __ Ret(USE_DELAY_SLOT);
458  __ sw(a0, FieldMemOperand(v0, JSValue::kValueOffset));  // In delay slot
459}
460
461static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
462  __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
463  __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset));
464  __ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
465  __ Jump(at);
466}
467
468static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
469                                           Runtime::FunctionId function_id) {
470  // ----------- S t a t e -------------
471  //  -- a0 : argument count (preserved for callee)
472  //  -- a1 : target function (preserved for callee)
473  //  -- a3 : new target (preserved for callee)
474  // -----------------------------------
475  {
476    FrameScope scope(masm, StackFrame::INTERNAL);
477    // Push a copy of the target function and the new target.
478    // Push function as parameter to the runtime call.
479    __ SmiTag(a0);
480    __ Push(a0, a1, a3, a1);
481
482    __ CallRuntime(function_id, 1);
483
484    // Restore target function and new target.
485    __ Pop(a0, a1, a3);
486    __ SmiUntag(a0);
487  }
488
489  __ Addu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
490  __ Jump(at);
491}
492
493
494void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
495  // Checking whether the queued function is ready for install is optional,
496  // since we come across interrupts and stack checks elsewhere.  However,
497  // not checking may delay installing ready functions, and always checking
498  // would be quite expensive.  A good compromise is to first check against
499  // stack limit as a cue for an interrupt signal.
500  Label ok;
501  __ LoadRoot(t0, Heap::kStackLimitRootIndex);
502  __ Branch(&ok, hs, sp, Operand(t0));
503
504  GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
505
506  __ bind(&ok);
507  GenerateTailCallToSharedCode(masm);
508}
509
510
511static void Generate_JSConstructStubHelper(MacroAssembler* masm,
512                                           bool is_api_function,
513                                           bool create_implicit_receiver,
514                                           bool check_derived_construct) {
515  // ----------- S t a t e -------------
516  //  -- a0     : number of arguments
517  //  -- a1     : constructor function
518  //  -- a2     : allocation site or undefined
519  //  -- a3     : new target
520  //  -- cp     : context
521  //  -- ra     : return address
522  //  -- sp[...]: constructor arguments
523  // -----------------------------------
524
525  Isolate* isolate = masm->isolate();
526
527  // Enter a construct frame.
528  {
529    FrameScope scope(masm, StackFrame::CONSTRUCT);
530
531    // Preserve the incoming parameters on the stack.
532    __ AssertUndefinedOrAllocationSite(a2, t0);
533    __ SmiTag(a0);
534    __ Push(cp, a2, a0);
535
536    if (create_implicit_receiver) {
537      // Allocate the new receiver object.
538      __ Push(a1, a3);
539      FastNewObjectStub stub(masm->isolate());
540      __ CallStub(&stub);
541      __ mov(t4, v0);
542      __ Pop(a1, a3);
543
544      // ----------- S t a t e -------------
545      //  -- a1: constructor function
546      //  -- a3: new target
547      //  -- t0: newly allocated object
548      // -----------------------------------
549
550      // Retrieve smi-tagged arguments count from the stack.
551      __ lw(a0, MemOperand(sp));
552    }
553
554    __ SmiUntag(a0);
555
556    if (create_implicit_receiver) {
557      // Push the allocated receiver to the stack. We need two copies
558      // because we may have to return the original one and the calling
559      // conventions dictate that the called function pops the receiver.
560      __ Push(t4, t4);
561    } else {
562      __ PushRoot(Heap::kTheHoleValueRootIndex);
563    }
564
565    // Set up pointer to last argument.
566    __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
567
568    // Copy arguments and receiver to the expression stack.
569    // a0: number of arguments
570    // a1: constructor function
571    // a2: address of last argument (caller sp)
572    // a3: new target
573    // t4: number of arguments (smi-tagged)
574    // sp[0]: receiver
575    // sp[1]: receiver
576    // sp[2]: number of arguments (smi-tagged)
577    Label loop, entry;
578    __ SmiTag(t4, a0);
579    __ jmp(&entry);
580    __ bind(&loop);
581    __ Lsa(t0, a2, t4, kPointerSizeLog2 - kSmiTagSize);
582    __ lw(t1, MemOperand(t0));
583    __ push(t1);
584    __ bind(&entry);
585    __ Addu(t4, t4, Operand(-2));
586    __ Branch(&loop, greater_equal, t4, Operand(zero_reg));
587
588    // Call the function.
589    // a0: number of arguments
590    // a1: constructor function
591    // a3: new target
592    ParameterCount actual(a0);
593    __ InvokeFunction(a1, a3, actual, CALL_FUNCTION,
594                      CheckDebugStepCallWrapper());
595
596    // Store offset of return address for deoptimizer.
597    if (create_implicit_receiver && !is_api_function) {
598      masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
599    }
600
601    // Restore context from the frame.
602    __ lw(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
603
604    if (create_implicit_receiver) {
605      // If the result is an object (in the ECMA sense), we should get rid
606      // of the receiver and use the result; see ECMA-262 section 13.2.2-7
607      // on page 74.
608      Label use_receiver, exit;
609
610      // If the result is a smi, it is *not* an object in the ECMA sense.
611      // v0: result
612      // sp[0]: receiver (newly allocated object)
613      // sp[1]: number of arguments (smi-tagged)
614      __ JumpIfSmi(v0, &use_receiver);
615
616      // If the type of the result (stored in its map) is less than
617      // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
618      __ GetObjectType(v0, a1, a3);
619      __ Branch(&exit, greater_equal, a3, Operand(FIRST_JS_RECEIVER_TYPE));
620
621      // Throw away the result of the constructor invocation and use the
622      // on-stack receiver as the result.
623      __ bind(&use_receiver);
624      __ lw(v0, MemOperand(sp));
625
626      // Remove receiver from the stack, remove caller arguments, and
627      // return.
628      __ bind(&exit);
629      // v0: result
630      // sp[0]: receiver (newly allocated object)
631      // sp[1]: number of arguments (smi-tagged)
632      __ lw(a1, MemOperand(sp, 1 * kPointerSize));
633    } else {
634      __ lw(a1, MemOperand(sp));
635    }
636
637    // Leave construct frame.
638  }
639
640  // ES6 9.2.2. Step 13+
641  // Check that the result is not a Smi, indicating that the constructor result
642  // from a derived class is neither undefined nor an Object.
643  if (check_derived_construct) {
644    Label dont_throw;
645    __ JumpIfNotSmi(v0, &dont_throw);
646    {
647      FrameScope scope(masm, StackFrame::INTERNAL);
648      __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
649    }
650    __ bind(&dont_throw);
651  }
652
653  __ Lsa(sp, sp, a1, kPointerSizeLog2 - 1);
654  __ Addu(sp, sp, kPointerSize);
655  if (create_implicit_receiver) {
656    __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2);
657  }
658  __ Ret();
659}
660
661
662void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
663  Generate_JSConstructStubHelper(masm, false, true, false);
664}
665
666
667void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
668  Generate_JSConstructStubHelper(masm, true, false, false);
669}
670
671
672void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
673  Generate_JSConstructStubHelper(masm, false, false, false);
674}
675
676
677void Builtins::Generate_JSBuiltinsConstructStubForDerived(
678    MacroAssembler* masm) {
679  Generate_JSConstructStubHelper(masm, false, false, true);
680}
681
682
683void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
684  FrameScope scope(masm, StackFrame::INTERNAL);
685  __ Push(a1);
686  __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
687}
688
689
690enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
691
692
693// Clobbers a2; preserves all other registers.
694static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
695                                        IsTagged argc_is_tagged) {
696  // Check the stack for overflow. We are not trying to catch
697  // interruptions (e.g. debug break and preemption) here, so the "real stack
698  // limit" is checked.
699  Label okay;
700  __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
701  // Make a2 the space we have left. The stack might already be overflowed
702  // here which will cause a2 to become negative.
703  __ Subu(a2, sp, a2);
704  // Check if the arguments will overflow the stack.
705  if (argc_is_tagged == kArgcIsSmiTagged) {
706    __ sll(t3, argc, kPointerSizeLog2 - kSmiTagSize);
707  } else {
708    DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
709    __ sll(t3, argc, kPointerSizeLog2);
710  }
711  // Signed comparison.
712  __ Branch(&okay, gt, a2, Operand(t3));
713
714  // Out of stack space.
715  __ CallRuntime(Runtime::kThrowStackOverflow);
716
717  __ bind(&okay);
718}
719
720
721static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
722                                             bool is_construct) {
723  // Called from JSEntryStub::GenerateBody
724
725  // ----------- S t a t e -------------
726  //  -- a0: new.target
727  //  -- a1: function
728  //  -- a2: receiver_pointer
729  //  -- a3: argc
730  //  -- s0: argv
731  // -----------------------------------
732  ProfileEntryHookStub::MaybeCallEntryHook(masm);
733
734  // Enter an internal frame.
735  {
736    FrameScope scope(masm, StackFrame::INTERNAL);
737
738    // Setup the context (we need to use the caller context from the isolate).
739    ExternalReference context_address(Isolate::kContextAddress,
740                                      masm->isolate());
741    __ li(cp, Operand(context_address));
742    __ lw(cp, MemOperand(cp));
743
744    // Push the function and the receiver onto the stack.
745    __ Push(a1, a2);
746
747    // Check if we have enough stack space to push all arguments.
748    // Clobbers a2.
749    Generate_CheckStackOverflow(masm, a3, kArgcIsUntaggedInt);
750
751    // Remember new.target.
752    __ mov(t1, a0);
753
754    // Copy arguments to the stack in a loop.
755    // a3: argc
756    // s0: argv, i.e. points to first arg
757    Label loop, entry;
758    __ Lsa(t2, s0, a3, kPointerSizeLog2);
759    __ b(&entry);
760    __ nop();   // Branch delay slot nop.
761    // t2 points past last arg.
762    __ bind(&loop);
763    __ lw(t0, MemOperand(s0));  // Read next parameter.
764    __ addiu(s0, s0, kPointerSize);
765    __ lw(t0, MemOperand(t0));  // Dereference handle.
766    __ push(t0);  // Push parameter.
767    __ bind(&entry);
768    __ Branch(&loop, ne, s0, Operand(t2));
769
770    // Setup new.target and argc.
771    __ mov(a0, a3);
772    __ mov(a3, t1);
773
774    // Initialize all JavaScript callee-saved registers, since they will be seen
775    // by the garbage collector as part of handlers.
776    __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
777    __ mov(s1, t0);
778    __ mov(s2, t0);
779    __ mov(s3, t0);
780    __ mov(s4, t0);
781    __ mov(s5, t0);
782    // s6 holds the root address. Do not clobber.
783    // s7 is cp. Do not init.
784
785    // Invoke the code.
786    Handle<Code> builtin = is_construct
787                               ? masm->isolate()->builtins()->Construct()
788                               : masm->isolate()->builtins()->Call();
789    __ Call(builtin, RelocInfo::CODE_TARGET);
790
791    // Leave internal frame.
792  }
793
794  __ Jump(ra);
795}
796
797
798void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
799  Generate_JSEntryTrampolineHelper(masm, false);
800}
801
802
803void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
804  Generate_JSEntryTrampolineHelper(masm, true);
805}
806
807// static
808void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
809  // ----------- S t a t e -------------
810  //  -- v0 : the value to pass to the generator
811  //  -- a1 : the JSGeneratorObject to resume
812  //  -- a2 : the resume mode (tagged)
813  //  -- ra : return address
814  // -----------------------------------
815  __ AssertGeneratorObject(a1);
816
817  // Store input value into generator object.
818  __ sw(v0, FieldMemOperand(a1, JSGeneratorObject::kInputOrDebugPosOffset));
819  __ RecordWriteField(a1, JSGeneratorObject::kInputOrDebugPosOffset, v0, a3,
820                      kRAHasNotBeenSaved, kDontSaveFPRegs);
821
822  // Store resume mode into generator object.
823  __ sw(a2, FieldMemOperand(a1, JSGeneratorObject::kResumeModeOffset));
824
825  // Load suspended function and context.
826  __ lw(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset));
827  __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
828
829  // Flood function if we are stepping.
830  Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
831  Label stepping_prepared;
832  ExternalReference last_step_action =
833      ExternalReference::debug_last_step_action_address(masm->isolate());
834  STATIC_ASSERT(StepFrame > StepIn);
835  __ li(t1, Operand(last_step_action));
836  __ lb(t1, MemOperand(t1));
837  __ Branch(&prepare_step_in_if_stepping, ge, t1, Operand(StepIn));
838
839  // Flood function if we need to continue stepping in the suspended generator.
840  ExternalReference debug_suspended_generator =
841      ExternalReference::debug_suspended_generator_address(masm->isolate());
842  __ li(t1, Operand(debug_suspended_generator));
843  __ lw(t1, MemOperand(t1));
844  __ Branch(&prepare_step_in_suspended_generator, eq, a1, Operand(t1));
845  __ bind(&stepping_prepared);
846
847  // Push receiver.
848  __ lw(t1, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
849  __ Push(t1);
850
851  // ----------- S t a t e -------------
852  //  -- a1    : the JSGeneratorObject to resume
853  //  -- a2    : the resume mode (tagged)
854  //  -- t0    : generator function
855  //  -- cp    : generator context
856  //  -- ra    : return address
857  //  -- sp[0] : generator receiver
858  // -----------------------------------
859
860  // Push holes for arguments to generator function. Since the parser forced
861  // context allocation for any variables in generators, the actual argument
862  // values have already been copied into the context and these dummy values
863  // will never be used.
864  __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
865  __ lw(a3,
866        FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
867  {
868    Label done_loop, loop;
869    __ bind(&loop);
870    __ Subu(a3, a3, Operand(Smi::FromInt(1)));
871    __ Branch(&done_loop, lt, a3, Operand(zero_reg));
872    __ PushRoot(Heap::kTheHoleValueRootIndex);
873    __ Branch(&loop);
874    __ bind(&done_loop);
875  }
876
877  // Dispatch on the kind of generator object.
878  Label old_generator;
879  __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
880  __ lw(a3, FieldMemOperand(a3, SharedFunctionInfo::kFunctionDataOffset));
881  __ GetObjectType(a3, a3, a3);
882  __ Branch(&old_generator, ne, a3, Operand(BYTECODE_ARRAY_TYPE));
883
884  // New-style (ignition/turbofan) generator object.
885  {
886    __ lw(a0, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
887    __ lw(a0,
888         FieldMemOperand(a0, SharedFunctionInfo::kFormalParameterCountOffset));
889    __ SmiUntag(a0);
890    // We abuse new.target both to indicate that this is a resume call and to
891    // pass in the generator object.  In ordinary calls, new.target is always
892    // undefined because generator functions are non-constructable.
893    __ Move(a3, a1);
894    __ Move(a1, t0);
895    __ lw(a2, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
896    __ Jump(a2);
897  }
898
899  // Old-style (full-codegen) generator object
900  __ bind(&old_generator);
901  {
902    // Enter a new JavaScript frame, and initialize its slots as they were when
903    // the generator was suspended.
904    FrameScope scope(masm, StackFrame::MANUAL);
905    __ Push(ra, fp);
906    __ Move(fp, sp);
907    __ Push(cp, t0);
908
909    // Restore the operand stack.
910    __ lw(a0, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
911    __ lw(a3, FieldMemOperand(a0, FixedArray::kLengthOffset));
912    __ Addu(a0, a0, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
913    __ Lsa(a3, a0, a3, kPointerSizeLog2 - 1);
914    {
915      Label done_loop, loop;
916      __ bind(&loop);
917      __ Branch(&done_loop, eq, a0, Operand(a3));
918      __ lw(t1, MemOperand(a0));
919      __ Push(t1);
920      __ Branch(USE_DELAY_SLOT, &loop);
921      __ addiu(a0, a0, kPointerSize);  // In delay slot.
922      __ bind(&done_loop);
923    }
924
925    // Reset operand stack so we don't leak.
926    __ LoadRoot(t1, Heap::kEmptyFixedArrayRootIndex);
927    __ sw(t1, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
928
929    // Resume the generator function at the continuation.
930    __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
931    __ lw(a3, FieldMemOperand(a3, SharedFunctionInfo::kCodeOffset));
932    __ Addu(a3, a3, Operand(Code::kHeaderSize - kHeapObjectTag));
933    __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
934    __ SmiUntag(a2);
935    __ Addu(a3, a3, Operand(a2));
936    __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
937    __ sw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
938    __ Move(v0, a1);  // Continuation expects generator object in v0.
939    __ Jump(a3);
940  }
941
942  __ bind(&prepare_step_in_if_stepping);
943  {
944    FrameScope scope(masm, StackFrame::INTERNAL);
945    __ Push(a1, a2, t0);
946    __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
947    __ Pop(a1, a2);
948  }
949  __ Branch(USE_DELAY_SLOT, &stepping_prepared);
950  __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
951
952  __ bind(&prepare_step_in_suspended_generator);
953  {
954    FrameScope scope(masm, StackFrame::INTERNAL);
955    __ Push(a1, a2);
956    __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
957    __ Pop(a1, a2);
958  }
959  __ Branch(USE_DELAY_SLOT, &stepping_prepared);
960  __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
961}
962
963static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
964  Register args_count = scratch;
965
966  // Get the arguments + receiver count.
967  __ lw(args_count,
968        MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
969  __ lw(args_count,
970        FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
971
972  // Leave the frame (also dropping the register file).
973  __ LeaveFrame(StackFrame::JAVA_SCRIPT);
974
975  // Drop receiver + arguments.
976  __ Addu(sp, sp, args_count);
977}
978
979// Generate code for entering a JS function with the interpreter.
980// On entry to the function the receiver and arguments have been pushed on the
981// stack left to right.  The actual argument count matches the formal parameter
982// count expected by the function.
983//
984// The live registers are:
985//   o a1: the JS function object being called.
986//   o a3: the new target
987//   o cp: our context
988//   o fp: the caller's frame pointer
989//   o sp: stack pointer
990//   o ra: return address
991//
992// The function builds an interpreter frame.  See InterpreterFrameConstants in
993// frames.h for its layout.
994void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
995  ProfileEntryHookStub::MaybeCallEntryHook(masm);
996
997  // Open a frame scope to indicate that there is a frame on the stack.  The
998  // MANUAL indicates that the scope shouldn't actually generate code to set up
999  // the frame (that is done below).
1000  FrameScope frame_scope(masm, StackFrame::MANUAL);
1001  __ PushStandardFrame(a1);
1002
1003  // Get the bytecode array from the function object (or from the DebugInfo if
1004  // it is present) and load it into kInterpreterBytecodeArrayRegister.
1005  __ lw(a0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1006  Label load_debug_bytecode_array, bytecode_array_loaded;
1007  Register debug_info = kInterpreterBytecodeArrayRegister;
1008  DCHECK(!debug_info.is(a0));
1009  __ lw(debug_info, FieldMemOperand(a0, SharedFunctionInfo::kDebugInfoOffset));
1010  __ Branch(&load_debug_bytecode_array, ne, debug_info,
1011            Operand(DebugInfo::uninitialized()));
1012  __ lw(kInterpreterBytecodeArrayRegister,
1013        FieldMemOperand(a0, SharedFunctionInfo::kFunctionDataOffset));
1014  __ bind(&bytecode_array_loaded);
1015
1016  // Check function data field is actually a BytecodeArray object.
1017  Label bytecode_array_not_present;
1018  __ JumpIfRoot(kInterpreterBytecodeArrayRegister,
1019                Heap::kUndefinedValueRootIndex, &bytecode_array_not_present);
1020  if (FLAG_debug_code) {
1021    __ SmiTst(kInterpreterBytecodeArrayRegister, t0);
1022    __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, t0,
1023              Operand(zero_reg));
1024    __ GetObjectType(kInterpreterBytecodeArrayRegister, t0, t0);
1025    __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, t0,
1026              Operand(BYTECODE_ARRAY_TYPE));
1027  }
1028
1029  // Load initial bytecode offset.
1030  __ li(kInterpreterBytecodeOffsetRegister,
1031        Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
1032
1033  // Push new.target, bytecode array and Smi tagged bytecode array offset.
1034  __ SmiTag(t0, kInterpreterBytecodeOffsetRegister);
1035  __ Push(a3, kInterpreterBytecodeArrayRegister, t0);
1036
1037  // Allocate the local and temporary register file on the stack.
1038  {
1039    // Load frame size from the BytecodeArray object.
1040    __ lw(t0, FieldMemOperand(kInterpreterBytecodeArrayRegister,
1041                              BytecodeArray::kFrameSizeOffset));
1042
1043    // Do a stack check to ensure we don't go over the limit.
1044    Label ok;
1045    __ Subu(t1, sp, Operand(t0));
1046    __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
1047    __ Branch(&ok, hs, t1, Operand(a2));
1048    __ CallRuntime(Runtime::kThrowStackOverflow);
1049    __ bind(&ok);
1050
1051    // If ok, push undefined as the initial value for all register file entries.
1052    Label loop_header;
1053    Label loop_check;
1054    __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
1055    __ Branch(&loop_check);
1056    __ bind(&loop_header);
1057    // TODO(rmcilroy): Consider doing more than one push per loop iteration.
1058    __ push(t1);
1059    // Continue loop if not done.
1060    __ bind(&loop_check);
1061    __ Subu(t0, t0, Operand(kPointerSize));
1062    __ Branch(&loop_header, ge, t0, Operand(zero_reg));
1063  }
1064
1065  // Load accumulator and dispatch table into registers.
1066  __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
1067  __ li(kInterpreterDispatchTableRegister,
1068        Operand(ExternalReference::interpreter_dispatch_table_address(
1069            masm->isolate())));
1070
1071  // Dispatch to the first bytecode handler for the function.
1072  __ Addu(a0, kInterpreterBytecodeArrayRegister,
1073          kInterpreterBytecodeOffsetRegister);
1074  __ lbu(a0, MemOperand(a0));
1075  __ Lsa(at, kInterpreterDispatchTableRegister, a0, kPointerSizeLog2);
1076  __ lw(at, MemOperand(at));
1077  __ Call(at);
1078  masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
1079
1080  // The return value is in v0.
1081  LeaveInterpreterFrame(masm, t0);
1082  __ Jump(ra);
1083
1084  // Load debug copy of the bytecode array.
1085  __ bind(&load_debug_bytecode_array);
1086  __ lw(kInterpreterBytecodeArrayRegister,
1087        FieldMemOperand(debug_info, DebugInfo::kAbstractCodeIndex));
1088  __ Branch(&bytecode_array_loaded);
1089
1090  // If the bytecode array is no longer present, then the underlying function
1091  // has been switched to a different kind of code and we heal the closure by
1092  // switching the code entry field over to the new code object as well.
1093  __ bind(&bytecode_array_not_present);
1094  __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1095  __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1096  __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kCodeOffset));
1097  __ Addu(t0, t0, Operand(Code::kHeaderSize - kHeapObjectTag));
1098  __ sw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
1099  __ RecordWriteCodeEntryField(a1, t0, t1);
1100  __ Jump(t0);
1101}
1102
1103void Builtins::Generate_InterpreterMarkBaselineOnReturn(MacroAssembler* masm) {
1104  // Save the function and context for call to CompileBaseline.
1105  __ lw(a1, MemOperand(fp, StandardFrameConstants::kFunctionOffset));
1106  __ lw(kContextRegister,
1107        MemOperand(fp, StandardFrameConstants::kContextOffset));
1108
1109  // Leave the frame before recompiling for baseline so that we don't count as
1110  // an activation on the stack.
1111  LeaveInterpreterFrame(masm, t0);
1112
1113  {
1114    FrameScope frame_scope(masm, StackFrame::INTERNAL);
1115    // Push return value.
1116    __ push(v0);
1117
1118    // Push function as argument and compile for baseline.
1119    __ push(a1);
1120    __ CallRuntime(Runtime::kCompileBaseline);
1121
1122    // Restore return value.
1123    __ pop(v0);
1124  }
1125  __ Jump(ra);
1126}
1127
1128// static
1129void Builtins::Generate_InterpreterPushArgsAndCallImpl(
1130    MacroAssembler* masm, TailCallMode tail_call_mode) {
1131  // ----------- S t a t e -------------
1132  //  -- a0 : the number of arguments (not including the receiver)
1133  //  -- a2 : the address of the first argument to be pushed. Subsequent
1134  //          arguments should be consecutive above this, in the same order as
1135  //          they are to be pushed onto the stack.
1136  //  -- a1 : the target to call (can be any Object).
1137  // -----------------------------------
1138
1139  // Find the address of the last argument.
1140  __ Addu(a3, a0, Operand(1));  // Add one for receiver.
1141  __ sll(a3, a3, kPointerSizeLog2);
1142  __ Subu(a3, a2, Operand(a3));
1143
1144  // Push the arguments.
1145  Label loop_header, loop_check;
1146  __ Branch(&loop_check);
1147  __ bind(&loop_header);
1148  __ lw(t0, MemOperand(a2));
1149  __ Addu(a2, a2, Operand(-kPointerSize));
1150  __ push(t0);
1151  __ bind(&loop_check);
1152  __ Branch(&loop_header, gt, a2, Operand(a3));
1153
1154  // Call the target.
1155  __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
1156                                            tail_call_mode),
1157          RelocInfo::CODE_TARGET);
1158}
1159
1160// static
1161void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
1162  // ----------- S t a t e -------------
1163  // -- a0 : argument count (not including receiver)
1164  // -- a3 : new target
1165  // -- a1 : constructor to call
1166  // -- a2 : address of the first argument
1167  // -----------------------------------
1168
1169  // Find the address of the last argument.
1170  __ sll(t0, a0, kPointerSizeLog2);
1171  __ Subu(t0, a2, Operand(t0));
1172
1173  // Push a slot for the receiver.
1174  __ push(zero_reg);
1175
1176  // Push the arguments.
1177  Label loop_header, loop_check;
1178  __ Branch(&loop_check);
1179  __ bind(&loop_header);
1180  __ lw(t1, MemOperand(a2));
1181  __ Addu(a2, a2, Operand(-kPointerSize));
1182  __ push(t1);
1183  __ bind(&loop_check);
1184  __ Branch(&loop_header, gt, a2, Operand(t0));
1185
1186  // Call the constructor with a0, a1, and a3 unmodified.
1187  __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1188}
1189
1190void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1191  // Set the return address to the correct point in the interpreter entry
1192  // trampoline.
1193  Smi* interpreter_entry_return_pc_offset(
1194      masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1195  DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0));
1196  __ li(t0, Operand(masm->isolate()->builtins()->InterpreterEntryTrampoline()));
1197  __ Addu(ra, t0, Operand(interpreter_entry_return_pc_offset->value() +
1198                          Code::kHeaderSize - kHeapObjectTag));
1199
1200  // Initialize the dispatch table register.
1201  __ li(kInterpreterDispatchTableRegister,
1202        Operand(ExternalReference::interpreter_dispatch_table_address(
1203            masm->isolate())));
1204
1205  // Get the bytecode array pointer from the frame.
1206  __ lw(kInterpreterBytecodeArrayRegister,
1207        MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1208
1209  if (FLAG_debug_code) {
1210    // Check function data field is actually a BytecodeArray object.
1211    __ SmiTst(kInterpreterBytecodeArrayRegister, at);
1212    __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, at,
1213              Operand(zero_reg));
1214    __ GetObjectType(kInterpreterBytecodeArrayRegister, a1, a1);
1215    __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a1,
1216              Operand(BYTECODE_ARRAY_TYPE));
1217  }
1218
1219  // Get the target bytecode offset from the frame.
1220  __ lw(kInterpreterBytecodeOffsetRegister,
1221        MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1222  __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1223
1224  // Dispatch to the target bytecode.
1225  __ Addu(a1, kInterpreterBytecodeArrayRegister,
1226          kInterpreterBytecodeOffsetRegister);
1227  __ lbu(a1, MemOperand(a1));
1228  __ Lsa(a1, kInterpreterDispatchTableRegister, a1, kPointerSizeLog2);
1229  __ lw(a1, MemOperand(a1));
1230  __ Jump(a1);
1231}
1232
1233void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
1234  // ----------- S t a t e -------------
1235  //  -- a0 : argument count (preserved for callee)
1236  //  -- a3 : new target (preserved for callee)
1237  //  -- a1 : target function (preserved for callee)
1238  // -----------------------------------
1239  // First lookup code, maybe we don't need to compile!
1240  Label gotta_call_runtime, gotta_call_runtime_no_stack;
1241  Label maybe_call_runtime;
1242  Label try_shared;
1243  Label loop_top, loop_bottom;
1244
1245  Register argument_count = a0;
1246  Register closure = a1;
1247  Register new_target = a3;
1248  __ push(argument_count);
1249  __ push(new_target);
1250  __ push(closure);
1251
1252  Register map = a0;
1253  Register index = a2;
1254  __ lw(map, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1255  __ lw(map, FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
1256  __ lw(index, FieldMemOperand(map, FixedArray::kLengthOffset));
1257  __ Branch(&gotta_call_runtime, lt, index, Operand(Smi::FromInt(2)));
1258
1259  // Find literals.
1260  // a3  : native context
1261  // a2  : length / index
1262  // a0  : optimized code map
1263  // stack[0] : new target
1264  // stack[4] : closure
1265  Register native_context = a3;
1266  __ lw(native_context, NativeContextMemOperand());
1267
1268  __ bind(&loop_top);
1269  Register temp = a1;
1270  Register array_pointer = t1;
1271
1272  // Does the native context match?
1273  __ sll(at, index, kPointerSizeLog2 - kSmiTagSize);
1274  __ Addu(array_pointer, map, Operand(at));
1275  __ lw(temp, FieldMemOperand(array_pointer,
1276                              SharedFunctionInfo::kOffsetToPreviousContext));
1277  __ lw(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1278  __ Branch(&loop_bottom, ne, temp, Operand(native_context));
1279  // OSR id set to none?
1280  __ lw(temp, FieldMemOperand(array_pointer,
1281                              SharedFunctionInfo::kOffsetToPreviousOsrAstId));
1282  const int bailout_id = BailoutId::None().ToInt();
1283  __ Branch(&loop_bottom, ne, temp, Operand(Smi::FromInt(bailout_id)));
1284
1285  // Literals available?
1286  Label got_literals, maybe_cleared_weakcell;
1287  __ lw(temp, FieldMemOperand(array_pointer,
1288                              SharedFunctionInfo::kOffsetToPreviousLiterals));
1289  // temp contains either a WeakCell pointing to the literals array or the
1290  // literals array directly.
1291  STATIC_ASSERT(WeakCell::kValueOffset == FixedArray::kLengthOffset);
1292  __ lw(t0, FieldMemOperand(temp, WeakCell::kValueOffset));
1293  __ JumpIfSmi(t0, &maybe_cleared_weakcell);
1294  // t0 is a pointer, therefore temp is a WeakCell pointing to a literals array.
1295  __ lw(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1296  __ jmp(&got_literals);
1297
1298  // t0 is a smi. If it's 0, then we are looking at a cleared WeakCell
1299  // around the literals array, and we should visit the runtime. If it's > 0,
1300  // then temp already contains the literals array.
1301  __ bind(&maybe_cleared_weakcell);
1302  __ Branch(&gotta_call_runtime, eq, t0, Operand(Smi::FromInt(0)));
1303
1304  // Save the literals in the closure.
1305  __ bind(&got_literals);
1306  __ lw(t0, MemOperand(sp, 0));
1307  __ sw(temp, FieldMemOperand(t0, JSFunction::kLiteralsOffset));
1308  __ push(index);
1309  __ RecordWriteField(t0, JSFunction::kLiteralsOffset, temp, index,
1310                      kRAHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1311                      OMIT_SMI_CHECK);
1312  __ pop(index);
1313
1314  // Code available?
1315  Register entry = t0;
1316  __ lw(entry,
1317        FieldMemOperand(array_pointer,
1318                        SharedFunctionInfo::kOffsetToPreviousCachedCode));
1319  __ lw(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1320  __ JumpIfSmi(entry, &maybe_call_runtime);
1321
1322  // Found literals and code. Get them into the closure and return.
1323  __ pop(closure);
1324  // Store code entry in the closure.
1325  __ Addu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1326
1327  Label install_optimized_code_and_tailcall;
1328  __ bind(&install_optimized_code_and_tailcall);
1329  __ sw(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
1330  __ RecordWriteCodeEntryField(closure, entry, t1);
1331
1332  // Link the closure into the optimized function list.
1333  // t0 : code entry
1334  // a3 : native context
1335  // a1 : closure
1336  __ lw(t1,
1337        ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1338  __ sw(t1, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset));
1339  __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, t1, a0,
1340                      kRAHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1341                      OMIT_SMI_CHECK);
1342  const int function_list_offset =
1343      Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
1344  __ sw(closure,
1345        ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1346  // Save closure before the write barrier.
1347  __ mov(t1, closure);
1348  __ RecordWriteContextSlot(native_context, function_list_offset, closure, a0,
1349                            kRAHasNotBeenSaved, kDontSaveFPRegs);
1350  __ mov(closure, t1);
1351  __ pop(new_target);
1352  __ pop(argument_count);
1353  __ Jump(entry);
1354
1355  __ bind(&loop_bottom);
1356  __ Subu(index, index,
1357          Operand(Smi::FromInt(SharedFunctionInfo::kEntryLength)));
1358  __ Branch(&loop_top, gt, index, Operand(Smi::FromInt(1)));
1359
1360  // We found neither literals nor code.
1361  __ jmp(&gotta_call_runtime);
1362
1363  __ bind(&maybe_call_runtime);
1364  __ pop(closure);
1365
1366  // Last possibility. Check the context free optimized code map entry.
1367  __ lw(entry, FieldMemOperand(map, FixedArray::kHeaderSize +
1368                                        SharedFunctionInfo::kSharedCodeIndex));
1369  __ lw(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1370  __ JumpIfSmi(entry, &try_shared);
1371
1372  // Store code entry in the closure.
1373  __ Addu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1374  __ jmp(&install_optimized_code_and_tailcall);
1375
1376  __ bind(&try_shared);
1377  __ pop(new_target);
1378  __ pop(argument_count);
1379  // Is the full code valid?
1380  __ lw(entry, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1381  __ lw(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset));
1382  __ lw(t1, FieldMemOperand(entry, Code::kFlagsOffset));
1383  __ And(t1, t1, Operand(Code::KindField::kMask));
1384  __ srl(t1, t1, Code::KindField::kShift);
1385  __ Branch(&gotta_call_runtime_no_stack, eq, t1, Operand(Code::BUILTIN));
1386  // Yes, install the full code.
1387  __ Addu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1388  __ sw(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
1389  __ RecordWriteCodeEntryField(closure, entry, t1);
1390  __ Jump(entry);
1391
1392  __ bind(&gotta_call_runtime);
1393  __ pop(closure);
1394  __ pop(new_target);
1395  __ pop(argument_count);
1396  __ bind(&gotta_call_runtime_no_stack);
1397  GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1398}
1399
1400void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
1401  GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
1402}
1403
1404void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
1405  GenerateTailCallToReturnedCode(masm,
1406                                 Runtime::kCompileOptimized_NotConcurrent);
1407}
1408
1409
1410void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
1411  GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
1412}
1413
1414
1415static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1416  // For now, we are relying on the fact that make_code_young doesn't do any
1417  // garbage collection which allows us to save/restore the registers without
1418  // worrying about which of them contain pointers. We also don't build an
1419  // internal frame to make the code faster, since we shouldn't have to do stack
1420  // crawls in MakeCodeYoung. This seems a bit fragile.
1421
1422  // Set a0 to point to the head of the PlatformCodeAge sequence.
1423  __ Subu(a0, a0,
1424      Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
1425
1426  // The following registers must be saved and restored when calling through to
1427  // the runtime:
1428  //   a0 - contains return address (beginning of patch sequence)
1429  //   a1 - isolate
1430  //   a3 - new target
1431  RegList saved_regs =
1432      (a0.bit() | a1.bit() | a3.bit() | ra.bit() | fp.bit()) & ~sp.bit();
1433  FrameScope scope(masm, StackFrame::MANUAL);
1434  __ MultiPush(saved_regs);
1435  __ PrepareCallCFunction(2, 0, a2);
1436  __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
1437  __ CallCFunction(
1438      ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1439  __ MultiPop(saved_regs);
1440  __ Jump(a0);
1441}
1442
1443#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                 \
1444void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking(  \
1445    MacroAssembler* masm) {                                  \
1446  GenerateMakeCodeYoungAgainCommon(masm);                    \
1447}                                                            \
1448void Builtins::Generate_Make##C##CodeYoungAgainOddMarking(   \
1449    MacroAssembler* masm) {                                  \
1450  GenerateMakeCodeYoungAgainCommon(masm);                    \
1451}
1452CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1453#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1454
1455
1456void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1457  // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1458  // that make_code_young doesn't do any garbage collection which allows us to
1459  // save/restore the registers without worrying about which of them contain
1460  // pointers.
1461
1462  // Set a0 to point to the head of the PlatformCodeAge sequence.
1463  __ Subu(a0, a0,
1464      Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
1465
1466  // The following registers must be saved and restored when calling through to
1467  // the runtime:
1468  //   a0 - contains return address (beginning of patch sequence)
1469  //   a1 - isolate
1470  //   a3 - new target
1471  RegList saved_regs =
1472      (a0.bit() | a1.bit() | a3.bit() | ra.bit() | fp.bit()) & ~sp.bit();
1473  FrameScope scope(masm, StackFrame::MANUAL);
1474  __ MultiPush(saved_regs);
1475  __ PrepareCallCFunction(2, 0, a2);
1476  __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
1477  __ CallCFunction(
1478      ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1479      2);
1480  __ MultiPop(saved_regs);
1481
1482  // Perform prologue operations usually performed by the young code stub.
1483  __ PushStandardFrame(a1);
1484
1485  // Jump to point after the code-age stub.
1486  __ Addu(a0, a0, Operand(kNoCodeAgeSequenceLength));
1487  __ Jump(a0);
1488}
1489
1490
1491void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1492  GenerateMakeCodeYoungAgainCommon(masm);
1493}
1494
1495
1496void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1497  Generate_MarkCodeAsExecutedOnce(masm);
1498}
1499
1500
1501static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1502                                             SaveFPRegsMode save_doubles) {
1503  {
1504    FrameScope scope(masm, StackFrame::INTERNAL);
1505
1506    // Preserve registers across notification, this is important for compiled
1507    // stubs that tail call the runtime on deopts passing their parameters in
1508    // registers.
1509    __ MultiPush(kJSCallerSaved | kCalleeSaved);
1510    // Pass the function and deoptimization type to the runtime system.
1511    __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
1512    __ MultiPop(kJSCallerSaved | kCalleeSaved);
1513  }
1514
1515  __ Addu(sp, sp, Operand(kPointerSize));  // Ignore state
1516  __ Jump(ra);  // Jump to miss handler
1517}
1518
1519
1520void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1521  Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1522}
1523
1524
1525void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1526  Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1527}
1528
1529
1530static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1531                                             Deoptimizer::BailoutType type) {
1532  {
1533    FrameScope scope(masm, StackFrame::INTERNAL);
1534    // Pass the function and deoptimization type to the runtime system.
1535    __ li(a0, Operand(Smi::FromInt(static_cast<int>(type))));
1536    __ push(a0);
1537    __ CallRuntime(Runtime::kNotifyDeoptimized);
1538  }
1539
1540  // Get the full codegen state from the stack and untag it -> t2.
1541  __ lw(t2, MemOperand(sp, 0 * kPointerSize));
1542  __ SmiUntag(t2);
1543  // Switch on the state.
1544  Label with_tos_register, unknown_state;
1545  __ Branch(&with_tos_register, ne, t2,
1546            Operand(static_cast<int>(Deoptimizer::BailoutState::NO_REGISTERS)));
1547  __ Ret(USE_DELAY_SLOT);
1548  // Safe to fill delay slot Addu will emit one instruction.
1549  __ Addu(sp, sp, Operand(1 * kPointerSize));  // Remove state.
1550
1551  __ bind(&with_tos_register);
1552  DCHECK_EQ(kInterpreterAccumulatorRegister.code(), v0.code());
1553  __ lw(v0, MemOperand(sp, 1 * kPointerSize));
1554  __ Branch(&unknown_state, ne, t2,
1555            Operand(static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER)));
1556
1557  __ Ret(USE_DELAY_SLOT);
1558  // Safe to fill delay slot Addu will emit one instruction.
1559  __ Addu(sp, sp, Operand(2 * kPointerSize));  // Remove state.
1560
1561  __ bind(&unknown_state);
1562  __ stop("no cases left");
1563}
1564
1565
1566void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1567  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1568}
1569
1570
1571void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1572  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1573}
1574
1575
1576void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1577  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1578}
1579
1580
1581// Clobbers {t2, t3, t4, t5}.
1582static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1583                                    Register function_template_info,
1584                                    Label* receiver_check_failed) {
1585  Register signature = t2;
1586  Register map = t3;
1587  Register constructor = t4;
1588  Register scratch = t5;
1589
1590  // If there is no signature, return the holder.
1591  __ lw(signature, FieldMemOperand(function_template_info,
1592                                   FunctionTemplateInfo::kSignatureOffset));
1593  Label receiver_check_passed;
1594  __ JumpIfRoot(signature, Heap::kUndefinedValueRootIndex,
1595                &receiver_check_passed);
1596
1597  // Walk the prototype chain.
1598  __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1599  Label prototype_loop_start;
1600  __ bind(&prototype_loop_start);
1601
1602  // Get the constructor, if any.
1603  __ GetMapConstructor(constructor, map, scratch, scratch);
1604  Label next_prototype;
1605  __ Branch(&next_prototype, ne, scratch, Operand(JS_FUNCTION_TYPE));
1606  Register type = constructor;
1607  __ lw(type,
1608        FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
1609  __ lw(type, FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
1610
1611  // Loop through the chain of inheriting function templates.
1612  Label function_template_loop;
1613  __ bind(&function_template_loop);
1614
1615  // If the signatures match, we have a compatible receiver.
1616  __ Branch(&receiver_check_passed, eq, signature, Operand(type),
1617            USE_DELAY_SLOT);
1618
1619  // If the current type is not a FunctionTemplateInfo, load the next prototype
1620  // in the chain.
1621  __ JumpIfSmi(type, &next_prototype);
1622  __ GetObjectType(type, scratch, scratch);
1623  __ Branch(&next_prototype, ne, scratch, Operand(FUNCTION_TEMPLATE_INFO_TYPE));
1624
1625  // Otherwise load the parent function template and iterate.
1626  __ lw(type,
1627        FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
1628  __ Branch(&function_template_loop);
1629
1630  // Load the next prototype and iterate.
1631  __ bind(&next_prototype);
1632  __ lw(scratch, FieldMemOperand(map, Map::kBitField3Offset));
1633  __ DecodeField<Map::HasHiddenPrototype>(scratch);
1634  __ Branch(receiver_check_failed, eq, scratch, Operand(zero_reg));
1635  __ lw(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1636  __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1637
1638  __ Branch(&prototype_loop_start);
1639
1640  __ bind(&receiver_check_passed);
1641}
1642
1643
1644void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1645  // ----------- S t a t e -------------
1646  //  -- a0                 : number of arguments excluding receiver
1647  //  -- a1                 : callee
1648  //  -- ra                 : return address
1649  //  -- sp[0]              : last argument
1650  //  -- ...
1651  //  -- sp[4 * (argc - 1)] : first argument
1652  //  -- sp[4 * argc]       : receiver
1653  // -----------------------------------
1654
1655  // Load the FunctionTemplateInfo.
1656  __ lw(t1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1657  __ lw(t1, FieldMemOperand(t1, SharedFunctionInfo::kFunctionDataOffset));
1658
1659  // Do the compatible receiver check.
1660  Label receiver_check_failed;
1661  __ Lsa(t8, sp, a0, kPointerSizeLog2);
1662  __ lw(t0, MemOperand(t8));
1663  CompatibleReceiverCheck(masm, t0, t1, &receiver_check_failed);
1664
1665  // Get the callback offset from the FunctionTemplateInfo, and jump to the
1666  // beginning of the code.
1667  __ lw(t2, FieldMemOperand(t1, FunctionTemplateInfo::kCallCodeOffset));
1668  __ lw(t2, FieldMemOperand(t2, CallHandlerInfo::kFastHandlerOffset));
1669  __ Addu(t2, t2, Operand(Code::kHeaderSize - kHeapObjectTag));
1670  __ Jump(t2);
1671
1672  // Compatible receiver check failed: throw an Illegal Invocation exception.
1673  __ bind(&receiver_check_failed);
1674  // Drop the arguments (including the receiver);
1675  __ Addu(t8, t8, Operand(kPointerSize));
1676  __ addu(sp, t8, zero_reg);
1677  __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1678}
1679
1680
1681void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1682  // Lookup the function in the JavaScript frame.
1683  __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1684  {
1685    FrameScope scope(masm, StackFrame::INTERNAL);
1686    // Pass function as argument.
1687    __ push(a0);
1688    __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1689  }
1690
1691  // If the code object is null, just return to the unoptimized code.
1692  __ Ret(eq, v0, Operand(Smi::FromInt(0)));
1693
1694  // Load deoptimization data from the code object.
1695  // <deopt_data> = <code>[#deoptimization_data_offset]
1696  __ lw(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1697
1698  // Load the OSR entrypoint offset from the deoptimization data.
1699  // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1700  __ lw(a1, MemOperand(a1, FixedArray::OffsetOfElementAt(
1701      DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
1702  __ SmiUntag(a1);
1703
1704  // Compute the target address = code_obj + header_size + osr_offset
1705  // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1706  __ addu(v0, v0, a1);
1707  __ addiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);
1708
1709  // And "return" to the OSR entry point of the function.
1710  __ Ret();
1711}
1712
1713
1714// static
1715void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
1716                                               int field_index) {
1717  // ----------- S t a t e -------------
1718  //  -- a0    : number of arguments
1719  //  -- a1    : function
1720  //  -- cp    : context
1721  //  -- sp[0] : receiver
1722  // -----------------------------------
1723
1724  // 1. Pop receiver into a0 and check that it's actually a JSDate object.
1725  Label receiver_not_date;
1726  {
1727    __ Pop(a0);
1728    __ JumpIfSmi(a0, &receiver_not_date);
1729    __ GetObjectType(a0, t0, t0);
1730    __ Branch(&receiver_not_date, ne, t0, Operand(JS_DATE_TYPE));
1731  }
1732
1733  // 2. Load the specified date field, falling back to the runtime as necessary.
1734  if (field_index == JSDate::kDateValue) {
1735    __ Ret(USE_DELAY_SLOT);
1736    __ lw(v0, FieldMemOperand(a0, JSDate::kValueOffset));  // In delay slot.
1737  } else {
1738    if (field_index < JSDate::kFirstUncachedField) {
1739      Label stamp_mismatch;
1740      __ li(a1, Operand(ExternalReference::date_cache_stamp(masm->isolate())));
1741      __ lw(a1, MemOperand(a1));
1742      __ lw(t0, FieldMemOperand(a0, JSDate::kCacheStampOffset));
1743      __ Branch(&stamp_mismatch, ne, t0, Operand(a1));
1744      __ Ret(USE_DELAY_SLOT);
1745      __ lw(v0, FieldMemOperand(
1746                    a0, JSDate::kValueOffset +
1747                            field_index * kPointerSize));  // In delay slot.
1748      __ bind(&stamp_mismatch);
1749    }
1750    FrameScope scope(masm, StackFrame::INTERNAL);
1751    __ PrepareCallCFunction(2, t0);
1752    __ li(a1, Operand(Smi::FromInt(field_index)));
1753    __ CallCFunction(
1754        ExternalReference::get_date_field_function(masm->isolate()), 2);
1755  }
1756  __ Ret();
1757
1758  // 3. Raise a TypeError if the receiver is not a date.
1759  __ bind(&receiver_not_date);
1760  {
1761    FrameScope scope(masm, StackFrame::MANUAL);
1762    __ Push(a0, ra, fp);
1763    __ Move(fp, sp);
1764    __ Push(cp, a1);
1765    __ Push(Smi::FromInt(0));
1766    __ CallRuntime(Runtime::kThrowNotDateError);
1767  }
1768}
1769
1770// static
1771void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1772  // ----------- S t a t e -------------
1773  //  -- a0    : argc
1774  //  -- sp[0] : argArray
1775  //  -- sp[4] : thisArg
1776  //  -- sp[8] : receiver
1777  // -----------------------------------
1778
1779  // 1. Load receiver into a1, argArray into a0 (if present), remove all
1780  // arguments from the stack (including the receiver), and push thisArg (if
1781  // present) instead.
1782  {
1783    Label no_arg;
1784    Register scratch = t0;
1785    __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
1786    __ mov(a3, a2);
1787    // Lsa() cannot be used hare as scratch value used later.
1788    __ sll(scratch, a0, kPointerSizeLog2);
1789    __ Addu(a0, sp, Operand(scratch));
1790    __ lw(a1, MemOperand(a0));  // receiver
1791    __ Subu(a0, a0, Operand(kPointerSize));
1792    __ Branch(&no_arg, lt, a0, Operand(sp));
1793    __ lw(a2, MemOperand(a0));  // thisArg
1794    __ Subu(a0, a0, Operand(kPointerSize));
1795    __ Branch(&no_arg, lt, a0, Operand(sp));
1796    __ lw(a3, MemOperand(a0));  // argArray
1797    __ bind(&no_arg);
1798    __ Addu(sp, sp, Operand(scratch));
1799    __ sw(a2, MemOperand(sp));
1800    __ mov(a0, a3);
1801  }
1802
1803  // ----------- S t a t e -------------
1804  //  -- a0    : argArray
1805  //  -- a1    : receiver
1806  //  -- sp[0] : thisArg
1807  // -----------------------------------
1808
1809  // 2. Make sure the receiver is actually callable.
1810  Label receiver_not_callable;
1811  __ JumpIfSmi(a1, &receiver_not_callable);
1812  __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
1813  __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset));
1814  __ And(t0, t0, Operand(1 << Map::kIsCallable));
1815  __ Branch(&receiver_not_callable, eq, t0, Operand(zero_reg));
1816
1817  // 3. Tail call with no arguments if argArray is null or undefined.
1818  Label no_arguments;
1819  __ JumpIfRoot(a0, Heap::kNullValueRootIndex, &no_arguments);
1820  __ JumpIfRoot(a0, Heap::kUndefinedValueRootIndex, &no_arguments);
1821
1822  // 4a. Apply the receiver to the given argArray (passing undefined for
1823  // new.target).
1824  __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1825  __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1826
1827  // 4b. The argArray is either null or undefined, so we tail call without any
1828  // arguments to the receiver.
1829  __ bind(&no_arguments);
1830  {
1831    __ mov(a0, zero_reg);
1832    __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1833  }
1834
1835  // 4c. The receiver is not callable, throw an appropriate TypeError.
1836  __ bind(&receiver_not_callable);
1837  {
1838    __ sw(a1, MemOperand(sp));
1839    __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1840  }
1841}
1842
1843
1844// static
1845void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1846  // 1. Make sure we have at least one argument.
1847  // a0: actual number of arguments
1848  {
1849    Label done;
1850    __ Branch(&done, ne, a0, Operand(zero_reg));
1851    __ PushRoot(Heap::kUndefinedValueRootIndex);
1852    __ Addu(a0, a0, Operand(1));
1853    __ bind(&done);
1854  }
1855
1856  // 2. Get the function to call (passed as receiver) from the stack.
1857  // a0: actual number of arguments
1858  __ Lsa(at, sp, a0, kPointerSizeLog2);
1859  __ lw(a1, MemOperand(at));
1860
1861  // 3. Shift arguments and return address one slot down on the stack
1862  //    (overwriting the original receiver).  Adjust argument count to make
1863  //    the original first argument the new receiver.
1864  // a0: actual number of arguments
1865  // a1: function
1866  {
1867    Label loop;
1868    // Calculate the copy start address (destination). Copy end address is sp.
1869    __ Lsa(a2, sp, a0, kPointerSizeLog2);
1870
1871    __ bind(&loop);
1872    __ lw(at, MemOperand(a2, -kPointerSize));
1873    __ sw(at, MemOperand(a2));
1874    __ Subu(a2, a2, Operand(kPointerSize));
1875    __ Branch(&loop, ne, a2, Operand(sp));
1876    // Adjust the actual number of arguments and remove the top element
1877    // (which is a copy of the last argument).
1878    __ Subu(a0, a0, Operand(1));
1879    __ Pop();
1880  }
1881
1882  // 4. Call the callable.
1883  __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1884}
1885
1886
1887void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1888  // ----------- S t a t e -------------
1889  //  -- a0     : argc
1890  //  -- sp[0]  : argumentsList
1891  //  -- sp[4]  : thisArgument
1892  //  -- sp[8]  : target
1893  //  -- sp[12] : receiver
1894  // -----------------------------------
1895
1896  // 1. Load target into a1 (if present), argumentsList into a0 (if present),
1897  // remove all arguments from the stack (including the receiver), and push
1898  // thisArgument (if present) instead.
1899  {
1900    Label no_arg;
1901    Register scratch = t0;
1902    __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
1903    __ mov(a2, a1);
1904    __ mov(a3, a1);
1905    __ sll(scratch, a0, kPointerSizeLog2);
1906    __ mov(a0, scratch);
1907    __ Subu(a0, a0, Operand(kPointerSize));
1908    __ Branch(&no_arg, lt, a0, Operand(zero_reg));
1909    __ Addu(a0, sp, Operand(a0));
1910    __ lw(a1, MemOperand(a0));  // target
1911    __ Subu(a0, a0, Operand(kPointerSize));
1912    __ Branch(&no_arg, lt, a0, Operand(sp));
1913    __ lw(a2, MemOperand(a0));  // thisArgument
1914    __ Subu(a0, a0, Operand(kPointerSize));
1915    __ Branch(&no_arg, lt, a0, Operand(sp));
1916    __ lw(a3, MemOperand(a0));  // argumentsList
1917    __ bind(&no_arg);
1918    __ Addu(sp, sp, Operand(scratch));
1919    __ sw(a2, MemOperand(sp));
1920    __ mov(a0, a3);
1921  }
1922
1923  // ----------- S t a t e -------------
1924  //  -- a0    : argumentsList
1925  //  -- a1    : target
1926  //  -- sp[0] : thisArgument
1927  // -----------------------------------
1928
1929  // 2. Make sure the target is actually callable.
1930  Label target_not_callable;
1931  __ JumpIfSmi(a1, &target_not_callable);
1932  __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
1933  __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset));
1934  __ And(t0, t0, Operand(1 << Map::kIsCallable));
1935  __ Branch(&target_not_callable, eq, t0, Operand(zero_reg));
1936
1937  // 3a. Apply the target to the given argumentsList (passing undefined for
1938  // new.target).
1939  __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1940  __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1941
1942  // 3b. The target is not callable, throw an appropriate TypeError.
1943  __ bind(&target_not_callable);
1944  {
1945    __ sw(a1, MemOperand(sp));
1946    __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1947  }
1948}
1949
1950
1951void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1952  // ----------- S t a t e -------------
1953  //  -- a0     : argc
1954  //  -- sp[0]  : new.target (optional)
1955  //  -- sp[4]  : argumentsList
1956  //  -- sp[8]  : target
1957  //  -- sp[12] : receiver
1958  // -----------------------------------
1959
1960  // 1. Load target into a1 (if present), argumentsList into a0 (if present),
1961  // new.target into a3 (if present, otherwise use target), remove all
1962  // arguments from the stack (including the receiver), and push thisArgument
1963  // (if present) instead.
1964  {
1965    Label no_arg;
1966    Register scratch = t0;
1967    __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
1968    __ mov(a2, a1);
1969    // Lsa() cannot be used hare as scratch value used later.
1970    __ sll(scratch, a0, kPointerSizeLog2);
1971    __ Addu(a0, sp, Operand(scratch));
1972    __ sw(a2, MemOperand(a0));  // receiver
1973    __ Subu(a0, a0, Operand(kPointerSize));
1974    __ Branch(&no_arg, lt, a0, Operand(sp));
1975    __ lw(a1, MemOperand(a0));  // target
1976    __ mov(a3, a1);             // new.target defaults to target
1977    __ Subu(a0, a0, Operand(kPointerSize));
1978    __ Branch(&no_arg, lt, a0, Operand(sp));
1979    __ lw(a2, MemOperand(a0));  // argumentsList
1980    __ Subu(a0, a0, Operand(kPointerSize));
1981    __ Branch(&no_arg, lt, a0, Operand(sp));
1982    __ lw(a3, MemOperand(a0));  // new.target
1983    __ bind(&no_arg);
1984    __ Addu(sp, sp, Operand(scratch));
1985    __ mov(a0, a2);
1986  }
1987
1988  // ----------- S t a t e -------------
1989  //  -- a0    : argumentsList
1990  //  -- a3    : new.target
1991  //  -- a1    : target
1992  //  -- sp[0] : receiver (undefined)
1993  // -----------------------------------
1994
1995  // 2. Make sure the target is actually a constructor.
1996  Label target_not_constructor;
1997  __ JumpIfSmi(a1, &target_not_constructor);
1998  __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
1999  __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset));
2000  __ And(t0, t0, Operand(1 << Map::kIsConstructor));
2001  __ Branch(&target_not_constructor, eq, t0, Operand(zero_reg));
2002
2003  // 3. Make sure the target is actually a constructor.
2004  Label new_target_not_constructor;
2005  __ JumpIfSmi(a3, &new_target_not_constructor);
2006  __ lw(t0, FieldMemOperand(a3, HeapObject::kMapOffset));
2007  __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset));
2008  __ And(t0, t0, Operand(1 << Map::kIsConstructor));
2009  __ Branch(&new_target_not_constructor, eq, t0, Operand(zero_reg));
2010
2011  // 4a. Construct the target with the given new.target and argumentsList.
2012  __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
2013
2014  // 4b. The target is not a constructor, throw an appropriate TypeError.
2015  __ bind(&target_not_constructor);
2016  {
2017    __ sw(a1, MemOperand(sp));
2018    __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
2019  }
2020
2021  // 4c. The new.target is not a constructor, throw an appropriate TypeError.
2022  __ bind(&new_target_not_constructor);
2023  {
2024    __ sw(a3, MemOperand(sp));
2025    __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
2026  }
2027}
2028
2029
2030static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
2031                                      Label* stack_overflow) {
2032  // ----------- S t a t e -------------
2033  //  -- a0 : actual number of arguments
2034  //  -- a1 : function (passed through to callee)
2035  //  -- a2 : expected number of arguments
2036  //  -- a3 : new target (passed through to callee)
2037  // -----------------------------------
2038  // Check the stack for overflow. We are not trying to catch
2039  // interruptions (e.g. debug break and preemption) here, so the "real stack
2040  // limit" is checked.
2041  __ LoadRoot(t1, Heap::kRealStackLimitRootIndex);
2042  // Make t1 the space we have left. The stack might already be overflowed
2043  // here which will cause t1 to become negative.
2044  __ subu(t1, sp, t1);
2045  // Check if the arguments will overflow the stack.
2046  __ sll(at, a2, kPointerSizeLog2);
2047  // Signed comparison.
2048  __ Branch(stack_overflow, le, t1, Operand(at));
2049}
2050
2051
2052static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
2053  __ sll(a0, a0, kSmiTagSize);
2054  __ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2055  __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit());
2056  __ Addu(fp, sp,
2057      Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
2058}
2059
2060
2061static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
2062  // ----------- S t a t e -------------
2063  //  -- v0 : result being passed through
2064  // -----------------------------------
2065  // Get the number of arguments passed (as a smi), tear down the frame and
2066  // then tear down the parameters.
2067  __ lw(a1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
2068                             kPointerSize)));
2069  __ mov(sp, fp);
2070  __ MultiPop(fp.bit() | ra.bit());
2071  __ Lsa(sp, sp, a1, kPointerSizeLog2 - kSmiTagSize);
2072  // Adjust for the receiver.
2073  __ Addu(sp, sp, Operand(kPointerSize));
2074}
2075
2076
2077// static
2078void Builtins::Generate_Apply(MacroAssembler* masm) {
2079  // ----------- S t a t e -------------
2080  //  -- a0    : argumentsList
2081  //  -- a1    : target
2082  //  -- a3    : new.target (checked to be constructor or undefined)
2083  //  -- sp[0] : thisArgument
2084  // -----------------------------------
2085
2086  // Create the list of arguments from the array-like argumentsList.
2087  {
2088    Label create_arguments, create_array, create_runtime, done_create;
2089    __ JumpIfSmi(a0, &create_runtime);
2090
2091    // Load the map of argumentsList into a2.
2092    __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
2093
2094    // Load native context into t0.
2095    __ lw(t0, NativeContextMemOperand());
2096
2097    // Check if argumentsList is an (unmodified) arguments object.
2098    __ lw(at, ContextMemOperand(t0, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
2099    __ Branch(&create_arguments, eq, a2, Operand(at));
2100    __ lw(at, ContextMemOperand(t0, Context::STRICT_ARGUMENTS_MAP_INDEX));
2101    __ Branch(&create_arguments, eq, a2, Operand(at));
2102
2103    // Check if argumentsList is a fast JSArray.
2104    __ lw(v0, FieldMemOperand(a2, HeapObject::kMapOffset));
2105    __ lbu(v0, FieldMemOperand(v0, Map::kInstanceTypeOffset));
2106    __ Branch(&create_array, eq, v0, Operand(JS_ARRAY_TYPE));
2107
2108    // Ask the runtime to create the list (actually a FixedArray).
2109    __ bind(&create_runtime);
2110    {
2111      FrameScope scope(masm, StackFrame::INTERNAL);
2112      __ Push(a1, a3, a0);
2113      __ CallRuntime(Runtime::kCreateListFromArrayLike);
2114      __ mov(a0, v0);
2115      __ Pop(a1, a3);
2116      __ lw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset));
2117      __ SmiUntag(a2);
2118    }
2119    __ Branch(&done_create);
2120
2121    // Try to create the list from an arguments object.
2122    __ bind(&create_arguments);
2123    __ lw(a2, FieldMemOperand(a0, JSArgumentsObject::kLengthOffset));
2124    __ lw(t0, FieldMemOperand(a0, JSObject::kElementsOffset));
2125    __ lw(at, FieldMemOperand(t0, FixedArray::kLengthOffset));
2126    __ Branch(&create_runtime, ne, a2, Operand(at));
2127    __ SmiUntag(a2);
2128    __ mov(a0, t0);
2129    __ Branch(&done_create);
2130
2131    // Try to create the list from a JSArray object.
2132    __ bind(&create_array);
2133    __ lw(a2, FieldMemOperand(a2, Map::kBitField2Offset));
2134    __ DecodeField<Map::ElementsKindBits>(a2);
2135    STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2136    STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2137    STATIC_ASSERT(FAST_ELEMENTS == 2);
2138    __ Branch(&create_runtime, hi, a2, Operand(FAST_ELEMENTS));
2139    __ Branch(&create_runtime, eq, a2, Operand(FAST_HOLEY_SMI_ELEMENTS));
2140    __ lw(a2, FieldMemOperand(a0, JSArray::kLengthOffset));
2141    __ lw(a0, FieldMemOperand(a0, JSArray::kElementsOffset));
2142    __ SmiUntag(a2);
2143
2144    __ bind(&done_create);
2145  }
2146
2147  // Check for stack overflow.
2148  {
2149    // Check the stack for overflow. We are not trying to catch interruptions
2150    // (i.e. debug break and preemption) here, so check the "real stack limit".
2151    Label done;
2152    __ LoadRoot(t0, Heap::kRealStackLimitRootIndex);
2153    // Make ip the space we have left. The stack might already be overflowed
2154    // here which will cause ip to become negative.
2155    __ Subu(t0, sp, t0);
2156    // Check if the arguments will overflow the stack.
2157    __ sll(at, a2, kPointerSizeLog2);
2158    __ Branch(&done, gt, t0, Operand(at));  // Signed comparison.
2159    __ TailCallRuntime(Runtime::kThrowStackOverflow);
2160    __ bind(&done);
2161  }
2162
2163  // ----------- S t a t e -------------
2164  //  -- a1    : target
2165  //  -- a0    : args (a FixedArray built from argumentsList)
2166  //  -- a2    : len (number of elements to push from args)
2167  //  -- a3    : new.target (checked to be constructor or undefined)
2168  //  -- sp[0] : thisArgument
2169  // -----------------------------------
2170
2171  // Push arguments onto the stack (thisArgument is already on the stack).
2172  {
2173    __ mov(t0, zero_reg);
2174    Label done, loop;
2175    __ bind(&loop);
2176    __ Branch(&done, eq, t0, Operand(a2));
2177    __ Lsa(at, a0, t0, kPointerSizeLog2);
2178    __ lw(at, FieldMemOperand(at, FixedArray::kHeaderSize));
2179    __ Push(at);
2180    __ Addu(t0, t0, Operand(1));
2181    __ Branch(&loop);
2182    __ bind(&done);
2183    __ Move(a0, t0);
2184  }
2185
2186  // Dispatch to Call or Construct depending on whether new.target is undefined.
2187  {
2188    Label construct;
2189    __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
2190    __ Branch(&construct, ne, a3, Operand(at));
2191    __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2192    __ bind(&construct);
2193    __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2194  }
2195}
2196
2197namespace {
2198
2199// Drops top JavaScript frame and an arguments adaptor frame below it (if
2200// present) preserving all the arguments prepared for current call.
2201// Does nothing if debugger is currently active.
2202// ES6 14.6.3. PrepareForTailCall
2203//
2204// Stack structure for the function g() tail calling f():
2205//
2206// ------- Caller frame: -------
2207// |  ...
2208// |  g()'s arg M
2209// |  ...
2210// |  g()'s arg 1
2211// |  g()'s receiver arg
2212// |  g()'s caller pc
2213// ------- g()'s frame: -------
2214// |  g()'s caller fp      <- fp
2215// |  g()'s context
2216// |  function pointer: g
2217// |  -------------------------
2218// |  ...
2219// |  ...
2220// |  f()'s arg N
2221// |  ...
2222// |  f()'s arg 1
2223// |  f()'s receiver arg   <- sp (f()'s caller pc is not on the stack yet!)
2224// ----------------------
2225//
2226void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
2227                        Register scratch1, Register scratch2,
2228                        Register scratch3) {
2229  DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
2230  Comment cmnt(masm, "[ PrepareForTailCall");
2231
2232  // Prepare for tail call only if ES2015 tail call elimination is enabled.
2233  Label done;
2234  ExternalReference is_tail_call_elimination_enabled =
2235      ExternalReference::is_tail_call_elimination_enabled_address(
2236          masm->isolate());
2237  __ li(at, Operand(is_tail_call_elimination_enabled));
2238  __ lb(scratch1, MemOperand(at));
2239  __ Branch(&done, eq, scratch1, Operand(zero_reg));
2240
2241  // Drop possible interpreter handler/stub frame.
2242  {
2243    Label no_interpreter_frame;
2244    __ lw(scratch3,
2245          MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset));
2246    __ Branch(&no_interpreter_frame, ne, scratch3,
2247              Operand(Smi::FromInt(StackFrame::STUB)));
2248    __ lw(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2249    __ bind(&no_interpreter_frame);
2250  }
2251
2252  // Check if next frame is an arguments adaptor frame.
2253  Register caller_args_count_reg = scratch1;
2254  Label no_arguments_adaptor, formal_parameter_count_loaded;
2255  __ lw(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2256  __ lw(scratch3,
2257        MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
2258  __ Branch(&no_arguments_adaptor, ne, scratch3,
2259            Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2260
2261  // Drop current frame and load arguments count from arguments adaptor frame.
2262  __ mov(fp, scratch2);
2263  __ lw(caller_args_count_reg,
2264        MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
2265  __ SmiUntag(caller_args_count_reg);
2266  __ Branch(&formal_parameter_count_loaded);
2267
2268  __ bind(&no_arguments_adaptor);
2269  // Load caller's formal parameter count
2270  __ lw(scratch1,
2271        MemOperand(fp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
2272  __ lw(scratch1,
2273        FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
2274  __ lw(caller_args_count_reg,
2275        FieldMemOperand(scratch1,
2276                        SharedFunctionInfo::kFormalParameterCountOffset));
2277  __ SmiUntag(caller_args_count_reg);
2278
2279  __ bind(&formal_parameter_count_loaded);
2280
2281  ParameterCount callee_args_count(args_reg);
2282  __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
2283                        scratch3);
2284  __ bind(&done);
2285}
2286}  // namespace
2287
2288// static
2289void Builtins::Generate_CallFunction(MacroAssembler* masm,
2290                                     ConvertReceiverMode mode,
2291                                     TailCallMode tail_call_mode) {
2292  // ----------- S t a t e -------------
2293  //  -- a0 : the number of arguments (not including the receiver)
2294  //  -- a1 : the function to call (checked to be a JSFunction)
2295  // -----------------------------------
2296  __ AssertFunction(a1);
2297
2298  // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2299  // Check that the function is not a "classConstructor".
2300  Label class_constructor;
2301  __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2302  __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kFunctionKindByteOffset));
2303  __ And(at, a3, Operand(SharedFunctionInfo::kClassConstructorBitsWithinByte));
2304  __ Branch(&class_constructor, ne, at, Operand(zero_reg));
2305
2306  // Enter the context of the function; ToObject has to run in the function
2307  // context, and we also need to take the global proxy from the function
2308  // context in case of conversion.
2309  STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
2310                SharedFunctionInfo::kStrictModeByteOffset);
2311  __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
2312  // We need to convert the receiver for non-native sloppy mode functions.
2313  Label done_convert;
2314  __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kNativeByteOffset));
2315  __ And(at, a3, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) |
2316                         (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
2317  __ Branch(&done_convert, ne, at, Operand(zero_reg));
2318  {
2319    // ----------- S t a t e -------------
2320    //  -- a0 : the number of arguments (not including the receiver)
2321    //  -- a1 : the function to call (checked to be a JSFunction)
2322    //  -- a2 : the shared function info.
2323    //  -- cp : the function context.
2324    // -----------------------------------
2325
2326    if (mode == ConvertReceiverMode::kNullOrUndefined) {
2327      // Patch receiver to global proxy.
2328      __ LoadGlobalProxy(a3);
2329    } else {
2330      Label convert_to_object, convert_receiver;
2331      __ Lsa(at, sp, a0, kPointerSizeLog2);
2332      __ lw(a3, MemOperand(at));
2333      __ JumpIfSmi(a3, &convert_to_object);
2334      STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2335      __ GetObjectType(a3, t0, t0);
2336      __ Branch(&done_convert, hs, t0, Operand(FIRST_JS_RECEIVER_TYPE));
2337      if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2338        Label convert_global_proxy;
2339        __ JumpIfRoot(a3, Heap::kUndefinedValueRootIndex,
2340                      &convert_global_proxy);
2341        __ JumpIfNotRoot(a3, Heap::kNullValueRootIndex, &convert_to_object);
2342        __ bind(&convert_global_proxy);
2343        {
2344          // Patch receiver to global proxy.
2345          __ LoadGlobalProxy(a3);
2346        }
2347        __ Branch(&convert_receiver);
2348      }
2349      __ bind(&convert_to_object);
2350      {
2351        // Convert receiver using ToObject.
2352        // TODO(bmeurer): Inline the allocation here to avoid building the frame
2353        // in the fast case? (fall back to AllocateInNewSpace?)
2354        FrameScope scope(masm, StackFrame::INTERNAL);
2355        __ sll(a0, a0, kSmiTagSize);  // Smi tagged.
2356        __ Push(a0, a1);
2357        __ mov(a0, a3);
2358        ToObjectStub stub(masm->isolate());
2359        __ CallStub(&stub);
2360        __ mov(a3, v0);
2361        __ Pop(a0, a1);
2362        __ sra(a0, a0, kSmiTagSize);  // Un-tag.
2363      }
2364      __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2365      __ bind(&convert_receiver);
2366    }
2367    __ Lsa(at, sp, a0, kPointerSizeLog2);
2368    __ sw(a3, MemOperand(at));
2369  }
2370  __ bind(&done_convert);
2371
2372  // ----------- S t a t e -------------
2373  //  -- a0 : the number of arguments (not including the receiver)
2374  //  -- a1 : the function to call (checked to be a JSFunction)
2375  //  -- a2 : the shared function info.
2376  //  -- cp : the function context.
2377  // -----------------------------------
2378
2379  if (tail_call_mode == TailCallMode::kAllow) {
2380    PrepareForTailCall(masm, a0, t0, t1, t2);
2381  }
2382
2383  __ lw(a2,
2384        FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset));
2385  __ sra(a2, a2, kSmiTagSize);  // Un-tag.
2386  ParameterCount actual(a0);
2387  ParameterCount expected(a2);
2388  __ InvokeFunctionCode(a1, no_reg, expected, actual, JUMP_FUNCTION,
2389                        CheckDebugStepCallWrapper());
2390
2391  // The function is a "classConstructor", need to raise an exception.
2392  __ bind(&class_constructor);
2393  {
2394    FrameScope frame(masm, StackFrame::INTERNAL);
2395    __ Push(a1);
2396    __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2397  }
2398}
2399
2400
2401// static
2402void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2403                                              TailCallMode tail_call_mode) {
2404  // ----------- S t a t e -------------
2405  //  -- a0 : the number of arguments (not including the receiver)
2406  //  -- a1 : the function to call (checked to be a JSBoundFunction)
2407  // -----------------------------------
2408  __ AssertBoundFunction(a1);
2409
2410  if (tail_call_mode == TailCallMode::kAllow) {
2411    PrepareForTailCall(masm, a0, t0, t1, t2);
2412  }
2413
2414  // Patch the receiver to [[BoundThis]].
2415  {
2416    __ lw(at, FieldMemOperand(a1, JSBoundFunction::kBoundThisOffset));
2417    __ Lsa(t0, sp, a0, kPointerSizeLog2);
2418    __ sw(at, MemOperand(t0));
2419  }
2420
2421  // Load [[BoundArguments]] into a2 and length of that into t0.
2422  __ lw(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
2423  __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset));
2424  __ SmiUntag(t0);
2425
2426  // ----------- S t a t e -------------
2427  //  -- a0 : the number of arguments (not including the receiver)
2428  //  -- a1 : the function to call (checked to be a JSBoundFunction)
2429  //  -- a2 : the [[BoundArguments]] (implemented as FixedArray)
2430  //  -- t0 : the number of [[BoundArguments]]
2431  // -----------------------------------
2432
2433  // Reserve stack space for the [[BoundArguments]].
2434  {
2435    Label done;
2436    __ sll(t1, t0, kPointerSizeLog2);
2437    __ Subu(sp, sp, Operand(t1));
2438    // Check the stack for overflow. We are not trying to catch interruptions
2439    // (i.e. debug break and preemption) here, so check the "real stack limit".
2440    __ LoadRoot(at, Heap::kRealStackLimitRootIndex);
2441    __ Branch(&done, gt, sp, Operand(at));  // Signed comparison.
2442    // Restore the stack pointer.
2443    __ Addu(sp, sp, Operand(t1));
2444    {
2445      FrameScope scope(masm, StackFrame::MANUAL);
2446      __ EnterFrame(StackFrame::INTERNAL);
2447      __ CallRuntime(Runtime::kThrowStackOverflow);
2448    }
2449    __ bind(&done);
2450  }
2451
2452  // Relocate arguments down the stack.
2453  {
2454    Label loop, done_loop;
2455    __ mov(t1, zero_reg);
2456    __ bind(&loop);
2457    __ Branch(&done_loop, gt, t1, Operand(a0));
2458    __ Lsa(t2, sp, t0, kPointerSizeLog2);
2459    __ lw(at, MemOperand(t2));
2460    __ Lsa(t2, sp, t1, kPointerSizeLog2);
2461    __ sw(at, MemOperand(t2));
2462    __ Addu(t0, t0, Operand(1));
2463    __ Addu(t1, t1, Operand(1));
2464    __ Branch(&loop);
2465    __ bind(&done_loop);
2466  }
2467
2468  // Copy [[BoundArguments]] to the stack (below the arguments).
2469  {
2470    Label loop, done_loop;
2471    __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset));
2472    __ SmiUntag(t0);
2473    __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2474    __ bind(&loop);
2475    __ Subu(t0, t0, Operand(1));
2476    __ Branch(&done_loop, lt, t0, Operand(zero_reg));
2477    __ Lsa(t1, a2, t0, kPointerSizeLog2);
2478    __ lw(at, MemOperand(t1));
2479    __ Lsa(t1, sp, a0, kPointerSizeLog2);
2480    __ sw(at, MemOperand(t1));
2481    __ Addu(a0, a0, Operand(1));
2482    __ Branch(&loop);
2483    __ bind(&done_loop);
2484  }
2485
2486  // Call the [[BoundTargetFunction]] via the Call builtin.
2487  __ lw(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2488  __ li(at, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny,
2489                                      masm->isolate())));
2490  __ lw(at, MemOperand(at));
2491  __ Addu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag));
2492  __ Jump(at);
2493}
2494
2495
2496// static
2497void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2498                             TailCallMode tail_call_mode) {
2499  // ----------- S t a t e -------------
2500  //  -- a0 : the number of arguments (not including the receiver)
2501  //  -- a1 : the target to call (can be any Object).
2502  // -----------------------------------
2503
2504  Label non_callable, non_function, non_smi;
2505  __ JumpIfSmi(a1, &non_callable);
2506  __ bind(&non_smi);
2507  __ GetObjectType(a1, t1, t2);
2508  __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
2509          RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE));
2510  __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
2511          RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE));
2512
2513  // Check if target has a [[Call]] internal method.
2514  __ lbu(t1, FieldMemOperand(t1, Map::kBitFieldOffset));
2515  __ And(t1, t1, Operand(1 << Map::kIsCallable));
2516  __ Branch(&non_callable, eq, t1, Operand(zero_reg));
2517
2518  __ Branch(&non_function, ne, t2, Operand(JS_PROXY_TYPE));
2519
2520  // 0. Prepare for tail call if necessary.
2521  if (tail_call_mode == TailCallMode::kAllow) {
2522    PrepareForTailCall(masm, a0, t0, t1, t2);
2523  }
2524
2525  // 1. Runtime fallback for Proxy [[Call]].
2526  __ Push(a1);
2527  // Increase the arguments size to include the pushed function and the
2528  // existing receiver on the stack.
2529  __ Addu(a0, a0, 2);
2530  // Tail-call to the runtime.
2531  __ JumpToExternalReference(
2532      ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2533
2534  // 2. Call to something else, which might have a [[Call]] internal method (if
2535  // not we raise an exception).
2536  __ bind(&non_function);
2537  // Overwrite the original receiver with the (original) target.
2538  __ Lsa(at, sp, a0, kPointerSizeLog2);
2539  __ sw(a1, MemOperand(at));
2540  // Let the "call_as_function_delegate" take care of the rest.
2541  __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, a1);
2542  __ Jump(masm->isolate()->builtins()->CallFunction(
2543              ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
2544          RelocInfo::CODE_TARGET);
2545
2546  // 3. Call to something that is not callable.
2547  __ bind(&non_callable);
2548  {
2549    FrameScope scope(masm, StackFrame::INTERNAL);
2550    __ Push(a1);
2551    __ CallRuntime(Runtime::kThrowCalledNonCallable);
2552  }
2553}
2554
2555
2556// static
2557void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2558  // ----------- S t a t e -------------
2559  //  -- a0 : the number of arguments (not including the receiver)
2560  //  -- a1 : the constructor to call (checked to be a JSFunction)
2561  //  -- a3 : the new target (checked to be a constructor)
2562  // -----------------------------------
2563  __ AssertFunction(a1);
2564
2565  // Calling convention for function specific ConstructStubs require
2566  // a2 to contain either an AllocationSite or undefined.
2567  __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2568
2569  // Tail call to the function-specific construct stub (still in the caller
2570  // context at this point).
2571  __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2572  __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset));
2573  __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag));
2574  __ Jump(at);
2575}
2576
2577
2578// static
2579void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2580  // ----------- S t a t e -------------
2581  //  -- a0 : the number of arguments (not including the receiver)
2582  //  -- a1 : the function to call (checked to be a JSBoundFunction)
2583  //  -- a3 : the new target (checked to be a constructor)
2584  // -----------------------------------
2585  __ AssertBoundFunction(a1);
2586
2587  // Load [[BoundArguments]] into a2 and length of that into t0.
2588  __ lw(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
2589  __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset));
2590  __ SmiUntag(t0);
2591
2592  // ----------- S t a t e -------------
2593  //  -- a0 : the number of arguments (not including the receiver)
2594  //  -- a1 : the function to call (checked to be a JSBoundFunction)
2595  //  -- a2 : the [[BoundArguments]] (implemented as FixedArray)
2596  //  -- a3 : the new target (checked to be a constructor)
2597  //  -- t0 : the number of [[BoundArguments]]
2598  // -----------------------------------
2599
2600  // Reserve stack space for the [[BoundArguments]].
2601  {
2602    Label done;
2603    __ sll(t1, t0, kPointerSizeLog2);
2604    __ Subu(sp, sp, Operand(t1));
2605    // Check the stack for overflow. We are not trying to catch interruptions
2606    // (i.e. debug break and preemption) here, so check the "real stack limit".
2607    __ LoadRoot(at, Heap::kRealStackLimitRootIndex);
2608    __ Branch(&done, gt, sp, Operand(at));  // Signed comparison.
2609    // Restore the stack pointer.
2610    __ Addu(sp, sp, Operand(t1));
2611    {
2612      FrameScope scope(masm, StackFrame::MANUAL);
2613      __ EnterFrame(StackFrame::INTERNAL);
2614      __ CallRuntime(Runtime::kThrowStackOverflow);
2615    }
2616    __ bind(&done);
2617  }
2618
2619  // Relocate arguments down the stack.
2620  {
2621    Label loop, done_loop;
2622    __ mov(t1, zero_reg);
2623    __ bind(&loop);
2624    __ Branch(&done_loop, ge, t1, Operand(a0));
2625    __ Lsa(t2, sp, t0, kPointerSizeLog2);
2626    __ lw(at, MemOperand(t2));
2627    __ Lsa(t2, sp, t1, kPointerSizeLog2);
2628    __ sw(at, MemOperand(t2));
2629    __ Addu(t0, t0, Operand(1));
2630    __ Addu(t1, t1, Operand(1));
2631    __ Branch(&loop);
2632    __ bind(&done_loop);
2633  }
2634
2635  // Copy [[BoundArguments]] to the stack (below the arguments).
2636  {
2637    Label loop, done_loop;
2638    __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset));
2639    __ SmiUntag(t0);
2640    __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2641    __ bind(&loop);
2642    __ Subu(t0, t0, Operand(1));
2643    __ Branch(&done_loop, lt, t0, Operand(zero_reg));
2644    __ Lsa(t1, a2, t0, kPointerSizeLog2);
2645    __ lw(at, MemOperand(t1));
2646    __ Lsa(t1, sp, a0, kPointerSizeLog2);
2647    __ sw(at, MemOperand(t1));
2648    __ Addu(a0, a0, Operand(1));
2649    __ Branch(&loop);
2650    __ bind(&done_loop);
2651  }
2652
2653  // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2654  {
2655    Label skip_load;
2656    __ Branch(&skip_load, ne, a1, Operand(a3));
2657    __ lw(a3, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2658    __ bind(&skip_load);
2659  }
2660
2661  // Construct the [[BoundTargetFunction]] via the Construct builtin.
2662  __ lw(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2663  __ li(at, Operand(ExternalReference(Builtins::kConstruct, masm->isolate())));
2664  __ lw(at, MemOperand(at));
2665  __ Addu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag));
2666  __ Jump(at);
2667}
2668
2669
2670// static
2671void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2672  // ----------- S t a t e -------------
2673  //  -- a0 : the number of arguments (not including the receiver)
2674  //  -- a1 : the constructor to call (checked to be a JSProxy)
2675  //  -- a3 : the new target (either the same as the constructor or
2676  //          the JSFunction on which new was invoked initially)
2677  // -----------------------------------
2678
2679  // Call into the Runtime for Proxy [[Construct]].
2680  __ Push(a1, a3);
2681  // Include the pushed new_target, constructor and the receiver.
2682  __ Addu(a0, a0, Operand(3));
2683  // Tail-call to the runtime.
2684  __ JumpToExternalReference(
2685      ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2686}
2687
2688
2689// static
2690void Builtins::Generate_Construct(MacroAssembler* masm) {
2691  // ----------- S t a t e -------------
2692  //  -- a0 : the number of arguments (not including the receiver)
2693  //  -- a1 : the constructor to call (can be any Object)
2694  //  -- a3 : the new target (either the same as the constructor or
2695  //          the JSFunction on which new was invoked initially)
2696  // -----------------------------------
2697
2698  // Check if target is a Smi.
2699  Label non_constructor;
2700  __ JumpIfSmi(a1, &non_constructor);
2701
2702  // Dispatch based on instance type.
2703  __ lw(t1, FieldMemOperand(a1, HeapObject::kMapOffset));
2704  __ lbu(t2, FieldMemOperand(t1, Map::kInstanceTypeOffset));
2705  __ Jump(masm->isolate()->builtins()->ConstructFunction(),
2706          RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE));
2707
2708  // Check if target has a [[Construct]] internal method.
2709  __ lbu(t3, FieldMemOperand(t1, Map::kBitFieldOffset));
2710  __ And(t3, t3, Operand(1 << Map::kIsConstructor));
2711  __ Branch(&non_constructor, eq, t3, Operand(zero_reg));
2712
2713  // Only dispatch to bound functions after checking whether they are
2714  // constructors.
2715  __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
2716          RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE));
2717
2718  // Only dispatch to proxies after checking whether they are constructors.
2719  __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
2720          eq, t2, Operand(JS_PROXY_TYPE));
2721
2722  // Called Construct on an exotic Object with a [[Construct]] internal method.
2723  {
2724    // Overwrite the original receiver with the (original) target.
2725    __ Lsa(at, sp, a0, kPointerSizeLog2);
2726    __ sw(a1, MemOperand(at));
2727    // Let the "call_as_constructor_delegate" take care of the rest.
2728    __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, a1);
2729    __ Jump(masm->isolate()->builtins()->CallFunction(),
2730            RelocInfo::CODE_TARGET);
2731  }
2732
2733  // Called Construct on an Object that doesn't have a [[Construct]] internal
2734  // method.
2735  __ bind(&non_constructor);
2736  __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2737          RelocInfo::CODE_TARGET);
2738}
2739
2740// static
2741void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
2742  // ----------- S t a t e -------------
2743  //  -- a0 : requested object size (untagged)
2744  //  -- ra : return address
2745  // -----------------------------------
2746  __ SmiTag(a0);
2747  __ Push(a0);
2748  __ Move(cp, Smi::FromInt(0));
2749  __ TailCallRuntime(Runtime::kAllocateInNewSpace);
2750}
2751
2752// static
2753void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
2754  // ----------- S t a t e -------------
2755  //  -- a0 : requested object size (untagged)
2756  //  -- ra : return address
2757  // -----------------------------------
2758  __ SmiTag(a0);
2759  __ Move(a1, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
2760  __ Push(a0, a1);
2761  __ Move(cp, Smi::FromInt(0));
2762  __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
2763}
2764
2765// static
2766void Builtins::Generate_StringToNumber(MacroAssembler* masm) {
2767  // The StringToNumber stub takes on argument in a0.
2768  __ AssertString(a0);
2769
2770  // Check if string has a cached array index.
2771  Label runtime;
2772  __ lw(a2, FieldMemOperand(a0, String::kHashFieldOffset));
2773  __ And(at, a2, Operand(String::kContainsCachedArrayIndexMask));
2774  __ Branch(&runtime, ne, at, Operand(zero_reg));
2775  __ IndexFromHash(a2, v0);
2776  __ Ret();
2777
2778  __ bind(&runtime);
2779  {
2780    FrameScope frame(masm, StackFrame::INTERNAL);
2781    // Push argument.
2782    __ Push(a0);
2783    // We cannot use a tail call here because this builtin can also be called
2784    // from wasm.
2785    __ CallRuntime(Runtime::kStringToNumber);
2786  }
2787  __ Ret();
2788}
2789
2790// static
2791void Builtins::Generate_ToNumber(MacroAssembler* masm) {
2792  // The ToNumber stub takes one argument in a0.
2793  Label not_smi;
2794  __ JumpIfNotSmi(a0, &not_smi);
2795  __ Ret(USE_DELAY_SLOT);
2796  __ mov(v0, a0);
2797  __ bind(&not_smi);
2798
2799  Label not_heap_number;
2800  __ GetObjectType(a0, a1, a1);
2801  // a0: receiver
2802  // a1: receiver instance type
2803  __ Branch(&not_heap_number, ne, a1, Operand(HEAP_NUMBER_TYPE));
2804  __ Ret(USE_DELAY_SLOT);
2805  __ mov(v0, a0);
2806  __ bind(&not_heap_number);
2807
2808  __ Jump(masm->isolate()->builtins()->NonNumberToNumber(),
2809          RelocInfo::CODE_TARGET);
2810}
2811
2812// static
2813void Builtins::Generate_NonNumberToNumber(MacroAssembler* masm) {
2814  // The NonNumberToNumber stub takes on argument in a0.
2815  __ AssertNotNumber(a0);
2816
2817  Label not_string;
2818  __ GetObjectType(a0, a1, a1);
2819  // a0: receiver
2820  // a1: receiver instance type
2821  __ Branch(&not_string, hs, a1, Operand(FIRST_NONSTRING_TYPE));
2822  __ Jump(masm->isolate()->builtins()->StringToNumber(),
2823          RelocInfo::CODE_TARGET);
2824  __ bind(&not_string);
2825
2826  Label not_oddball;
2827  __ Branch(&not_oddball, ne, a1, Operand(ODDBALL_TYPE));
2828  __ Ret(USE_DELAY_SLOT);
2829  __ lw(v0, FieldMemOperand(a0, Oddball::kToNumberOffset));  // In delay slot.
2830  __ bind(&not_oddball);
2831  {
2832    FrameScope frame(masm, StackFrame::INTERNAL);
2833    // Push argument.
2834    __ Push(a0);
2835    // We cannot use a tail call here because this builtin can also be called
2836    // from wasm.
2837    __ CallRuntime(Runtime::kToNumber);
2838  }
2839  __ Ret();
2840}
2841
2842void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2843  // State setup as expected by MacroAssembler::InvokePrologue.
2844  // ----------- S t a t e -------------
2845  //  -- a0: actual arguments count
2846  //  -- a1: function (passed through to callee)
2847  //  -- a2: expected arguments count
2848  //  -- a3: new target (passed through to callee)
2849  // -----------------------------------
2850
2851  Label invoke, dont_adapt_arguments, stack_overflow;
2852
2853  Label enough, too_few;
2854  __ Branch(&dont_adapt_arguments, eq,
2855      a2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2856  // We use Uless as the number of argument should always be greater than 0.
2857  __ Branch(&too_few, Uless, a0, Operand(a2));
2858
2859  {  // Enough parameters: actual >= expected.
2860    // a0: actual number of arguments as a smi
2861    // a1: function
2862    // a2: expected number of arguments
2863    // a3: new target (passed through to callee)
2864    __ bind(&enough);
2865    EnterArgumentsAdaptorFrame(masm);
2866    ArgumentAdaptorStackCheck(masm, &stack_overflow);
2867
2868    // Calculate copy start address into a0 and copy end address into t1.
2869    __ Lsa(a0, fp, a0, kPointerSizeLog2 - kSmiTagSize);
2870    // Adjust for return address and receiver.
2871    __ Addu(a0, a0, Operand(2 * kPointerSize));
2872    // Compute copy end address.
2873    __ sll(t1, a2, kPointerSizeLog2);
2874    __ subu(t1, a0, t1);
2875
2876    // Copy the arguments (including the receiver) to the new stack frame.
2877    // a0: copy start address
2878    // a1: function
2879    // a2: expected number of arguments
2880    // a3: new target (passed through to callee)
2881    // t1: copy end address
2882
2883    Label copy;
2884    __ bind(&copy);
2885    __ lw(t0, MemOperand(a0));
2886    __ push(t0);
2887    __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(t1));
2888    __ addiu(a0, a0, -kPointerSize);  // In delay slot.
2889
2890    __ jmp(&invoke);
2891  }
2892
2893  {  // Too few parameters: Actual < expected.
2894    __ bind(&too_few);
2895    EnterArgumentsAdaptorFrame(masm);
2896    ArgumentAdaptorStackCheck(masm, &stack_overflow);
2897
2898    // Calculate copy start address into a0 and copy end address into t3.
2899    // a0: actual number of arguments as a smi
2900    // a1: function
2901    // a2: expected number of arguments
2902    // a3: new target (passed through to callee)
2903    __ Lsa(a0, fp, a0, kPointerSizeLog2 - kSmiTagSize);
2904    // Adjust for return address and receiver.
2905    __ Addu(a0, a0, Operand(2 * kPointerSize));
2906    // Compute copy end address. Also adjust for return address.
2907    __ Addu(t3, fp, kPointerSize);
2908
2909    // Copy the arguments (including the receiver) to the new stack frame.
2910    // a0: copy start address
2911    // a1: function
2912    // a2: expected number of arguments
2913    // a3: new target (passed through to callee)
2914    // t3: copy end address
2915    Label copy;
2916    __ bind(&copy);
2917    __ lw(t0, MemOperand(a0));  // Adjusted above for return addr and receiver.
2918    __ Subu(sp, sp, kPointerSize);
2919    __ Subu(a0, a0, kPointerSize);
2920    __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(t3));
2921    __ sw(t0, MemOperand(sp));  // In the delay slot.
2922
2923    // Fill the remaining expected arguments with undefined.
2924    // a1: function
2925    // a2: expected number of arguments
2926    // a3: new target (passed through to callee)
2927    __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
2928    __ sll(t2, a2, kPointerSizeLog2);
2929    __ Subu(t1, fp, Operand(t2));
2930    // Adjust for frame.
2931    __ Subu(t1, t1, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
2932                            2 * kPointerSize));
2933
2934    Label fill;
2935    __ bind(&fill);
2936    __ Subu(sp, sp, kPointerSize);
2937    __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(t1));
2938    __ sw(t0, MemOperand(sp));
2939  }
2940
2941  // Call the entry point.
2942  __ bind(&invoke);
2943  __ mov(a0, a2);
2944  // a0 : expected number of arguments
2945  // a1 : function (passed through to callee)
2946  // a3 : new target (passed through to callee)
2947  __ lw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
2948  __ Call(t0);
2949
2950  // Store offset of return address for deoptimizer.
2951  masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2952
2953  // Exit frame and return.
2954  LeaveArgumentsAdaptorFrame(masm);
2955  __ Ret();
2956
2957
2958  // -------------------------------------------
2959  // Don't adapt arguments.
2960  // -------------------------------------------
2961  __ bind(&dont_adapt_arguments);
2962  __ lw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
2963  __ Jump(t0);
2964
2965  __ bind(&stack_overflow);
2966  {
2967    FrameScope frame(masm, StackFrame::MANUAL);
2968    __ CallRuntime(Runtime::kThrowStackOverflow);
2969    __ break_(0xCC);
2970  }
2971}
2972
2973
2974#undef __
2975
2976}  // namespace internal
2977}  // namespace v8
2978
2979#endif  // V8_TARGET_ARCH_MIPS
2980