1// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_S390
6
7#include "src/codegen.h"
8#include "src/debug/debug.h"
9#include "src/deoptimizer.h"
10#include "src/full-codegen/full-codegen.h"
11#include "src/runtime/runtime.h"
12
13namespace v8 {
14namespace internal {
15
16#define __ ACCESS_MASM(masm)
17
18void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
19                                ExitFrameType exit_frame_type) {
20  // ----------- S t a t e -------------
21  //  -- r2                 : number of arguments excluding receiver
22  //  -- r3                 : target
23  //  -- r5                 : new.target
24  //  -- sp[0]              : last argument
25  //  -- ...
26  //  -- sp[4 * (argc - 1)] : first argument
27  //  -- sp[4 * argc]       : receiver
28  // -----------------------------------
29  __ AssertFunction(r3);
30
31  // Make sure we operate in the context of the called function (for example
32  // ConstructStubs implemented in C++ will be run in the context of the caller
33  // instead of the callee, due to the way that [[Construct]] is defined for
34  // ordinary functions).
35  __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset));
36
37  // JumpToExternalReference expects r2 to contain the number of arguments
38  // including the receiver and the extra arguments.
39  const int num_extra_args = 3;
40  __ AddP(r2, r2, Operand(num_extra_args + 1));
41
42  // Insert extra arguments.
43  __ SmiTag(r2);
44  __ Push(r2, r3, r5);
45  __ SmiUntag(r2);
46
47  __ JumpToExternalReference(ExternalReference(address, masm->isolate()),
48                             exit_frame_type == BUILTIN_EXIT);
49}
50
51// Load the built-in InternalArray function from the current context.
52static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
53                                              Register result) {
54  // Load the InternalArray function from the current native context.
55  __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
56}
57
58// Load the built-in Array function from the current context.
59static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
60  // Load the Array function from the current native context.
61  __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
62}
63
64void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
65  // ----------- S t a t e -------------
66  //  -- r2     : number of arguments
67  //  -- lr     : return address
68  //  -- sp[...]: constructor arguments
69  // -----------------------------------
70  Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
71
72  // Get the InternalArray function.
73  GenerateLoadInternalArrayFunction(masm, r3);
74
75  if (FLAG_debug_code) {
76    // Initial map for the builtin InternalArray functions should be maps.
77    __ LoadP(r4, FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset));
78    __ TestIfSmi(r4);
79    __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, cr0);
80    __ CompareObjectType(r4, r5, r6, MAP_TYPE);
81    __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
82  }
83
84  // Run the native code for the InternalArray function called as a normal
85  // function.
86  // tail call a stub
87  InternalArrayConstructorStub stub(masm->isolate());
88  __ TailCallStub(&stub);
89}
90
91void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
92  // ----------- S t a t e -------------
93  //  -- r2     : number of arguments
94  //  -- lr     : return address
95  //  -- sp[...]: constructor arguments
96  // -----------------------------------
97  Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
98
99  // Get the Array function.
100  GenerateLoadArrayFunction(masm, r3);
101
102  if (FLAG_debug_code) {
103    // Initial map for the builtin Array functions should be maps.
104    __ LoadP(r4, FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset));
105    __ TestIfSmi(r4);
106    __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0);
107    __ CompareObjectType(r4, r5, r6, MAP_TYPE);
108    __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
109  }
110
111  __ LoadRR(r5, r3);
112  // Run the native code for the Array function called as a normal function.
113  // tail call a stub
114  __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
115  ArrayConstructorStub stub(masm->isolate());
116  __ TailCallStub(&stub);
117}
118
119// static
120void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
121  // ----------- S t a t e -------------
122  //  -- r2                     : number of arguments
123  //  -- r3                     : function
124  //  -- cp                     : context
125  //  -- lr                     : return address
126  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
127  //  -- sp[argc * 4]           : receiver
128  // -----------------------------------
129  Condition const cond_done = (kind == MathMaxMinKind::kMin) ? lt : gt;
130  Heap::RootListIndex const root_index =
131      (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
132                                     : Heap::kMinusInfinityValueRootIndex;
133  DoubleRegister const reg = (kind == MathMaxMinKind::kMin) ? d2 : d1;
134
135  // Load the accumulator with the default return value (either -Infinity or
136  // +Infinity), with the tagged value in r7 and the double value in d1.
137  __ LoadRoot(r7, root_index);
138  __ LoadDouble(d1, FieldMemOperand(r7, HeapNumber::kValueOffset));
139
140  // Setup state for loop
141  // r4: address of arg[0] + kPointerSize
142  // r5: number of slots to drop at exit (arguments + receiver)
143  __ AddP(r6, r2, Operand(1));
144
145  Label done_loop, loop;
146  __ LoadRR(r6, r2);
147  __ bind(&loop);
148  {
149    // Check if all parameters done.
150    __ SubP(r6, Operand(1));
151    __ blt(&done_loop);
152
153    // Load the next parameter tagged value into r2.
154    __ ShiftLeftP(r1, r6, Operand(kPointerSizeLog2));
155    __ LoadP(r4, MemOperand(sp, r1));
156
157    // Load the double value of the parameter into d2, maybe converting the
158    // parameter to a number first using the ToNumber builtin if necessary.
159    Label convert, convert_smi, convert_number, done_convert;
160    __ bind(&convert);
161    __ JumpIfSmi(r4, &convert_smi);
162    __ LoadP(r5, FieldMemOperand(r4, HeapObject::kMapOffset));
163    __ JumpIfRoot(r5, Heap::kHeapNumberMapRootIndex, &convert_number);
164    {
165      // Parameter is not a Number, use the ToNumber builtin to convert it.
166      DCHECK(!FLAG_enable_embedded_constant_pool);
167      FrameScope scope(masm, StackFrame::MANUAL);
168      __ SmiTag(r2);
169      __ SmiTag(r6);
170      __ EnterBuiltinFrame(cp, r3, r2);
171      __ Push(r6, r7);
172      __ LoadRR(r2, r4);
173      __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
174      __ LoadRR(r4, r2);
175      __ Pop(r6, r7);
176      __ LeaveBuiltinFrame(cp, r3, r2);
177      __ SmiUntag(r6);
178      __ SmiUntag(r2);
179      {
180        // Restore the double accumulator value (d1).
181        Label done_restore;
182        __ SmiToDouble(d1, r7);
183        __ JumpIfSmi(r7, &done_restore);
184        __ LoadDouble(d1, FieldMemOperand(r7, HeapNumber::kValueOffset));
185        __ bind(&done_restore);
186      }
187    }
188    __ b(&convert);
189    __ bind(&convert_number);
190    __ LoadDouble(d2, FieldMemOperand(r4, HeapNumber::kValueOffset));
191    __ b(&done_convert);
192    __ bind(&convert_smi);
193    __ SmiToDouble(d2, r4);
194    __ bind(&done_convert);
195
196    // Perform the actual comparison with the accumulator value on the left hand
197    // side (d1) and the next parameter value on the right hand side (d2).
198    Label compare_nan, compare_swap;
199    __ cdbr(d1, d2);
200    __ bunordered(&compare_nan);
201    __ b(cond_done, &loop);
202    __ b(CommuteCondition(cond_done), &compare_swap);
203
204    // Left and right hand side are equal, check for -0 vs. +0.
205    __ TestDoubleIsMinusZero(reg, r1, r0);
206    __ bne(&loop);
207
208    // Update accumulator. Result is on the right hand side.
209    __ bind(&compare_swap);
210    __ ldr(d1, d2);
211    __ LoadRR(r7, r4);
212    __ b(&loop);
213
214    // At least one side is NaN, which means that the result will be NaN too.
215    // We still need to visit the rest of the arguments.
216    __ bind(&compare_nan);
217    __ LoadRoot(r7, Heap::kNanValueRootIndex);
218    __ LoadDouble(d1, FieldMemOperand(r7, HeapNumber::kValueOffset));
219    __ b(&loop);
220  }
221
222  __ bind(&done_loop);
223  // Drop all slots, including the receiver.
224  __ AddP(r2, Operand(1));
225  __ Drop(r2);
226  __ LoadRR(r2, r7);
227  __ Ret();
228}
229
230// static
231void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
232  // ----------- S t a t e -------------
233  //  -- r2                     : number of arguments
234  //  -- r3                     : constructor function
235  //  -- cp                     : context
236  //  -- lr                     : return address
237  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
238  //  -- sp[argc * 4]           : receiver
239  // -----------------------------------
240
241  // 1. Load the first argument into r2.
242  Label no_arguments;
243  {
244    __ LoadRR(r4, r2);  // Store argc in r4.
245    __ CmpP(r2, Operand::Zero());
246    __ beq(&no_arguments);
247    __ SubP(r2, r2, Operand(1));
248    __ ShiftLeftP(r2, r2, Operand(kPointerSizeLog2));
249    __ LoadP(r2, MemOperand(sp, r2));
250  }
251
252  // 2a. Convert the first argument to a number.
253  {
254    FrameScope scope(masm, StackFrame::MANUAL);
255    __ SmiTag(r4);
256    __ EnterBuiltinFrame(cp, r3, r4);
257    __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
258    __ LeaveBuiltinFrame(cp, r3, r4);
259    __ SmiUntag(r4);
260  }
261
262  {
263    // Drop all arguments including the receiver.
264    __ Drop(r4);
265    __ Ret(1);
266  }
267
268  // 2b. No arguments, return +0.
269  __ bind(&no_arguments);
270  __ LoadSmiLiteral(r2, Smi::kZero);
271  __ Ret(1);
272}
273
274// static
275void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
276  // ----------- S t a t e -------------
277  //  -- r2                     : number of arguments
278  //  -- r3                     : constructor function
279  //  -- r5                     : new target
280  //  -- lr                     : return address
281  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
282  //  -- sp[argc * 4]           : receiver
283  // -----------------------------------
284
285  // 1. Make sure we operate in the context of the called function.
286  __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset));
287
288  // 2. Load the first argument into r4.
289  {
290    Label no_arguments, done;
291    __ LoadRR(r8, r2);  // Store argc in r8.
292    __ CmpP(r2, Operand::Zero());
293    __ beq(&no_arguments);
294    __ SubP(r2, r2, Operand(1));
295    __ ShiftLeftP(r4, r2, Operand(kPointerSizeLog2));
296    __ LoadP(r4, MemOperand(sp, r4));
297    __ b(&done);
298    __ bind(&no_arguments);
299    __ LoadSmiLiteral(r4, Smi::kZero);
300    __ bind(&done);
301  }
302
303  // 3. Make sure r4 is a number.
304  {
305    Label done_convert;
306    __ JumpIfSmi(r4, &done_convert);
307    __ CompareObjectType(r4, r6, r6, HEAP_NUMBER_TYPE);
308    __ beq(&done_convert);
309    {
310      FrameScope scope(masm, StackFrame::MANUAL);
311      __ SmiTag(r8);
312      __ EnterBuiltinFrame(cp, r3, r8);
313      __ Push(r5);
314      __ LoadRR(r2, r4);
315      __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
316      __ LoadRR(r4, r2);
317      __ Pop(r5);
318      __ LeaveBuiltinFrame(cp, r3, r8);
319      __ SmiUntag(r8);
320    }
321    __ bind(&done_convert);
322  }
323
324  // 4. Check if new target and constructor differ.
325  Label drop_frame_and_ret, new_object;
326  __ CmpP(r3, r5);
327  __ bne(&new_object);
328
329  // 5. Allocate a JSValue wrapper for the number.
330  __ AllocateJSValue(r2, r3, r4, r6, r7, &new_object);
331  __ b(&drop_frame_and_ret);
332
333  // 6. Fallback to the runtime to create new object.
334  __ bind(&new_object);
335  {
336    FrameScope scope(masm, StackFrame::MANUAL);
337    __ SmiTag(r8);
338    __ EnterBuiltinFrame(cp, r3, r8);
339    __ Push(r4);  // first argument
340    __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
341            RelocInfo::CODE_TARGET);
342    __ Pop(r4);
343    __ LeaveBuiltinFrame(cp, r3, r8);
344    __ SmiUntag(r8);
345  }
346  __ StoreP(r4, FieldMemOperand(r2, JSValue::kValueOffset), r0);
347
348  __ bind(&drop_frame_and_ret);
349  {
350    __ Drop(r8);
351    __ Ret(1);
352  }
353}
354
355// static
356void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
357  // ----------- S t a t e -------------
358  //  -- r2                     : number of arguments
359  //  -- r3                     : constructor function
360  //  -- cp                     : context
361  //  -- lr                     : return address
362  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
363  //  -- sp[argc * 4]           : receiver
364  // -----------------------------------
365  // 1. Load the first argument into r2.
366  Label no_arguments;
367  {
368    __ LoadRR(r4, r2);  // Store argc in r4
369    __ CmpP(r2, Operand::Zero());
370    __ beq(&no_arguments);
371    __ SubP(r2, r2, Operand(1));
372    __ ShiftLeftP(r2, r2, Operand(kPointerSizeLog2));
373    __ LoadP(r2, MemOperand(sp, r2));
374  }
375
376  // 2a. At least one argument, return r2 if it's a string, otherwise
377  // dispatch to appropriate conversion.
378  Label drop_frame_and_ret, to_string, symbol_descriptive_string;
379  {
380    __ JumpIfSmi(r2, &to_string);
381    STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
382    __ CompareObjectType(r2, r5, r5, FIRST_NONSTRING_TYPE);
383    __ bgt(&to_string);
384    __ beq(&symbol_descriptive_string);
385    __ b(&drop_frame_and_ret);
386  }
387
388  // 2b. No arguments, return the empty string (and pop the receiver).
389  __ bind(&no_arguments);
390  {
391    __ LoadRoot(r2, Heap::kempty_stringRootIndex);
392    __ Ret(1);
393  }
394
395  // 3a. Convert r2 to a string.
396  __ bind(&to_string);
397  {
398    FrameScope scope(masm, StackFrame::MANUAL);
399    __ SmiTag(r4);
400    __ EnterBuiltinFrame(cp, r3, r4);
401    __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
402    __ LeaveBuiltinFrame(cp, r3, r4);
403    __ SmiUntag(r4);
404  }
405  __ b(&drop_frame_and_ret);
406  // 3b. Convert symbol in r2 to a string.
407  __ bind(&symbol_descriptive_string);
408  {
409    __ Drop(r4);
410    __ Drop(1);
411    __ Push(r2);
412    __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
413  }
414
415  __ bind(&drop_frame_and_ret);
416  {
417    __ Drop(r4);
418    __ Ret(1);
419  }
420}
421
422// static
423void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
424  // ----------- S t a t e -------------
425  //  -- r2                     : number of arguments
426  //  -- r3                     : constructor function
427  //  -- r5                     : new target
428  //  -- cp                     : context
429  //  -- lr                     : return address
430  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
431  //  -- sp[argc * 4]           : receiver
432  // -----------------------------------
433
434  // 1. Make sure we operate in the context of the called function.
435  __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset));
436
437  // 2. Load the first argument into r4.
438  {
439    Label no_arguments, done;
440    __ LoadRR(r8, r2);  // Store argc in r8.
441    __ CmpP(r2, Operand::Zero());
442    __ beq(&no_arguments);
443    __ SubP(r2, r2, Operand(1));
444    __ ShiftLeftP(r4, r2, Operand(kPointerSizeLog2));
445    __ LoadP(r4, MemOperand(sp, r4));
446    __ b(&done);
447    __ bind(&no_arguments);
448    __ LoadRoot(r4, Heap::kempty_stringRootIndex);
449    __ bind(&done);
450  }
451
452  // 3. Make sure r4 is a string.
453  {
454    Label convert, done_convert;
455    __ JumpIfSmi(r4, &convert);
456    __ CompareObjectType(r4, r6, r6, FIRST_NONSTRING_TYPE);
457    __ blt(&done_convert);
458    __ bind(&convert);
459    {
460      FrameScope scope(masm, StackFrame::MANUAL);
461      __ SmiTag(r8);
462      __ EnterBuiltinFrame(cp, r3, r8);
463      __ Push(r5);
464      __ LoadRR(r2, r4);
465      __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
466      __ LoadRR(r4, r2);
467      __ Pop(r5);
468      __ LeaveBuiltinFrame(cp, r3, r8);
469      __ SmiUntag(r8);
470    }
471    __ bind(&done_convert);
472  }
473
474  // 4. Check if new target and constructor differ.
475  Label drop_frame_and_ret, new_object;
476  __ CmpP(r3, r5);
477  __ bne(&new_object);
478
479  // 5. Allocate a JSValue wrapper for the string.
480  __ AllocateJSValue(r2, r3, r4, r6, r7, &new_object);
481  __ b(&drop_frame_and_ret);
482
483  // 6. Fallback to the runtime to create new object.
484  __ bind(&new_object);
485  {
486    FrameScope scope(masm, StackFrame::MANUAL);
487    __ SmiTag(r8);
488    __ EnterBuiltinFrame(cp, r3, r8);
489    __ Push(r4);  // first argument
490    __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
491            RelocInfo::CODE_TARGET);
492    __ Pop(r4);
493    __ LeaveBuiltinFrame(cp, r3, r8);
494    __ SmiUntag(r8);
495  }
496  __ StoreP(r4, FieldMemOperand(r2, JSValue::kValueOffset), r0);
497
498  __ bind(&drop_frame_and_ret);
499  {
500    __ Drop(r8);
501    __ Ret(1);
502  }
503}
504
505static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
506  __ LoadP(ip, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
507  __ LoadP(ip, FieldMemOperand(ip, SharedFunctionInfo::kCodeOffset));
508  __ AddP(ip, Operand(Code::kHeaderSize - kHeapObjectTag));
509  __ JumpToJSEntry(ip);
510}
511
512static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
513                                           Runtime::FunctionId function_id) {
514  // ----------- S t a t e -------------
515  //  -- r2 : argument count (preserved for callee)
516  //  -- r3 : target function (preserved for callee)
517  //  -- r5 : new target (preserved for callee)
518  // -----------------------------------
519  {
520    FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
521    // Push the number of arguments to the callee.
522    // Push a copy of the target function and the new target.
523    // Push function as parameter to the runtime call.
524    __ SmiTag(r2);
525    __ Push(r2, r3, r5, r3);
526
527    __ CallRuntime(function_id, 1);
528    __ LoadRR(r4, r2);
529
530    // Restore target function and new target.
531    __ Pop(r2, r3, r5);
532    __ SmiUntag(r2);
533  }
534  __ AddP(ip, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
535  __ JumpToJSEntry(ip);
536}
537
538void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
539  // Checking whether the queued function is ready for install is optional,
540  // since we come across interrupts and stack checks elsewhere.  However,
541  // not checking may delay installing ready functions, and always checking
542  // would be quite expensive.  A good compromise is to first check against
543  // stack limit as a cue for an interrupt signal.
544  Label ok;
545  __ CmpLogicalP(sp, RootMemOperand(Heap::kStackLimitRootIndex));
546  __ bge(&ok, Label::kNear);
547
548  GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
549
550  __ bind(&ok);
551  GenerateTailCallToSharedCode(masm);
552}
553
554namespace {
555
556void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function,
557                                    bool create_implicit_receiver,
558                                    bool check_derived_construct) {
559  Label post_instantiation_deopt_entry;
560  // ----------- S t a t e -------------
561  //  -- r2     : number of arguments
562  //  -- r3     : constructor function
563  //  -- r5     : new target
564  //  -- cp     : context
565  //  -- lr     : return address
566  //  -- sp[...]: constructor arguments
567  // -----------------------------------
568
569  Isolate* isolate = masm->isolate();
570
571  // Enter a construct frame.
572  {
573    FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
574
575    // Preserve the incoming parameters on the stack.
576
577    if (!create_implicit_receiver) {
578      __ SmiTag(r6, r2);
579      __ LoadAndTestP(r6, r6);
580      __ Push(cp, r6);
581      __ PushRoot(Heap::kTheHoleValueRootIndex);
582    } else {
583      __ SmiTag(r2);
584      __ Push(cp, r2);
585
586      // Allocate the new receiver object.
587      __ Push(r3, r5);
588      __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
589              RelocInfo::CODE_TARGET);
590      __ LoadRR(r6, r2);
591      __ Pop(r3, r5);
592
593      // ----------- S t a t e -------------
594      //  -- r3: constructor function
595      //  -- r5: new target
596      //  -- r6: newly allocated object
597      // -----------------------------------
598
599      // Retrieve smi-tagged arguments count from the stack.
600      __ LoadP(r2, MemOperand(sp));
601      __ SmiUntag(r2);
602      __ LoadAndTestP(r2, r2);
603
604      // Push the allocated receiver to the stack. We need two copies
605      // because we may have to return the original one and the calling
606      // conventions dictate that the called function pops the receiver.
607      __ Push(r6, r6);
608    }
609
610    // Deoptimizer re-enters stub code here.
611    __ bind(&post_instantiation_deopt_entry);
612
613    // Set up pointer to last argument.
614    __ la(r4, MemOperand(fp, StandardFrameConstants::kCallerSPOffset));
615
616    // Copy arguments and receiver to the expression stack.
617    // r2: number of arguments
618    // r3: constructor function
619    // r4: address of last argument (caller sp)
620    // r5: new target
621    // cr0: condition indicating whether r2 is zero
622    // sp[0]: receiver
623    // sp[1]: receiver
624    // sp[2]: number of arguments (smi-tagged)
625    Label loop, no_args;
626    __ beq(&no_args);
627    __ ShiftLeftP(ip, r2, Operand(kPointerSizeLog2));
628    __ SubP(sp, sp, ip);
629    __ LoadRR(r1, r2);
630    __ bind(&loop);
631    __ lay(ip, MemOperand(ip, -kPointerSize));
632    __ LoadP(r0, MemOperand(ip, r4));
633    __ StoreP(r0, MemOperand(ip, sp));
634    __ BranchOnCount(r1, &loop);
635    __ bind(&no_args);
636
637    // Call the function.
638    // r2: number of arguments
639    // r3: constructor function
640    // r5: new target
641
642    ParameterCount actual(r2);
643    __ InvokeFunction(r3, r5, actual, CALL_FUNCTION,
644                      CheckDebugStepCallWrapper());
645
646    // Store offset of return address for deoptimizer.
647    if (create_implicit_receiver && !is_api_function) {
648      masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
649          masm->pc_offset());
650    }
651
652    // Restore context from the frame.
653    // r2: result
654    // sp[0]: receiver
655    // sp[1]: number of arguments (smi-tagged)
656    __ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
657
658    if (create_implicit_receiver) {
659      // If the result is an object (in the ECMA sense), we should get rid
660      // of the receiver and use the result; see ECMA-262 section 13.2.2-7
661      // on page 74.
662      Label use_receiver, exit;
663
664      // If the result is a smi, it is *not* an object in the ECMA sense.
665      // r2: result
666      // sp[0]: receiver
667      // sp[1]: new.target
668      // sp[2]: number of arguments (smi-tagged)
669      __ JumpIfSmi(r2, &use_receiver);
670
671      // If the type of the result (stored in its map) is less than
672      // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
673      __ CompareObjectType(r2, r3, r5, FIRST_JS_RECEIVER_TYPE);
674      __ bge(&exit);
675
676      // Throw away the result of the constructor invocation and use the
677      // on-stack receiver as the result.
678      __ bind(&use_receiver);
679      __ LoadP(r2, MemOperand(sp));
680
681      // Remove receiver from the stack, remove caller arguments, and
682      // return.
683      __ bind(&exit);
684      // r2: result
685      // sp[0]: receiver (newly allocated object)
686      // sp[1]: number of arguments (smi-tagged)
687      __ LoadP(r3, MemOperand(sp, 1 * kPointerSize));
688    } else {
689      __ LoadP(r3, MemOperand(sp));
690    }
691
692    // Leave construct frame.
693  }
694
695  // ES6 9.2.2. Step 13+
696  // Check that the result is not a Smi, indicating that the constructor result
697  // from a derived class is neither undefined nor an Object.
698  if (check_derived_construct) {
699    Label dont_throw;
700    __ JumpIfNotSmi(r2, &dont_throw);
701    {
702      FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
703      __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
704    }
705    __ bind(&dont_throw);
706  }
707
708  __ SmiToPtrArrayOffset(r3, r3);
709  __ AddP(sp, sp, r3);
710  __ AddP(sp, sp, Operand(kPointerSize));
711  if (create_implicit_receiver) {
712    __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r3, r4);
713  }
714  __ Ret();
715
716  // Store offset of trampoline address for deoptimizer. This is the bailout
717  // point after the receiver instantiation but before the function invocation.
718  // We need to restore some registers in order to continue the above code.
719  if (create_implicit_receiver && !is_api_function) {
720    masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
721        masm->pc_offset());
722
723    // ----------- S t a t e -------------
724    //  -- r2    : newly allocated object
725    //  -- sp[0] : constructor function
726    // -----------------------------------
727
728    __ pop(r3);
729    __ Push(r2, r2);
730
731    // Retrieve smi-tagged arguments count from the stack.
732    __ LoadP(r2, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
733    __ SmiUntag(r2);
734
735    // Retrieve the new target value from the stack. This was placed into the
736    // frame description in place of the receiver by the optimizing compiler.
737    __ la(r5, MemOperand(fp, StandardFrameConstants::kCallerSPOffset));
738    __ ShiftLeftP(ip, r2, Operand(kPointerSizeLog2));
739    __ LoadP(r5, MemOperand(r5, ip));
740
741    // Continue with constructor function invocation.
742    __ b(&post_instantiation_deopt_entry);
743  }
744}
745
746}  // namespace
747
748void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
749  Generate_JSConstructStubHelper(masm, false, true, false);
750}
751
752void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
753  Generate_JSConstructStubHelper(masm, true, false, false);
754}
755
756void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
757  Generate_JSConstructStubHelper(masm, false, false, false);
758}
759
760void Builtins::Generate_JSBuiltinsConstructStubForDerived(
761    MacroAssembler* masm) {
762  Generate_JSConstructStubHelper(masm, false, false, true);
763}
764
765// static
766void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
767  // ----------- S t a t e -------------
768  //  -- r2 : the value to pass to the generator
769  //  -- r3 : the JSGeneratorObject to resume
770  //  -- r4 : the resume mode (tagged)
771  //  -- lr : return address
772  // -----------------------------------
773  __ AssertGeneratorObject(r3);
774
775  // Store input value into generator object.
776  __ StoreP(r2, FieldMemOperand(r3, JSGeneratorObject::kInputOrDebugPosOffset),
777            r0);
778  __ RecordWriteField(r3, JSGeneratorObject::kInputOrDebugPosOffset, r2, r5,
779                      kLRHasNotBeenSaved, kDontSaveFPRegs);
780
781  // Store resume mode into generator object.
782  __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kResumeModeOffset));
783
784  // Load suspended function and context.
785  __ LoadP(r6, FieldMemOperand(r3, JSGeneratorObject::kFunctionOffset));
786  __ LoadP(cp, FieldMemOperand(r6, JSFunction::kContextOffset));
787
788  // Flood function if we are stepping.
789  Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
790  Label stepping_prepared;
791  ExternalReference debug_hook =
792      ExternalReference::debug_hook_on_function_call_address(masm->isolate());
793  __ mov(ip, Operand(debug_hook));
794  __ LoadB(ip, MemOperand(ip));
795  __ CmpSmiLiteral(ip, Smi::kZero, r0);
796  __ bne(&prepare_step_in_if_stepping);
797
798  // Flood function if we need to continue stepping in the suspended generator.
799
800  ExternalReference debug_suspended_generator =
801      ExternalReference::debug_suspended_generator_address(masm->isolate());
802
803  __ mov(ip, Operand(debug_suspended_generator));
804  __ LoadP(ip, MemOperand(ip));
805  __ CmpP(ip, r3);
806  __ beq(&prepare_step_in_suspended_generator);
807  __ bind(&stepping_prepared);
808
809  // Push receiver.
810  __ LoadP(ip, FieldMemOperand(r3, JSGeneratorObject::kReceiverOffset));
811  __ Push(ip);
812
813  // ----------- S t a t e -------------
814  //  -- r3    : the JSGeneratorObject to resume
815  //  -- r4    : the resume mode (tagged)
816  //  -- r6    : generator function
817  //  -- cp    : generator context
818  //  -- lr    : return address
819  //  -- sp[0] : generator receiver
820  // -----------------------------------
821
822  // Push holes for arguments to generator function. Since the parser forced
823  // context allocation for any variables in generators, the actual argument
824  // values have already been copied into the context and these dummy values
825  // will never be used.
826  __ LoadP(r5, FieldMemOperand(r6, JSFunction::kSharedFunctionInfoOffset));
827  __ LoadW(
828      r2, FieldMemOperand(r5, SharedFunctionInfo::kFormalParameterCountOffset));
829  {
830    Label loop, done_loop;
831    __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
832#if V8_TARGET_ARCH_S390X
833    __ CmpP(r2, Operand::Zero());
834    __ beq(&done_loop);
835#else
836    __ SmiUntag(r2);
837    __ LoadAndTestP(r2, r2);
838    __ beq(&done_loop);
839#endif
840    __ LoadRR(r1, r2);
841    __ bind(&loop);
842    __ push(ip);
843    __ BranchOnCount(r1, &loop);
844    __ bind(&done_loop);
845  }
846
847  // Underlying function needs to have bytecode available.
848  if (FLAG_debug_code) {
849    __ LoadP(r5, FieldMemOperand(r5, SharedFunctionInfo::kFunctionDataOffset));
850    __ CompareObjectType(r5, r5, r5, BYTECODE_ARRAY_TYPE);
851    __ Assert(eq, kMissingBytecodeArray);
852  }
853
854  // Resume (Ignition/TurboFan) generator object.
855  {
856    // We abuse new.target both to indicate that this is a resume call and to
857    // pass in the generator object.  In ordinary calls, new.target is always
858    // undefined because generator functions are non-constructable.
859    __ LoadRR(r5, r3);
860    __ LoadRR(r3, r6);
861    __ LoadP(ip, FieldMemOperand(r3, JSFunction::kCodeEntryOffset));
862    __ JumpToJSEntry(ip);
863  }
864
865  __ bind(&prepare_step_in_if_stepping);
866  {
867    FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
868    __ Push(r3, r4, r6);
869    __ CallRuntime(Runtime::kDebugOnFunctionCall);
870    __ Pop(r3, r4);
871    __ LoadP(r6, FieldMemOperand(r3, JSGeneratorObject::kFunctionOffset));
872  }
873  __ b(&stepping_prepared);
874
875  __ bind(&prepare_step_in_suspended_generator);
876  {
877    FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
878    __ Push(r3, r4);
879    __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
880    __ Pop(r3, r4);
881    __ LoadP(r6, FieldMemOperand(r3, JSGeneratorObject::kFunctionOffset));
882  }
883  __ b(&stepping_prepared);
884}
885
886void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
887  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
888  __ push(r3);
889  __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
890}
891
892enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
893
894// Clobbers r4; preserves all other registers.
895static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
896                                        IsTagged argc_is_tagged) {
897  // Check the stack for overflow. We are not trying to catch
898  // interruptions (e.g. debug break and preemption) here, so the "real stack
899  // limit" is checked.
900  Label okay;
901  __ LoadRoot(r4, Heap::kRealStackLimitRootIndex);
902  // Make r4 the space we have left. The stack might already be overflowed
903  // here which will cause r4 to become negative.
904  __ SubP(r4, sp, r4);
905  // Check if the arguments will overflow the stack.
906  if (argc_is_tagged == kArgcIsSmiTagged) {
907    __ SmiToPtrArrayOffset(r0, argc);
908  } else {
909    DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
910    __ ShiftLeftP(r0, argc, Operand(kPointerSizeLog2));
911  }
912  __ CmpP(r4, r0);
913  __ bgt(&okay);  // Signed comparison.
914
915  // Out of stack space.
916  __ CallRuntime(Runtime::kThrowStackOverflow);
917
918  __ bind(&okay);
919}
920
921static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
922                                             bool is_construct) {
923  // Called from Generate_JS_Entry
924  // r2: new.target
925  // r3: function
926  // r4: receiver
927  // r5: argc
928  // r6: argv
929  // r0,r7-r9, cp may be clobbered
930  ProfileEntryHookStub::MaybeCallEntryHook(masm);
931
932  // Enter an internal frame.
933  {
934    // FrameScope ends up calling MacroAssembler::EnterFrame here
935    FrameScope scope(masm, StackFrame::INTERNAL);
936
937    // Setup the context (we need to use the caller context from the isolate).
938    ExternalReference context_address(Isolate::kContextAddress,
939                                      masm->isolate());
940    __ mov(cp, Operand(context_address));
941    __ LoadP(cp, MemOperand(cp));
942
943    __ InitializeRootRegister();
944
945    // Push the function and the receiver onto the stack.
946    __ Push(r3, r4);
947
948    // Check if we have enough stack space to push all arguments.
949    // Clobbers r4.
950    Generate_CheckStackOverflow(masm, r5, kArgcIsUntaggedInt);
951
952    // Copy arguments to the stack in a loop from argv to sp.
953    // The arguments are actually placed in reverse order on sp
954    // compared to argv (i.e. arg1 is highest memory in sp).
955    // r3: function
956    // r5: argc
957    // r6: argv, i.e. points to first arg
958    // r7: scratch reg to hold scaled argc
959    // r8: scratch reg to hold arg handle
960    // r9: scratch reg to hold index into argv
961    Label argLoop, argExit;
962    intptr_t zero = 0;
963    __ ShiftLeftP(r7, r5, Operand(kPointerSizeLog2));
964    __ SubRR(sp, r7);                // Buy the stack frame to fit args
965    __ LoadImmP(r9, Operand(zero));  // Initialize argv index
966    __ bind(&argLoop);
967    __ CmpPH(r7, Operand(zero));
968    __ beq(&argExit, Label::kNear);
969    __ lay(r7, MemOperand(r7, -kPointerSize));
970    __ LoadP(r8, MemOperand(r9, r6));         // read next parameter
971    __ la(r9, MemOperand(r9, kPointerSize));  // r9++;
972    __ LoadP(r0, MemOperand(r8));             // dereference handle
973    __ StoreP(r0, MemOperand(r7, sp));        // push parameter
974    __ b(&argLoop);
975    __ bind(&argExit);
976
977    // Setup new.target and argc.
978    __ LoadRR(r6, r2);
979    __ LoadRR(r2, r5);
980    __ LoadRR(r5, r6);
981
982    // Initialize all JavaScript callee-saved registers, since they will be seen
983    // by the garbage collector as part of handlers.
984    __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
985    __ LoadRR(r7, r6);
986    __ LoadRR(r8, r6);
987    __ LoadRR(r9, r6);
988
989    // Invoke the code.
990    Handle<Code> builtin = is_construct
991                               ? masm->isolate()->builtins()->Construct()
992                               : masm->isolate()->builtins()->Call();
993    __ Call(builtin, RelocInfo::CODE_TARGET);
994
995    // Exit the JS frame and remove the parameters (except function), and
996    // return.
997  }
998  __ b(r14);
999
1000  // r2: result
1001}
1002
1003void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
1004  Generate_JSEntryTrampolineHelper(masm, false);
1005}
1006
1007void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
1008  Generate_JSEntryTrampolineHelper(masm, true);
1009}
1010
1011static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
1012  Register args_count = scratch;
1013
1014  // Get the arguments + receiver count.
1015  __ LoadP(args_count,
1016           MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1017  __ LoadlW(args_count,
1018            FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
1019
1020  // Leave the frame (also dropping the register file).
1021  __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1022
1023  __ AddP(sp, sp, args_count);
1024}
1025
1026// Generate code for entering a JS function with the interpreter.
1027// On entry to the function the receiver and arguments have been pushed on the
1028// stack left to right.  The actual argument count matches the formal parameter
1029// count expected by the function.
1030//
1031// The live registers are:
1032//   o r3: the JS function object being called.
1033//   o r5: the new target
1034//   o cp: our context
1035//   o pp: the caller's constant pool pointer (if enabled)
1036//   o fp: the caller's frame pointer
1037//   o sp: stack pointer
1038//   o lr: return address
1039//
1040// The function builds an interpreter frame.  See InterpreterFrameConstants in
1041// frames.h for its layout.
1042void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
1043  ProfileEntryHookStub::MaybeCallEntryHook(masm);
1044
1045  // Open a frame scope to indicate that there is a frame on the stack.  The
1046  // MANUAL indicates that the scope shouldn't actually generate code to set up
1047  // the frame (that is done below).
1048  FrameScope frame_scope(masm, StackFrame::MANUAL);
1049  __ PushStandardFrame(r3);
1050
1051  // Get the bytecode array from the function object (or from the DebugInfo if
1052  // it is present) and load it into kInterpreterBytecodeArrayRegister.
1053  __ LoadP(r2, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
1054  Label array_done;
1055  Register debug_info = r4;
1056  DCHECK(!debug_info.is(r2));
1057  __ LoadP(debug_info,
1058           FieldMemOperand(r2, SharedFunctionInfo::kDebugInfoOffset));
1059  // Load original bytecode array or the debug copy.
1060  __ LoadP(kInterpreterBytecodeArrayRegister,
1061           FieldMemOperand(r2, SharedFunctionInfo::kFunctionDataOffset));
1062  __ TestIfSmi(debug_info);
1063  __ beq(&array_done);
1064  __ LoadP(kInterpreterBytecodeArrayRegister,
1065           FieldMemOperand(debug_info, DebugInfo::kDebugBytecodeArrayIndex));
1066  __ bind(&array_done);
1067
1068  // Check whether we should continue to use the interpreter.
1069  Label switch_to_different_code_kind;
1070  __ LoadP(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset));
1071  __ CmpP(r2, Operand(masm->CodeObject()));  // Self-reference to this code.
1072  __ bne(&switch_to_different_code_kind);
1073
1074  // Increment invocation count for the function.
1075  __ LoadP(r6, FieldMemOperand(r3, JSFunction::kFeedbackVectorOffset));
1076  __ LoadP(r6, FieldMemOperand(r6, Cell::kValueOffset));
1077  __ LoadP(r1, FieldMemOperand(
1078                   r6, FeedbackVector::kInvocationCountIndex * kPointerSize +
1079                           FeedbackVector::kHeaderSize));
1080  __ AddSmiLiteral(r1, r1, Smi::FromInt(1), r0);
1081  __ StoreP(r1, FieldMemOperand(
1082                    r6, FeedbackVector::kInvocationCountIndex * kPointerSize +
1083                            FeedbackVector::kHeaderSize));
1084
1085  // Check function data field is actually a BytecodeArray object.
1086  if (FLAG_debug_code) {
1087    __ TestIfSmi(kInterpreterBytecodeArrayRegister);
1088    __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1089    __ CompareObjectType(kInterpreterBytecodeArrayRegister, r2, no_reg,
1090                         BYTECODE_ARRAY_TYPE);
1091    __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1092  }
1093
1094  // Reset code age.
1095  __ mov(r1, Operand(BytecodeArray::kNoAgeBytecodeAge));
1096  __ StoreByte(r1, FieldMemOperand(kInterpreterBytecodeArrayRegister,
1097                                   BytecodeArray::kBytecodeAgeOffset),
1098               r0);
1099
1100  // Load the initial bytecode offset.
1101  __ mov(kInterpreterBytecodeOffsetRegister,
1102         Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
1103
1104  // Push new.target, bytecode array and Smi tagged bytecode array offset.
1105  __ SmiTag(r4, kInterpreterBytecodeOffsetRegister);
1106  __ Push(r5, kInterpreterBytecodeArrayRegister, r4);
1107
1108  // Allocate the local and temporary register file on the stack.
1109  {
1110    // Load frame size (word) from the BytecodeArray object.
1111    __ LoadlW(r4, FieldMemOperand(kInterpreterBytecodeArrayRegister,
1112                                  BytecodeArray::kFrameSizeOffset));
1113
1114    // Do a stack check to ensure we don't go over the limit.
1115    Label ok;
1116    __ SubP(r5, sp, r4);
1117    __ LoadRoot(r0, Heap::kRealStackLimitRootIndex);
1118    __ CmpLogicalP(r5, r0);
1119    __ bge(&ok);
1120    __ CallRuntime(Runtime::kThrowStackOverflow);
1121    __ bind(&ok);
1122
1123    // If ok, push undefined as the initial value for all register file entries.
1124    // TODO(rmcilroy): Consider doing more than one push per loop iteration.
1125    Label loop, no_args;
1126    __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
1127    __ ShiftRightP(r4, r4, Operand(kPointerSizeLog2));
1128    __ LoadAndTestP(r4, r4);
1129    __ beq(&no_args);
1130    __ LoadRR(r1, r4);
1131    __ bind(&loop);
1132    __ push(r5);
1133    __ SubP(r1, Operand(1));
1134    __ bne(&loop);
1135    __ bind(&no_args);
1136  }
1137
1138  // Load accumulator and dispatch table into registers.
1139  __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
1140  __ mov(kInterpreterDispatchTableRegister,
1141         Operand(ExternalReference::interpreter_dispatch_table_address(
1142             masm->isolate())));
1143
1144  // Dispatch to the first bytecode handler for the function.
1145  __ LoadlB(r3, MemOperand(kInterpreterBytecodeArrayRegister,
1146                           kInterpreterBytecodeOffsetRegister));
1147  __ ShiftLeftP(ip, r3, Operand(kPointerSizeLog2));
1148  __ LoadP(ip, MemOperand(kInterpreterDispatchTableRegister, ip));
1149  __ Call(ip);
1150
1151  masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
1152
1153  // The return value is in r2.
1154  LeaveInterpreterFrame(masm, r4);
1155  __ Ret();
1156
1157  // If the shared code is no longer this entry trampoline, then the underlying
1158  // function has been switched to a different kind of code and we heal the
1159  // closure by switching the code entry field over to the new code as well.
1160  __ bind(&switch_to_different_code_kind);
1161  __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1162  __ LoadP(r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
1163  __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kCodeOffset));
1164  __ AddP(r6, r6, Operand(Code::kHeaderSize - kHeapObjectTag));
1165  __ StoreP(r6, FieldMemOperand(r3, JSFunction::kCodeEntryOffset), r0);
1166  __ RecordWriteCodeEntryField(r3, r6, r7);
1167  __ JumpToJSEntry(r6);
1168}
1169
1170static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
1171                                        Register scratch,
1172                                        Label* stack_overflow) {
1173  // Check the stack for overflow. We are not trying to catch
1174  // interruptions (e.g. debug break and preemption) here, so the "real stack
1175  // limit" is checked.
1176  __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
1177  // Make scratch the space we have left. The stack might already be overflowed
1178  // here which will cause scratch to become negative.
1179  __ SubP(scratch, sp, scratch);
1180  // Check if the arguments will overflow the stack.
1181  __ ShiftLeftP(r0, num_args, Operand(kPointerSizeLog2));
1182  __ CmpP(scratch, r0);
1183  __ ble(stack_overflow);  // Signed comparison.
1184}
1185
1186static void Generate_InterpreterPushArgs(MacroAssembler* masm,
1187                                         Register num_args, Register index,
1188                                         Register count, Register scratch,
1189                                         Label* stack_overflow) {
1190  // Add a stack check before pushing arguments.
1191  Generate_StackOverflowCheck(masm, num_args, scratch, stack_overflow);
1192
1193  Label loop;
1194  __ AddP(index, index, Operand(kPointerSize));  // Bias up for LoadPU
1195  __ LoadRR(r0, count);
1196  __ bind(&loop);
1197  __ LoadP(scratch, MemOperand(index, -kPointerSize));
1198  __ lay(index, MemOperand(index, -kPointerSize));
1199  __ push(scratch);
1200  __ SubP(r0, Operand(1));
1201  __ bne(&loop);
1202}
1203
1204// static
1205void Builtins::Generate_InterpreterPushArgsAndCallImpl(
1206    MacroAssembler* masm, TailCallMode tail_call_mode,
1207    InterpreterPushArgsMode mode) {
1208  // ----------- S t a t e -------------
1209  //  -- r2 : the number of arguments (not including the receiver)
1210  //  -- r4 : the address of the first argument to be pushed. Subsequent
1211  //          arguments should be consecutive above this, in the same order as
1212  //          they are to be pushed onto the stack.
1213  //  -- r3 : the target to call (can be any Object).
1214  // -----------------------------------
1215  Label stack_overflow;
1216
1217  // Calculate number of arguments (AddP one for receiver).
1218  __ AddP(r5, r2, Operand(1));
1219
1220  // Push the arguments.
1221  Generate_InterpreterPushArgs(masm, r5, r4, r5, r6, &stack_overflow);
1222
1223  // Call the target.
1224  if (mode == InterpreterPushArgsMode::kJSFunction) {
1225    __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny,
1226                                                      tail_call_mode),
1227            RelocInfo::CODE_TARGET);
1228  } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1229    __ Jump(masm->isolate()->builtins()->CallWithSpread(),
1230            RelocInfo::CODE_TARGET);
1231  } else {
1232    __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
1233                                              tail_call_mode),
1234            RelocInfo::CODE_TARGET);
1235  }
1236
1237  __ bind(&stack_overflow);
1238  {
1239    __ TailCallRuntime(Runtime::kThrowStackOverflow);
1240    // Unreachable Code.
1241    __ bkpt(0);
1242  }
1243}
1244
1245// static
1246void Builtins::Generate_InterpreterPushArgsAndConstructImpl(
1247    MacroAssembler* masm, InterpreterPushArgsMode mode) {
1248  // ----------- S t a t e -------------
1249  // -- r2 : argument count (not including receiver)
1250  // -- r5 : new target
1251  // -- r3 : constructor to call
1252  // -- r4 : allocation site feedback if available, undefined otherwise.
1253  // -- r6 : address of the first argument
1254  // -----------------------------------
1255  Label stack_overflow;
1256
1257  // Push a slot for the receiver to be constructed.
1258  __ LoadImmP(r0, Operand::Zero());
1259  __ push(r0);
1260
1261  // Push the arguments (skip if none).
1262  Label skip;
1263  __ CmpP(r2, Operand::Zero());
1264  __ beq(&skip);
1265  Generate_InterpreterPushArgs(masm, r2, r6, r2, r7, &stack_overflow);
1266  __ bind(&skip);
1267
1268  __ AssertUndefinedOrAllocationSite(r4, r7);
1269  if (mode == InterpreterPushArgsMode::kJSFunction) {
1270    __ AssertFunction(r3);
1271
1272    // Tail call to the function-specific construct stub (still in the caller
1273    // context at this point).
1274    __ LoadP(r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
1275    __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kConstructStubOffset));
1276    // Jump to the construct function.
1277    __ AddP(ip, r6, Operand(Code::kHeaderSize - kHeapObjectTag));
1278    __ Jump(ip);
1279  } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1280    // Call the constructor with r2, r3, and r5 unmodified.
1281    __ Jump(masm->isolate()->builtins()->ConstructWithSpread(),
1282            RelocInfo::CODE_TARGET);
1283  } else {
1284    DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
1285    // Call the constructor with r2, r3, and r5 unmodified.
1286    __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1287  }
1288
1289  __ bind(&stack_overflow);
1290  {
1291    __ TailCallRuntime(Runtime::kThrowStackOverflow);
1292    // Unreachable Code.
1293    __ bkpt(0);
1294  }
1295}
1296
1297// static
1298void Builtins::Generate_InterpreterPushArgsAndConstructArray(
1299    MacroAssembler* masm) {
1300  // ----------- S t a t e -------------
1301  // -- r2 : argument count (not including receiver)
1302  // -- r3 : target to call verified to be Array function
1303  // -- r4 : allocation site feedback if available, undefined otherwise.
1304  // -- r5 : address of the first argument
1305  // -----------------------------------
1306  Label stack_overflow;
1307
1308  __ AddP(r6, r2, Operand(1));  // Add one for receiver.
1309
1310  // Push the arguments. r6, r8, r3 will be modified.
1311  Generate_InterpreterPushArgs(masm, r6, r5, r6, r7, &stack_overflow);
1312
1313  // Array constructor expects constructor in r5. It is same as r3 here.
1314  __ LoadRR(r5, r3);
1315
1316  ArrayConstructorStub stub(masm->isolate());
1317  __ TailCallStub(&stub);
1318
1319  __ bind(&stack_overflow);
1320  {
1321    __ TailCallRuntime(Runtime::kThrowStackOverflow);
1322    // Unreachable Code.
1323    __ bkpt(0);
1324  }
1325}
1326
1327static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1328  // Set the return address to the correct point in the interpreter entry
1329  // trampoline.
1330  Smi* interpreter_entry_return_pc_offset(
1331      masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1332  DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
1333  __ Move(r4, masm->isolate()->builtins()->InterpreterEntryTrampoline());
1334  __ AddP(r14, r4, Operand(interpreter_entry_return_pc_offset->value() +
1335                           Code::kHeaderSize - kHeapObjectTag));
1336
1337  // Initialize the dispatch table register.
1338  __ mov(kInterpreterDispatchTableRegister,
1339         Operand(ExternalReference::interpreter_dispatch_table_address(
1340             masm->isolate())));
1341
1342  // Get the bytecode array pointer from the frame.
1343  __ LoadP(kInterpreterBytecodeArrayRegister,
1344           MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1345
1346  if (FLAG_debug_code) {
1347    // Check function data field is actually a BytecodeArray object.
1348    __ TestIfSmi(kInterpreterBytecodeArrayRegister);
1349    __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1350    __ CompareObjectType(kInterpreterBytecodeArrayRegister, r3, no_reg,
1351                         BYTECODE_ARRAY_TYPE);
1352    __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1353  }
1354
1355  // Get the target bytecode offset from the frame.
1356  __ LoadP(kInterpreterBytecodeOffsetRegister,
1357           MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1358  __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1359
1360  // Dispatch to the target bytecode.
1361  __ LoadlB(r3, MemOperand(kInterpreterBytecodeArrayRegister,
1362                           kInterpreterBytecodeOffsetRegister));
1363  __ ShiftLeftP(ip, r3, Operand(kPointerSizeLog2));
1364  __ LoadP(ip, MemOperand(kInterpreterDispatchTableRegister, ip));
1365  __ Jump(ip);
1366}
1367
1368void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1369  // Advance the current bytecode offset stored within the given interpreter
1370  // stack frame. This simulates what all bytecode handlers do upon completion
1371  // of the underlying operation.
1372  __ LoadP(r3, MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1373  __ LoadP(r4,
1374           MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1375  __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1376  {
1377    FrameScope scope(masm, StackFrame::INTERNAL);
1378    __ Push(kInterpreterAccumulatorRegister, r3, r4);
1379    __ CallRuntime(Runtime::kInterpreterAdvanceBytecodeOffset);
1380    __ Move(r4, r2);  // Result is the new bytecode offset.
1381    __ Pop(kInterpreterAccumulatorRegister);
1382  }
1383  __ StoreP(r4,
1384            MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1385
1386  Generate_InterpreterEnterBytecode(masm);
1387}
1388
1389void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1390  Generate_InterpreterEnterBytecode(masm);
1391}
1392
1393void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
1394  // ----------- S t a t e -------------
1395  //  -- r2 : argument count (preserved for callee)
1396  //  -- r5 : new target (preserved for callee)
1397  //  -- r3 : target function (preserved for callee)
1398  // -----------------------------------
1399  // First lookup code, maybe we don't need to compile!
1400  Label gotta_call_runtime;
1401  Label try_shared;
1402  Label loop_top, loop_bottom;
1403
1404  Register closure = r3;
1405  Register map = r8;
1406  Register index = r4;
1407
1408  // Do we have a valid feedback vector?
1409  __ LoadP(index, FieldMemOperand(closure, JSFunction::kFeedbackVectorOffset));
1410  __ LoadP(index, FieldMemOperand(index, Cell::kValueOffset));
1411  __ JumpIfRoot(index, Heap::kUndefinedValueRootIndex, &gotta_call_runtime);
1412
1413  __ LoadP(map,
1414           FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1415  __ LoadP(map,
1416           FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
1417  __ LoadP(index, FieldMemOperand(map, FixedArray::kLengthOffset));
1418  __ CmpSmiLiteral(index, Smi::FromInt(2), r0);
1419  __ blt(&try_shared);
1420
1421  // Find literals.
1422  // r9 : native context
1423  // r4  : length / index
1424  // r8  : optimized code map
1425  // r5  : new target
1426  // r3  : closure
1427  Register native_context = r9;
1428  __ LoadP(native_context, NativeContextMemOperand());
1429
1430  __ bind(&loop_top);
1431  Register temp = r1;
1432  Register array_pointer = r7;
1433
1434  // Does the native context match?
1435  __ SmiToPtrArrayOffset(array_pointer, index);
1436  __ AddP(array_pointer, map, array_pointer);
1437  __ LoadP(temp, FieldMemOperand(array_pointer,
1438                                 SharedFunctionInfo::kOffsetToPreviousContext));
1439  __ LoadP(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1440  __ CmpP(temp, native_context);
1441  __ bne(&loop_bottom, Label::kNear);
1442
1443  // Code available?
1444  Register entry = r6;
1445  __ LoadP(entry,
1446           FieldMemOperand(array_pointer,
1447                           SharedFunctionInfo::kOffsetToPreviousCachedCode));
1448  __ LoadP(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1449  __ JumpIfSmi(entry, &try_shared);
1450
1451  // Found code. Get it into the closure and return.
1452  // Store code entry in the closure.
1453  __ AddP(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1454  __ StoreP(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset), r0);
1455  __ RecordWriteCodeEntryField(closure, entry, r7);
1456
1457  // Link the closure into the optimized function list.
1458  // r6 : code entry
1459  // r9: native context
1460  // r3 : closure
1461  __ LoadP(
1462      r7, ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1463  __ StoreP(r7, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset),
1464            r0);
1465  __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, r7, temp,
1466                      kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1467                      OMIT_SMI_CHECK);
1468  const int function_list_offset =
1469      Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
1470  __ StoreP(
1471      closure,
1472      ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST), r0);
1473  // Save closure before the write barrier.
1474  __ LoadRR(r7, closure);
1475  __ RecordWriteContextSlot(native_context, function_list_offset, r7, temp,
1476                            kLRHasNotBeenSaved, kDontSaveFPRegs);
1477  __ JumpToJSEntry(entry);
1478
1479  __ bind(&loop_bottom);
1480  __ SubSmiLiteral(index, index, Smi::FromInt(SharedFunctionInfo::kEntryLength),
1481                   r0);
1482  __ CmpSmiLiteral(index, Smi::FromInt(1), r0);
1483  __ bgt(&loop_top);
1484
1485  // We found no code.
1486  __ b(&gotta_call_runtime);
1487
1488  __ bind(&try_shared);
1489  __ LoadP(entry,
1490           FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1491  // Is the shared function marked for tier up?
1492  __ LoadlB(temp, FieldMemOperand(
1493                      entry, SharedFunctionInfo::kMarkedForTierUpByteOffset));
1494  __ TestBit(temp, SharedFunctionInfo::kMarkedForTierUpBitWithinByte, r0);
1495  __ bne(&gotta_call_runtime);
1496
1497  // If SFI points to anything other than CompileLazy, install that.
1498  __ LoadP(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset));
1499  __ mov(r7, Operand(masm->CodeObject()));
1500  __ CmpP(entry, r7);
1501  __ beq(&gotta_call_runtime);
1502
1503  // Install the SFI's code entry.
1504  __ AddP(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1505  __ StoreP(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset), r0);
1506  __ RecordWriteCodeEntryField(closure, entry, r7);
1507  __ JumpToJSEntry(entry);
1508
1509  __ bind(&gotta_call_runtime);
1510  GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1511}
1512
1513void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
1514  GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
1515}
1516
1517void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
1518  GenerateTailCallToReturnedCode(masm,
1519                                 Runtime::kCompileOptimized_NotConcurrent);
1520}
1521
1522void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
1523  GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
1524}
1525
1526void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1527  // ----------- S t a t e -------------
1528  //  -- r2 : argument count (preserved for callee)
1529  //  -- r3 : new target (preserved for callee)
1530  //  -- r5 : target function (preserved for callee)
1531  // -----------------------------------
1532  Label failed;
1533  {
1534    FrameScope scope(masm, StackFrame::INTERNAL);
1535    // Preserve argument count for later compare.
1536    __ Move(r6, r2);
1537    // Push a copy of the target function and the new target.
1538    __ SmiTag(r2);
1539    // Push another copy as a parameter to the runtime call.
1540    __ Push(r2, r3, r5, r3);
1541
1542    // Copy arguments from caller (stdlib, foreign, heap).
1543    Label args_done;
1544    for (int j = 0; j < 4; ++j) {
1545      Label over;
1546      if (j < 3) {
1547        __ CmpP(r6, Operand(j));
1548        __ b(ne, &over);
1549      }
1550      for (int i = j - 1; i >= 0; --i) {
1551        __ LoadP(r6, MemOperand(fp, StandardFrameConstants::kCallerSPOffset +
1552                                        i * kPointerSize));
1553        __ push(r6);
1554      }
1555      for (int i = 0; i < 3 - j; ++i) {
1556        __ PushRoot(Heap::kUndefinedValueRootIndex);
1557      }
1558      if (j < 3) {
1559        __ jmp(&args_done);
1560        __ bind(&over);
1561      }
1562    }
1563    __ bind(&args_done);
1564
1565    // Call runtime, on success unwind frame, and parent frame.
1566    __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1567    // A smi 0 is returned on failure, an object on success.
1568    __ JumpIfSmi(r2, &failed);
1569
1570    __ Drop(2);
1571    __ pop(r6);
1572    __ SmiUntag(r6);
1573    scope.GenerateLeaveFrame();
1574
1575    __ AddP(r6, r6, Operand(1));
1576    __ Drop(r6);
1577    __ Ret();
1578
1579    __ bind(&failed);
1580    // Restore target function and new target.
1581    __ Pop(r2, r3, r5);
1582    __ SmiUntag(r2);
1583  }
1584  // On failure, tail call back to regular js.
1585  GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1586}
1587
1588static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1589  // For now, we are relying on the fact that make_code_young doesn't do any
1590  // garbage collection which allows us to save/restore the registers without
1591  // worrying about which of them contain pointers. We also don't build an
1592  // internal frame to make the code faster, since we shouldn't have to do stack
1593  // crawls in MakeCodeYoung. This seems a bit fragile.
1594
1595  // Point r2 at the start of the PlatformCodeAge sequence.
1596  __ CleanseP(r14);
1597  __ SubP(r14, Operand(kCodeAgingSequenceLength));
1598  __ LoadRR(r2, r14);
1599
1600  __ pop(r14);
1601
1602  // The following registers must be saved and restored when calling through to
1603  // the runtime:
1604  //   r2 - contains return address (beginning of patch sequence)
1605  //   r3 - isolate
1606  //   r5 - new target
1607  //   lr - return address
1608  FrameScope scope(masm, StackFrame::MANUAL);
1609  __ MultiPush(r14.bit() | r2.bit() | r3.bit() | r5.bit() | fp.bit());
1610  __ PrepareCallCFunction(2, 0, r4);
1611  __ mov(r3, Operand(ExternalReference::isolate_address(masm->isolate())));
1612  __ CallCFunction(
1613      ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1614  __ MultiPop(r14.bit() | r2.bit() | r3.bit() | r5.bit() | fp.bit());
1615  __ LoadRR(ip, r2);
1616  __ Jump(ip);
1617}
1618
1619#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                              \
1620  void Builtins::Generate_Make##C##CodeYoungAgain(MacroAssembler* masm) { \
1621    GenerateMakeCodeYoungAgainCommon(masm);                               \
1622  }
1623CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1624#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1625
1626void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1627  // For now, we are relying on the fact that make_code_young doesn't do any
1628  // garbage collection which allows us to save/restore the registers without
1629  // worrying about which of them contain pointers. We also don't build an
1630  // internal frame to make the code faster, since we shouldn't have to do stack
1631  // crawls in MakeCodeYoung. This seems a bit fragile.
1632
1633  // Point r2 at the start of the PlatformCodeAge sequence.
1634  __ CleanseP(r14);
1635  __ SubP(r14, Operand(kCodeAgingSequenceLength));
1636  __ LoadRR(r2, r14);
1637
1638  __ pop(r14);
1639
1640  // The following registers must be saved and restored when calling through to
1641  // the runtime:
1642  //   r2 - contains return address (beginning of patch sequence)
1643  //   r3 - isolate
1644  //   r5 - new target
1645  //   lr - return address
1646  FrameScope scope(masm, StackFrame::MANUAL);
1647  __ MultiPush(r14.bit() | r2.bit() | r3.bit() | r5.bit() | fp.bit());
1648  __ PrepareCallCFunction(2, 0, r4);
1649  __ mov(r3, Operand(ExternalReference::isolate_address(masm->isolate())));
1650  __ CallCFunction(
1651      ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1652      2);
1653  __ MultiPop(r14.bit() | r2.bit() | r3.bit() | r5.bit() | fp.bit());
1654  __ LoadRR(ip, r2);
1655
1656  // Perform prologue operations usually performed by the young code stub.
1657  __ PushStandardFrame(r3);
1658
1659  // Jump to point after the code-age stub.
1660  __ AddP(r2, ip, Operand(kNoCodeAgeSequenceLength));
1661  __ Jump(r2);
1662}
1663
1664void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1665  GenerateMakeCodeYoungAgainCommon(masm);
1666}
1667
1668void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1669  Generate_MarkCodeAsExecutedOnce(masm);
1670}
1671
1672static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1673                                             SaveFPRegsMode save_doubles) {
1674  {
1675    FrameScope scope(masm, StackFrame::INTERNAL);
1676
1677    // Preserve registers across notification, this is important for compiled
1678    // stubs that tail call the runtime on deopts passing their parameters in
1679    // registers.
1680    __ MultiPush(kJSCallerSaved | kCalleeSaved);
1681    // Pass the function and deoptimization type to the runtime system.
1682    __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
1683    __ MultiPop(kJSCallerSaved | kCalleeSaved);
1684  }
1685
1686  __ la(sp, MemOperand(sp, kPointerSize));  // Ignore state
1687  __ Ret();                                 // Jump to miss handler
1688}
1689
1690void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1691  Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1692}
1693
1694void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1695  Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1696}
1697
1698static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1699                                             Deoptimizer::BailoutType type) {
1700  {
1701    FrameScope scope(masm, StackFrame::INTERNAL);
1702    // Pass the function and deoptimization type to the runtime system.
1703    __ LoadSmiLiteral(r2, Smi::FromInt(static_cast<int>(type)));
1704    __ push(r2);
1705    __ CallRuntime(Runtime::kNotifyDeoptimized);
1706  }
1707
1708  // Get the full codegen state from the stack and untag it -> r8.
1709  __ LoadP(r8, MemOperand(sp, 0 * kPointerSize));
1710  __ SmiUntag(r8);
1711  // Switch on the state.
1712  Label with_tos_register, unknown_state;
1713  __ CmpP(
1714      r8,
1715      Operand(static_cast<intptr_t>(Deoptimizer::BailoutState::NO_REGISTERS)));
1716  __ bne(&with_tos_register);
1717  __ la(sp, MemOperand(sp, 1 * kPointerSize));  // Remove state.
1718  __ Ret();
1719
1720  __ bind(&with_tos_register);
1721  DCHECK_EQ(kInterpreterAccumulatorRegister.code(), r2.code());
1722  __ LoadP(r2, MemOperand(sp, 1 * kPointerSize));
1723  __ CmpP(
1724      r8,
1725      Operand(static_cast<intptr_t>(Deoptimizer::BailoutState::TOS_REGISTER)));
1726  __ bne(&unknown_state);
1727  __ la(sp, MemOperand(sp, 2 * kPointerSize));  // Remove state.
1728  __ Ret();
1729
1730  __ bind(&unknown_state);
1731  __ stop("no cases left");
1732}
1733
1734void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1735  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1736}
1737
1738void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1739  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1740}
1741
1742void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1743  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1744}
1745
1746// Clobbers registers {r6, r7, r8, r9}.
1747void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1748                             Register function_template_info,
1749                             Label* receiver_check_failed) {
1750  Register signature = r6;
1751  Register map = r7;
1752  Register constructor = r8;
1753  Register scratch = r9;
1754
1755  // If there is no signature, return the holder.
1756  __ LoadP(signature, FieldMemOperand(function_template_info,
1757                                      FunctionTemplateInfo::kSignatureOffset));
1758  Label receiver_check_passed;
1759  __ JumpIfRoot(signature, Heap::kUndefinedValueRootIndex,
1760                &receiver_check_passed);
1761
1762  // Walk the prototype chain.
1763  __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1764  Label prototype_loop_start;
1765  __ bind(&prototype_loop_start);
1766
1767  // Get the constructor, if any.
1768  __ GetMapConstructor(constructor, map, scratch, scratch);
1769  __ CmpP(scratch, Operand(JS_FUNCTION_TYPE));
1770  Label next_prototype;
1771  __ bne(&next_prototype);
1772  Register type = constructor;
1773  __ LoadP(type,
1774           FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
1775  __ LoadP(type,
1776           FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
1777
1778  // Loop through the chain of inheriting function templates.
1779  Label function_template_loop;
1780  __ bind(&function_template_loop);
1781
1782  // If the signatures match, we have a compatible receiver.
1783  __ CmpP(signature, type);
1784  __ beq(&receiver_check_passed);
1785
1786  // If the current type is not a FunctionTemplateInfo, load the next prototype
1787  // in the chain.
1788  __ JumpIfSmi(type, &next_prototype);
1789  __ CompareObjectType(type, scratch, scratch, FUNCTION_TEMPLATE_INFO_TYPE);
1790  __ bne(&next_prototype);
1791
1792  // Otherwise load the parent function template and iterate.
1793  __ LoadP(type,
1794           FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
1795  __ b(&function_template_loop);
1796
1797  // Load the next prototype.
1798  __ bind(&next_prototype);
1799  __ LoadlW(scratch, FieldMemOperand(map, Map::kBitField3Offset));
1800  __ DecodeField<Map::HasHiddenPrototype>(scratch);
1801  __ beq(receiver_check_failed);
1802
1803  __ LoadP(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1804  __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1805  // Iterate.
1806  __ b(&prototype_loop_start);
1807
1808  __ bind(&receiver_check_passed);
1809}
1810
1811void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1812  // ----------- S t a t e -------------
1813  //  -- r2                 : number of arguments excluding receiver
1814  //  -- r3                 : callee
1815  //  -- lr                 : return address
1816  //  -- sp[0]              : last argument
1817  //  -- ...
1818  //  -- sp[4 * (argc - 1)] : first argument
1819  //  -- sp[4 * argc]       : receiver
1820  // -----------------------------------
1821
1822  // Load the FunctionTemplateInfo.
1823  __ LoadP(r5, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
1824  __ LoadP(r5, FieldMemOperand(r5, SharedFunctionInfo::kFunctionDataOffset));
1825
1826  // Do the compatible receiver check.
1827  Label receiver_check_failed;
1828  __ ShiftLeftP(r1, r2, Operand(kPointerSizeLog2));
1829  __ LoadP(r4, MemOperand(sp, r1));
1830  CompatibleReceiverCheck(masm, r4, r5, &receiver_check_failed);
1831
1832  // Get the callback offset from the FunctionTemplateInfo, and jump to the
1833  // beginning of the code.
1834  __ LoadP(r6, FieldMemOperand(r5, FunctionTemplateInfo::kCallCodeOffset));
1835  __ LoadP(r6, FieldMemOperand(r6, CallHandlerInfo::kFastHandlerOffset));
1836  __ AddP(ip, r6, Operand(Code::kHeaderSize - kHeapObjectTag));
1837  __ JumpToJSEntry(ip);
1838
1839  // Compatible receiver check failed: throw an Illegal Invocation exception.
1840  __ bind(&receiver_check_failed);
1841  // Drop the arguments (including the receiver);
1842  __ AddP(r1, r1, Operand(kPointerSize));
1843  __ AddP(sp, sp, r1);
1844  __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1845}
1846
1847static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
1848                                              bool has_handler_frame) {
1849  // Lookup the function in the JavaScript frame.
1850  if (has_handler_frame) {
1851    __ LoadP(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1852    __ LoadP(r2, MemOperand(r2, JavaScriptFrameConstants::kFunctionOffset));
1853  } else {
1854    __ LoadP(r2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1855  }
1856
1857  {
1858    FrameScope scope(masm, StackFrame::INTERNAL);
1859    // Pass function as argument.
1860    __ push(r2);
1861    __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1862  }
1863
1864  // If the code object is null, just return to the caller.
1865  Label skip;
1866  __ CmpSmiLiteral(r2, Smi::kZero, r0);
1867  __ bne(&skip);
1868  __ Ret();
1869
1870  __ bind(&skip);
1871
1872  // Drop any potential handler frame that is be sitting on top of the actual
1873  // JavaScript frame. This is the case then OSR is triggered from bytecode.
1874  if (has_handler_frame) {
1875    __ LeaveFrame(StackFrame::STUB);
1876  }
1877
1878  // Load deoptimization data from the code object.
1879  // <deopt_data> = <code>[#deoptimization_data_offset]
1880  __ LoadP(r3, FieldMemOperand(r2, Code::kDeoptimizationDataOffset));
1881
1882  // Load the OSR entrypoint offset from the deoptimization data.
1883  // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1884  __ LoadP(
1885      r3, FieldMemOperand(r3, FixedArray::OffsetOfElementAt(
1886                                  DeoptimizationInputData::kOsrPcOffsetIndex)));
1887  __ SmiUntag(r3);
1888
1889  // Compute the target address = code_obj + header_size + osr_offset
1890  // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1891  __ AddP(r2, r3);
1892  __ AddP(r0, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
1893  __ LoadRR(r14, r0);
1894
1895  // And "return" to the OSR entry point of the function.
1896  __ Ret();
1897}
1898
1899void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1900  Generate_OnStackReplacementHelper(masm, false);
1901}
1902
1903void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
1904  Generate_OnStackReplacementHelper(masm, true);
1905}
1906
1907// static
1908void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1909  // ----------- S t a t e -------------
1910  //  -- r2    : argc
1911  //  -- sp[0] : argArray
1912  //  -- sp[4] : thisArg
1913  //  -- sp[8] : receiver
1914  // -----------------------------------
1915
1916  // 1. Load receiver into r3, argArray into r2 (if present), remove all
1917  // arguments from the stack (including the receiver), and push thisArg (if
1918  // present) instead.
1919  {
1920    Label skip;
1921    Register arg_size = r4;
1922    Register new_sp = r5;
1923    Register scratch = r6;
1924    __ ShiftLeftP(arg_size, r2, Operand(kPointerSizeLog2));
1925    __ AddP(new_sp, sp, arg_size);
1926    __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
1927    __ LoadRR(scratch, r2);
1928    __ LoadP(r3, MemOperand(new_sp, 0));  // receiver
1929    __ CmpP(arg_size, Operand(kPointerSize));
1930    __ blt(&skip);
1931    __ LoadP(scratch, MemOperand(new_sp, 1 * -kPointerSize));  // thisArg
1932    __ beq(&skip);
1933    __ LoadP(r2, MemOperand(new_sp, 2 * -kPointerSize));  // argArray
1934    __ bind(&skip);
1935    __ LoadRR(sp, new_sp);
1936    __ StoreP(scratch, MemOperand(sp, 0));
1937  }
1938
1939  // ----------- S t a t e -------------
1940  //  -- r2    : argArray
1941  //  -- r3    : receiver
1942  //  -- sp[0] : thisArg
1943  // -----------------------------------
1944
1945  // 2. Make sure the receiver is actually callable.
1946  Label receiver_not_callable;
1947  __ JumpIfSmi(r3, &receiver_not_callable);
1948  __ LoadP(r6, FieldMemOperand(r3, HeapObject::kMapOffset));
1949  __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset));
1950  __ TestBit(r6, Map::kIsCallable);
1951  __ beq(&receiver_not_callable);
1952
1953  // 3. Tail call with no arguments if argArray is null or undefined.
1954  Label no_arguments;
1955  __ JumpIfRoot(r2, Heap::kNullValueRootIndex, &no_arguments);
1956  __ JumpIfRoot(r2, Heap::kUndefinedValueRootIndex, &no_arguments);
1957
1958  // 4a. Apply the receiver to the given argArray (passing undefined for
1959  // new.target).
1960  __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
1961  __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1962
1963  // 4b. The argArray is either null or undefined, so we tail call without any
1964  // arguments to the receiver.
1965  __ bind(&no_arguments);
1966  {
1967    __ LoadImmP(r2, Operand::Zero());
1968    __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1969  }
1970
1971  // 4c. The receiver is not callable, throw an appropriate TypeError.
1972  __ bind(&receiver_not_callable);
1973  {
1974    __ StoreP(r3, MemOperand(sp, 0));
1975    __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1976  }
1977}
1978
1979// static
1980void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1981  // 1. Make sure we have at least one argument.
1982  // r2: actual number of arguments
1983  {
1984    Label done;
1985    __ CmpP(r2, Operand::Zero());
1986    __ bne(&done, Label::kNear);
1987    __ PushRoot(Heap::kUndefinedValueRootIndex);
1988    __ AddP(r2, Operand(1));
1989    __ bind(&done);
1990  }
1991
1992  // r2: actual number of arguments
1993  // 2. Get the callable to call (passed as receiver) from the stack.
1994  __ ShiftLeftP(r4, r2, Operand(kPointerSizeLog2));
1995  __ LoadP(r3, MemOperand(sp, r4));
1996
1997  // 3. Shift arguments and return address one slot down on the stack
1998  //    (overwriting the original receiver).  Adjust argument count to make
1999  //    the original first argument the new receiver.
2000  // r2: actual number of arguments
2001  // r3: callable
2002  {
2003    Label loop;
2004    // Calculate the copy start address (destination). Copy end address is sp.
2005    __ AddP(r4, sp, r4);
2006
2007    __ bind(&loop);
2008    __ LoadP(ip, MemOperand(r4, -kPointerSize));
2009    __ StoreP(ip, MemOperand(r4));
2010    __ SubP(r4, Operand(kPointerSize));
2011    __ CmpP(r4, sp);
2012    __ bne(&loop);
2013    // Adjust the actual number of arguments and remove the top element
2014    // (which is a copy of the last argument).
2015    __ SubP(r2, Operand(1));
2016    __ pop();
2017  }
2018
2019  // 4. Call the callable.
2020  __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2021}
2022
2023void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
2024  // ----------- S t a t e -------------
2025  //  -- r2     : argc
2026  //  -- sp[0]  : argumentsList
2027  //  -- sp[4]  : thisArgument
2028  //  -- sp[8]  : target
2029  //  -- sp[12] : receiver
2030  // -----------------------------------
2031
2032  // 1. Load target into r3 (if present), argumentsList into r2 (if present),
2033  // remove all arguments from the stack (including the receiver), and push
2034  // thisArgument (if present) instead.
2035  {
2036    Label skip;
2037    Register arg_size = r4;
2038    Register new_sp = r5;
2039    Register scratch = r6;
2040    __ ShiftLeftP(arg_size, r2, Operand(kPointerSizeLog2));
2041    __ AddP(new_sp, sp, arg_size);
2042    __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
2043    __ LoadRR(scratch, r3);
2044    __ LoadRR(r2, r3);
2045    __ CmpP(arg_size, Operand(kPointerSize));
2046    __ blt(&skip);
2047    __ LoadP(r3, MemOperand(new_sp, 1 * -kPointerSize));  // target
2048    __ beq(&skip);
2049    __ LoadP(scratch, MemOperand(new_sp, 2 * -kPointerSize));  // thisArgument
2050    __ CmpP(arg_size, Operand(2 * kPointerSize));
2051    __ beq(&skip);
2052    __ LoadP(r2, MemOperand(new_sp, 3 * -kPointerSize));  // argumentsList
2053    __ bind(&skip);
2054    __ LoadRR(sp, new_sp);
2055    __ StoreP(scratch, MemOperand(sp, 0));
2056  }
2057
2058  // ----------- S t a t e -------------
2059  //  -- r2    : argumentsList
2060  //  -- r3    : target
2061  //  -- sp[0] : thisArgument
2062  // -----------------------------------
2063
2064  // 2. Make sure the target is actually callable.
2065  Label target_not_callable;
2066  __ JumpIfSmi(r3, &target_not_callable);
2067  __ LoadP(r6, FieldMemOperand(r3, HeapObject::kMapOffset));
2068  __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset));
2069  __ TestBit(r6, Map::kIsCallable);
2070  __ beq(&target_not_callable);
2071
2072  // 3a. Apply the target to the given argumentsList (passing undefined for
2073  // new.target).
2074  __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
2075  __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
2076
2077  // 3b. The target is not callable, throw an appropriate TypeError.
2078  __ bind(&target_not_callable);
2079  {
2080    __ StoreP(r3, MemOperand(sp, 0));
2081    __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
2082  }
2083}
2084
2085void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
2086  // ----------- S t a t e -------------
2087  //  -- r2     : argc
2088  //  -- sp[0]  : new.target (optional)
2089  //  -- sp[4]  : argumentsList
2090  //  -- sp[8]  : target
2091  //  -- sp[12] : receiver
2092  // -----------------------------------
2093
2094  // 1. Load target into r3 (if present), argumentsList into r2 (if present),
2095  // new.target into r5 (if present, otherwise use target), remove all
2096  // arguments from the stack (including the receiver), and push thisArgument
2097  // (if present) instead.
2098  {
2099    Label skip;
2100    Register arg_size = r4;
2101    Register new_sp = r6;
2102    __ ShiftLeftP(arg_size, r2, Operand(kPointerSizeLog2));
2103    __ AddP(new_sp, sp, arg_size);
2104    __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
2105    __ LoadRR(r2, r3);
2106    __ LoadRR(r5, r3);
2107    __ StoreP(r3, MemOperand(new_sp, 0));  // receiver (undefined)
2108    __ CmpP(arg_size, Operand(kPointerSize));
2109    __ blt(&skip);
2110    __ LoadP(r3, MemOperand(new_sp, 1 * -kPointerSize));  // target
2111    __ LoadRR(r5, r3);  // new.target defaults to target
2112    __ beq(&skip);
2113    __ LoadP(r2, MemOperand(new_sp, 2 * -kPointerSize));  // argumentsList
2114    __ CmpP(arg_size, Operand(2 * kPointerSize));
2115    __ beq(&skip);
2116    __ LoadP(r5, MemOperand(new_sp, 3 * -kPointerSize));  // new.target
2117    __ bind(&skip);
2118    __ LoadRR(sp, new_sp);
2119  }
2120
2121  // ----------- S t a t e -------------
2122  //  -- r2    : argumentsList
2123  //  -- r5    : new.target
2124  //  -- r3    : target
2125  //  -- sp[0] : receiver (undefined)
2126  // -----------------------------------
2127
2128  // 2. Make sure the target is actually a constructor.
2129  Label target_not_constructor;
2130  __ JumpIfSmi(r3, &target_not_constructor);
2131  __ LoadP(r6, FieldMemOperand(r3, HeapObject::kMapOffset));
2132  __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset));
2133  __ TestBit(r6, Map::kIsConstructor);
2134  __ beq(&target_not_constructor);
2135
2136  // 3. Make sure the target is actually a constructor.
2137  Label new_target_not_constructor;
2138  __ JumpIfSmi(r5, &new_target_not_constructor);
2139  __ LoadP(r6, FieldMemOperand(r5, HeapObject::kMapOffset));
2140  __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset));
2141  __ TestBit(r6, Map::kIsConstructor);
2142  __ beq(&new_target_not_constructor);
2143
2144  // 4a. Construct the target with the given new.target and argumentsList.
2145  __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
2146
2147  // 4b. The target is not a constructor, throw an appropriate TypeError.
2148  __ bind(&target_not_constructor);
2149  {
2150    __ StoreP(r3, MemOperand(sp, 0));
2151    __ TailCallRuntime(Runtime::kThrowNotConstructor);
2152  }
2153
2154  // 4c. The new.target is not a constructor, throw an appropriate TypeError.
2155  __ bind(&new_target_not_constructor);
2156  {
2157    __ StoreP(r5, MemOperand(sp, 0));
2158    __ TailCallRuntime(Runtime::kThrowNotConstructor);
2159  }
2160}
2161
2162static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
2163  __ SmiTag(r2);
2164  __ Load(r6, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
2165  // Stack updated as such:
2166  //    old SP --->
2167  //                 R14 Return Addr
2168  //                 Old FP                     <--- New FP
2169  //                 Argument Adapter SMI
2170  //                 Function
2171  //                 ArgC as SMI                <--- New SP
2172  __ lay(sp, MemOperand(sp, -5 * kPointerSize));
2173
2174  // Cleanse the top nibble of 31-bit pointers.
2175  __ CleanseP(r14);
2176  __ StoreP(r14, MemOperand(sp, 4 * kPointerSize));
2177  __ StoreP(fp, MemOperand(sp, 3 * kPointerSize));
2178  __ StoreP(r6, MemOperand(sp, 2 * kPointerSize));
2179  __ StoreP(r3, MemOperand(sp, 1 * kPointerSize));
2180  __ StoreP(r2, MemOperand(sp, 0 * kPointerSize));
2181  __ la(fp, MemOperand(sp, StandardFrameConstants::kFixedFrameSizeFromFp +
2182                               kPointerSize));
2183}
2184
2185static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
2186  // ----------- S t a t e -------------
2187  //  -- r2 : result being passed through
2188  // -----------------------------------
2189  // Get the number of arguments passed (as a smi), tear down the frame and
2190  // then tear down the parameters.
2191  __ LoadP(r3, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
2192                                kPointerSize)));
2193  int stack_adjustment = kPointerSize;  // adjust for receiver
2194  __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment);
2195  __ SmiToPtrArrayOffset(r3, r3);
2196  __ lay(sp, MemOperand(sp, r3));
2197}
2198
2199// static
2200void Builtins::Generate_Apply(MacroAssembler* masm) {
2201  // ----------- S t a t e -------------
2202  //  -- r2    : argumentsList
2203  //  -- r3    : target
2204  //  -- r5    : new.target (checked to be constructor or undefined)
2205  //  -- sp[0] : thisArgument
2206  // -----------------------------------
2207
2208  // Create the list of arguments from the array-like argumentsList.
2209  {
2210    Label create_arguments, create_array, create_holey_array, create_runtime,
2211        done_create;
2212    __ JumpIfSmi(r2, &create_runtime);
2213
2214    // Load the map of argumentsList into r4.
2215    __ LoadP(r4, FieldMemOperand(r2, HeapObject::kMapOffset));
2216
2217    // Load native context into r6.
2218    __ LoadP(r6, NativeContextMemOperand());
2219
2220    // Check if argumentsList is an (unmodified) arguments object.
2221    __ LoadP(ip, ContextMemOperand(r6, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
2222    __ CmpP(ip, r4);
2223    __ beq(&create_arguments);
2224    __ LoadP(ip, ContextMemOperand(r6, Context::STRICT_ARGUMENTS_MAP_INDEX));
2225    __ CmpP(ip, r4);
2226    __ beq(&create_arguments);
2227
2228    // Check if argumentsList is a fast JSArray.
2229    __ CompareInstanceType(r4, ip, JS_ARRAY_TYPE);
2230    __ beq(&create_array);
2231
2232    // Ask the runtime to create the list (actually a FixedArray).
2233    __ bind(&create_runtime);
2234    {
2235      FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2236      __ Push(r3, r5, r2);
2237      __ CallRuntime(Runtime::kCreateListFromArrayLike);
2238      __ Pop(r3, r5);
2239      __ LoadP(r4, FieldMemOperand(r2, FixedArray::kLengthOffset));
2240      __ SmiUntag(r4);
2241    }
2242    __ b(&done_create);
2243
2244    // Try to create the list from an arguments object.
2245    __ bind(&create_arguments);
2246    __ LoadP(r4, FieldMemOperand(r2, JSArgumentsObject::kLengthOffset));
2247    __ LoadP(r6, FieldMemOperand(r2, JSObject::kElementsOffset));
2248    __ LoadP(ip, FieldMemOperand(r6, FixedArray::kLengthOffset));
2249    __ CmpP(r4, ip);
2250    __ bne(&create_runtime);
2251    __ SmiUntag(r4);
2252    __ LoadRR(r2, r6);
2253    __ b(&done_create);
2254
2255    // For holey JSArrays we need to check that the array prototype chain
2256    // protector is intact and our prototype is the Array.prototype actually.
2257    __ bind(&create_holey_array);
2258    __ LoadP(r4, FieldMemOperand(r4, Map::kPrototypeOffset));
2259    __ LoadP(r6, ContextMemOperand(r6, Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
2260    __ CmpP(r4, r6);
2261    __ bne(&create_runtime);
2262    __ LoadRoot(r6, Heap::kArrayProtectorRootIndex);
2263    __ LoadP(r4, FieldMemOperand(r6, PropertyCell::kValueOffset));
2264    __ CmpSmiLiteral(r4, Smi::FromInt(Isolate::kProtectorValid), r0);
2265    __ bne(&create_runtime);
2266    __ LoadP(r4, FieldMemOperand(r2, JSArray::kLengthOffset));
2267    __ LoadP(r2, FieldMemOperand(r2, JSArray::kElementsOffset));
2268    __ SmiUntag(r4);
2269    __ b(&done_create);
2270
2271    // Try to create the list from a JSArray object.
2272    // -- r4 and r6 must be preserved till bne create_holey_array.
2273    __ bind(&create_array);
2274    __ LoadlB(r7, FieldMemOperand(r4, Map::kBitField2Offset));
2275    __ DecodeField<Map::ElementsKindBits>(r7);
2276    STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2277    STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2278    STATIC_ASSERT(FAST_ELEMENTS == 2);
2279    STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
2280    __ CmpP(r7, Operand(FAST_HOLEY_ELEMENTS));
2281    __ bgt(&create_runtime);
2282    // Only FAST_XXX after this point, FAST_HOLEY_XXX are odd values.
2283    __ TestBit(r7, Map::kHasNonInstancePrototype, r0);
2284    __ bne(&create_holey_array);
2285    // FAST_SMI_ELEMENTS or FAST_ELEMENTS after this point.
2286    __ LoadP(r4, FieldMemOperand(r2, JSArray::kLengthOffset));
2287    __ LoadP(r2, FieldMemOperand(r2, JSArray::kElementsOffset));
2288    __ SmiUntag(r4);
2289
2290    __ bind(&done_create);
2291  }
2292
2293  // Check for stack overflow.
2294  {
2295    // Check the stack for overflow. We are not trying to catch interruptions
2296    // (i.e. debug break and preemption) here, so check the "real stack limit".
2297    Label done;
2298    __ LoadRoot(ip, Heap::kRealStackLimitRootIndex);
2299    // Make ip the space we have left. The stack might already be overflowed
2300    // here which will cause ip to become negative.
2301    __ SubP(ip, sp, ip);
2302    // Check if the arguments will overflow the stack.
2303    __ ShiftLeftP(r0, r4, Operand(kPointerSizeLog2));
2304    __ CmpP(ip, r0);  // Signed comparison.
2305    __ bgt(&done);
2306    __ TailCallRuntime(Runtime::kThrowStackOverflow);
2307    __ bind(&done);
2308  }
2309
2310  // ----------- S t a t e -------------
2311  //  -- r3    : target
2312  //  -- r2    : args (a FixedArray built from argumentsList)
2313  //  -- r4    : len (number of elements to push from args)
2314  //  -- r5    : new.target (checked to be constructor or undefined)
2315  //  -- sp[0] : thisArgument
2316  // -----------------------------------
2317
2318  // Push arguments onto the stack (thisArgument is already on the stack).
2319  {
2320    __ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
2321    Label loop, no_args, skip;
2322    __ CmpP(r4, Operand::Zero());
2323    __ beq(&no_args);
2324    __ AddP(r2, r2,
2325            Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
2326    __ LoadRR(r1, r4);
2327    __ bind(&loop);
2328    __ LoadP(ip, MemOperand(r2, kPointerSize));
2329    __ la(r2, MemOperand(r2, kPointerSize));
2330    __ CompareRoot(ip, Heap::kTheHoleValueRootIndex);
2331    __ bne(&skip, Label::kNear);
2332    __ LoadRR(ip, r8);
2333    __ bind(&skip);
2334    __ push(ip);
2335    __ BranchOnCount(r1, &loop);
2336    __ bind(&no_args);
2337    __ LoadRR(r2, r4);
2338  }
2339
2340  // Dispatch to Call or Construct depending on whether new.target is undefined.
2341  {
2342    __ CompareRoot(r5, Heap::kUndefinedValueRootIndex);
2343    __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq);
2344    __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2345  }
2346}
2347
2348// static
2349void Builtins::Generate_CallForwardVarargs(MacroAssembler* masm,
2350                                           Handle<Code> code) {
2351  // ----------- S t a t e -------------
2352  //  -- r3    : the target to call (can be any Object)
2353  //  -- r4    : start index (to support rest parameters)
2354  //  -- lr    : return address.
2355  //  -- sp[0] : thisArgument
2356  // -----------------------------------
2357
2358  // Check if we have an arguments adaptor frame below the function frame.
2359  Label arguments_adaptor, arguments_done;
2360  __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2361  __ LoadP(ip, MemOperand(r5, CommonFrameConstants::kContextOrFrameTypeOffset));
2362  __ CmpP(ip, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
2363  __ beq(&arguments_adaptor);
2364  {
2365    __ LoadP(r2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2366    __ LoadP(r2, FieldMemOperand(r2, JSFunction::kSharedFunctionInfoOffset));
2367    __ LoadW(r2, FieldMemOperand(
2368                     r2, SharedFunctionInfo::kFormalParameterCountOffset));
2369    __ LoadRR(r5, fp);
2370  }
2371  __ b(&arguments_done);
2372  __ bind(&arguments_adaptor);
2373  {
2374    // Load the length from the ArgumentsAdaptorFrame.
2375    __ LoadP(r2, MemOperand(r5, ArgumentsAdaptorFrameConstants::kLengthOffset));
2376  }
2377  __ bind(&arguments_done);
2378
2379  Label stack_empty, stack_done, stack_overflow;
2380  __ SmiUntag(r2);
2381  __ SubP(r2, r2, r4);
2382  __ CmpP(r2, Operand::Zero());
2383  __ ble(&stack_empty);
2384  {
2385    // Check for stack overflow.
2386    Generate_StackOverflowCheck(masm, r2, r4, &stack_overflow);
2387
2388    // Forward the arguments from the caller frame.
2389    {
2390      Label loop;
2391      __ AddP(r5, r5, Operand(kPointerSize));
2392      __ LoadRR(r4, r2);
2393      __ bind(&loop);
2394      {
2395        __ ShiftLeftP(ip, r4, Operand(kPointerSizeLog2));
2396        __ LoadP(ip, MemOperand(r5, ip));
2397        __ push(ip);
2398        __ SubP(r4, r4, Operand(1));
2399        __ CmpP(r4, Operand::Zero());
2400        __ bne(&loop);
2401      }
2402    }
2403  }
2404  __ b(&stack_done);
2405  __ bind(&stack_overflow);
2406  __ TailCallRuntime(Runtime::kThrowStackOverflow);
2407  __ bind(&stack_empty);
2408  {
2409    // We just pass the receiver, which is already on the stack.
2410    __ mov(r2, Operand::Zero());
2411  }
2412  __ bind(&stack_done);
2413
2414  __ Jump(code, RelocInfo::CODE_TARGET);
2415}
2416
2417namespace {
2418
2419// Drops top JavaScript frame and an arguments adaptor frame below it (if
2420// present) preserving all the arguments prepared for current call.
2421// Does nothing if debugger is currently active.
2422// ES6 14.6.3. PrepareForTailCall
2423//
2424// Stack structure for the function g() tail calling f():
2425//
2426// ------- Caller frame: -------
2427// |  ...
2428// |  g()'s arg M
2429// |  ...
2430// |  g()'s arg 1
2431// |  g()'s receiver arg
2432// |  g()'s caller pc
2433// ------- g()'s frame: -------
2434// |  g()'s caller fp      <- fp
2435// |  g()'s context
2436// |  function pointer: g
2437// |  -------------------------
2438// |  ...
2439// |  ...
2440// |  f()'s arg N
2441// |  ...
2442// |  f()'s arg 1
2443// |  f()'s receiver arg   <- sp (f()'s caller pc is not on the stack yet!)
2444// ----------------------
2445//
2446void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
2447                        Register scratch1, Register scratch2,
2448                        Register scratch3) {
2449  DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
2450  Comment cmnt(masm, "[ PrepareForTailCall");
2451
2452  // Prepare for tail call only if ES2015 tail call elimination is active.
2453  Label done;
2454  ExternalReference is_tail_call_elimination_enabled =
2455      ExternalReference::is_tail_call_elimination_enabled_address(
2456          masm->isolate());
2457  __ mov(scratch1, Operand(is_tail_call_elimination_enabled));
2458  __ LoadlB(scratch1, MemOperand(scratch1));
2459  __ CmpP(scratch1, Operand::Zero());
2460  __ beq(&done);
2461
2462  // Drop possible interpreter handler/stub frame.
2463  {
2464    Label no_interpreter_frame;
2465    __ LoadP(scratch3,
2466             MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset));
2467    __ CmpP(scratch3, Operand(StackFrame::TypeToMarker(StackFrame::STUB)));
2468    __ bne(&no_interpreter_frame);
2469    __ LoadP(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2470    __ bind(&no_interpreter_frame);
2471  }
2472
2473  // Check if next frame is an arguments adaptor frame.
2474  Register caller_args_count_reg = scratch1;
2475  Label no_arguments_adaptor, formal_parameter_count_loaded;
2476  __ LoadP(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2477  __ LoadP(
2478      scratch3,
2479      MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
2480  __ CmpP(scratch3,
2481          Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
2482  __ bne(&no_arguments_adaptor);
2483
2484  // Drop current frame and load arguments count from arguments adaptor frame.
2485  __ LoadRR(fp, scratch2);
2486  __ LoadP(caller_args_count_reg,
2487           MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
2488  __ SmiUntag(caller_args_count_reg);
2489  __ b(&formal_parameter_count_loaded);
2490
2491  __ bind(&no_arguments_adaptor);
2492  // Load caller's formal parameter count
2493  __ LoadP(scratch1,
2494           MemOperand(fp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
2495  __ LoadP(scratch1,
2496           FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
2497  __ LoadW(caller_args_count_reg,
2498           FieldMemOperand(scratch1,
2499                           SharedFunctionInfo::kFormalParameterCountOffset));
2500#if !V8_TARGET_ARCH_S390X
2501  __ SmiUntag(caller_args_count_reg);
2502#endif
2503
2504  __ bind(&formal_parameter_count_loaded);
2505
2506  ParameterCount callee_args_count(args_reg);
2507  __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
2508                        scratch3);
2509  __ bind(&done);
2510}
2511}  // namespace
2512
2513// static
2514void Builtins::Generate_CallFunction(MacroAssembler* masm,
2515                                     ConvertReceiverMode mode,
2516                                     TailCallMode tail_call_mode) {
2517  // ----------- S t a t e -------------
2518  //  -- r2 : the number of arguments (not including the receiver)
2519  //  -- r3 : the function to call (checked to be a JSFunction)
2520  // -----------------------------------
2521  __ AssertFunction(r3);
2522
2523  // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2524  // Check that the function is not a "classConstructor".
2525  Label class_constructor;
2526  __ LoadP(r4, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
2527  __ LoadlW(r5, FieldMemOperand(r4, SharedFunctionInfo::kCompilerHintsOffset));
2528  __ TestBitMask(r5, FunctionKind::kClassConstructor
2529                         << SharedFunctionInfo::kFunctionKindShift,
2530                 r0);
2531  __ bne(&class_constructor);
2532
2533  // Enter the context of the function; ToObject has to run in the function
2534  // context, and we also need to take the global proxy from the function
2535  // context in case of conversion.
2536  __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset));
2537  // We need to convert the receiver for non-native sloppy mode functions.
2538  Label done_convert;
2539  __ AndP(r0, r5, Operand((1 << SharedFunctionInfo::kStrictModeBit) |
2540                          (1 << SharedFunctionInfo::kNativeBit)));
2541  __ bne(&done_convert);
2542  {
2543    // ----------- S t a t e -------------
2544    //  -- r2 : the number of arguments (not including the receiver)
2545    //  -- r3 : the function to call (checked to be a JSFunction)
2546    //  -- r4 : the shared function info.
2547    //  -- cp : the function context.
2548    // -----------------------------------
2549
2550    if (mode == ConvertReceiverMode::kNullOrUndefined) {
2551      // Patch receiver to global proxy.
2552      __ LoadGlobalProxy(r5);
2553    } else {
2554      Label convert_to_object, convert_receiver;
2555      __ ShiftLeftP(r5, r2, Operand(kPointerSizeLog2));
2556      __ LoadP(r5, MemOperand(sp, r5));
2557      __ JumpIfSmi(r5, &convert_to_object);
2558      STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2559      __ CompareObjectType(r5, r6, r6, FIRST_JS_RECEIVER_TYPE);
2560      __ bge(&done_convert);
2561      if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2562        Label convert_global_proxy;
2563        __ JumpIfRoot(r5, Heap::kUndefinedValueRootIndex,
2564                      &convert_global_proxy);
2565        __ JumpIfNotRoot(r5, Heap::kNullValueRootIndex, &convert_to_object);
2566        __ bind(&convert_global_proxy);
2567        {
2568          // Patch receiver to global proxy.
2569          __ LoadGlobalProxy(r5);
2570        }
2571        __ b(&convert_receiver);
2572      }
2573      __ bind(&convert_to_object);
2574      {
2575        // Convert receiver using ToObject.
2576        // TODO(bmeurer): Inline the allocation here to avoid building the frame
2577        // in the fast case? (fall back to AllocateInNewSpace?)
2578        FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2579        __ SmiTag(r2);
2580        __ Push(r2, r3);
2581        __ LoadRR(r2, r5);
2582        __ Push(cp);
2583        __ Call(masm->isolate()->builtins()->ToObject(),
2584                RelocInfo::CODE_TARGET);
2585        __ Pop(cp);
2586        __ LoadRR(r5, r2);
2587        __ Pop(r2, r3);
2588        __ SmiUntag(r2);
2589      }
2590      __ LoadP(r4, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
2591      __ bind(&convert_receiver);
2592    }
2593    __ ShiftLeftP(r6, r2, Operand(kPointerSizeLog2));
2594    __ StoreP(r5, MemOperand(sp, r6));
2595  }
2596  __ bind(&done_convert);
2597
2598  // ----------- S t a t e -------------
2599  //  -- r2 : the number of arguments (not including the receiver)
2600  //  -- r3 : the function to call (checked to be a JSFunction)
2601  //  -- r4 : the shared function info.
2602  //  -- cp : the function context.
2603  // -----------------------------------
2604
2605  if (tail_call_mode == TailCallMode::kAllow) {
2606    PrepareForTailCall(masm, r2, r5, r6, r7);
2607  }
2608
2609  __ LoadW(
2610      r4, FieldMemOperand(r4, SharedFunctionInfo::kFormalParameterCountOffset));
2611#if !V8_TARGET_ARCH_S390X
2612  __ SmiUntag(r4);
2613#endif
2614  ParameterCount actual(r2);
2615  ParameterCount expected(r4);
2616  __ InvokeFunctionCode(r3, no_reg, expected, actual, JUMP_FUNCTION,
2617                        CheckDebugStepCallWrapper());
2618
2619  // The function is a "classConstructor", need to raise an exception.
2620  __ bind(&class_constructor);
2621  {
2622    FrameAndConstantPoolScope frame(masm, StackFrame::INTERNAL);
2623    __ push(r3);
2624    __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2625  }
2626}
2627
2628namespace {
2629
2630void Generate_PushBoundArguments(MacroAssembler* masm) {
2631  // ----------- S t a t e -------------
2632  //  -- r2 : the number of arguments (not including the receiver)
2633  //  -- r3 : target (checked to be a JSBoundFunction)
2634  //  -- r5 : new.target (only in case of [[Construct]])
2635  // -----------------------------------
2636
2637  // Load [[BoundArguments]] into r4 and length of that into r6.
2638  Label no_bound_arguments;
2639  __ LoadP(r4, FieldMemOperand(r3, JSBoundFunction::kBoundArgumentsOffset));
2640  __ LoadP(r6, FieldMemOperand(r4, FixedArray::kLengthOffset));
2641  __ SmiUntag(r6);
2642  __ LoadAndTestP(r6, r6);
2643  __ beq(&no_bound_arguments);
2644  {
2645    // ----------- S t a t e -------------
2646    //  -- r2 : the number of arguments (not including the receiver)
2647    //  -- r3 : target (checked to be a JSBoundFunction)
2648    //  -- r4 : the [[BoundArguments]] (implemented as FixedArray)
2649    //  -- r5 : new.target (only in case of [[Construct]])
2650    //  -- r6 : the number of [[BoundArguments]]
2651    // -----------------------------------
2652
2653    // Reserve stack space for the [[BoundArguments]].
2654    {
2655      Label done;
2656      __ LoadRR(r8, sp);  // preserve previous stack pointer
2657      __ ShiftLeftP(r9, r6, Operand(kPointerSizeLog2));
2658      __ SubP(sp, sp, r9);
2659      // Check the stack for overflow. We are not trying to catch interruptions
2660      // (i.e. debug break and preemption) here, so check the "real stack
2661      // limit".
2662      __ CompareRoot(sp, Heap::kRealStackLimitRootIndex);
2663      __ bgt(&done);  // Signed comparison.
2664      // Restore the stack pointer.
2665      __ LoadRR(sp, r8);
2666      {
2667        FrameScope scope(masm, StackFrame::MANUAL);
2668        __ EnterFrame(StackFrame::INTERNAL);
2669        __ CallRuntime(Runtime::kThrowStackOverflow);
2670      }
2671      __ bind(&done);
2672    }
2673
2674    // Relocate arguments down the stack.
2675    //  -- r2 : the number of arguments (not including the receiver)
2676    //  -- r8 : the previous stack pointer
2677    //  -- r9: the size of the [[BoundArguments]]
2678    {
2679      Label skip, loop;
2680      __ LoadImmP(r7, Operand::Zero());
2681      __ CmpP(r2, Operand::Zero());
2682      __ beq(&skip);
2683      __ LoadRR(r1, r2);
2684      __ bind(&loop);
2685      __ LoadP(r0, MemOperand(r8, r7));
2686      __ StoreP(r0, MemOperand(sp, r7));
2687      __ AddP(r7, r7, Operand(kPointerSize));
2688      __ BranchOnCount(r1, &loop);
2689      __ bind(&skip);
2690    }
2691
2692    // Copy [[BoundArguments]] to the stack (below the arguments).
2693    {
2694      Label loop;
2695      __ AddP(r4, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2696      __ AddP(r4, r4, r9);
2697      __ LoadRR(r1, r6);
2698      __ bind(&loop);
2699      __ LoadP(r0, MemOperand(r4, -kPointerSize));
2700      __ lay(r4, MemOperand(r4, -kPointerSize));
2701      __ StoreP(r0, MemOperand(sp, r7));
2702      __ AddP(r7, r7, Operand(kPointerSize));
2703      __ BranchOnCount(r1, &loop);
2704      __ AddP(r2, r2, r6);
2705    }
2706  }
2707  __ bind(&no_bound_arguments);
2708}
2709
2710}  // namespace
2711
2712// static
2713void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2714                                              TailCallMode tail_call_mode) {
2715  // ----------- S t a t e -------------
2716  //  -- r2 : the number of arguments (not including the receiver)
2717  //  -- r3 : the function to call (checked to be a JSBoundFunction)
2718  // -----------------------------------
2719  __ AssertBoundFunction(r3);
2720
2721  if (tail_call_mode == TailCallMode::kAllow) {
2722    PrepareForTailCall(masm, r2, r5, r6, r7);
2723  }
2724
2725  // Patch the receiver to [[BoundThis]].
2726  __ LoadP(ip, FieldMemOperand(r3, JSBoundFunction::kBoundThisOffset));
2727  __ ShiftLeftP(r1, r2, Operand(kPointerSizeLog2));
2728  __ StoreP(ip, MemOperand(sp, r1));
2729
2730  // Push the [[BoundArguments]] onto the stack.
2731  Generate_PushBoundArguments(masm);
2732
2733  // Call the [[BoundTargetFunction]] via the Call builtin.
2734  __ LoadP(r3,
2735           FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset));
2736  __ mov(ip, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny,
2737                                       masm->isolate())));
2738  __ LoadP(ip, MemOperand(ip));
2739  __ AddP(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
2740  __ JumpToJSEntry(ip);
2741}
2742
2743// static
2744void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2745                             TailCallMode tail_call_mode) {
2746  // ----------- S t a t e -------------
2747  //  -- r2 : the number of arguments (not including the receiver)
2748  //  -- r3 : the target to call (can be any Object).
2749  // -----------------------------------
2750
2751  Label non_callable, non_function, non_smi;
2752  __ JumpIfSmi(r3, &non_callable);
2753  __ bind(&non_smi);
2754  __ CompareObjectType(r3, r6, r7, JS_FUNCTION_TYPE);
2755  __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
2756          RelocInfo::CODE_TARGET, eq);
2757  __ CmpP(r7, Operand(JS_BOUND_FUNCTION_TYPE));
2758  __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
2759          RelocInfo::CODE_TARGET, eq);
2760
2761  // Check if target has a [[Call]] internal method.
2762  __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset));
2763  __ TestBit(r6, Map::kIsCallable);
2764  __ beq(&non_callable);
2765
2766  __ CmpP(r7, Operand(JS_PROXY_TYPE));
2767  __ bne(&non_function);
2768
2769  // 0. Prepare for tail call if necessary.
2770  if (tail_call_mode == TailCallMode::kAllow) {
2771    PrepareForTailCall(masm, r2, r5, r6, r7);
2772  }
2773
2774  // 1. Runtime fallback for Proxy [[Call]].
2775  __ Push(r3);
2776  // Increase the arguments size to include the pushed function and the
2777  // existing receiver on the stack.
2778  __ AddP(r2, r2, Operand(2));
2779  // Tail-call to the runtime.
2780  __ JumpToExternalReference(
2781      ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2782
2783  // 2. Call to something else, which might have a [[Call]] internal method (if
2784  // not we raise an exception).
2785  __ bind(&non_function);
2786  // Overwrite the original receiver the (original) target.
2787  __ ShiftLeftP(r7, r2, Operand(kPointerSizeLog2));
2788  __ StoreP(r3, MemOperand(sp, r7));
2789  // Let the "call_as_function_delegate" take care of the rest.
2790  __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r3);
2791  __ Jump(masm->isolate()->builtins()->CallFunction(
2792              ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
2793          RelocInfo::CODE_TARGET);
2794
2795  // 3. Call to something that is not callable.
2796  __ bind(&non_callable);
2797  {
2798    FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2799    __ Push(r3);
2800    __ CallRuntime(Runtime::kThrowCalledNonCallable);
2801  }
2802}
2803
2804static void CheckSpreadAndPushToStack(MacroAssembler* masm) {
2805  Register argc = r2;
2806  Register constructor = r3;
2807  Register new_target = r5;
2808
2809  Register scratch = r4;
2810  Register scratch2 = r8;
2811
2812  Register spread = r6;
2813  Register spread_map = r7;
2814  Register spread_len = r7;
2815  Label runtime_call, push_args;
2816  __ LoadP(spread, MemOperand(sp, 0));
2817  __ JumpIfSmi(spread, &runtime_call);
2818  __ LoadP(spread_map, FieldMemOperand(spread, HeapObject::kMapOffset));
2819
2820  // Check that the spread is an array.
2821  __ CompareInstanceType(spread_map, scratch, JS_ARRAY_TYPE);
2822  __ bne(&runtime_call);
2823
2824  // Check that we have the original ArrayPrototype.
2825  __ LoadP(scratch, FieldMemOperand(spread_map, Map::kPrototypeOffset));
2826  __ LoadP(scratch2, NativeContextMemOperand());
2827  __ LoadP(scratch2,
2828           ContextMemOperand(scratch2, Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
2829  __ CmpP(scratch, scratch2);
2830  __ bne(&runtime_call);
2831
2832  // Check that the ArrayPrototype hasn't been modified in a way that would
2833  // affect iteration.
2834  __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex);
2835  __ LoadP(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
2836  __ CmpSmiLiteral(scratch, Smi::FromInt(Isolate::kProtectorValid), r0);
2837  __ bne(&runtime_call);
2838
2839  // Check that the map of the initial array iterator hasn't changed.
2840  __ LoadP(scratch2, NativeContextMemOperand());
2841  __ LoadP(scratch,
2842           ContextMemOperand(scratch2,
2843                             Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX));
2844  __ LoadP(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset));
2845  __ LoadP(scratch2,
2846           ContextMemOperand(
2847               scratch2, Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX));
2848  __ CmpP(scratch, scratch2);
2849  __ bne(&runtime_call);
2850
2851  // For FastPacked kinds, iteration will have the same effect as simply
2852  // accessing each property in order.
2853  Label no_protector_check;
2854  __ LoadlB(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset));
2855  __ DecodeField<Map::ElementsKindBits>(scratch);
2856  __ CmpP(scratch, Operand(FAST_HOLEY_ELEMENTS));
2857  __ bgt(&runtime_call);
2858  // For non-FastHoley kinds, we can skip the protector check.
2859  __ CmpP(scratch, Operand(FAST_SMI_ELEMENTS));
2860  __ beq(&no_protector_check);
2861  __ CmpP(scratch, Operand(FAST_ELEMENTS));
2862  __ beq(&no_protector_check);
2863  // Check the ArrayProtector cell.
2864  __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex);
2865  __ LoadP(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
2866  __ CmpSmiLiteral(scratch, Smi::FromInt(Isolate::kProtectorValid), r0);
2867  __ bne(&runtime_call);
2868
2869  __ bind(&no_protector_check);
2870  // Load the FixedArray backing store, but use the length from the array.
2871  __ LoadP(spread_len, FieldMemOperand(spread, JSArray::kLengthOffset));
2872  __ SmiUntag(spread_len);
2873  __ LoadP(spread, FieldMemOperand(spread, JSArray::kElementsOffset));
2874  __ b(&push_args);
2875
2876  __ bind(&runtime_call);
2877  {
2878    // Call the builtin for the result of the spread.
2879    FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2880    __ SmiTag(argc);
2881    __ Push(constructor, new_target, argc, spread);
2882    __ CallRuntime(Runtime::kSpreadIterableFixed);
2883    __ LoadRR(spread, r2);
2884    __ Pop(constructor, new_target, argc);
2885    __ SmiUntag(argc);
2886  }
2887
2888  {
2889    // Calculate the new nargs including the result of the spread.
2890    __ LoadP(spread_len, FieldMemOperand(spread, FixedArray::kLengthOffset));
2891    __ SmiUntag(spread_len);
2892
2893    __ bind(&push_args);
2894    // argc += spread_len - 1. Subtract 1 for the spread itself.
2895    __ AddP(argc, argc, spread_len);
2896    __ SubP(argc, argc, Operand(1));
2897
2898    // Pop the spread argument off the stack.
2899    __ Pop(scratch);
2900  }
2901
2902  // Check for stack overflow.
2903  {
2904    // Check the stack for overflow. We are not trying to catch interruptions
2905    // (i.e. debug break and preemption) here, so check the "real stack limit".
2906    Label done;
2907    __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
2908    // Make scratch the space we have left. The stack might already be
2909    // overflowed here which will cause scratch to become negative.
2910    __ SubP(scratch, sp, scratch);
2911    // Check if the arguments will overflow the stack.
2912    __ ShiftLeftP(r0, spread_len, Operand(kPointerSizeLog2));
2913    __ CmpP(scratch, r0);
2914    __ bgt(&done);  // Signed comparison.
2915    __ TailCallRuntime(Runtime::kThrowStackOverflow);
2916    __ bind(&done);
2917  }
2918
2919  // Put the evaluated spread onto the stack as additional arguments.
2920  {
2921    __ LoadImmP(scratch, Operand::Zero());
2922    Label done, push, loop;
2923    __ bind(&loop);
2924    __ CmpP(scratch, spread_len);
2925    __ beq(&done);
2926    __ ShiftLeftP(r0, scratch, Operand(kPointerSizeLog2));
2927    __ AddP(scratch2, spread, r0);
2928    __ LoadP(scratch2, FieldMemOperand(scratch2, FixedArray::kHeaderSize));
2929    __ JumpIfNotRoot(scratch2, Heap::kTheHoleValueRootIndex, &push);
2930    __ LoadRoot(scratch2, Heap::kUndefinedValueRootIndex);
2931    __ bind(&push);
2932    __ Push(scratch2);
2933    __ AddP(scratch, scratch, Operand(1));
2934    __ b(&loop);
2935    __ bind(&done);
2936  }
2937}
2938
2939// static
2940void Builtins::Generate_CallWithSpread(MacroAssembler* masm) {
2941  // ----------- S t a t e -------------
2942  //  -- r2 : the number of arguments (not including the receiver)
2943  //  -- r3 : the constructor to call (can be any Object)
2944  // -----------------------------------
2945
2946  // CheckSpreadAndPushToStack will push r5 to save it.
2947  __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
2948  CheckSpreadAndPushToStack(masm);
2949  __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2950                                            TailCallMode::kDisallow),
2951          RelocInfo::CODE_TARGET);
2952}
2953
2954// static
2955void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2956  // ----------- S t a t e -------------
2957  //  -- r2 : the number of arguments (not including the receiver)
2958  //  -- r3 : the constructor to call (checked to be a JSFunction)
2959  //  -- r5 : the new target (checked to be a constructor)
2960  // -----------------------------------
2961  __ AssertFunction(r3);
2962
2963  // Calling convention for function specific ConstructStubs require
2964  // r4 to contain either an AllocationSite or undefined.
2965  __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2966
2967  // Tail call to the function-specific construct stub (still in the caller
2968  // context at this point).
2969  __ LoadP(r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
2970  __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kConstructStubOffset));
2971  __ AddP(ip, r6, Operand(Code::kHeaderSize - kHeapObjectTag));
2972  __ JumpToJSEntry(ip);
2973}
2974
2975// static
2976void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2977  // ----------- S t a t e -------------
2978  //  -- r2 : the number of arguments (not including the receiver)
2979  //  -- r3 : the function to call (checked to be a JSBoundFunction)
2980  //  -- r5 : the new target (checked to be a constructor)
2981  // -----------------------------------
2982  __ AssertBoundFunction(r3);
2983
2984  // Push the [[BoundArguments]] onto the stack.
2985  Generate_PushBoundArguments(masm);
2986
2987  // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2988  Label skip;
2989  __ CmpP(r3, r5);
2990  __ bne(&skip);
2991  __ LoadP(r5,
2992           FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset));
2993  __ bind(&skip);
2994
2995  // Construct the [[BoundTargetFunction]] via the Construct builtin.
2996  __ LoadP(r3,
2997           FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset));
2998  __ mov(ip, Operand(ExternalReference(Builtins::kConstruct, masm->isolate())));
2999  __ LoadP(ip, MemOperand(ip));
3000  __ AddP(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
3001  __ JumpToJSEntry(ip);
3002}
3003
3004// static
3005void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
3006  // ----------- S t a t e -------------
3007  //  -- r2 : the number of arguments (not including the receiver)
3008  //  -- r3 : the constructor to call (checked to be a JSProxy)
3009  //  -- r5 : the new target (either the same as the constructor or
3010  //          the JSFunction on which new was invoked initially)
3011  // -----------------------------------
3012
3013  // Call into the Runtime for Proxy [[Construct]].
3014  __ Push(r3, r5);
3015  // Include the pushed new_target, constructor and the receiver.
3016  __ AddP(r2, r2, Operand(3));
3017  // Tail-call to the runtime.
3018  __ JumpToExternalReference(
3019      ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
3020}
3021
3022// static
3023void Builtins::Generate_Construct(MacroAssembler* masm) {
3024  // ----------- S t a t e -------------
3025  //  -- r2 : the number of arguments (not including the receiver)
3026  //  -- r3 : the constructor to call (can be any Object)
3027  //  -- r5 : the new target (either the same as the constructor or
3028  //          the JSFunction on which new was invoked initially)
3029  // -----------------------------------
3030
3031  // Check if target is a Smi.
3032  Label non_constructor;
3033  __ JumpIfSmi(r3, &non_constructor);
3034
3035  // Dispatch based on instance type.
3036  __ CompareObjectType(r3, r6, r7, JS_FUNCTION_TYPE);
3037  __ Jump(masm->isolate()->builtins()->ConstructFunction(),
3038          RelocInfo::CODE_TARGET, eq);
3039
3040  // Check if target has a [[Construct]] internal method.
3041  __ LoadlB(r4, FieldMemOperand(r6, Map::kBitFieldOffset));
3042  __ TestBit(r4, Map::kIsConstructor);
3043  __ beq(&non_constructor);
3044
3045  // Only dispatch to bound functions after checking whether they are
3046  // constructors.
3047  __ CmpP(r7, Operand(JS_BOUND_FUNCTION_TYPE));
3048  __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
3049          RelocInfo::CODE_TARGET, eq);
3050
3051  // Only dispatch to proxies after checking whether they are constructors.
3052  __ CmpP(r7, Operand(JS_PROXY_TYPE));
3053  __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
3054          eq);
3055
3056  // Called Construct on an exotic Object with a [[Construct]] internal method.
3057  {
3058    // Overwrite the original receiver with the (original) target.
3059    __ ShiftLeftP(r7, r2, Operand(kPointerSizeLog2));
3060    __ StoreP(r3, MemOperand(sp, r7));
3061    // Let the "call_as_constructor_delegate" take care of the rest.
3062    __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, r3);
3063    __ Jump(masm->isolate()->builtins()->CallFunction(),
3064            RelocInfo::CODE_TARGET);
3065  }
3066
3067  // Called Construct on an Object that doesn't have a [[Construct]] internal
3068  // method.
3069  __ bind(&non_constructor);
3070  __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
3071          RelocInfo::CODE_TARGET);
3072}
3073
3074void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) {
3075  // ----------- S t a t e -------------
3076  //  -- r2 : the number of arguments (not including the receiver)
3077  //  -- r3 : the constructor to call (can be any Object)
3078  //  -- r5 : the new target (either the same as the constructor or
3079  //          the JSFunction on which new was invoked initially)
3080  // -----------------------------------
3081
3082  CheckSpreadAndPushToStack(masm);
3083  __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
3084}
3085
3086// static
3087void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
3088  // ----------- S t a t e -------------
3089  //  -- r3 : requested object size (untagged)
3090  //  -- lr : return address
3091  // -----------------------------------
3092  __ SmiTag(r3);
3093  __ Push(r3);
3094  __ LoadSmiLiteral(cp, Smi::kZero);
3095  __ TailCallRuntime(Runtime::kAllocateInNewSpace);
3096}
3097
3098// static
3099void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
3100  // ----------- S t a t e -------------
3101  //  -- r3 : requested object size (untagged)
3102  //  -- lr : return address
3103  // -----------------------------------
3104  __ SmiTag(r3);
3105  __ LoadSmiLiteral(r4, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
3106  __ Push(r3, r4);
3107  __ LoadSmiLiteral(cp, Smi::kZero);
3108  __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
3109}
3110
3111// static
3112void Builtins::Generate_Abort(MacroAssembler* masm) {
3113  // ----------- S t a t e -------------
3114  //  -- r3 : message_id as Smi
3115  //  -- lr : return address
3116  // -----------------------------------
3117  __ push(r3);
3118  __ LoadSmiLiteral(cp, Smi::kZero);
3119  __ TailCallRuntime(Runtime::kAbort);
3120}
3121
3122void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
3123  // ----------- S t a t e -------------
3124  //  -- r2 : actual number of arguments
3125  //  -- r3 : function (passed through to callee)
3126  //  -- r4 : expected number of arguments
3127  //  -- r5 : new target (passed through to callee)
3128  // -----------------------------------
3129
3130  Label invoke, dont_adapt_arguments, stack_overflow;
3131
3132  Label enough, too_few;
3133  __ LoadP(ip, FieldMemOperand(r3, JSFunction::kCodeEntryOffset));
3134  __ CmpP(r2, r4);
3135  __ blt(&too_few);
3136  __ CmpP(r4, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
3137  __ beq(&dont_adapt_arguments);
3138
3139  {  // Enough parameters: actual >= expected
3140    __ bind(&enough);
3141    EnterArgumentsAdaptorFrame(masm);
3142    Generate_StackOverflowCheck(masm, r4, r7, &stack_overflow);
3143
3144    // Calculate copy start address into r2 and copy end address into r6.
3145    // r2: actual number of arguments as a smi
3146    // r3: function
3147    // r4: expected number of arguments
3148    // r5: new target (passed through to callee)
3149    // ip: code entry to call
3150    __ SmiToPtrArrayOffset(r2, r2);
3151    __ AddP(r2, fp);
3152    // adjust for return address and receiver
3153    __ AddP(r2, r2, Operand(2 * kPointerSize));
3154    __ ShiftLeftP(r6, r4, Operand(kPointerSizeLog2));
3155    __ SubP(r6, r2, r6);
3156
3157    // Copy the arguments (including the receiver) to the new stack frame.
3158    // r2: copy start address
3159    // r3: function
3160    // r4: expected number of arguments
3161    // r5: new target (passed through to callee)
3162    // r6: copy end address
3163    // ip: code entry to call
3164
3165    Label copy;
3166    __ bind(&copy);
3167    __ LoadP(r0, MemOperand(r2, 0));
3168    __ push(r0);
3169    __ CmpP(r2, r6);  // Compare before moving to next argument.
3170    __ lay(r2, MemOperand(r2, -kPointerSize));
3171    __ bne(&copy);
3172
3173    __ b(&invoke);
3174  }
3175
3176  {  // Too few parameters: Actual < expected
3177    __ bind(&too_few);
3178
3179    EnterArgumentsAdaptorFrame(masm);
3180    Generate_StackOverflowCheck(masm, r4, r7, &stack_overflow);
3181
3182    // Calculate copy start address into r0 and copy end address is fp.
3183    // r2: actual number of arguments as a smi
3184    // r3: function
3185    // r4: expected number of arguments
3186    // r5: new target (passed through to callee)
3187    // ip: code entry to call
3188    __ SmiToPtrArrayOffset(r2, r2);
3189    __ lay(r2, MemOperand(r2, fp));
3190
3191    // Copy the arguments (including the receiver) to the new stack frame.
3192    // r2: copy start address
3193    // r3: function
3194    // r4: expected number of arguments
3195    // r5: new target (passed through to callee)
3196    // ip: code entry to call
3197    Label copy;
3198    __ bind(&copy);
3199    // Adjust load for return address and receiver.
3200    __ LoadP(r0, MemOperand(r2, 2 * kPointerSize));
3201    __ push(r0);
3202    __ CmpP(r2, fp);  // Compare before moving to next argument.
3203    __ lay(r2, MemOperand(r2, -kPointerSize));
3204    __ bne(&copy);
3205
3206    // Fill the remaining expected arguments with undefined.
3207    // r3: function
3208    // r4: expected number of argumentus
3209    // ip: code entry to call
3210    __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3211    __ ShiftLeftP(r6, r4, Operand(kPointerSizeLog2));
3212    __ SubP(r6, fp, r6);
3213    // Adjust for frame.
3214    __ SubP(r6, r6, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
3215                            2 * kPointerSize));
3216
3217    Label fill;
3218    __ bind(&fill);
3219    __ push(r0);
3220    __ CmpP(sp, r6);
3221    __ bne(&fill);
3222  }
3223
3224  // Call the entry point.
3225  __ bind(&invoke);
3226  __ LoadRR(r2, r4);
3227  // r2 : expected number of arguments
3228  // r3 : function (passed through to callee)
3229  // r5 : new target (passed through to callee)
3230  __ CallJSEntry(ip);
3231
3232  // Store offset of return address for deoptimizer.
3233  masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
3234
3235  // Exit frame and return.
3236  LeaveArgumentsAdaptorFrame(masm);
3237  __ Ret();
3238
3239  // -------------------------------------------
3240  // Dont adapt arguments.
3241  // -------------------------------------------
3242  __ bind(&dont_adapt_arguments);
3243  __ JumpToJSEntry(ip);
3244
3245  __ bind(&stack_overflow);
3246  {
3247    FrameScope frame(masm, StackFrame::MANUAL);
3248    __ CallRuntime(Runtime::kThrowStackOverflow);
3249    __ bkpt(0);
3250  }
3251}
3252
3253#undef __
3254
3255}  // namespace internal
3256}  // namespace v8
3257
3258#endif  // V8_TARGET_ARCH_S390
3259