1// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_ARM64
6
7#include "src/arm64/frames-arm64.h"
8#include "src/codegen.h"
9#include "src/debug/debug.h"
10#include "src/deoptimizer.h"
11#include "src/full-codegen/full-codegen.h"
12#include "src/runtime/runtime.h"
13
14namespace v8 {
15namespace internal {
16
17#define __ ACCESS_MASM(masm)
18
19// Load the built-in Array function from the current context.
20static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
21  // Load the InternalArray function from the native context.
22  __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
23}
24
25// Load the built-in InternalArray function from the current context.
26static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
27                                              Register result) {
28  // Load the InternalArray function from the native context.
29  __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
30}
31
32void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
33                                ExitFrameType exit_frame_type) {
34  // ----------- S t a t e -------------
35  //  -- x0                 : number of arguments excluding receiver
36  //  -- x1                 : target
37  //  -- x3                 : new target
38  //  -- sp[0]              : last argument
39  //  -- ...
40  //  -- sp[4 * (argc - 1)] : first argument
41  //  -- sp[4 * argc]       : receiver
42  // -----------------------------------
43  __ AssertFunction(x1);
44
45  // Make sure we operate in the context of the called function (for example
46  // ConstructStubs implemented in C++ will be run in the context of the caller
47  // instead of the callee, due to the way that [[Construct]] is defined for
48  // ordinary functions).
49  __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
50
51  // JumpToExternalReference expects x0 to contain the number of arguments
52  // including the receiver and the extra arguments.
53  const int num_extra_args = 3;
54  __ Add(x0, x0, num_extra_args + 1);
55
56  // Insert extra arguments.
57  __ SmiTag(x0);
58  __ Push(x0, x1, x3);
59  __ SmiUntag(x0);
60
61  __ JumpToExternalReference(ExternalReference(address, masm->isolate()),
62                             exit_frame_type == BUILTIN_EXIT);
63}
64
65void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
66  // ----------- S t a t e -------------
67  //  -- x0     : number of arguments
68  //  -- lr     : return address
69  //  -- sp[...]: constructor arguments
70  // -----------------------------------
71  ASM_LOCATION("Builtins::Generate_InternalArrayCode");
72  Label generic_array_code;
73
74  // Get the InternalArray function.
75  GenerateLoadInternalArrayFunction(masm, x1);
76
77  if (FLAG_debug_code) {
78    // Initial map for the builtin InternalArray functions should be maps.
79    __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset));
80    __ Tst(x10, kSmiTagMask);
81    __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction);
82    __ CompareObjectType(x10, x11, x12, MAP_TYPE);
83    __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
84  }
85
86  // Run the native code for the InternalArray function called as a normal
87  // function.
88  InternalArrayConstructorStub stub(masm->isolate());
89  __ TailCallStub(&stub);
90}
91
92void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
93  // ----------- S t a t e -------------
94  //  -- x0     : number of arguments
95  //  -- lr     : return address
96  //  -- sp[...]: constructor arguments
97  // -----------------------------------
98  ASM_LOCATION("Builtins::Generate_ArrayCode");
99  Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
100
101  // Get the Array function.
102  GenerateLoadArrayFunction(masm, x1);
103
104  if (FLAG_debug_code) {
105    // Initial map for the builtin Array functions should be maps.
106    __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset));
107    __ Tst(x10, kSmiTagMask);
108    __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
109    __ CompareObjectType(x10, x11, x12, MAP_TYPE);
110    __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
111  }
112
113  // Run the native code for the Array function called as a normal function.
114  __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
115  __ Mov(x3, x1);
116  ArrayConstructorStub stub(masm->isolate());
117  __ TailCallStub(&stub);
118}
119
120// static
121void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
122  // ----------- S t a t e -------------
123  //  -- x0                     : number of arguments
124  //  -- x1                     : function
125  //  -- cp                     : context
126  //  -- lr                     : return address
127  //  -- sp[(argc - n - 1) * 8] : arg[n] (zero-based)
128  //  -- sp[argc * 8]           : receiver
129  // -----------------------------------
130  ASM_LOCATION("Builtins::Generate_MathMaxMin");
131
132  Heap::RootListIndex const root_index =
133      (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
134                                     : Heap::kMinusInfinityValueRootIndex;
135
136  // Load the accumulator with the default return value (either -Infinity or
137  // +Infinity), with the tagged value in x5 and the double value in d5.
138  __ LoadRoot(x5, root_index);
139  __ Ldr(d5, FieldMemOperand(x5, HeapNumber::kValueOffset));
140
141  Label done_loop, loop;
142  __ mov(x4, x0);
143  __ Bind(&loop);
144  {
145    // Check if all parameters done.
146    __ Subs(x4, x4, 1);
147    __ B(lt, &done_loop);
148
149    // Load the next parameter tagged value into x2.
150    __ Peek(x2, Operand(x4, LSL, kPointerSizeLog2));
151
152    // Load the double value of the parameter into d2, maybe converting the
153    // parameter to a number first using the ToNumber builtin if necessary.
154    Label convert_smi, convert_number, done_convert;
155    __ JumpIfSmi(x2, &convert_smi);
156    __ JumpIfHeapNumber(x2, &convert_number);
157    {
158      // Parameter is not a Number, use the ToNumber builtin to convert it.
159      FrameScope scope(masm, StackFrame::MANUAL);
160      __ SmiTag(x0);
161      __ SmiTag(x4);
162      __ EnterBuiltinFrame(cp, x1, x0);
163      __ Push(x5, x4);
164      __ Mov(x0, x2);
165      __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
166      __ Mov(x2, x0);
167      __ Pop(x4, x5);
168      __ LeaveBuiltinFrame(cp, x1, x0);
169      __ SmiUntag(x4);
170      __ SmiUntag(x0);
171      {
172        // Restore the double accumulator value (d5).
173        Label done_restore;
174        __ SmiUntagToDouble(d5, x5, kSpeculativeUntag);
175        __ JumpIfSmi(x5, &done_restore);
176        __ Ldr(d5, FieldMemOperand(x5, HeapNumber::kValueOffset));
177        __ Bind(&done_restore);
178      }
179    }
180    __ AssertNumber(x2);
181    __ JumpIfSmi(x2, &convert_smi);
182
183    __ Bind(&convert_number);
184    __ Ldr(d2, FieldMemOperand(x2, HeapNumber::kValueOffset));
185    __ B(&done_convert);
186
187    __ Bind(&convert_smi);
188    __ SmiUntagToDouble(d2, x2);
189    __ Bind(&done_convert);
190
191    // We can use a single fmin/fmax for the operation itself, but we then need
192    // to work out which HeapNumber (or smi) the result came from.
193    __ Fmov(x11, d5);
194    if (kind == MathMaxMinKind::kMin) {
195      __ Fmin(d5, d5, d2);
196    } else {
197      DCHECK(kind == MathMaxMinKind::kMax);
198      __ Fmax(d5, d5, d2);
199    }
200    __ Fmov(x10, d5);
201    __ Cmp(x10, x11);
202    __ Csel(x5, x5, x2, eq);
203    __ B(&loop);
204  }
205
206  __ Bind(&done_loop);
207  // Drop all slots, including the receiver.
208  __ Add(x0, x0, 1);
209  __ Drop(x0);
210  __ Mov(x0, x5);
211  __ Ret();
212}
213
214// static
215void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
216  // ----------- S t a t e -------------
217  //  -- x0                     : number of arguments
218  //  -- x1                     : constructor function
219  //  -- cp                     : context
220  //  -- lr                     : return address
221  //  -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
222  //  -- sp[argc * 8]           : receiver
223  // -----------------------------------
224  ASM_LOCATION("Builtins::Generate_NumberConstructor");
225
226  // 1. Load the first argument into x0.
227  Label no_arguments;
228  {
229    __ Cbz(x0, &no_arguments);
230    __ Mov(x2, x0);  // Store argc in x2.
231    __ Sub(x0, x0, 1);
232    __ Ldr(x0, MemOperand(jssp, x0, LSL, kPointerSizeLog2));
233  }
234
235  // 2a. Convert first argument to number.
236  {
237    FrameScope scope(masm, StackFrame::MANUAL);
238    __ SmiTag(x2);
239    __ EnterBuiltinFrame(cp, x1, x2);
240    __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
241    __ LeaveBuiltinFrame(cp, x1, x2);
242    __ SmiUntag(x2);
243  }
244
245  {
246    // Drop all arguments.
247    __ Drop(x2);
248  }
249
250  // 2b. No arguments, return +0 (already in x0).
251  __ Bind(&no_arguments);
252  __ Drop(1);
253  __ Ret();
254}
255
256// static
257void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
258  // ----------- S t a t e -------------
259  //  -- x0                     : number of arguments
260  //  -- x1                     : constructor function
261  //  -- x3                     : new target
262  //  -- cp                     : context
263  //  -- lr                     : return address
264  //  -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
265  //  -- sp[argc * 8]           : receiver
266  // -----------------------------------
267  ASM_LOCATION("Builtins::Generate_NumberConstructor_ConstructStub");
268
269  // 1. Make sure we operate in the context of the called function.
270  __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
271
272  // 2. Load the first argument into x2.
273  {
274    Label no_arguments, done;
275    __ Move(x6, x0);  // Store argc in x6.
276    __ Cbz(x0, &no_arguments);
277    __ Sub(x0, x0, 1);
278    __ Ldr(x2, MemOperand(jssp, x0, LSL, kPointerSizeLog2));
279    __ B(&done);
280    __ Bind(&no_arguments);
281    __ Mov(x2, Smi::kZero);
282    __ Bind(&done);
283  }
284
285  // 3. Make sure x2 is a number.
286  {
287    Label done_convert;
288    __ JumpIfSmi(x2, &done_convert);
289    __ JumpIfObjectType(x2, x4, x4, HEAP_NUMBER_TYPE, &done_convert, eq);
290    {
291      FrameScope scope(masm, StackFrame::MANUAL);
292      __ SmiTag(x6);
293      __ EnterBuiltinFrame(cp, x1, x6);
294      __ Push(x3);
295      __ Move(x0, x2);
296      __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
297      __ Move(x2, x0);
298      __ Pop(x3);
299      __ LeaveBuiltinFrame(cp, x1, x6);
300      __ SmiUntag(x6);
301    }
302    __ Bind(&done_convert);
303  }
304
305  // 4. Check if new target and constructor differ.
306  Label drop_frame_and_ret, new_object;
307  __ Cmp(x1, x3);
308  __ B(ne, &new_object);
309
310  // 5. Allocate a JSValue wrapper for the number.
311  __ AllocateJSValue(x0, x1, x2, x4, x5, &new_object);
312  __ B(&drop_frame_and_ret);
313
314  // 6. Fallback to the runtime to create new object.
315  __ bind(&new_object);
316  {
317    FrameScope scope(masm, StackFrame::MANUAL);
318    __ SmiTag(x6);
319    __ EnterBuiltinFrame(cp, x1, x6);
320    __ Push(x2);  // first argument
321    __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
322            RelocInfo::CODE_TARGET);
323    __ Pop(x2);
324    __ LeaveBuiltinFrame(cp, x1, x6);
325    __ SmiUntag(x6);
326  }
327  __ Str(x2, FieldMemOperand(x0, JSValue::kValueOffset));
328
329  __ bind(&drop_frame_and_ret);
330  {
331    __ Drop(x6);
332    __ Drop(1);
333    __ Ret();
334  }
335}
336
337// static
338void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
339  // ----------- S t a t e -------------
340  //  -- x0                     : number of arguments
341  //  -- x1                     : constructor function
342  //  -- cp                     : context
343  //  -- lr                     : return address
344  //  -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
345  //  -- sp[argc * 8]           : receiver
346  // -----------------------------------
347  ASM_LOCATION("Builtins::Generate_StringConstructor");
348
349  // 1. Load the first argument into x0.
350  Label no_arguments;
351  {
352    __ Cbz(x0, &no_arguments);
353    __ Mov(x2, x0);  // Store argc in x2.
354    __ Sub(x0, x0, 1);
355    __ Ldr(x0, MemOperand(jssp, x0, LSL, kPointerSizeLog2));
356  }
357
358  // 2a. At least one argument, return x0 if it's a string, otherwise
359  // dispatch to appropriate conversion.
360  Label drop_frame_and_ret, to_string, symbol_descriptive_string;
361  {
362    __ JumpIfSmi(x0, &to_string);
363    STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
364    __ CompareObjectType(x0, x3, x3, FIRST_NONSTRING_TYPE);
365    __ B(hi, &to_string);
366    __ B(eq, &symbol_descriptive_string);
367    __ b(&drop_frame_and_ret);
368  }
369
370  // 2b. No arguments, return the empty string (and pop the receiver).
371  __ Bind(&no_arguments);
372  {
373    __ LoadRoot(x0, Heap::kempty_stringRootIndex);
374    __ Drop(1);
375    __ Ret();
376  }
377
378  // 3a. Convert x0 to a string.
379  __ Bind(&to_string);
380  {
381    FrameScope scope(masm, StackFrame::MANUAL);
382    __ SmiTag(x2);
383    __ EnterBuiltinFrame(cp, x1, x2);
384    __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
385    __ LeaveBuiltinFrame(cp, x1, x2);
386    __ SmiUntag(x2);
387  }
388  __ b(&drop_frame_and_ret);
389
390  // 3b. Convert symbol in x0 to a string.
391  __ Bind(&symbol_descriptive_string);
392  {
393    __ Drop(x2);
394    __ Drop(1);
395    __ Push(x0);
396    __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
397  }
398
399  __ bind(&drop_frame_and_ret);
400  {
401    __ Drop(x2);
402    __ Drop(1);
403    __ Ret();
404  }
405}
406
407// static
408void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
409  // ----------- S t a t e -------------
410  //  -- x0                     : number of arguments
411  //  -- x1                     : constructor function
412  //  -- x3                     : new target
413  //  -- cp                     : context
414  //  -- lr                     : return address
415  //  -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
416  //  -- sp[argc * 8]           : receiver
417  // -----------------------------------
418  ASM_LOCATION("Builtins::Generate_StringConstructor_ConstructStub");
419
420  // 1. Make sure we operate in the context of the called function.
421  __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
422
423  // 2. Load the first argument into x2.
424  {
425    Label no_arguments, done;
426    __ mov(x6, x0);  // Store argc in x6.
427    __ Cbz(x0, &no_arguments);
428    __ Sub(x0, x0, 1);
429    __ Ldr(x2, MemOperand(jssp, x0, LSL, kPointerSizeLog2));
430    __ B(&done);
431    __ Bind(&no_arguments);
432    __ LoadRoot(x2, Heap::kempty_stringRootIndex);
433    __ Bind(&done);
434  }
435
436  // 3. Make sure x2 is a string.
437  {
438    Label convert, done_convert;
439    __ JumpIfSmi(x2, &convert);
440    __ JumpIfObjectType(x2, x4, x4, FIRST_NONSTRING_TYPE, &done_convert, lo);
441    __ Bind(&convert);
442    {
443      FrameScope scope(masm, StackFrame::MANUAL);
444      __ SmiTag(x6);
445      __ EnterBuiltinFrame(cp, x1, x6);
446      __ Push(x3);
447      __ Move(x0, x2);
448      __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
449      __ Move(x2, x0);
450      __ Pop(x3);
451      __ LeaveBuiltinFrame(cp, x1, x6);
452      __ SmiUntag(x6);
453    }
454    __ Bind(&done_convert);
455  }
456
457  // 4. Check if new target and constructor differ.
458  Label drop_frame_and_ret, new_object;
459  __ Cmp(x1, x3);
460  __ B(ne, &new_object);
461
462  // 5. Allocate a JSValue wrapper for the string.
463  __ AllocateJSValue(x0, x1, x2, x4, x5, &new_object);
464  __ B(&drop_frame_and_ret);
465
466  // 6. Fallback to the runtime to create new object.
467  __ bind(&new_object);
468  {
469    FrameScope scope(masm, StackFrame::MANUAL);
470    __ SmiTag(x6);
471    __ EnterBuiltinFrame(cp, x1, x6);
472    __ Push(x2);  // first argument
473    __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
474            RelocInfo::CODE_TARGET);
475    __ Pop(x2);
476    __ LeaveBuiltinFrame(cp, x1, x6);
477    __ SmiUntag(x6);
478  }
479  __ Str(x2, FieldMemOperand(x0, JSValue::kValueOffset));
480
481  __ bind(&drop_frame_and_ret);
482  {
483    __ Drop(x6);
484    __ Drop(1);
485    __ Ret();
486  }
487}
488
489static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
490  __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
491  __ Ldr(x2, FieldMemOperand(x2, SharedFunctionInfo::kCodeOffset));
492  __ Add(x2, x2, Code::kHeaderSize - kHeapObjectTag);
493  __ Br(x2);
494}
495
496static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
497                                           Runtime::FunctionId function_id) {
498  // ----------- S t a t e -------------
499  //  -- x0 : argument count (preserved for callee)
500  //  -- x1 : target function (preserved for callee)
501  //  -- x3 : new target (preserved for callee)
502  // -----------------------------------
503  {
504    FrameScope scope(masm, StackFrame::INTERNAL);
505    // Push a copy of the target function and the new target.
506    // Push another copy as a parameter to the runtime call.
507    __ SmiTag(x0);
508    __ Push(x0, x1, x3, x1);
509
510    __ CallRuntime(function_id, 1);
511    __ Move(x2, x0);
512
513    // Restore target function and new target.
514    __ Pop(x3, x1, x0);
515    __ SmiUntag(x0);
516  }
517
518  __ Add(x2, x2, Code::kHeaderSize - kHeapObjectTag);
519  __ Br(x2);
520}
521
522void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
523  // Checking whether the queued function is ready for install is optional,
524  // since we come across interrupts and stack checks elsewhere. However, not
525  // checking may delay installing ready functions, and always checking would be
526  // quite expensive. A good compromise is to first check against stack limit as
527  // a cue for an interrupt signal.
528  Label ok;
529  __ CompareRoot(masm->StackPointer(), Heap::kStackLimitRootIndex);
530  __ B(hs, &ok);
531
532  GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
533
534  __ Bind(&ok);
535  GenerateTailCallToSharedCode(masm);
536}
537
538namespace {
539
540void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function,
541                                    bool create_implicit_receiver,
542                                    bool check_derived_construct) {
543  Label post_instantiation_deopt_entry;
544
545  // ----------- S t a t e -------------
546  //  -- x0     : number of arguments
547  //  -- x1     : constructor function
548  //  -- x3     : new target
549  //  -- lr     : return address
550  //  -- cp     : context pointer
551  //  -- sp[...]: constructor arguments
552  // -----------------------------------
553
554  ASM_LOCATION("Builtins::Generate_JSConstructStubHelper");
555
556  Isolate* isolate = masm->isolate();
557
558  // Enter a construct frame.
559  {
560    FrameScope scope(masm, StackFrame::CONSTRUCT);
561
562    // Preserve the four incoming parameters on the stack.
563    Register argc = x0;
564    Register constructor = x1;
565    Register new_target = x3;
566
567    // Preserve the incoming parameters on the stack.
568    __ SmiTag(argc);
569    __ Push(cp, argc);
570
571    if (create_implicit_receiver) {
572      // Allocate the new receiver object.
573      __ Push(constructor, new_target);
574      __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
575              RelocInfo::CODE_TARGET);
576      __ Mov(x4, x0);
577      __ Pop(new_target, constructor);
578
579      // ----------- S t a t e -------------
580      //  -- x1: constructor function
581      //  -- x3: new target
582      //  -- x4: newly allocated object
583      // -----------------------------------
584
585      // Reload the number of arguments from the stack.
586      // Set it up in x0 for the function call below.
587      // jssp[0]: number of arguments (smi-tagged)
588      __ Peek(argc, 0);  // Load number of arguments.
589    }
590
591    __ SmiUntag(argc);
592
593    if (create_implicit_receiver) {
594      // Push the allocated receiver to the stack. We need two copies
595      // because we may have to return the original one and the calling
596      // conventions dictate that the called function pops the receiver.
597      __ Push(x4, x4);
598    } else {
599      __ PushRoot(Heap::kTheHoleValueRootIndex);
600    }
601
602    // Deoptimizer re-enters stub code here.
603    __ Bind(&post_instantiation_deopt_entry);
604
605    // Set up pointer to last argument.
606    __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset);
607
608    // Copy arguments and receiver to the expression stack.
609    // Copy 2 values every loop to use ldp/stp.
610    // x0: number of arguments
611    // x1: constructor function
612    // x2: address of last argument (caller sp)
613    // x3: new target
614    // jssp[0]: receiver
615    // jssp[1]: receiver
616    // jssp[2]: number of arguments (smi-tagged)
617    // Compute the start address of the copy in x3.
618    __ Add(x4, x2, Operand(argc, LSL, kPointerSizeLog2));
619    Label loop, entry, done_copying_arguments;
620    __ B(&entry);
621    __ Bind(&loop);
622    __ Ldp(x10, x11, MemOperand(x4, -2 * kPointerSize, PreIndex));
623    __ Push(x11, x10);
624    __ Bind(&entry);
625    __ Cmp(x4, x2);
626    __ B(gt, &loop);
627    // Because we copied values 2 by 2 we may have copied one extra value.
628    // Drop it if that is the case.
629    __ B(eq, &done_copying_arguments);
630    __ Drop(1);
631    __ Bind(&done_copying_arguments);
632
633    // Call the function.
634    // x0: number of arguments
635    // x1: constructor function
636    // x3: new target
637    ParameterCount actual(argc);
638    __ InvokeFunction(constructor, new_target, actual, CALL_FUNCTION,
639                      CheckDebugStepCallWrapper());
640
641    // Store offset of return address for deoptimizer.
642    if (create_implicit_receiver && !is_api_function) {
643      masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
644          masm->pc_offset());
645    }
646
647    // Restore the context from the frame.
648    // x0: result
649    // jssp[0]: receiver
650    // jssp[1]: number of arguments (smi-tagged)
651    __ Ldr(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
652
653    if (create_implicit_receiver) {
654      // If the result is an object (in the ECMA sense), we should get rid
655      // of the receiver and use the result; see ECMA-262 section 13.2.2-7
656      // on page 74.
657      Label use_receiver, exit;
658
659      // If the result is a smi, it is *not* an object in the ECMA sense.
660      // x0: result
661      // jssp[0]: receiver (newly allocated object)
662      // jssp[1]: number of arguments (smi-tagged)
663      __ JumpIfSmi(x0, &use_receiver);
664
665      // If the type of the result (stored in its map) is less than
666      // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
667      __ JumpIfObjectType(x0, x1, x3, FIRST_JS_RECEIVER_TYPE, &exit, ge);
668
669      // Throw away the result of the constructor invocation and use the
670      // on-stack receiver as the result.
671      __ Bind(&use_receiver);
672      __ Peek(x0, 0);
673
674      // Remove the receiver from the stack, remove caller arguments, and
675      // return.
676      __ Bind(&exit);
677      // x0: result
678      // jssp[0]: receiver (newly allocated object)
679      // jssp[1]: number of arguments (smi-tagged)
680      __ Peek(x1, 1 * kXRegSize);
681    } else {
682      __ Peek(x1, 0);
683    }
684
685    // Leave construct frame.
686  }
687
688  // ES6 9.2.2. Step 13+
689  // Check that the result is not a Smi, indicating that the constructor result
690  // from a derived class is neither undefined nor an Object.
691  if (check_derived_construct) {
692    Label dont_throw;
693    __ JumpIfNotSmi(x0, &dont_throw);
694    {
695      FrameScope scope(masm, StackFrame::INTERNAL);
696      __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
697    }
698    __ Bind(&dont_throw);
699  }
700
701  __ DropBySMI(x1);
702  __ Drop(1);
703  if (create_implicit_receiver) {
704    __ IncrementCounter(isolate->counters()->constructed_objects(), 1, x1, x2);
705  }
706  __ Ret();
707
708  // Store offset of trampoline address for deoptimizer. This is the bailout
709  // point after the receiver instantiation but before the function invocation.
710  // We need to restore some registers in order to continue the above code.
711  if (create_implicit_receiver && !is_api_function) {
712    masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
713        masm->pc_offset());
714
715    // ----------- S t a t e -------------
716    //  -- x0    : newly allocated object
717    //  -- sp[0] : constructor function
718    // -----------------------------------
719
720    __ Pop(x1);
721    __ Push(x0, x0);
722
723    // Retrieve smi-tagged arguments count from the stack.
724    __ Ldr(x0, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
725    __ SmiUntag(x0);
726
727    // Retrieve the new target value from the stack. This was placed into the
728    // frame description in place of the receiver by the optimizing compiler.
729    __ Add(x3, fp, Operand(StandardFrameConstants::kCallerSPOffset));
730    __ Ldr(x3, MemOperand(x3, x0, LSL, kPointerSizeLog2));
731
732    // Continue with constructor function invocation.
733    __ B(&post_instantiation_deopt_entry);
734  }
735}
736
737}  // namespace
738
739void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
740  Generate_JSConstructStubHelper(masm, false, true, false);
741}
742
743void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
744  Generate_JSConstructStubHelper(masm, true, false, false);
745}
746
747void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
748  Generate_JSConstructStubHelper(masm, false, false, false);
749}
750
751void Builtins::Generate_JSBuiltinsConstructStubForDerived(
752    MacroAssembler* masm) {
753  Generate_JSConstructStubHelper(masm, false, false, true);
754}
755
756void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
757  FrameScope scope(masm, StackFrame::INTERNAL);
758  __ Push(x1);
759  __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
760}
761
762// static
763void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
764  // ----------- S t a t e -------------
765  //  -- x0 : the value to pass to the generator
766  //  -- x1 : the JSGeneratorObject to resume
767  //  -- x2 : the resume mode (tagged)
768  //  -- lr : return address
769  // -----------------------------------
770  __ AssertGeneratorObject(x1);
771
772  // Store input value into generator object.
773  __ Str(x0, FieldMemOperand(x1, JSGeneratorObject::kInputOrDebugPosOffset));
774  __ RecordWriteField(x1, JSGeneratorObject::kInputOrDebugPosOffset, x0, x3,
775                      kLRHasNotBeenSaved, kDontSaveFPRegs);
776
777  // Store resume mode into generator object.
778  __ Str(x2, FieldMemOperand(x1, JSGeneratorObject::kResumeModeOffset));
779
780  // Load suspended function and context.
781  __ Ldr(x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset));
782  __ Ldr(cp, FieldMemOperand(x4, JSFunction::kContextOffset));
783
784  // Flood function if we are stepping.
785  Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
786  Label stepping_prepared;
787  ExternalReference debug_hook =
788      ExternalReference::debug_hook_on_function_call_address(masm->isolate());
789  __ Mov(x10, Operand(debug_hook));
790  __ Ldrsb(x10, MemOperand(x10));
791  __ CompareAndBranch(x10, Operand(0), ne, &prepare_step_in_if_stepping);
792
793  // Flood function if we need to continue stepping in the suspended generator.
794  ExternalReference debug_suspended_generator =
795      ExternalReference::debug_suspended_generator_address(masm->isolate());
796  __ Mov(x10, Operand(debug_suspended_generator));
797  __ Ldr(x10, MemOperand(x10));
798  __ CompareAndBranch(x10, Operand(x1), eq,
799                      &prepare_step_in_suspended_generator);
800  __ Bind(&stepping_prepared);
801
802  // Push receiver.
803  __ Ldr(x5, FieldMemOperand(x1, JSGeneratorObject::kReceiverOffset));
804  __ Push(x5);
805
806  // ----------- S t a t e -------------
807  //  -- x1      : the JSGeneratorObject to resume
808  //  -- x2      : the resume mode (tagged)
809  //  -- x4      : generator function
810  //  -- cp      : generator context
811  //  -- lr      : return address
812  //  -- jssp[0] : generator receiver
813  // -----------------------------------
814
815  // Push holes for arguments to generator function. Since the parser forced
816  // context allocation for any variables in generators, the actual argument
817  // values have already been copied into the context and these dummy values
818  // will never be used.
819  __ Ldr(x10, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset));
820  __ Ldr(w10,
821         FieldMemOperand(x10, SharedFunctionInfo::kFormalParameterCountOffset));
822  __ LoadRoot(x11, Heap::kTheHoleValueRootIndex);
823  __ PushMultipleTimes(x11, w10);
824
825  // Underlying function needs to have bytecode available.
826  if (FLAG_debug_code) {
827    __ Ldr(x3, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset));
828    __ Ldr(x3, FieldMemOperand(x3, SharedFunctionInfo::kFunctionDataOffset));
829    __ CompareObjectType(x3, x3, x3, BYTECODE_ARRAY_TYPE);
830    __ Assert(eq, kMissingBytecodeArray);
831  }
832
833  // Resume (Ignition/TurboFan) generator object.
834  {
835    __ Ldr(x0, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset));
836    __ Ldr(w0, FieldMemOperand(
837                   x0, SharedFunctionInfo::kFormalParameterCountOffset));
838    // We abuse new.target both to indicate that this is a resume call and to
839    // pass in the generator object.  In ordinary calls, new.target is always
840    // undefined because generator functions are non-constructable.
841    __ Move(x3, x1);
842    __ Move(x1, x4);
843    __ Ldr(x5, FieldMemOperand(x1, JSFunction::kCodeEntryOffset));
844    __ Jump(x5);
845  }
846
847  __ Bind(&prepare_step_in_if_stepping);
848  {
849    FrameScope scope(masm, StackFrame::INTERNAL);
850    __ Push(x1, x2, x4);
851    __ CallRuntime(Runtime::kDebugOnFunctionCall);
852    __ Pop(x2, x1);
853    __ Ldr(x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset));
854  }
855  __ B(&stepping_prepared);
856
857  __ Bind(&prepare_step_in_suspended_generator);
858  {
859    FrameScope scope(masm, StackFrame::INTERNAL);
860    __ Push(x1, x2);
861    __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
862    __ Pop(x2, x1);
863    __ Ldr(x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset));
864  }
865  __ B(&stepping_prepared);
866}
867
868enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
869
870// Clobbers x10, x15; preserves all other registers.
871static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
872                                        IsTagged argc_is_tagged) {
873  // Check the stack for overflow.
874  // We are not trying to catch interruptions (e.g. debug break and
875  // preemption) here, so the "real stack limit" is checked.
876  Label enough_stack_space;
877  __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
878  // Make x10 the space we have left. The stack might already be overflowed
879  // here which will cause x10 to become negative.
880  // TODO(jbramley): Check that the stack usage here is safe.
881  __ Sub(x10, jssp, x10);
882  // Check if the arguments will overflow the stack.
883  if (argc_is_tagged == kArgcIsSmiTagged) {
884    __ Cmp(x10, Operand::UntagSmiAndScale(argc, kPointerSizeLog2));
885  } else {
886    DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
887    __ Cmp(x10, Operand(argc, LSL, kPointerSizeLog2));
888  }
889  __ B(gt, &enough_stack_space);
890  __ CallRuntime(Runtime::kThrowStackOverflow);
891  // We should never return from the APPLY_OVERFLOW builtin.
892  if (__ emit_debug_code()) {
893    __ Unreachable();
894  }
895
896  __ Bind(&enough_stack_space);
897}
898
899// Input:
900//   x0: new.target.
901//   x1: function.
902//   x2: receiver.
903//   x3: argc.
904//   x4: argv.
905// Output:
906//   x0: result.
907static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
908                                             bool is_construct) {
909  // Called from JSEntryStub::GenerateBody().
910  Register new_target = x0;
911  Register function = x1;
912  Register receiver = x2;
913  Register argc = x3;
914  Register argv = x4;
915  Register scratch = x10;
916
917  ProfileEntryHookStub::MaybeCallEntryHook(masm);
918
919  {
920    // Enter an internal frame.
921    FrameScope scope(masm, StackFrame::INTERNAL);
922
923    // Setup the context (we need to use the caller context from the isolate).
924    __ Mov(scratch, Operand(ExternalReference(Isolate::kContextAddress,
925                                              masm->isolate())));
926    __ Ldr(cp, MemOperand(scratch));
927
928    __ InitializeRootRegister();
929
930    // Push the function and the receiver onto the stack.
931    __ Push(function, receiver);
932
933    // Check if we have enough stack space to push all arguments.
934    // Expects argument count in eax. Clobbers ecx, edx, edi.
935    Generate_CheckStackOverflow(masm, argc, kArgcIsUntaggedInt);
936
937    // Copy arguments to the stack in a loop, in reverse order.
938    // x3: argc.
939    // x4: argv.
940    Label loop, entry;
941    // Compute the copy end address.
942    __ Add(scratch, argv, Operand(argc, LSL, kPointerSizeLog2));
943
944    __ B(&entry);
945    __ Bind(&loop);
946    __ Ldr(x11, MemOperand(argv, kPointerSize, PostIndex));
947    __ Ldr(x12, MemOperand(x11));  // Dereference the handle.
948    __ Push(x12);                  // Push the argument.
949    __ Bind(&entry);
950    __ Cmp(scratch, argv);
951    __ B(ne, &loop);
952
953    __ Mov(scratch, argc);
954    __ Mov(argc, new_target);
955    __ Mov(new_target, scratch);
956    // x0: argc.
957    // x3: new.target.
958
959    // Initialize all JavaScript callee-saved registers, since they will be seen
960    // by the garbage collector as part of handlers.
961    // The original values have been saved in JSEntryStub::GenerateBody().
962    __ LoadRoot(x19, Heap::kUndefinedValueRootIndex);
963    __ Mov(x20, x19);
964    __ Mov(x21, x19);
965    __ Mov(x22, x19);
966    __ Mov(x23, x19);
967    __ Mov(x24, x19);
968    __ Mov(x25, x19);
969    // Don't initialize the reserved registers.
970    // x26 : root register (root).
971    // x27 : context pointer (cp).
972    // x28 : JS stack pointer (jssp).
973    // x29 : frame pointer (fp).
974
975    Handle<Code> builtin = is_construct
976                               ? masm->isolate()->builtins()->Construct()
977                               : masm->isolate()->builtins()->Call();
978    __ Call(builtin, RelocInfo::CODE_TARGET);
979
980    // Exit the JS internal frame and remove the parameters (except function),
981    // and return.
982  }
983
984  // Result is in x0. Return.
985  __ Ret();
986}
987
988void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
989  Generate_JSEntryTrampolineHelper(masm, false);
990}
991
992void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
993  Generate_JSEntryTrampolineHelper(masm, true);
994}
995
996static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
997  Register args_count = scratch;
998
999  // Get the arguments + receiver count.
1000  __ ldr(args_count,
1001         MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1002  __ Ldr(args_count.W(),
1003         FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
1004
1005  // Leave the frame (also dropping the register file).
1006  __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1007
1008  // Drop receiver + arguments.
1009  __ Drop(args_count, 1);
1010}
1011
1012// Generate code for entering a JS function with the interpreter.
1013// On entry to the function the receiver and arguments have been pushed on the
1014// stack left to right.  The actual argument count matches the formal parameter
1015// count expected by the function.
1016//
1017// The live registers are:
1018//   - x1: the JS function object being called.
1019//   - x3: the new target
1020//   - cp: our context.
1021//   - fp: our caller's frame pointer.
1022//   - jssp: stack pointer.
1023//   - lr: return address.
1024//
1025// The function builds an interpreter frame.  See InterpreterFrameConstants in
1026// frames.h for its layout.
1027void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
1028  ProfileEntryHookStub::MaybeCallEntryHook(masm);
1029
1030  // Open a frame scope to indicate that there is a frame on the stack.  The
1031  // MANUAL indicates that the scope shouldn't actually generate code to set up
1032  // the frame (that is done below).
1033  FrameScope frame_scope(masm, StackFrame::MANUAL);
1034  __ Push(lr, fp, cp, x1);
1035  __ Add(fp, jssp, StandardFrameConstants::kFixedFrameSizeFromFp);
1036
1037  // Get the bytecode array from the function object (or from the DebugInfo if
1038  // it is present) and load it into kInterpreterBytecodeArrayRegister.
1039  __ Ldr(x0, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
1040  Register debug_info = kInterpreterBytecodeArrayRegister;
1041  Label load_debug_bytecode_array, bytecode_array_loaded;
1042  DCHECK(!debug_info.is(x0));
1043  __ Ldr(debug_info, FieldMemOperand(x0, SharedFunctionInfo::kDebugInfoOffset));
1044  __ JumpIfNotSmi(debug_info, &load_debug_bytecode_array);
1045  __ Ldr(kInterpreterBytecodeArrayRegister,
1046         FieldMemOperand(x0, SharedFunctionInfo::kFunctionDataOffset));
1047  __ Bind(&bytecode_array_loaded);
1048
1049  // Check whether we should continue to use the interpreter.
1050  Label switch_to_different_code_kind;
1051  __ Ldr(x0, FieldMemOperand(x0, SharedFunctionInfo::kCodeOffset));
1052  __ Cmp(x0, Operand(masm->CodeObject()));  // Self-reference to this code.
1053  __ B(ne, &switch_to_different_code_kind);
1054
1055  // Increment invocation count for the function.
1056  __ Ldr(x11, FieldMemOperand(x1, JSFunction::kFeedbackVectorOffset));
1057  __ Ldr(x11, FieldMemOperand(x11, Cell::kValueOffset));
1058  __ Ldr(x10, FieldMemOperand(
1059                  x11, FeedbackVector::kInvocationCountIndex * kPointerSize +
1060                           FeedbackVector::kHeaderSize));
1061  __ Add(x10, x10, Operand(Smi::FromInt(1)));
1062  __ Str(x10, FieldMemOperand(
1063                  x11, FeedbackVector::kInvocationCountIndex * kPointerSize +
1064                           FeedbackVector::kHeaderSize));
1065
1066  // Check function data field is actually a BytecodeArray object.
1067  if (FLAG_debug_code) {
1068    __ AssertNotSmi(kInterpreterBytecodeArrayRegister,
1069                    kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1070    __ CompareObjectType(kInterpreterBytecodeArrayRegister, x0, x0,
1071                         BYTECODE_ARRAY_TYPE);
1072    __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1073  }
1074
1075  // Reset code age.
1076  __ Mov(x10, Operand(BytecodeArray::kNoAgeBytecodeAge));
1077  __ Strb(x10, FieldMemOperand(kInterpreterBytecodeArrayRegister,
1078                               BytecodeArray::kBytecodeAgeOffset));
1079
1080  // Load the initial bytecode offset.
1081  __ Mov(kInterpreterBytecodeOffsetRegister,
1082         Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
1083
1084  // Push new.target, bytecode array and Smi tagged bytecode array offset.
1085  __ SmiTag(x0, kInterpreterBytecodeOffsetRegister);
1086  __ Push(x3, kInterpreterBytecodeArrayRegister, x0);
1087
1088  // Allocate the local and temporary register file on the stack.
1089  {
1090    // Load frame size from the BytecodeArray object.
1091    __ Ldr(w11, FieldMemOperand(kInterpreterBytecodeArrayRegister,
1092                                BytecodeArray::kFrameSizeOffset));
1093
1094    // Do a stack check to ensure we don't go over the limit.
1095    Label ok;
1096    DCHECK(jssp.Is(__ StackPointer()));
1097    __ Sub(x10, jssp, Operand(x11));
1098    __ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
1099    __ B(hs, &ok);
1100    __ CallRuntime(Runtime::kThrowStackOverflow);
1101    __ Bind(&ok);
1102
1103    // If ok, push undefined as the initial value for all register file entries.
1104    // Note: there should always be at least one stack slot for the return
1105    // register in the register file.
1106    Label loop_header;
1107    __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
1108    // TODO(rmcilroy): Ensure we always have an even number of registers to
1109    // allow stack to be 16 bit aligned (and remove need for jssp).
1110    __ Lsr(x11, x11, kPointerSizeLog2);
1111    __ PushMultipleTimes(x10, x11);
1112    __ Bind(&loop_header);
1113  }
1114
1115  // Load accumulator and dispatch table into registers.
1116  __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
1117  __ Mov(kInterpreterDispatchTableRegister,
1118         Operand(ExternalReference::interpreter_dispatch_table_address(
1119             masm->isolate())));
1120
1121  // Dispatch to the first bytecode handler for the function.
1122  __ Ldrb(x1, MemOperand(kInterpreterBytecodeArrayRegister,
1123                         kInterpreterBytecodeOffsetRegister));
1124  __ Mov(x1, Operand(x1, LSL, kPointerSizeLog2));
1125  __ Ldr(ip0, MemOperand(kInterpreterDispatchTableRegister, x1));
1126  __ Call(ip0);
1127  masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
1128
1129  // The return value is in x0.
1130  LeaveInterpreterFrame(masm, x2);
1131  __ Ret();
1132
1133  // Load debug copy of the bytecode array.
1134  __ Bind(&load_debug_bytecode_array);
1135  __ Ldr(kInterpreterBytecodeArrayRegister,
1136         FieldMemOperand(debug_info, DebugInfo::kDebugBytecodeArrayIndex));
1137  __ B(&bytecode_array_loaded);
1138
1139  // If the shared code is no longer this entry trampoline, then the underlying
1140  // function has been switched to a different kind of code and we heal the
1141  // closure by switching the code entry field over to the new code as well.
1142  __ bind(&switch_to_different_code_kind);
1143  __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1144  __ Ldr(x7, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
1145  __ Ldr(x7, FieldMemOperand(x7, SharedFunctionInfo::kCodeOffset));
1146  __ Add(x7, x7, Operand(Code::kHeaderSize - kHeapObjectTag));
1147  __ Str(x7, FieldMemOperand(x1, JSFunction::kCodeEntryOffset));
1148  __ RecordWriteCodeEntryField(x1, x7, x5);
1149  __ Jump(x7);
1150}
1151
1152static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
1153                                        Register scratch,
1154                                        Label* stack_overflow) {
1155  // Check the stack for overflow.
1156  // We are not trying to catch interruptions (e.g. debug break and
1157  // preemption) here, so the "real stack limit" is checked.
1158  Label enough_stack_space;
1159  __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
1160  // Make scratch the space we have left. The stack might already be overflowed
1161  // here which will cause scratch to become negative.
1162  __ Sub(scratch, jssp, scratch);
1163  // Check if the arguments will overflow the stack.
1164  __ Cmp(scratch, Operand(num_args, LSL, kPointerSizeLog2));
1165  __ B(le, stack_overflow);
1166}
1167
1168static void Generate_InterpreterPushArgs(MacroAssembler* masm,
1169                                         Register num_args, Register index,
1170                                         Register last_arg, Register stack_addr,
1171                                         Register scratch,
1172                                         Label* stack_overflow) {
1173  // Add a stack check before pushing arguments.
1174  Generate_StackOverflowCheck(masm, num_args, scratch, stack_overflow);
1175
1176  __ Mov(scratch, num_args);
1177  __ lsl(scratch, scratch, kPointerSizeLog2);
1178  __ sub(last_arg, index, scratch);
1179
1180  // Set stack pointer and where to stop.
1181  __ Mov(stack_addr, jssp);
1182  __ Claim(scratch, 1);
1183
1184  // Push the arguments.
1185  Label loop_header, loop_check;
1186  __ B(&loop_check);
1187  __ Bind(&loop_header);
1188  // TODO(rmcilroy): Push two at a time once we ensure we keep stack aligned.
1189  __ Ldr(scratch, MemOperand(index, -kPointerSize, PostIndex));
1190  __ Str(scratch, MemOperand(stack_addr, -kPointerSize, PreIndex));
1191  __ Bind(&loop_check);
1192  __ Cmp(index, last_arg);
1193  __ B(gt, &loop_header);
1194}
1195
1196// static
1197void Builtins::Generate_InterpreterPushArgsAndCallImpl(
1198    MacroAssembler* masm, TailCallMode tail_call_mode,
1199    InterpreterPushArgsMode mode) {
1200  // ----------- S t a t e -------------
1201  //  -- x0 : the number of arguments (not including the receiver)
1202  //  -- x2 : the address of the first argument to be pushed. Subsequent
1203  //          arguments should be consecutive above this, in the same order as
1204  //          they are to be pushed onto the stack.
1205  //  -- x1 : the target to call (can be any Object).
1206  // -----------------------------------
1207  Label stack_overflow;
1208
1209  // Add one for the receiver.
1210  __ add(x3, x0, Operand(1));
1211
1212  // Push the arguments. x2, x4, x5, x6 will be modified.
1213  Generate_InterpreterPushArgs(masm, x3, x2, x4, x5, x6, &stack_overflow);
1214
1215  // Call the target.
1216  if (mode == InterpreterPushArgsMode::kJSFunction) {
1217    __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny,
1218                                                      tail_call_mode),
1219            RelocInfo::CODE_TARGET);
1220  } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1221    __ Jump(masm->isolate()->builtins()->CallWithSpread(),
1222            RelocInfo::CODE_TARGET);
1223  } else {
1224    __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
1225                                              tail_call_mode),
1226            RelocInfo::CODE_TARGET);
1227  }
1228
1229  __ bind(&stack_overflow);
1230  {
1231    __ TailCallRuntime(Runtime::kThrowStackOverflow);
1232    __ Unreachable();
1233  }
1234}
1235
1236// static
1237void Builtins::Generate_InterpreterPushArgsAndConstructImpl(
1238    MacroAssembler* masm, InterpreterPushArgsMode mode) {
1239  // ----------- S t a t e -------------
1240  // -- x0 : argument count (not including receiver)
1241  // -- x3 : new target
1242  // -- x1 : constructor to call
1243  // -- x2 : allocation site feedback if available, undefined otherwise
1244  // -- x4 : address of the first argument
1245  // -----------------------------------
1246  Label stack_overflow;
1247
1248  // Push a slot for the receiver.
1249  __ Push(xzr);
1250
1251  // Push the arguments. x5, x4, x6, x7 will be modified.
1252  Generate_InterpreterPushArgs(masm, x0, x4, x5, x6, x7, &stack_overflow);
1253
1254  __ AssertUndefinedOrAllocationSite(x2, x6);
1255  if (mode == InterpreterPushArgsMode::kJSFunction) {
1256    __ AssertFunction(x1);
1257
1258    // Tail call to the function-specific construct stub (still in the caller
1259    // context at this point).
1260    __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
1261    __ Ldr(x4, FieldMemOperand(x4, SharedFunctionInfo::kConstructStubOffset));
1262    __ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag);
1263    __ Br(x4);
1264  } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1265    // Call the constructor with x0, x1, and x3 unmodified.
1266    __ Jump(masm->isolate()->builtins()->ConstructWithSpread(),
1267            RelocInfo::CODE_TARGET);
1268  } else {
1269    DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
1270    // Call the constructor with x0, x1, and x3 unmodified.
1271    __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1272  }
1273
1274  __ bind(&stack_overflow);
1275  {
1276    __ TailCallRuntime(Runtime::kThrowStackOverflow);
1277    __ Unreachable();
1278  }
1279}
1280
1281// static
1282void Builtins::Generate_InterpreterPushArgsAndConstructArray(
1283    MacroAssembler* masm) {
1284  // ----------- S t a t e -------------
1285  // -- x0 : argument count (not including receiver)
1286  // -- x1 : target to call verified to be Array function
1287  // -- x2 : allocation site feedback if available, undefined otherwise.
1288  // -- x3 : address of the first argument
1289  // -----------------------------------
1290  Label stack_overflow;
1291
1292  __ add(x4, x0, Operand(1));  // Add one for the receiver.
1293
1294  // Push the arguments. x3, x5, x6, x7 will be modified.
1295  Generate_InterpreterPushArgs(masm, x4, x3, x5, x6, x7, &stack_overflow);
1296
1297  // Array constructor expects constructor in x3. It is same as call target.
1298  __ mov(x3, x1);
1299
1300  ArrayConstructorStub stub(masm->isolate());
1301  __ TailCallStub(&stub);
1302
1303  __ bind(&stack_overflow);
1304  {
1305    __ TailCallRuntime(Runtime::kThrowStackOverflow);
1306    __ Unreachable();
1307  }
1308}
1309
1310static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1311  // Set the return address to the correct point in the interpreter entry
1312  // trampoline.
1313  Smi* interpreter_entry_return_pc_offset(
1314      masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1315  DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
1316  __ LoadObject(x1, masm->isolate()->builtins()->InterpreterEntryTrampoline());
1317  __ Add(lr, x1, Operand(interpreter_entry_return_pc_offset->value() +
1318                         Code::kHeaderSize - kHeapObjectTag));
1319
1320  // Initialize the dispatch table register.
1321  __ Mov(kInterpreterDispatchTableRegister,
1322         Operand(ExternalReference::interpreter_dispatch_table_address(
1323             masm->isolate())));
1324
1325  // Get the bytecode array pointer from the frame.
1326  __ Ldr(kInterpreterBytecodeArrayRegister,
1327         MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1328
1329  if (FLAG_debug_code) {
1330    // Check function data field is actually a BytecodeArray object.
1331    __ AssertNotSmi(kInterpreterBytecodeArrayRegister,
1332                    kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1333    __ CompareObjectType(kInterpreterBytecodeArrayRegister, x1, x1,
1334                         BYTECODE_ARRAY_TYPE);
1335    __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1336  }
1337
1338  // Get the target bytecode offset from the frame.
1339  __ Ldr(kInterpreterBytecodeOffsetRegister,
1340         MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1341  __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1342
1343  // Dispatch to the target bytecode.
1344  __ Ldrb(x1, MemOperand(kInterpreterBytecodeArrayRegister,
1345                         kInterpreterBytecodeOffsetRegister));
1346  __ Mov(x1, Operand(x1, LSL, kPointerSizeLog2));
1347  __ Ldr(ip0, MemOperand(kInterpreterDispatchTableRegister, x1));
1348  __ Jump(ip0);
1349}
1350
1351void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1352  // Advance the current bytecode offset stored within the given interpreter
1353  // stack frame. This simulates what all bytecode handlers do upon completion
1354  // of the underlying operation.
1355  __ Ldr(x1, MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1356  __ Ldr(x2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1357  __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1358  {
1359    FrameScope scope(masm, StackFrame::INTERNAL);
1360    __ Push(kInterpreterAccumulatorRegister, x1, x2);
1361    __ CallRuntime(Runtime::kInterpreterAdvanceBytecodeOffset);
1362    __ Mov(x2, x0);  // Result is the new bytecode offset.
1363    __ Pop(kInterpreterAccumulatorRegister);
1364  }
1365  __ Str(x2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1366
1367  Generate_InterpreterEnterBytecode(masm);
1368}
1369
1370void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1371  Generate_InterpreterEnterBytecode(masm);
1372}
1373
1374void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
1375  // ----------- S t a t e -------------
1376  //  -- x0 : argument count (preserved for callee)
1377  //  -- x3 : new target (preserved for callee)
1378  //  -- x1 : target function (preserved for callee)
1379  // -----------------------------------
1380  // First lookup code, maybe we don't need to compile!
1381  Label gotta_call_runtime;
1382  Label try_shared;
1383  Label loop_top, loop_bottom;
1384
1385  Register closure = x1;
1386  Register map = x13;
1387  Register index = x2;
1388
1389  // Do we have a valid feedback vector?
1390  __ Ldr(index, FieldMemOperand(closure, JSFunction::kFeedbackVectorOffset));
1391  __ Ldr(index, FieldMemOperand(index, Cell::kValueOffset));
1392  __ JumpIfRoot(index, Heap::kUndefinedValueRootIndex, &gotta_call_runtime);
1393
1394  __ Ldr(map, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1395  __ Ldr(map,
1396         FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
1397  __ Ldrsw(index, UntagSmiFieldMemOperand(map, FixedArray::kLengthOffset));
1398  __ Cmp(index, Operand(2));
1399  __ B(lt, &try_shared);
1400
1401  // x3  : native context
1402  // x2  : length / index
1403  // x13 : optimized code map
1404  // stack[0] : new target
1405  // stack[4] : closure
1406  Register native_context = x4;
1407  __ Ldr(native_context, NativeContextMemOperand());
1408
1409  __ Bind(&loop_top);
1410  Register temp = x5;
1411  Register array_pointer = x6;
1412
1413  // Does the native context match?
1414  __ Add(array_pointer, map, Operand(index, LSL, kPointerSizeLog2));
1415  __ Ldr(temp, FieldMemOperand(array_pointer,
1416                               SharedFunctionInfo::kOffsetToPreviousContext));
1417  __ Ldr(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1418  __ Cmp(temp, native_context);
1419  __ B(ne, &loop_bottom);
1420
1421  // Code available?
1422  Register entry = x7;
1423  __ Ldr(entry,
1424         FieldMemOperand(array_pointer,
1425                         SharedFunctionInfo::kOffsetToPreviousCachedCode));
1426  __ Ldr(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1427  __ JumpIfSmi(entry, &try_shared);
1428
1429  // Found code. Get it into the closure and return.
1430  __ Add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1431  __ Str(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
1432  __ RecordWriteCodeEntryField(closure, entry, x5);
1433
1434  // Link the closure into the optimized function list.
1435  // x7 : code entry
1436  // x4 : native context
1437  // x1 : closure
1438  __ Ldr(x8,
1439         ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1440  __ Str(x8, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset));
1441  __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, x8, x13,
1442                      kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1443                      OMIT_SMI_CHECK);
1444  const int function_list_offset =
1445      Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
1446  __ Str(closure,
1447         ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1448  __ Mov(x5, closure);
1449  __ RecordWriteContextSlot(native_context, function_list_offset, x5, x13,
1450                            kLRHasNotBeenSaved, kDontSaveFPRegs);
1451  __ Jump(entry);
1452
1453  __ Bind(&loop_bottom);
1454  __ Sub(index, index, Operand(SharedFunctionInfo::kEntryLength));
1455  __ Cmp(index, Operand(1));
1456  __ B(gt, &loop_top);
1457
1458  // We found no code.
1459  __ Bind(&try_shared);
1460  __ Ldr(entry,
1461         FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1462  // Is the shared function marked for tier up?
1463  __ Ldrb(temp, FieldMemOperand(
1464                    entry, SharedFunctionInfo::kMarkedForTierUpByteOffset));
1465  __ TestAndBranchIfAnySet(
1466      temp, 1 << SharedFunctionInfo::kMarkedForTierUpBitWithinByte,
1467      &gotta_call_runtime);
1468
1469  // If SFI points to anything other than CompileLazy, install that.
1470  __ Ldr(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset));
1471  __ Move(temp, masm->CodeObject());
1472  __ Cmp(entry, temp);
1473  __ B(eq, &gotta_call_runtime);
1474
1475  // Install the SFI's code entry.
1476  __ Add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1477  __ Str(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
1478  __ RecordWriteCodeEntryField(closure, entry, x5);
1479  __ Jump(entry);
1480
1481  __ Bind(&gotta_call_runtime);
1482  GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1483}
1484
1485void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
1486  GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
1487}
1488
1489void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
1490  GenerateTailCallToReturnedCode(masm,
1491                                 Runtime::kCompileOptimized_NotConcurrent);
1492}
1493
1494void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
1495  GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
1496}
1497
1498void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1499  // ----------- S t a t e -------------
1500  //  -- x0 : argument count (preserved for callee)
1501  //  -- x1 : new target (preserved for callee)
1502  //  -- x3 : target function (preserved for callee)
1503  // -----------------------------------
1504  Label failed;
1505  {
1506    FrameScope scope(masm, StackFrame::INTERNAL);
1507    // Preserve argument count for later compare.
1508    __ Move(x4, x0);
1509    // Push a copy of the target function and the new target.
1510    __ SmiTag(x0);
1511    // Push another copy as a parameter to the runtime call.
1512    __ Push(x0, x1, x3, x1);
1513
1514    // Copy arguments from caller (stdlib, foreign, heap).
1515    Label args_done;
1516    for (int j = 0; j < 4; ++j) {
1517      Label over;
1518      if (j < 3) {
1519        __ cmp(x4, Operand(j));
1520        __ B(ne, &over);
1521      }
1522      for (int i = j - 1; i >= 0; --i) {
1523        __ ldr(x4, MemOperand(fp, StandardFrameConstants::kCallerSPOffset +
1524                                      i * kPointerSize));
1525        __ push(x4);
1526      }
1527      for (int i = 0; i < 3 - j; ++i) {
1528        __ PushRoot(Heap::kUndefinedValueRootIndex);
1529      }
1530      if (j < 3) {
1531        __ jmp(&args_done);
1532        __ bind(&over);
1533      }
1534    }
1535    __ bind(&args_done);
1536
1537    // Call runtime, on success unwind frame, and parent frame.
1538    __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1539    // A smi 0 is returned on failure, an object on success.
1540    __ JumpIfSmi(x0, &failed);
1541
1542    __ Drop(2);
1543    __ pop(x4);
1544    __ SmiUntag(x4);
1545    scope.GenerateLeaveFrame();
1546
1547    __ add(x4, x4, Operand(1));
1548    __ Drop(x4);
1549    __ Ret();
1550
1551    __ bind(&failed);
1552    // Restore target function and new target.
1553    __ Pop(x3, x1, x0);
1554    __ SmiUntag(x0);
1555  }
1556  // On failure, tail call back to regular js.
1557  GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1558}
1559
1560static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1561  // For now, we are relying on the fact that make_code_young doesn't do any
1562  // garbage collection which allows us to save/restore the registers without
1563  // worrying about which of them contain pointers. We also don't build an
1564  // internal frame to make the code fast, since we shouldn't have to do stack
1565  // crawls in MakeCodeYoung. This seems a bit fragile.
1566
1567  // The following caller-saved registers must be saved and restored when
1568  // calling through to the runtime:
1569  //   x0 - The address from which to resume execution.
1570  //   x1 - isolate
1571  //   x3 - new target
1572  //   lr - The return address for the JSFunction itself. It has not yet been
1573  //        preserved on the stack because the frame setup code was replaced
1574  //        with a call to this stub, to handle code ageing.
1575  {
1576    FrameScope scope(masm, StackFrame::MANUAL);
1577    __ Push(x0, x1, x3, fp, lr);
1578    __ Mov(x1, ExternalReference::isolate_address(masm->isolate()));
1579    __ CallCFunction(
1580        ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1581    __ Pop(lr, fp, x3, x1, x0);
1582  }
1583
1584  // The calling function has been made young again, so return to execute the
1585  // real frame set-up code.
1586  __ Br(x0);
1587}
1588
1589#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                              \
1590  void Builtins::Generate_Make##C##CodeYoungAgain(MacroAssembler* masm) { \
1591    GenerateMakeCodeYoungAgainCommon(masm);                               \
1592  }
1593CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1594#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1595
1596void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1597  // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1598  // that make_code_young doesn't do any garbage collection which allows us to
1599  // save/restore the registers without worrying about which of them contain
1600  // pointers.
1601
1602  // The following caller-saved registers must be saved and restored when
1603  // calling through to the runtime:
1604  //   x0 - The address from which to resume execution.
1605  //   x1 - isolate
1606  //   x3 - new target
1607  //   lr - The return address for the JSFunction itself. It has not yet been
1608  //        preserved on the stack because the frame setup code was replaced
1609  //        with a call to this stub, to handle code ageing.
1610  {
1611    FrameScope scope(masm, StackFrame::MANUAL);
1612    __ Push(x0, x1, x3, fp, lr);
1613    __ Mov(x1, ExternalReference::isolate_address(masm->isolate()));
1614    __ CallCFunction(
1615        ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1616        2);
1617    __ Pop(lr, fp, x3, x1, x0);
1618
1619    // Perform prologue operations usually performed by the young code stub.
1620    __ EmitFrameSetupForCodeAgePatching(masm);
1621  }
1622
1623  // Jump to point after the code-age stub.
1624  __ Add(x0, x0, kNoCodeAgeSequenceLength);
1625  __ Br(x0);
1626}
1627
1628void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1629  GenerateMakeCodeYoungAgainCommon(masm);
1630}
1631
1632void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1633  Generate_MarkCodeAsExecutedOnce(masm);
1634}
1635
1636static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1637                                             SaveFPRegsMode save_doubles) {
1638  {
1639    FrameScope scope(masm, StackFrame::INTERNAL);
1640
1641    // Preserve registers across notification, this is important for compiled
1642    // stubs that tail call the runtime on deopts passing their parameters in
1643    // registers.
1644    // TODO(jbramley): Is it correct (and appropriate) to use safepoint
1645    // registers here? According to the comment above, we should only need to
1646    // preserve the registers with parameters.
1647    __ PushXRegList(kSafepointSavedRegisters);
1648    // Pass the function and deoptimization type to the runtime system.
1649    __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
1650    __ PopXRegList(kSafepointSavedRegisters);
1651  }
1652
1653  // Ignore state (pushed by Deoptimizer::EntryGenerator::Generate).
1654  __ Drop(1);
1655
1656  // Jump to the miss handler. Deoptimizer::EntryGenerator::Generate loads this
1657  // into lr before it jumps here.
1658  __ Br(lr);
1659}
1660
1661void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1662  Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1663}
1664
1665void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1666  Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1667}
1668
1669static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1670                                             Deoptimizer::BailoutType type) {
1671  {
1672    FrameScope scope(masm, StackFrame::INTERNAL);
1673    // Pass the deoptimization type to the runtime system.
1674    __ Mov(x0, Smi::FromInt(static_cast<int>(type)));
1675    __ Push(x0);
1676    __ CallRuntime(Runtime::kNotifyDeoptimized);
1677  }
1678
1679  // Get the full codegen state from the stack and untag it.
1680  Register state = x6;
1681  __ Peek(state, 0);
1682  __ SmiUntag(state);
1683
1684  // Switch on the state.
1685  Label with_tos_register, unknown_state;
1686  __ CompareAndBranch(state,
1687                      static_cast<int>(Deoptimizer::BailoutState::NO_REGISTERS),
1688                      ne, &with_tos_register);
1689  __ Drop(1);  // Remove state.
1690  __ Ret();
1691
1692  __ Bind(&with_tos_register);
1693  // Reload TOS register.
1694  DCHECK_EQ(kInterpreterAccumulatorRegister.code(), x0.code());
1695  __ Peek(x0, kPointerSize);
1696  __ CompareAndBranch(state,
1697                      static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER),
1698                      ne, &unknown_state);
1699  __ Drop(2);  // Remove state and TOS.
1700  __ Ret();
1701
1702  __ Bind(&unknown_state);
1703  __ Abort(kInvalidFullCodegenState);
1704}
1705
1706void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1707  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1708}
1709
1710void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1711  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1712}
1713
1714void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1715  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1716}
1717
1718static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1719                                    Register function_template_info,
1720                                    Register scratch0, Register scratch1,
1721                                    Register scratch2,
1722                                    Label* receiver_check_failed) {
1723  Register signature = scratch0;
1724  Register map = scratch1;
1725  Register constructor = scratch2;
1726
1727  // If there is no signature, return the holder.
1728  __ Ldr(signature, FieldMemOperand(function_template_info,
1729                                    FunctionTemplateInfo::kSignatureOffset));
1730  __ CompareRoot(signature, Heap::kUndefinedValueRootIndex);
1731  Label receiver_check_passed;
1732  __ B(eq, &receiver_check_passed);
1733
1734  // Walk the prototype chain.
1735  __ Ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1736  Label prototype_loop_start;
1737  __ Bind(&prototype_loop_start);
1738
1739  // Get the constructor, if any
1740  __ GetMapConstructor(constructor, map, x16, x16);
1741  __ cmp(x16, Operand(JS_FUNCTION_TYPE));
1742  Label next_prototype;
1743  __ B(ne, &next_prototype);
1744  Register type = constructor;
1745  __ Ldr(type,
1746         FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
1747  __ Ldr(type, FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
1748
1749  // Loop through the chain of inheriting function templates.
1750  Label function_template_loop;
1751  __ Bind(&function_template_loop);
1752
1753  // If the signatures match, we have a compatible receiver.
1754  __ Cmp(signature, type);
1755  __ B(eq, &receiver_check_passed);
1756
1757  // If the current type is not a FunctionTemplateInfo, load the next prototype
1758  // in the chain.
1759  __ JumpIfSmi(type, &next_prototype);
1760  __ CompareObjectType(type, x16, x17, FUNCTION_TEMPLATE_INFO_TYPE);
1761  __ B(ne, &next_prototype);
1762
1763  // Otherwise load the parent function template and iterate.
1764  __ Ldr(type,
1765         FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
1766  __ B(&function_template_loop);
1767
1768  // Load the next prototype.
1769  __ Bind(&next_prototype);
1770  __ Ldr(x16, FieldMemOperand(map, Map::kBitField3Offset));
1771  __ Tst(x16, Operand(Map::HasHiddenPrototype::kMask));
1772  __ B(eq, receiver_check_failed);
1773  __ Ldr(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1774  __ Ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1775  // Iterate.
1776  __ B(&prototype_loop_start);
1777
1778  __ Bind(&receiver_check_passed);
1779}
1780
1781void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1782  // ----------- S t a t e -------------
1783  //  -- x0                 : number of arguments excluding receiver
1784  //  -- x1                 : callee
1785  //  -- lr                 : return address
1786  //  -- sp[0]              : last argument
1787  //  -- ...
1788  //  -- sp[8 * (argc - 1)] : first argument
1789  //  -- sp[8 * argc]       : receiver
1790  // -----------------------------------
1791
1792  // Load the FunctionTemplateInfo.
1793  __ Ldr(x3, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
1794  __ Ldr(x3, FieldMemOperand(x3, SharedFunctionInfo::kFunctionDataOffset));
1795
1796  // Do the compatible receiver check.
1797  Label receiver_check_failed;
1798  __ Ldr(x2, MemOperand(jssp, x0, LSL, kPointerSizeLog2));
1799  CompatibleReceiverCheck(masm, x2, x3, x4, x5, x6, &receiver_check_failed);
1800
1801  // Get the callback offset from the FunctionTemplateInfo, and jump to the
1802  // beginning of the code.
1803  __ Ldr(x4, FieldMemOperand(x3, FunctionTemplateInfo::kCallCodeOffset));
1804  __ Ldr(x4, FieldMemOperand(x4, CallHandlerInfo::kFastHandlerOffset));
1805  __ Add(x4, x4, Operand(Code::kHeaderSize - kHeapObjectTag));
1806  __ Jump(x4);
1807
1808  // Compatible receiver check failed: throw an Illegal Invocation exception.
1809  __ Bind(&receiver_check_failed);
1810  // Drop the arguments (including the receiver)
1811  __ add(x0, x0, Operand(1));
1812  __ Drop(x0);
1813  __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1814}
1815
1816static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
1817                                              bool has_handler_frame) {
1818  // Lookup the function in the JavaScript frame.
1819  if (has_handler_frame) {
1820    __ Ldr(x0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1821    __ Ldr(x0, MemOperand(x0, JavaScriptFrameConstants::kFunctionOffset));
1822  } else {
1823    __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1824  }
1825
1826  {
1827    FrameScope scope(masm, StackFrame::INTERNAL);
1828    // Pass function as argument.
1829    __ Push(x0);
1830    __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1831  }
1832
1833  // If the code object is null, just return to the caller.
1834  Label skip;
1835  __ CompareAndBranch(x0, Smi::kZero, ne, &skip);
1836  __ Ret();
1837
1838  __ Bind(&skip);
1839
1840  // Drop any potential handler frame that is be sitting on top of the actual
1841  // JavaScript frame. This is the case then OSR is triggered from bytecode.
1842  if (has_handler_frame) {
1843    __ LeaveFrame(StackFrame::STUB);
1844  }
1845
1846  // Load deoptimization data from the code object.
1847  // <deopt_data> = <code>[#deoptimization_data_offset]
1848  __ Ldr(x1, MemOperand(x0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1849
1850  // Load the OSR entrypoint offset from the deoptimization data.
1851  // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1852  __ Ldrsw(w1, UntagSmiFieldMemOperand(
1853                   x1, FixedArray::OffsetOfElementAt(
1854                           DeoptimizationInputData::kOsrPcOffsetIndex)));
1855
1856  // Compute the target address = code_obj + header_size + osr_offset
1857  // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1858  __ Add(x0, x0, x1);
1859  __ Add(lr, x0, Code::kHeaderSize - kHeapObjectTag);
1860
1861  // And "return" to the OSR entry point of the function.
1862  __ Ret();
1863}
1864
1865void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1866  Generate_OnStackReplacementHelper(masm, false);
1867}
1868
1869void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
1870  Generate_OnStackReplacementHelper(masm, true);
1871}
1872
1873// static
1874void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1875  // ----------- S t a t e -------------
1876  //  -- x0       : argc
1877  //  -- jssp[0]  : argArray (if argc == 2)
1878  //  -- jssp[8]  : thisArg  (if argc >= 1)
1879  //  -- jssp[16] : receiver
1880  // -----------------------------------
1881  ASM_LOCATION("Builtins::Generate_FunctionPrototypeApply");
1882
1883  Register argc = x0;
1884  Register arg_array = x0;
1885  Register receiver = x1;
1886  Register this_arg = x2;
1887  Register undefined_value = x3;
1888  Register null_value = x4;
1889
1890  __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
1891  __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1892
1893  // 1. Load receiver into x1, argArray into x0 (if present), remove all
1894  // arguments from the stack (including the receiver), and push thisArg (if
1895  // present) instead.
1896  {
1897    // Claim (2 - argc) dummy arguments from the stack, to put the stack in a
1898    // consistent state for a simple pop operation.
1899    __ Claim(2);
1900    __ Drop(argc);
1901
1902    // ----------- S t a t e -------------
1903    //  -- x0       : argc
1904    //  -- jssp[0]  : argArray (dummy value if argc <= 1)
1905    //  -- jssp[8]  : thisArg  (dummy value if argc == 0)
1906    //  -- jssp[16] : receiver
1907    // -----------------------------------
1908    __ Cmp(argc, 1);
1909    __ Pop(arg_array, this_arg);               // Overwrites argc.
1910    __ CmovX(this_arg, undefined_value, lo);   // undefined if argc == 0.
1911    __ CmovX(arg_array, undefined_value, ls);  // undefined if argc <= 1.
1912
1913    __ Peek(receiver, 0);
1914    __ Poke(this_arg, 0);
1915  }
1916
1917  // ----------- S t a t e -------------
1918  //  -- x0      : argArray
1919  //  -- x1      : receiver
1920  //  -- x3      : undefined root value
1921  //  -- jssp[0] : thisArg
1922  // -----------------------------------
1923
1924  // 2. Make sure the receiver is actually callable.
1925  Label receiver_not_callable;
1926  __ JumpIfSmi(receiver, &receiver_not_callable);
1927  __ Ldr(x10, FieldMemOperand(receiver, HeapObject::kMapOffset));
1928  __ Ldrb(w10, FieldMemOperand(x10, Map::kBitFieldOffset));
1929  __ TestAndBranchIfAllClear(x10, 1 << Map::kIsCallable,
1930                             &receiver_not_callable);
1931
1932  // 3. Tail call with no arguments if argArray is null or undefined.
1933  Label no_arguments;
1934  __ Cmp(arg_array, null_value);
1935  __ Ccmp(arg_array, undefined_value, ZFlag, ne);
1936  __ B(eq, &no_arguments);
1937
1938  // 4a. Apply the receiver to the given argArray (passing undefined for
1939  // new.target in x3).
1940  DCHECK(undefined_value.Is(x3));
1941  __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1942
1943  // 4b. The argArray is either null or undefined, so we tail call without any
1944  // arguments to the receiver.
1945  __ Bind(&no_arguments);
1946  {
1947    __ Mov(x0, 0);
1948    DCHECK(receiver.Is(x1));
1949    __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1950  }
1951
1952  // 4c. The receiver is not callable, throw an appropriate TypeError.
1953  __ Bind(&receiver_not_callable);
1954  {
1955    __ Poke(receiver, 0);
1956    __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1957  }
1958}
1959
1960// static
1961void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1962  Register argc = x0;
1963  Register function = x1;
1964  Register scratch1 = x10;
1965  Register scratch2 = x11;
1966
1967  ASM_LOCATION("Builtins::Generate_FunctionPrototypeCall");
1968
1969  // 1. Make sure we have at least one argument.
1970  {
1971    Label done;
1972    __ Cbnz(argc, &done);
1973    __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex);
1974    __ Push(scratch1);
1975    __ Mov(argc, 1);
1976    __ Bind(&done);
1977  }
1978
1979  // 2. Get the callable to call (passed as receiver) from the stack.
1980  __ Peek(function, Operand(argc, LSL, kXRegSizeLog2));
1981
1982  // 3. Shift arguments and return address one slot down on the stack
1983  //    (overwriting the original receiver).  Adjust argument count to make
1984  //    the original first argument the new receiver.
1985  {
1986    Label loop;
1987    // Calculate the copy start address (destination). Copy end address is jssp.
1988    __ Add(scratch2, jssp, Operand(argc, LSL, kPointerSizeLog2));
1989    __ Sub(scratch1, scratch2, kPointerSize);
1990
1991    __ Bind(&loop);
1992    __ Ldr(x12, MemOperand(scratch1, -kPointerSize, PostIndex));
1993    __ Str(x12, MemOperand(scratch2, -kPointerSize, PostIndex));
1994    __ Cmp(scratch1, jssp);
1995    __ B(ge, &loop);
1996    // Adjust the actual number of arguments and remove the top element
1997    // (which is a copy of the last argument).
1998    __ Sub(argc, argc, 1);
1999    __ Drop(1);
2000  }
2001
2002  // 4. Call the callable.
2003  __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2004}
2005
2006void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
2007  // ----------- S t a t e -------------
2008  //  -- x0       : argc
2009  //  -- jssp[0]  : argumentsList (if argc == 3)
2010  //  -- jssp[8]  : thisArgument  (if argc >= 2)
2011  //  -- jssp[16] : target        (if argc >= 1)
2012  //  -- jssp[24] : receiver
2013  // -----------------------------------
2014  ASM_LOCATION("Builtins::Generate_ReflectApply");
2015
2016  Register argc = x0;
2017  Register arguments_list = x0;
2018  Register target = x1;
2019  Register this_argument = x2;
2020  Register undefined_value = x3;
2021
2022  __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
2023
2024  // 1. Load target into x1 (if present), argumentsList into x0 (if present),
2025  // remove all arguments from the stack (including the receiver), and push
2026  // thisArgument (if present) instead.
2027  {
2028    // Claim (3 - argc) dummy arguments from the stack, to put the stack in a
2029    // consistent state for a simple pop operation.
2030    __ Claim(3);
2031    __ Drop(argc);
2032
2033    // ----------- S t a t e -------------
2034    //  -- x0       : argc
2035    //  -- jssp[0]  : argumentsList (dummy value if argc <= 2)
2036    //  -- jssp[8]  : thisArgument  (dummy value if argc <= 1)
2037    //  -- jssp[16] : target        (dummy value if argc == 0)
2038    //  -- jssp[24] : receiver
2039    // -----------------------------------
2040    __ Adds(x10, argc, 0);  // Preserve argc, and set the Z flag if it is zero.
2041    __ Pop(arguments_list, this_argument, target);  // Overwrites argc.
2042    __ CmovX(target, undefined_value, eq);          // undefined if argc == 0.
2043    __ Cmp(x10, 2);
2044    __ CmovX(this_argument, undefined_value, lo);   // undefined if argc <= 1.
2045    __ CmovX(arguments_list, undefined_value, ls);  // undefined if argc <= 2.
2046
2047    __ Poke(this_argument, 0);  // Overwrite receiver.
2048  }
2049
2050  // ----------- S t a t e -------------
2051  //  -- x0      : argumentsList
2052  //  -- x1      : target
2053  //  -- jssp[0] : thisArgument
2054  // -----------------------------------
2055
2056  // 2. Make sure the target is actually callable.
2057  Label target_not_callable;
2058  __ JumpIfSmi(target, &target_not_callable);
2059  __ Ldr(x10, FieldMemOperand(target, HeapObject::kMapOffset));
2060  __ Ldr(x10, FieldMemOperand(x10, Map::kBitFieldOffset));
2061  __ TestAndBranchIfAllClear(x10, 1 << Map::kIsCallable, &target_not_callable);
2062
2063  // 3a. Apply the target to the given argumentsList (passing undefined for
2064  // new.target in x3).
2065  DCHECK(undefined_value.Is(x3));
2066  __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
2067
2068  // 3b. The target is not callable, throw an appropriate TypeError.
2069  __ Bind(&target_not_callable);
2070  {
2071    __ Poke(target, 0);
2072    __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
2073  }
2074}
2075
2076void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
2077  // ----------- S t a t e -------------
2078  //  -- x0       : argc
2079  //  -- jssp[0]  : new.target (optional)
2080  //  -- jssp[8]  : argumentsList
2081  //  -- jssp[16] : target
2082  //  -- jssp[24] : receiver
2083  // -----------------------------------
2084  ASM_LOCATION("Builtins::Generate_ReflectConstruct");
2085
2086  Register argc = x0;
2087  Register arguments_list = x0;
2088  Register target = x1;
2089  Register new_target = x3;
2090  Register undefined_value = x4;
2091
2092  __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
2093
2094  // 1. Load target into x1 (if present), argumentsList into x0 (if present),
2095  // new.target into x3 (if present, otherwise use target), remove all
2096  // arguments from the stack (including the receiver), and push thisArgument
2097  // (if present) instead.
2098  {
2099    // Claim (3 - argc) dummy arguments from the stack, to put the stack in a
2100    // consistent state for a simple pop operation.
2101    __ Claim(3);
2102    __ Drop(argc);
2103
2104    // ----------- S t a t e -------------
2105    //  -- x0       : argc
2106    //  -- jssp[0]  : new.target    (dummy value if argc <= 2)
2107    //  -- jssp[8]  : argumentsList (dummy value if argc <= 1)
2108    //  -- jssp[16] : target        (dummy value if argc == 0)
2109    //  -- jssp[24] : receiver
2110    // -----------------------------------
2111    __ Adds(x10, argc, 0);  // Preserve argc, and set the Z flag if it is zero.
2112    __ Pop(new_target, arguments_list, target);  // Overwrites argc.
2113    __ CmovX(target, undefined_value, eq);       // undefined if argc == 0.
2114    __ Cmp(x10, 2);
2115    __ CmovX(arguments_list, undefined_value, lo);  // undefined if argc <= 1.
2116    __ CmovX(new_target, target, ls);               // target if argc <= 2.
2117
2118    __ Poke(undefined_value, 0);  // Overwrite receiver.
2119  }
2120
2121  // ----------- S t a t e -------------
2122  //  -- x0      : argumentsList
2123  //  -- x1      : target
2124  //  -- x3      : new.target
2125  //  -- jssp[0] : receiver (undefined)
2126  // -----------------------------------
2127
2128  // 2. Make sure the target is actually a constructor.
2129  Label target_not_constructor;
2130  __ JumpIfSmi(target, &target_not_constructor);
2131  __ Ldr(x10, FieldMemOperand(target, HeapObject::kMapOffset));
2132  __ Ldrb(x10, FieldMemOperand(x10, Map::kBitFieldOffset));
2133  __ TestAndBranchIfAllClear(x10, 1 << Map::kIsConstructor,
2134                             &target_not_constructor);
2135
2136  // 3. Make sure the new.target is actually a constructor.
2137  Label new_target_not_constructor;
2138  __ JumpIfSmi(new_target, &new_target_not_constructor);
2139  __ Ldr(x10, FieldMemOperand(new_target, HeapObject::kMapOffset));
2140  __ Ldrb(x10, FieldMemOperand(x10, Map::kBitFieldOffset));
2141  __ TestAndBranchIfAllClear(x10, 1 << Map::kIsConstructor,
2142                             &new_target_not_constructor);
2143
2144  // 4a. Construct the target with the given new.target and argumentsList.
2145  __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
2146
2147  // 4b. The target is not a constructor, throw an appropriate TypeError.
2148  __ Bind(&target_not_constructor);
2149  {
2150    __ Poke(target, 0);
2151    __ TailCallRuntime(Runtime::kThrowNotConstructor);
2152  }
2153
2154  // 4c. The new.target is not a constructor, throw an appropriate TypeError.
2155  __ Bind(&new_target_not_constructor);
2156  {
2157    __ Poke(new_target, 0);
2158    __ TailCallRuntime(Runtime::kThrowNotConstructor);
2159  }
2160}
2161
2162static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
2163  __ SmiTag(x10, x0);
2164  __ Mov(x11, StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR));
2165  __ Push(lr, fp);
2166  __ Push(x11, x1, x10);
2167  __ Add(fp, jssp,
2168         StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize);
2169}
2170
2171static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
2172  // ----------- S t a t e -------------
2173  //  -- x0 : result being passed through
2174  // -----------------------------------
2175  // Get the number of arguments passed (as a smi), tear down the frame and
2176  // then drop the parameters and the receiver.
2177  __ Ldr(x10, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
2178                               kPointerSize)));
2179  __ Mov(jssp, fp);
2180  __ Pop(fp, lr);
2181  __ DropBySMI(x10, kXRegSize);
2182  __ Drop(1);
2183}
2184
2185// static
2186void Builtins::Generate_Apply(MacroAssembler* masm) {
2187  // ----------- S t a t e -------------
2188  //  -- x0      : argumentsList
2189  //  -- x1      : target
2190  //  -- x3      : new.target (checked to be constructor or undefined)
2191  //  -- jssp[0] : thisArgument
2192  // -----------------------------------
2193
2194  Register arguments_list = x0;
2195  Register target = x1;
2196  Register new_target = x3;
2197
2198  Register args = x0;
2199  Register len = x2;
2200
2201  // Create the list of arguments from the array-like argumentsList.
2202  {
2203    Label create_arguments, create_array, create_holey_array, create_runtime,
2204        done_create;
2205    __ JumpIfSmi(arguments_list, &create_runtime);
2206
2207    // Load native context.
2208    Register native_context = x4;
2209    __ Ldr(native_context, NativeContextMemOperand());
2210
2211    // Load the map of argumentsList.
2212    Register arguments_list_map = x2;
2213    __ Ldr(arguments_list_map,
2214           FieldMemOperand(arguments_list, HeapObject::kMapOffset));
2215
2216    // Check if argumentsList is an (unmodified) arguments object.
2217    __ Ldr(x10, ContextMemOperand(native_context,
2218                                  Context::SLOPPY_ARGUMENTS_MAP_INDEX));
2219    __ Ldr(x11, ContextMemOperand(native_context,
2220                                  Context::STRICT_ARGUMENTS_MAP_INDEX));
2221    __ Cmp(arguments_list_map, x10);
2222    __ Ccmp(arguments_list_map, x11, ZFlag, ne);
2223    __ B(eq, &create_arguments);
2224
2225    // Check if argumentsList is a fast JSArray.
2226    __ CompareInstanceType(arguments_list_map, x10, JS_ARRAY_TYPE);
2227    __ B(eq, &create_array);
2228
2229    // Ask the runtime to create the list (actually a FixedArray).
2230    __ Bind(&create_runtime);
2231    {
2232      FrameScope scope(masm, StackFrame::INTERNAL);
2233      __ Push(target, new_target, arguments_list);
2234      __ CallRuntime(Runtime::kCreateListFromArrayLike);
2235      __ Pop(new_target, target);
2236      __ Ldrsw(len, UntagSmiFieldMemOperand(arguments_list,
2237                                            FixedArray::kLengthOffset));
2238    }
2239    __ B(&done_create);
2240
2241    // Try to create the list from an arguments object.
2242    __ Bind(&create_arguments);
2243    __ Ldrsw(len, UntagSmiFieldMemOperand(arguments_list,
2244                                          JSArgumentsObject::kLengthOffset));
2245    __ Ldr(x10, FieldMemOperand(arguments_list, JSObject::kElementsOffset));
2246    __ Ldrsw(x11, UntagSmiFieldMemOperand(x10, FixedArray::kLengthOffset));
2247    __ CompareAndBranch(len, x11, ne, &create_runtime);
2248    __ Mov(args, x10);
2249    __ B(&done_create);
2250
2251    // For holey JSArrays we need to check that the array prototype chain
2252    // protector is intact and our prototype is the Array.prototype actually.
2253    __ Bind(&create_holey_array);
2254    //  -- x2 : arguments_list_map
2255    //  -- x4 : native_context
2256    Register arguments_list_prototype = x2;
2257    __ Ldr(arguments_list_prototype,
2258           FieldMemOperand(arguments_list_map, Map::kPrototypeOffset));
2259    __ Ldr(x10, ContextMemOperand(native_context,
2260                                  Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
2261    __ Cmp(arguments_list_prototype, x10);
2262    __ B(ne, &create_runtime);
2263    __ LoadRoot(x10, Heap::kArrayProtectorRootIndex);
2264    __ Ldrsw(x11, UntagSmiFieldMemOperand(x10, PropertyCell::kValueOffset));
2265    __ Cmp(x11, Isolate::kProtectorValid);
2266    __ B(ne, &create_runtime);
2267    __ Ldrsw(len,
2268             UntagSmiFieldMemOperand(arguments_list, JSArray::kLengthOffset));
2269    __ Ldr(args, FieldMemOperand(arguments_list, JSArray::kElementsOffset));
2270    __ B(&done_create);
2271
2272    // Try to create the list from a JSArray object.
2273    __ Bind(&create_array);
2274    __ Ldr(x10, FieldMemOperand(arguments_list_map, Map::kBitField2Offset));
2275    __ DecodeField<Map::ElementsKindBits>(x10);
2276    STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2277    STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2278    STATIC_ASSERT(FAST_ELEMENTS == 2);
2279    STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
2280    // Check if it is a holey array, the order of the cmp is important as
2281    // anything higher than FAST_HOLEY_ELEMENTS will fall back to runtime.
2282    __ Cmp(x10, FAST_HOLEY_ELEMENTS);
2283    __ B(hi, &create_runtime);
2284    // Only FAST_XXX after this point, FAST_HOLEY_XXX are odd values.
2285    __ Tbnz(x10, 0, &create_holey_array);
2286    // FAST_SMI_ELEMENTS or FAST_ELEMENTS after this point.
2287    __ Ldrsw(len,
2288             UntagSmiFieldMemOperand(arguments_list, JSArray::kLengthOffset));
2289    __ Ldr(args, FieldMemOperand(arguments_list, JSArray::kElementsOffset));
2290
2291    __ Bind(&done_create);
2292  }
2293
2294  // Check for stack overflow.
2295  {
2296    // Check the stack for overflow. We are not trying to catch interruptions
2297    // (i.e. debug break and preemption) here, so check the "real stack limit".
2298    Label done;
2299    __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
2300    // Make x10 the space we have left. The stack might already be overflowed
2301    // here which will cause x10 to become negative.
2302    __ Sub(x10, masm->StackPointer(), x10);
2303    // Check if the arguments will overflow the stack.
2304    __ Cmp(x10, Operand(len, LSL, kPointerSizeLog2));
2305    __ B(gt, &done);  // Signed comparison.
2306    __ TailCallRuntime(Runtime::kThrowStackOverflow);
2307    __ Bind(&done);
2308  }
2309
2310  // ----------- S t a t e -------------
2311  //  -- x0      : args (a FixedArray built from argumentsList)
2312  //  -- x1      : target
2313  //  -- x2      : len (number of elements to push from args)
2314  //  -- x3      : new.target (checked to be constructor or undefined)
2315  //  -- jssp[0] : thisArgument
2316  // -----------------------------------
2317
2318  // Push arguments onto the stack (thisArgument is already on the stack).
2319  {
2320    Label done, push, loop;
2321    Register src = x4;
2322
2323    __ Add(src, args, FixedArray::kHeaderSize - kHeapObjectTag);
2324    __ Mov(x0, len);  // The 'len' argument for Call() or Construct().
2325    __ Cbz(len, &done);
2326    Register the_hole_value = x11;
2327    Register undefined_value = x12;
2328    // We do not use the CompareRoot macro as it would do a LoadRoot behind the
2329    // scenes and we want to avoid that in a loop.
2330    __ LoadRoot(the_hole_value, Heap::kTheHoleValueRootIndex);
2331    __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
2332    __ Claim(len);
2333    __ Bind(&loop);
2334    __ Sub(len, len, 1);
2335    __ Ldr(x10, MemOperand(src, kPointerSize, PostIndex));
2336    __ Cmp(x10, the_hole_value);
2337    __ Csel(x10, x10, undefined_value, ne);
2338    __ Poke(x10, Operand(len, LSL, kPointerSizeLog2));
2339    __ Cbnz(len, &loop);
2340    __ Bind(&done);
2341  }
2342
2343  // ----------- S t a t e -------------
2344  //  -- x0              : argument count (len)
2345  //  -- x1              : target
2346  //  -- x3              : new.target (checked to be constructor or undefined)
2347  //  -- jssp[0]         : args[len-1]
2348  //  -- jssp[8]         : args[len-2]
2349  //      ...            :  ...
2350  //  -- jssp[8*(len-2)] : args[1]
2351  //  -- jssp[8*(len-1)] : args[0]
2352  // -----------------------------------
2353
2354  // Dispatch to Call or Construct depending on whether new.target is undefined.
2355  {
2356    __ CompareRoot(new_target, Heap::kUndefinedValueRootIndex);
2357    __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq);
2358    __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2359  }
2360}
2361
2362// static
2363void Builtins::Generate_CallForwardVarargs(MacroAssembler* masm,
2364                                           Handle<Code> code) {
2365  // ----------- S t a t e -------------
2366  //  -- x1    : the target to call (can be any Object)
2367  //  -- x2    : start index (to support rest parameters)
2368  //  -- lr    : return address.
2369  //  -- sp[0] : thisArgument
2370  // -----------------------------------
2371
2372  // Check if we have an arguments adaptor frame below the function frame.
2373  Label arguments_adaptor, arguments_done;
2374  __ Ldr(x3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2375  __ Ldr(x4, MemOperand(x3, CommonFrameConstants::kContextOrFrameTypeOffset));
2376  __ Cmp(x4, StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR));
2377  __ B(eq, &arguments_adaptor);
2378  {
2379    __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2380    __ Ldr(x0, FieldMemOperand(x0, JSFunction::kSharedFunctionInfoOffset));
2381    __ Ldrsw(x0, FieldMemOperand(
2382                     x0, SharedFunctionInfo::kFormalParameterCountOffset));
2383    __ Mov(x3, fp);
2384  }
2385  __ B(&arguments_done);
2386  __ Bind(&arguments_adaptor);
2387  {
2388    // Just load the length from ArgumentsAdaptorFrame.
2389    __ Ldrsw(x0, UntagSmiMemOperand(
2390                     x3, ArgumentsAdaptorFrameConstants::kLengthOffset));
2391  }
2392  __ Bind(&arguments_done);
2393
2394  Label stack_empty, stack_done, stack_overflow;
2395  __ Subs(x0, x0, x2);
2396  __ B(le, &stack_empty);
2397  {
2398    // Check for stack overflow.
2399    Generate_StackOverflowCheck(masm, x0, x2, &stack_overflow);
2400
2401    // Forward the arguments from the caller frame.
2402    {
2403      Label loop;
2404      __ Add(x3, x3, kPointerSize);
2405      __ Mov(x2, x0);
2406      __ bind(&loop);
2407      {
2408        __ Ldr(x4, MemOperand(x3, x2, LSL, kPointerSizeLog2));
2409        __ Push(x4);
2410        __ Subs(x2, x2, 1);
2411        __ B(ne, &loop);
2412      }
2413    }
2414  }
2415  __ B(&stack_done);
2416  __ Bind(&stack_overflow);
2417  __ TailCallRuntime(Runtime::kThrowStackOverflow);
2418  __ Bind(&stack_empty);
2419  {
2420    // We just pass the receiver, which is already on the stack.
2421    __ Mov(x0, 0);
2422  }
2423  __ Bind(&stack_done);
2424
2425  __ Jump(code, RelocInfo::CODE_TARGET);
2426}
2427
2428namespace {
2429
2430// Drops top JavaScript frame and an arguments adaptor frame below it (if
2431// present) preserving all the arguments prepared for current call.
2432// Does nothing if debugger is currently active.
2433// ES6 14.6.3. PrepareForTailCall
2434//
2435// Stack structure for the function g() tail calling f():
2436//
2437// ------- Caller frame: -------
2438// |  ...
2439// |  g()'s arg M
2440// |  ...
2441// |  g()'s arg 1
2442// |  g()'s receiver arg
2443// |  g()'s caller pc
2444// ------- g()'s frame: -------
2445// |  g()'s caller fp      <- fp
2446// |  g()'s context
2447// |  function pointer: g
2448// |  -------------------------
2449// |  ...
2450// |  ...
2451// |  f()'s arg N
2452// |  ...
2453// |  f()'s arg 1
2454// |  f()'s receiver arg   <- sp (f()'s caller pc is not on the stack yet!)
2455// ----------------------
2456//
2457void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
2458                        Register scratch1, Register scratch2,
2459                        Register scratch3) {
2460  DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
2461  Comment cmnt(masm, "[ PrepareForTailCall");
2462
2463  // Prepare for tail call only if ES2015 tail call elimination is enabled.
2464  Label done;
2465  ExternalReference is_tail_call_elimination_enabled =
2466      ExternalReference::is_tail_call_elimination_enabled_address(
2467          masm->isolate());
2468  __ Mov(scratch1, Operand(is_tail_call_elimination_enabled));
2469  __ Ldrb(scratch1, MemOperand(scratch1));
2470  __ Cmp(scratch1, Operand(0));
2471  __ B(eq, &done);
2472
2473  // Drop possible interpreter handler/stub frame.
2474  {
2475    Label no_interpreter_frame;
2476    __ Ldr(scratch3,
2477           MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset));
2478    __ Cmp(scratch3, Operand(StackFrame::TypeToMarker(StackFrame::STUB)));
2479    __ B(ne, &no_interpreter_frame);
2480    __ Ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2481    __ bind(&no_interpreter_frame);
2482  }
2483
2484  // Check if next frame is an arguments adaptor frame.
2485  Register caller_args_count_reg = scratch1;
2486  Label no_arguments_adaptor, formal_parameter_count_loaded;
2487  __ Ldr(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2488  __ Ldr(scratch3,
2489         MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
2490  __ Cmp(scratch3,
2491         Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
2492  __ B(ne, &no_arguments_adaptor);
2493
2494  // Drop current frame and load arguments count from arguments adaptor frame.
2495  __ mov(fp, scratch2);
2496  __ Ldr(caller_args_count_reg,
2497         MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
2498  __ SmiUntag(caller_args_count_reg);
2499  __ B(&formal_parameter_count_loaded);
2500
2501  __ bind(&no_arguments_adaptor);
2502  // Load caller's formal parameter count
2503  __ Ldr(scratch1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2504  __ Ldr(scratch1,
2505         FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
2506  __ Ldrsw(caller_args_count_reg,
2507           FieldMemOperand(scratch1,
2508                           SharedFunctionInfo::kFormalParameterCountOffset));
2509  __ bind(&formal_parameter_count_loaded);
2510
2511  ParameterCount callee_args_count(args_reg);
2512  __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
2513                        scratch3);
2514  __ bind(&done);
2515}
2516}  // namespace
2517
2518// static
2519void Builtins::Generate_CallFunction(MacroAssembler* masm,
2520                                     ConvertReceiverMode mode,
2521                                     TailCallMode tail_call_mode) {
2522  ASM_LOCATION("Builtins::Generate_CallFunction");
2523  // ----------- S t a t e -------------
2524  //  -- x0 : the number of arguments (not including the receiver)
2525  //  -- x1 : the function to call (checked to be a JSFunction)
2526  // -----------------------------------
2527  __ AssertFunction(x1);
2528
2529  // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2530  // Check that function is not a "classConstructor".
2531  Label class_constructor;
2532  __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
2533  __ Ldr(w3, FieldMemOperand(x2, SharedFunctionInfo::kCompilerHintsOffset));
2534  __ TestAndBranchIfAnySet(w3, FunctionKind::kClassConstructor
2535                                   << SharedFunctionInfo::kFunctionKindShift,
2536                           &class_constructor);
2537
2538  // Enter the context of the function; ToObject has to run in the function
2539  // context, and we also need to take the global proxy from the function
2540  // context in case of conversion.
2541  __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
2542  // We need to convert the receiver for non-native sloppy mode functions.
2543  Label done_convert;
2544  __ TestAndBranchIfAnySet(w3,
2545                           (1 << SharedFunctionInfo::kNative) |
2546                               (1 << SharedFunctionInfo::kStrictModeFunction),
2547                           &done_convert);
2548  {
2549    // ----------- S t a t e -------------
2550    //  -- x0 : the number of arguments (not including the receiver)
2551    //  -- x1 : the function to call (checked to be a JSFunction)
2552    //  -- x2 : the shared function info.
2553    //  -- cp : the function context.
2554    // -----------------------------------
2555
2556    if (mode == ConvertReceiverMode::kNullOrUndefined) {
2557      // Patch receiver to global proxy.
2558      __ LoadGlobalProxy(x3);
2559    } else {
2560      Label convert_to_object, convert_receiver;
2561      __ Peek(x3, Operand(x0, LSL, kXRegSizeLog2));
2562      __ JumpIfSmi(x3, &convert_to_object);
2563      STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2564      __ CompareObjectType(x3, x4, x4, FIRST_JS_RECEIVER_TYPE);
2565      __ B(hs, &done_convert);
2566      if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2567        Label convert_global_proxy;
2568        __ JumpIfRoot(x3, Heap::kUndefinedValueRootIndex,
2569                      &convert_global_proxy);
2570        __ JumpIfNotRoot(x3, Heap::kNullValueRootIndex, &convert_to_object);
2571        __ Bind(&convert_global_proxy);
2572        {
2573          // Patch receiver to global proxy.
2574          __ LoadGlobalProxy(x3);
2575        }
2576        __ B(&convert_receiver);
2577      }
2578      __ Bind(&convert_to_object);
2579      {
2580        // Convert receiver using ToObject.
2581        // TODO(bmeurer): Inline the allocation here to avoid building the frame
2582        // in the fast case? (fall back to AllocateInNewSpace?)
2583        FrameScope scope(masm, StackFrame::INTERNAL);
2584        __ SmiTag(x0);
2585        __ Push(x0, x1);
2586        __ Mov(x0, x3);
2587        __ Push(cp);
2588        __ Call(masm->isolate()->builtins()->ToObject(),
2589                RelocInfo::CODE_TARGET);
2590        __ Pop(cp);
2591        __ Mov(x3, x0);
2592        __ Pop(x1, x0);
2593        __ SmiUntag(x0);
2594      }
2595      __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
2596      __ Bind(&convert_receiver);
2597    }
2598    __ Poke(x3, Operand(x0, LSL, kXRegSizeLog2));
2599  }
2600  __ Bind(&done_convert);
2601
2602  // ----------- S t a t e -------------
2603  //  -- x0 : the number of arguments (not including the receiver)
2604  //  -- x1 : the function to call (checked to be a JSFunction)
2605  //  -- x2 : the shared function info.
2606  //  -- cp : the function context.
2607  // -----------------------------------
2608
2609  if (tail_call_mode == TailCallMode::kAllow) {
2610    PrepareForTailCall(masm, x0, x3, x4, x5);
2611  }
2612
2613  __ Ldrsw(
2614      x2, FieldMemOperand(x2, SharedFunctionInfo::kFormalParameterCountOffset));
2615  ParameterCount actual(x0);
2616  ParameterCount expected(x2);
2617  __ InvokeFunctionCode(x1, no_reg, expected, actual, JUMP_FUNCTION,
2618                        CheckDebugStepCallWrapper());
2619
2620  // The function is a "classConstructor", need to raise an exception.
2621  __ bind(&class_constructor);
2622  {
2623    FrameScope frame(masm, StackFrame::INTERNAL);
2624    __ Push(x1);
2625    __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2626  }
2627}
2628
2629namespace {
2630
2631void Generate_PushBoundArguments(MacroAssembler* masm) {
2632  // ----------- S t a t e -------------
2633  //  -- x0 : the number of arguments (not including the receiver)
2634  //  -- x1 : target (checked to be a JSBoundFunction)
2635  //  -- x3 : new.target (only in case of [[Construct]])
2636  // -----------------------------------
2637
2638  // Load [[BoundArguments]] into x2 and length of that into x4.
2639  Label no_bound_arguments;
2640  __ Ldr(x2, FieldMemOperand(x1, JSBoundFunction::kBoundArgumentsOffset));
2641  __ Ldrsw(x4, UntagSmiFieldMemOperand(x2, FixedArray::kLengthOffset));
2642  __ Cmp(x4, 0);
2643  __ B(eq, &no_bound_arguments);
2644  {
2645    // ----------- S t a t e -------------
2646    //  -- x0 : the number of arguments (not including the receiver)
2647    //  -- x1 : target (checked to be a JSBoundFunction)
2648    //  -- x2 : the [[BoundArguments]] (implemented as FixedArray)
2649    //  -- x3 : new.target (only in case of [[Construct]])
2650    //  -- x4 : the number of [[BoundArguments]]
2651    // -----------------------------------
2652
2653    // Reserve stack space for the [[BoundArguments]].
2654    {
2655      Label done;
2656      __ Claim(x4);
2657      // Check the stack for overflow. We are not trying to catch interruptions
2658      // (i.e. debug break and preemption) here, so check the "real stack
2659      // limit".
2660      __ CompareRoot(jssp, Heap::kRealStackLimitRootIndex);
2661      __ B(gt, &done);  // Signed comparison.
2662      // Restore the stack pointer.
2663      __ Drop(x4);
2664      {
2665        FrameScope scope(masm, StackFrame::MANUAL);
2666        __ EnterFrame(StackFrame::INTERNAL);
2667        __ CallRuntime(Runtime::kThrowStackOverflow);
2668      }
2669      __ Bind(&done);
2670    }
2671
2672    // Relocate arguments down the stack.
2673    {
2674      Label loop, done_loop;
2675      __ Mov(x5, 0);
2676      __ Bind(&loop);
2677      __ Cmp(x5, x0);
2678      __ B(gt, &done_loop);
2679      __ Peek(x10, Operand(x4, LSL, kPointerSizeLog2));
2680      __ Poke(x10, Operand(x5, LSL, kPointerSizeLog2));
2681      __ Add(x4, x4, 1);
2682      __ Add(x5, x5, 1);
2683      __ B(&loop);
2684      __ Bind(&done_loop);
2685    }
2686
2687    // Copy [[BoundArguments]] to the stack (below the arguments).
2688    {
2689      Label loop;
2690      __ Ldrsw(x4, UntagSmiFieldMemOperand(x2, FixedArray::kLengthOffset));
2691      __ Add(x2, x2, FixedArray::kHeaderSize - kHeapObjectTag);
2692      __ Bind(&loop);
2693      __ Sub(x4, x4, 1);
2694      __ Ldr(x10, MemOperand(x2, x4, LSL, kPointerSizeLog2));
2695      __ Poke(x10, Operand(x0, LSL, kPointerSizeLog2));
2696      __ Add(x0, x0, 1);
2697      __ Cmp(x4, 0);
2698      __ B(gt, &loop);
2699    }
2700  }
2701  __ Bind(&no_bound_arguments);
2702}
2703
2704}  // namespace
2705
2706// static
2707void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2708                                              TailCallMode tail_call_mode) {
2709  // ----------- S t a t e -------------
2710  //  -- x0 : the number of arguments (not including the receiver)
2711  //  -- x1 : the function to call (checked to be a JSBoundFunction)
2712  // -----------------------------------
2713  __ AssertBoundFunction(x1);
2714
2715  if (tail_call_mode == TailCallMode::kAllow) {
2716    PrepareForTailCall(masm, x0, x3, x4, x5);
2717  }
2718
2719  // Patch the receiver to [[BoundThis]].
2720  __ Ldr(x10, FieldMemOperand(x1, JSBoundFunction::kBoundThisOffset));
2721  __ Poke(x10, Operand(x0, LSL, kPointerSizeLog2));
2722
2723  // Push the [[BoundArguments]] onto the stack.
2724  Generate_PushBoundArguments(masm);
2725
2726  // Call the [[BoundTargetFunction]] via the Call builtin.
2727  __ Ldr(x1, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
2728  __ Mov(x10,
2729         ExternalReference(Builtins::kCall_ReceiverIsAny, masm->isolate()));
2730  __ Ldr(x11, MemOperand(x10));
2731  __ Add(x12, x11, Code::kHeaderSize - kHeapObjectTag);
2732  __ Br(x12);
2733}
2734
2735// static
2736void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2737                             TailCallMode tail_call_mode) {
2738  // ----------- S t a t e -------------
2739  //  -- x0 : the number of arguments (not including the receiver)
2740  //  -- x1 : the target to call (can be any Object).
2741  // -----------------------------------
2742
2743  Label non_callable, non_function, non_smi;
2744  __ JumpIfSmi(x1, &non_callable);
2745  __ Bind(&non_smi);
2746  __ CompareObjectType(x1, x4, x5, JS_FUNCTION_TYPE);
2747  __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
2748          RelocInfo::CODE_TARGET, eq);
2749  __ Cmp(x5, JS_BOUND_FUNCTION_TYPE);
2750  __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
2751          RelocInfo::CODE_TARGET, eq);
2752
2753  // Check if target has a [[Call]] internal method.
2754  __ Ldrb(x4, FieldMemOperand(x4, Map::kBitFieldOffset));
2755  __ TestAndBranchIfAllClear(x4, 1 << Map::kIsCallable, &non_callable);
2756
2757  __ Cmp(x5, JS_PROXY_TYPE);
2758  __ B(ne, &non_function);
2759
2760  // 0. Prepare for tail call if necessary.
2761  if (tail_call_mode == TailCallMode::kAllow) {
2762    PrepareForTailCall(masm, x0, x3, x4, x5);
2763  }
2764
2765  // 1. Runtime fallback for Proxy [[Call]].
2766  __ Push(x1);
2767  // Increase the arguments size to include the pushed function and the
2768  // existing receiver on the stack.
2769  __ Add(x0, x0, Operand(2));
2770  // Tail-call to the runtime.
2771  __ JumpToExternalReference(
2772      ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2773
2774  // 2. Call to something else, which might have a [[Call]] internal method (if
2775  // not we raise an exception).
2776  __ Bind(&non_function);
2777  // Overwrite the original receiver with the (original) target.
2778  __ Poke(x1, Operand(x0, LSL, kXRegSizeLog2));
2779  // Let the "call_as_function_delegate" take care of the rest.
2780  __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, x1);
2781  __ Jump(masm->isolate()->builtins()->CallFunction(
2782              ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
2783          RelocInfo::CODE_TARGET);
2784
2785  // 3. Call to something that is not callable.
2786  __ bind(&non_callable);
2787  {
2788    FrameScope scope(masm, StackFrame::INTERNAL);
2789    __ Push(x1);
2790    __ CallRuntime(Runtime::kThrowCalledNonCallable);
2791  }
2792}
2793
2794static void CheckSpreadAndPushToStack(MacroAssembler* masm) {
2795  Register argc = x0;
2796  Register constructor = x1;
2797  Register new_target = x3;
2798
2799  Register scratch = x2;
2800  Register scratch2 = x6;
2801
2802  Register spread = x4;
2803  Register spread_map = x5;
2804
2805  Register spread_len = x5;
2806
2807  Label runtime_call, push_args;
2808  __ Peek(spread, 0);
2809  __ JumpIfSmi(spread, &runtime_call);
2810  __ Ldr(spread_map, FieldMemOperand(spread, HeapObject::kMapOffset));
2811
2812  // Check that the spread is an array.
2813  __ CompareInstanceType(spread_map, scratch, JS_ARRAY_TYPE);
2814  __ B(ne, &runtime_call);
2815
2816  // Check that we have the original ArrayPrototype.
2817  __ Ldr(scratch, FieldMemOperand(spread_map, Map::kPrototypeOffset));
2818  __ Ldr(scratch2, NativeContextMemOperand());
2819  __ Ldr(scratch2,
2820         ContextMemOperand(scratch2, Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
2821  __ Cmp(scratch, scratch2);
2822  __ B(ne, &runtime_call);
2823
2824  // Check that the ArrayPrototype hasn't been modified in a way that would
2825  // affect iteration.
2826  __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex);
2827  __ Ldr(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
2828  __ Cmp(scratch, Smi::FromInt(Isolate::kProtectorValid));
2829  __ B(ne, &runtime_call);
2830
2831  // Check that the map of the initial array iterator hasn't changed.
2832  __ Ldr(scratch2, NativeContextMemOperand());
2833  __ Ldr(scratch,
2834         ContextMemOperand(scratch2,
2835                           Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX));
2836  __ Ldr(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset));
2837  __ Ldr(scratch2,
2838         ContextMemOperand(
2839             scratch2, Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX));
2840  __ Cmp(scratch, scratch2);
2841  __ B(ne, &runtime_call);
2842
2843  // For FastPacked kinds, iteration will have the same effect as simply
2844  // accessing each property in order.
2845  Label no_protector_check;
2846  __ Ldr(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset));
2847  __ DecodeField<Map::ElementsKindBits>(scratch);
2848  __ Cmp(scratch, FAST_HOLEY_ELEMENTS);
2849  __ B(hi, &runtime_call);
2850  // For non-FastHoley kinds, we can skip the protector check.
2851  __ Cmp(scratch, FAST_SMI_ELEMENTS);
2852  __ B(eq, &no_protector_check);
2853  __ Cmp(scratch, FAST_ELEMENTS);
2854  __ B(eq, &no_protector_check);
2855  // Check the ArrayProtector cell.
2856  __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex);
2857  __ Ldr(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
2858  __ Cmp(scratch, Smi::FromInt(Isolate::kProtectorValid));
2859  __ B(ne, &runtime_call);
2860
2861  __ Bind(&no_protector_check);
2862  // Load the FixedArray backing store, but use the length from the array.
2863  __ Ldrsw(spread_len, UntagSmiFieldMemOperand(spread, JSArray::kLengthOffset));
2864  __ Ldr(spread, FieldMemOperand(spread, JSArray::kElementsOffset));
2865  __ B(&push_args);
2866
2867  __ Bind(&runtime_call);
2868  {
2869    // Call the builtin for the result of the spread.
2870    FrameScope scope(masm, StackFrame::INTERNAL);
2871    __ SmiTag(argc);
2872    __ Push(constructor, new_target, argc, spread);
2873    __ CallRuntime(Runtime::kSpreadIterableFixed);
2874    __ Mov(spread, x0);
2875    __ Pop(argc, new_target, constructor);
2876    __ SmiUntag(argc);
2877  }
2878
2879  {
2880    // Calculate the new nargs including the result of the spread.
2881    __ Ldrsw(spread_len,
2882             UntagSmiFieldMemOperand(spread, FixedArray::kLengthOffset));
2883
2884    __ Bind(&push_args);
2885    // argc += spread_len - 1. Subtract 1 for the spread itself.
2886    __ Add(argc, argc, spread_len);
2887    __ Sub(argc, argc, 1);
2888
2889    // Pop the spread argument off the stack.
2890    __ Pop(scratch);
2891  }
2892
2893  // Check for stack overflow.
2894  {
2895    // Check the stack for overflow. We are not trying to catch interruptions
2896    // (i.e. debug break and preemption) here, so check the "real stack limit".
2897    Label done;
2898    __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
2899    // Make scratch the space we have left. The stack might already be
2900    // overflowed here which will cause scratch to become negative.
2901    __ Sub(scratch, masm->StackPointer(), scratch);
2902    // Check if the arguments will overflow the stack.
2903    __ Cmp(scratch, Operand(spread_len, LSL, kPointerSizeLog2));
2904    __ B(gt, &done);  // Signed comparison.
2905    __ TailCallRuntime(Runtime::kThrowStackOverflow);
2906    __ Bind(&done);
2907  }
2908
2909  // Put the evaluated spread onto the stack as additional arguments.
2910  {
2911    __ Mov(scratch, 0);
2912    Label done, push, loop;
2913    __ Bind(&loop);
2914    __ Cmp(scratch, spread_len);
2915    __ B(eq, &done);
2916    __ Add(scratch2, spread, Operand(scratch, LSL, kPointerSizeLog2));
2917    __ Ldr(scratch2, FieldMemOperand(scratch2, FixedArray::kHeaderSize));
2918    __ JumpIfNotRoot(scratch2, Heap::kTheHoleValueRootIndex, &push);
2919    __ LoadRoot(scratch2, Heap::kUndefinedValueRootIndex);
2920    __ bind(&push);
2921    __ Push(scratch2);
2922    __ Add(scratch, scratch, Operand(1));
2923    __ B(&loop);
2924    __ Bind(&done);
2925  }
2926}
2927
2928// static
2929void Builtins::Generate_CallWithSpread(MacroAssembler* masm) {
2930  // ----------- S t a t e -------------
2931  //  -- x0 : the number of arguments (not including the receiver)
2932  //  -- x1 : the constructor to call (can be any Object)
2933  // -----------------------------------
2934
2935  // CheckSpreadAndPushToStack will push r3 to save it.
2936  __ LoadRoot(x3, Heap::kUndefinedValueRootIndex);
2937  CheckSpreadAndPushToStack(masm);
2938  __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2939                                            TailCallMode::kDisallow),
2940          RelocInfo::CODE_TARGET);
2941}
2942
2943// static
2944void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2945  // ----------- S t a t e -------------
2946  //  -- x0 : the number of arguments (not including the receiver)
2947  //  -- x1 : the constructor to call (checked to be a JSFunction)
2948  //  -- x3 : the new target (checked to be a constructor)
2949  // -----------------------------------
2950  __ AssertFunction(x1);
2951
2952  // Calling convention for function specific ConstructStubs require
2953  // x2 to contain either an AllocationSite or undefined.
2954  __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
2955
2956  // Tail call to the function-specific construct stub (still in the caller
2957  // context at this point).
2958  __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
2959  __ Ldr(x4, FieldMemOperand(x4, SharedFunctionInfo::kConstructStubOffset));
2960  __ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag);
2961  __ Br(x4);
2962}
2963
2964// static
2965void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2966  // ----------- S t a t e -------------
2967  //  -- x0 : the number of arguments (not including the receiver)
2968  //  -- x1 : the function to call (checked to be a JSBoundFunction)
2969  //  -- x3 : the new target (checked to be a constructor)
2970  // -----------------------------------
2971  __ AssertBoundFunction(x1);
2972
2973  // Push the [[BoundArguments]] onto the stack.
2974  Generate_PushBoundArguments(masm);
2975
2976  // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2977  {
2978    Label done;
2979    __ Cmp(x1, x3);
2980    __ B(ne, &done);
2981    __ Ldr(x3,
2982           FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
2983    __ Bind(&done);
2984  }
2985
2986  // Construct the [[BoundTargetFunction]] via the Construct builtin.
2987  __ Ldr(x1, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
2988  __ Mov(x10, ExternalReference(Builtins::kConstruct, masm->isolate()));
2989  __ Ldr(x11, MemOperand(x10));
2990  __ Add(x12, x11, Code::kHeaderSize - kHeapObjectTag);
2991  __ Br(x12);
2992}
2993
2994// static
2995void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2996  // ----------- S t a t e -------------
2997  //  -- x0 : the number of arguments (not including the receiver)
2998  //  -- x1 : the constructor to call (checked to be a JSProxy)
2999  //  -- x3 : the new target (either the same as the constructor or
3000  //          the JSFunction on which new was invoked initially)
3001  // -----------------------------------
3002
3003  // Call into the Runtime for Proxy [[Construct]].
3004  __ Push(x1);
3005  __ Push(x3);
3006  // Include the pushed new_target, constructor and the receiver.
3007  __ Add(x0, x0, 3);
3008  // Tail-call to the runtime.
3009  __ JumpToExternalReference(
3010      ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
3011}
3012
3013// static
3014void Builtins::Generate_Construct(MacroAssembler* masm) {
3015  // ----------- S t a t e -------------
3016  //  -- x0 : the number of arguments (not including the receiver)
3017  //  -- x1 : the constructor to call (can be any Object)
3018  //  -- x3 : the new target (either the same as the constructor or
3019  //          the JSFunction on which new was invoked initially)
3020  // -----------------------------------
3021
3022  // Check if target is a Smi.
3023  Label non_constructor;
3024  __ JumpIfSmi(x1, &non_constructor);
3025
3026  // Dispatch based on instance type.
3027  __ CompareObjectType(x1, x4, x5, JS_FUNCTION_TYPE);
3028  __ Jump(masm->isolate()->builtins()->ConstructFunction(),
3029          RelocInfo::CODE_TARGET, eq);
3030
3031  // Check if target has a [[Construct]] internal method.
3032  __ Ldrb(x2, FieldMemOperand(x4, Map::kBitFieldOffset));
3033  __ TestAndBranchIfAllClear(x2, 1 << Map::kIsConstructor, &non_constructor);
3034
3035  // Only dispatch to bound functions after checking whether they are
3036  // constructors.
3037  __ Cmp(x5, JS_BOUND_FUNCTION_TYPE);
3038  __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
3039          RelocInfo::CODE_TARGET, eq);
3040
3041  // Only dispatch to proxies after checking whether they are constructors.
3042  __ Cmp(x5, JS_PROXY_TYPE);
3043  __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
3044          eq);
3045
3046  // Called Construct on an exotic Object with a [[Construct]] internal method.
3047  {
3048    // Overwrite the original receiver with the (original) target.
3049    __ Poke(x1, Operand(x0, LSL, kXRegSizeLog2));
3050    // Let the "call_as_constructor_delegate" take care of the rest.
3051    __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, x1);
3052    __ Jump(masm->isolate()->builtins()->CallFunction(),
3053            RelocInfo::CODE_TARGET);
3054  }
3055
3056  // Called Construct on an Object that doesn't have a [[Construct]] internal
3057  // method.
3058  __ bind(&non_constructor);
3059  __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
3060          RelocInfo::CODE_TARGET);
3061}
3062
3063// static
3064void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) {
3065  // ----------- S t a t e -------------
3066  //  -- x0 : the number of arguments (not including the receiver)
3067  //  -- x1 : the constructor to call (can be any Object)
3068  //  -- x3 : the new target (either the same as the constructor or
3069  //          the JSFunction on which new was invoked initially)
3070  // -----------------------------------
3071
3072  CheckSpreadAndPushToStack(masm);
3073  __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
3074}
3075
3076// static
3077void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
3078  ASM_LOCATION("Builtins::Generate_AllocateInNewSpace");
3079  // ----------- S t a t e -------------
3080  //  -- x1 : requested object size (untagged)
3081  //  -- lr : return address
3082  // -----------------------------------
3083  __ SmiTag(x1);
3084  __ Push(x1);
3085  __ Move(cp, Smi::kZero);
3086  __ TailCallRuntime(Runtime::kAllocateInNewSpace);
3087}
3088
3089// static
3090void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
3091  ASM_LOCATION("Builtins::Generate_AllocateInOldSpace");
3092  // ----------- S t a t e -------------
3093  //  -- x1 : requested object size (untagged)
3094  //  -- lr : return address
3095  // -----------------------------------
3096  __ SmiTag(x1);
3097  __ Move(x2, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
3098  __ Push(x1, x2);
3099  __ Move(cp, Smi::kZero);
3100  __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
3101}
3102
3103// static
3104void Builtins::Generate_Abort(MacroAssembler* masm) {
3105  ASM_LOCATION("Builtins::Generate_Abort");
3106  // ----------- S t a t e -------------
3107  //  -- x1 : message_id as Smi
3108  //  -- lr : return address
3109  // -----------------------------------
3110  MacroAssembler::NoUseRealAbortsScope no_use_real_aborts(masm);
3111  __ Push(x1);
3112  __ Move(cp, Smi::kZero);
3113  __ TailCallRuntime(Runtime::kAbort);
3114}
3115
3116void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
3117  ASM_LOCATION("Builtins::Generate_ArgumentsAdaptorTrampoline");
3118  // ----------- S t a t e -------------
3119  //  -- x0 : actual number of arguments
3120  //  -- x1 : function (passed through to callee)
3121  //  -- x2 : expected number of arguments
3122  //  -- x3 : new target (passed through to callee)
3123  // -----------------------------------
3124
3125  Register argc_actual = x0;    // Excluding the receiver.
3126  Register argc_expected = x2;  // Excluding the receiver.
3127  Register function = x1;
3128  Register code_entry = x10;
3129
3130  Label invoke, dont_adapt_arguments, stack_overflow;
3131
3132  Label enough, too_few;
3133  __ Cmp(argc_actual, argc_expected);
3134  __ B(lt, &too_few);
3135  __ Cmp(argc_expected, SharedFunctionInfo::kDontAdaptArgumentsSentinel);
3136  __ B(eq, &dont_adapt_arguments);
3137
3138  {  // Enough parameters: actual >= expected
3139    EnterArgumentsAdaptorFrame(masm);
3140    Generate_StackOverflowCheck(masm, x2, x10, &stack_overflow);
3141
3142    Register copy_start = x10;
3143    Register copy_end = x11;
3144    Register copy_to = x12;
3145    Register scratch1 = x13, scratch2 = x14;
3146
3147    __ Lsl(scratch2, argc_expected, kPointerSizeLog2);
3148
3149    // Adjust for fp, lr, and the receiver.
3150    __ Add(copy_start, fp, 3 * kPointerSize);
3151    __ Add(copy_start, copy_start, Operand(argc_actual, LSL, kPointerSizeLog2));
3152    __ Sub(copy_end, copy_start, scratch2);
3153    __ Sub(copy_end, copy_end, kPointerSize);
3154    __ Mov(copy_to, jssp);
3155
3156    // Claim space for the arguments, the receiver, and one extra slot.
3157    // The extra slot ensures we do not write under jssp. It will be popped
3158    // later.
3159    __ Add(scratch1, scratch2, 2 * kPointerSize);
3160    __ Claim(scratch1, 1);
3161
3162    // Copy the arguments (including the receiver) to the new stack frame.
3163    Label copy_2_by_2;
3164    __ Bind(&copy_2_by_2);
3165    __ Ldp(scratch1, scratch2,
3166           MemOperand(copy_start, -2 * kPointerSize, PreIndex));
3167    __ Stp(scratch1, scratch2,
3168           MemOperand(copy_to, -2 * kPointerSize, PreIndex));
3169    __ Cmp(copy_start, copy_end);
3170    __ B(hi, &copy_2_by_2);
3171
3172    // Correct the space allocated for the extra slot.
3173    __ Drop(1);
3174
3175    __ B(&invoke);
3176  }
3177
3178  {  // Too few parameters: Actual < expected
3179    __ Bind(&too_few);
3180
3181    Register copy_from = x10;
3182    Register copy_end = x11;
3183    Register copy_to = x12;
3184    Register scratch1 = x13, scratch2 = x14;
3185
3186    EnterArgumentsAdaptorFrame(masm);
3187    Generate_StackOverflowCheck(masm, x2, x10, &stack_overflow);
3188
3189    __ Lsl(scratch2, argc_expected, kPointerSizeLog2);
3190    __ Lsl(argc_actual, argc_actual, kPointerSizeLog2);
3191
3192    // Adjust for fp, lr, and the receiver.
3193    __ Add(copy_from, fp, 3 * kPointerSize);
3194    __ Add(copy_from, copy_from, argc_actual);
3195    __ Mov(copy_to, jssp);
3196    __ Sub(copy_end, copy_to, 1 * kPointerSize);  // Adjust for the receiver.
3197    __ Sub(copy_end, copy_end, argc_actual);
3198
3199    // Claim space for the arguments, the receiver, and one extra slot.
3200    // The extra slot ensures we do not write under jssp. It will be popped
3201    // later.
3202    __ Add(scratch1, scratch2, 2 * kPointerSize);
3203    __ Claim(scratch1, 1);
3204
3205    // Copy the arguments (including the receiver) to the new stack frame.
3206    Label copy_2_by_2;
3207    __ Bind(&copy_2_by_2);
3208    __ Ldp(scratch1, scratch2,
3209           MemOperand(copy_from, -2 * kPointerSize, PreIndex));
3210    __ Stp(scratch1, scratch2,
3211           MemOperand(copy_to, -2 * kPointerSize, PreIndex));
3212    __ Cmp(copy_to, copy_end);
3213    __ B(hi, &copy_2_by_2);
3214
3215    __ Mov(copy_to, copy_end);
3216
3217    // Fill the remaining expected arguments with undefined.
3218    __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex);
3219    __ Add(copy_end, jssp, kPointerSize);
3220
3221    Label fill;
3222    __ Bind(&fill);
3223    __ Stp(scratch1, scratch1,
3224           MemOperand(copy_to, -2 * kPointerSize, PreIndex));
3225    __ Cmp(copy_to, copy_end);
3226    __ B(hi, &fill);
3227
3228    // Correct the space allocated for the extra slot.
3229    __ Drop(1);
3230  }
3231
3232  // Arguments have been adapted. Now call the entry point.
3233  __ Bind(&invoke);
3234  __ Mov(argc_actual, argc_expected);
3235  // x0 : expected number of arguments
3236  // x1 : function (passed through to callee)
3237  // x3 : new target (passed through to callee)
3238  __ Ldr(code_entry, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
3239  __ Call(code_entry);
3240
3241  // Store offset of return address for deoptimizer.
3242  masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
3243
3244  // Exit frame and return.
3245  LeaveArgumentsAdaptorFrame(masm);
3246  __ Ret();
3247
3248  // Call the entry point without adapting the arguments.
3249  __ Bind(&dont_adapt_arguments);
3250  __ Ldr(code_entry, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
3251  __ Jump(code_entry);
3252
3253  __ Bind(&stack_overflow);
3254  {
3255    FrameScope frame(masm, StackFrame::MANUAL);
3256    __ CallRuntime(Runtime::kThrowStackOverflow);
3257    __ Unreachable();
3258  }
3259}
3260
3261#undef __
3262
3263}  // namespace internal
3264}  // namespace v8
3265
3266#endif  // V8_TARGET_ARCH_ARM
3267