1// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/v8.h"
6
7#if V8_TARGET_ARCH_ARM64
8
9#include "src/codegen.h"
10#include "src/debug.h"
11#include "src/deoptimizer.h"
12#include "src/full-codegen.h"
13#include "src/runtime.h"
14#include "src/stub-cache.h"
15
16namespace v8 {
17namespace internal {
18
19
20#define __ ACCESS_MASM(masm)
21
22
23// Load the built-in Array function from the current context.
24static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
25  // Load the native context.
26  __ Ldr(result, GlobalObjectMemOperand());
27  __ Ldr(result,
28         FieldMemOperand(result, GlobalObject::kNativeContextOffset));
29  // Load the InternalArray function from the native context.
30  __ Ldr(result,
31         MemOperand(result,
32                    Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
33}
34
35
36// Load the built-in InternalArray function from the current context.
37static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
38                                              Register result) {
39  // Load the native context.
40  __ Ldr(result, GlobalObjectMemOperand());
41  __ Ldr(result,
42         FieldMemOperand(result, GlobalObject::kNativeContextOffset));
43  // Load the InternalArray function from the native context.
44  __ Ldr(result, ContextMemOperand(result,
45                                   Context::INTERNAL_ARRAY_FUNCTION_INDEX));
46}
47
48
49void Builtins::Generate_Adaptor(MacroAssembler* masm,
50                                CFunctionId id,
51                                BuiltinExtraArguments extra_args) {
52  // ----------- S t a t e -------------
53  //  -- x0                 : number of arguments excluding receiver
54  //  -- x1                 : called function (only guaranteed when
55  //                          extra_args requires it)
56  //  -- cp                 : context
57  //  -- sp[0]              : last argument
58  //  -- ...
59  //  -- sp[4 * (argc - 1)] : first argument (argc == x0)
60  //  -- sp[4 * argc]       : receiver
61  // -----------------------------------
62
63  // Insert extra arguments.
64  int num_extra_args = 0;
65  if (extra_args == NEEDS_CALLED_FUNCTION) {
66    num_extra_args = 1;
67    __ Push(x1);
68  } else {
69    ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
70  }
71
72  // JumpToExternalReference expects x0 to contain the number of arguments
73  // including the receiver and the extra arguments.
74  __ Add(x0, x0, num_extra_args + 1);
75  __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
76}
77
78
79void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
80  // ----------- S t a t e -------------
81  //  -- x0     : number of arguments
82  //  -- lr     : return address
83  //  -- sp[...]: constructor arguments
84  // -----------------------------------
85  ASM_LOCATION("Builtins::Generate_InternalArrayCode");
86  Label generic_array_code;
87
88  // Get the InternalArray function.
89  GenerateLoadInternalArrayFunction(masm, x1);
90
91  if (FLAG_debug_code) {
92    // Initial map for the builtin InternalArray functions should be maps.
93    __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset));
94    __ Tst(x10, kSmiTagMask);
95    __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction);
96    __ CompareObjectType(x10, x11, x12, MAP_TYPE);
97    __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
98  }
99
100  // Run the native code for the InternalArray function called as a normal
101  // function.
102  InternalArrayConstructorStub stub(masm->isolate());
103  __ TailCallStub(&stub);
104}
105
106
107void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
108  // ----------- S t a t e -------------
109  //  -- x0     : number of arguments
110  //  -- lr     : return address
111  //  -- sp[...]: constructor arguments
112  // -----------------------------------
113  ASM_LOCATION("Builtins::Generate_ArrayCode");
114  Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
115
116  // Get the Array function.
117  GenerateLoadArrayFunction(masm, x1);
118
119  if (FLAG_debug_code) {
120    // Initial map for the builtin Array functions should be maps.
121    __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset));
122    __ Tst(x10, kSmiTagMask);
123    __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
124    __ CompareObjectType(x10, x11, x12, MAP_TYPE);
125    __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
126  }
127
128  // Run the native code for the Array function called as a normal function.
129  __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
130  ArrayConstructorStub stub(masm->isolate());
131  __ TailCallStub(&stub);
132}
133
134
135void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
136  // ----------- S t a t e -------------
137  //  -- x0                     : number of arguments
138  //  -- x1                     : constructor function
139  //  -- lr                     : return address
140  //  -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
141  //  -- sp[argc * 8]           : receiver
142  // -----------------------------------
143  ASM_LOCATION("Builtins::Generate_StringConstructCode");
144  Counters* counters = masm->isolate()->counters();
145  __ IncrementCounter(counters->string_ctor_calls(), 1, x10, x11);
146
147  Register argc = x0;
148  Register function = x1;
149  if (FLAG_debug_code) {
150    __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, x10);
151    __ Cmp(function, x10);
152    __ Assert(eq, kUnexpectedStringFunction);
153  }
154
155  // Load the first arguments in x0 and get rid of the rest.
156  Label no_arguments;
157  __ Cbz(argc, &no_arguments);
158  // First args = sp[(argc - 1) * 8].
159  __ Sub(argc, argc, 1);
160  __ Claim(argc, kXRegSize);
161  // jssp now point to args[0], load and drop args[0] + receiver.
162  Register arg = argc;
163  __ Ldr(arg, MemOperand(jssp, 2 * kPointerSize, PostIndex));
164  argc = NoReg;
165
166  Register argument = x2;
167  Label not_cached, argument_is_string;
168  __ LookupNumberStringCache(arg,        // Input.
169                             argument,   // Result.
170                             x10,        // Scratch.
171                             x11,        // Scratch.
172                             x12,        // Scratch.
173                             &not_cached);
174  __ IncrementCounter(counters->string_ctor_cached_number(), 1, x10, x11);
175  __ Bind(&argument_is_string);
176
177  // ----------- S t a t e -------------
178  //  -- x2     : argument converted to string
179  //  -- x1     : constructor function
180  //  -- lr     : return address
181  // -----------------------------------
182
183  Label gc_required;
184  Register new_obj = x0;
185  __ Allocate(JSValue::kSize, new_obj, x10, x11, &gc_required, TAG_OBJECT);
186
187  // Initialize the String object.
188  Register map = x3;
189  __ LoadGlobalFunctionInitialMap(function, map, x10);
190  if (FLAG_debug_code) {
191    __ Ldrb(x4, FieldMemOperand(map, Map::kInstanceSizeOffset));
192    __ Cmp(x4, JSValue::kSize >> kPointerSizeLog2);
193    __ Assert(eq, kUnexpectedStringWrapperInstanceSize);
194    __ Ldrb(x4, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
195    __ Cmp(x4, 0);
196    __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper);
197  }
198  __ Str(map, FieldMemOperand(new_obj, HeapObject::kMapOffset));
199
200  Register empty = x3;
201  __ LoadRoot(empty, Heap::kEmptyFixedArrayRootIndex);
202  __ Str(empty, FieldMemOperand(new_obj, JSObject::kPropertiesOffset));
203  __ Str(empty, FieldMemOperand(new_obj, JSObject::kElementsOffset));
204
205  __ Str(argument, FieldMemOperand(new_obj, JSValue::kValueOffset));
206
207  // Ensure the object is fully initialized.
208  STATIC_ASSERT(JSValue::kSize == (4 * kPointerSize));
209
210  __ Ret();
211
212  // The argument was not found in the number to string cache. Check
213  // if it's a string already before calling the conversion builtin.
214  Label convert_argument;
215  __ Bind(&not_cached);
216  __ JumpIfSmi(arg, &convert_argument);
217
218  // Is it a String?
219  __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
220  __ Ldrb(x11, FieldMemOperand(x10, Map::kInstanceTypeOffset));
221  __ Tbnz(x11, MaskToBit(kIsNotStringMask), &convert_argument);
222  __ Mov(argument, arg);
223  __ IncrementCounter(counters->string_ctor_string_value(), 1, x10, x11);
224  __ B(&argument_is_string);
225
226  // Invoke the conversion builtin and put the result into x2.
227  __ Bind(&convert_argument);
228  __ Push(function);  // Preserve the function.
229  __ IncrementCounter(counters->string_ctor_conversions(), 1, x10, x11);
230  {
231    FrameScope scope(masm, StackFrame::INTERNAL);
232    __ Push(arg);
233    __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
234  }
235  __ Pop(function);
236  __ Mov(argument, x0);
237  __ B(&argument_is_string);
238
239  // Load the empty string into x2, remove the receiver from the
240  // stack, and jump back to the case where the argument is a string.
241  __ Bind(&no_arguments);
242  __ LoadRoot(argument, Heap::kempty_stringRootIndex);
243  __ Drop(1);
244  __ B(&argument_is_string);
245
246  // At this point the argument is already a string. Call runtime to create a
247  // string wrapper.
248  __ Bind(&gc_required);
249  __ IncrementCounter(counters->string_ctor_gc_required(), 1, x10, x11);
250  {
251    FrameScope scope(masm, StackFrame::INTERNAL);
252    __ Push(argument);
253    __ CallRuntime(Runtime::kNewStringWrapper, 1);
254  }
255  __ Ret();
256}
257
258
259static void CallRuntimePassFunction(MacroAssembler* masm,
260                                    Runtime::FunctionId function_id) {
261  FrameScope scope(masm, StackFrame::INTERNAL);
262  //   - Push a copy of the function onto the stack.
263  //   - Push another copy as a parameter to the runtime call.
264  __ Push(x1, x1);
265
266  __ CallRuntime(function_id, 1);
267
268  //   - Restore receiver.
269  __ Pop(x1);
270}
271
272
273static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
274  __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
275  __ Ldr(x2, FieldMemOperand(x2, SharedFunctionInfo::kCodeOffset));
276  __ Add(x2, x2, Code::kHeaderSize - kHeapObjectTag);
277  __ Br(x2);
278}
279
280
281static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
282  __ Add(x0, x0, Code::kHeaderSize - kHeapObjectTag);
283  __ Br(x0);
284}
285
286
287void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
288  // Checking whether the queued function is ready for install is optional,
289  // since we come across interrupts and stack checks elsewhere. However, not
290  // checking may delay installing ready functions, and always checking would be
291  // quite expensive. A good compromise is to first check against stack limit as
292  // a cue for an interrupt signal.
293  Label ok;
294  __ CompareRoot(masm->StackPointer(), Heap::kStackLimitRootIndex);
295  __ B(hs, &ok);
296
297  CallRuntimePassFunction(masm, Runtime::kHiddenTryInstallOptimizedCode);
298  GenerateTailCallToReturnedCode(masm);
299
300  __ Bind(&ok);
301  GenerateTailCallToSharedCode(masm);
302}
303
304
305static void Generate_JSConstructStubHelper(MacroAssembler* masm,
306                                           bool is_api_function,
307                                           bool create_memento) {
308  // ----------- S t a t e -------------
309  //  -- x0     : number of arguments
310  //  -- x1     : constructor function
311  //  -- x2     : allocation site or undefined
312  //  -- lr     : return address
313  //  -- sp[...]: constructor arguments
314  // -----------------------------------
315
316  ASM_LOCATION("Builtins::Generate_JSConstructStubHelper");
317  // Should never create mementos for api functions.
318  ASSERT(!is_api_function || !create_memento);
319
320  Isolate* isolate = masm->isolate();
321
322  // Enter a construct frame.
323  {
324    FrameScope scope(masm, StackFrame::CONSTRUCT);
325
326    // Preserve the three incoming parameters on the stack.
327    if (create_memento) {
328      __ AssertUndefinedOrAllocationSite(x2, x10);
329      __ Push(x2);
330    }
331
332    Register argc = x0;
333    Register constructor = x1;
334    // x1: constructor function
335    __ SmiTag(argc);
336    __ Push(argc, constructor);
337    // sp[0] : Constructor function.
338    // sp[1]: number of arguments (smi-tagged)
339
340    // Try to allocate the object without transitioning into C code. If any of
341    // the preconditions is not met, the code bails out to the runtime call.
342    Label rt_call, allocated;
343    if (FLAG_inline_new) {
344      Label undo_allocation;
345      ExternalReference debug_step_in_fp =
346          ExternalReference::debug_step_in_fp_address(isolate);
347      __ Mov(x2, Operand(debug_step_in_fp));
348      __ Ldr(x2, MemOperand(x2));
349      __ Cbnz(x2, &rt_call);
350      // Load the initial map and verify that it is in fact a map.
351      Register init_map = x2;
352      __ Ldr(init_map,
353             FieldMemOperand(constructor,
354                             JSFunction::kPrototypeOrInitialMapOffset));
355      __ JumpIfSmi(init_map, &rt_call);
356      __ JumpIfNotObjectType(init_map, x10, x11, MAP_TYPE, &rt_call);
357
358      // Check that the constructor is not constructing a JSFunction (see
359      // comments in Runtime_NewObject in runtime.cc). In which case the initial
360      // map's instance type would be JS_FUNCTION_TYPE.
361      __ CompareInstanceType(init_map, x10, JS_FUNCTION_TYPE);
362      __ B(eq, &rt_call);
363
364      Register constructon_count = x14;
365      if (!is_api_function) {
366        Label allocate;
367        MemOperand bit_field3 =
368            FieldMemOperand(init_map, Map::kBitField3Offset);
369        // Check if slack tracking is enabled.
370        __ Ldr(x4, bit_field3);
371        __ DecodeField<Map::ConstructionCount>(constructon_count, x4);
372        __ Cmp(constructon_count, Operand(JSFunction::kNoSlackTracking));
373        __ B(eq, &allocate);
374        // Decrease generous allocation count.
375        __ Subs(x4, x4, Operand(1 << Map::ConstructionCount::kShift));
376        __ Str(x4, bit_field3);
377        __ Cmp(constructon_count, Operand(JSFunction::kFinishSlackTracking));
378        __ B(ne, &allocate);
379
380        // Push the constructor and map to the stack, and the constructor again
381        // as argument to the runtime call.
382        __ Push(constructor, init_map, constructor);
383        __ CallRuntime(Runtime::kHiddenFinalizeInstanceSize, 1);
384        __ Pop(init_map, constructor);
385        __ Mov(constructon_count, Operand(JSFunction::kNoSlackTracking));
386        __ Bind(&allocate);
387      }
388
389      // Now allocate the JSObject on the heap.
390      Register obj_size = x3;
391      Register new_obj = x4;
392      __ Ldrb(obj_size, FieldMemOperand(init_map, Map::kInstanceSizeOffset));
393      if (create_memento) {
394        __ Add(x7, obj_size,
395               Operand(AllocationMemento::kSize / kPointerSize));
396        __ Allocate(x7, new_obj, x10, x11, &rt_call, SIZE_IN_WORDS);
397      } else {
398        __ Allocate(obj_size, new_obj, x10, x11, &rt_call, SIZE_IN_WORDS);
399      }
400
401      // Allocated the JSObject, now initialize the fields. Map is set to
402      // initial map and properties and elements are set to empty fixed array.
403      // NB. the object pointer is not tagged, so MemOperand is used.
404      Register empty = x5;
405      __ LoadRoot(empty, Heap::kEmptyFixedArrayRootIndex);
406      __ Str(init_map, MemOperand(new_obj, JSObject::kMapOffset));
407      STATIC_ASSERT(JSObject::kElementsOffset ==
408          (JSObject::kPropertiesOffset + kPointerSize));
409      __ Stp(empty, empty, MemOperand(new_obj, JSObject::kPropertiesOffset));
410
411      Register first_prop = x5;
412      __ Add(first_prop, new_obj, JSObject::kHeaderSize);
413
414      // Fill all of the in-object properties with the appropriate filler.
415      Register filler = x7;
416      __ LoadRoot(filler, Heap::kUndefinedValueRootIndex);
417
418      // Obtain number of pre-allocated property fields and in-object
419      // properties.
420      Register prealloc_fields = x10;
421      Register inobject_props = x11;
422      Register inst_sizes = x11;
423      __ Ldr(inst_sizes, FieldMemOperand(init_map, Map::kInstanceSizesOffset));
424      __ Ubfx(prealloc_fields, inst_sizes,
425              Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte,
426              kBitsPerByte);
427      __ Ubfx(inobject_props, inst_sizes,
428              Map::kInObjectPropertiesByte * kBitsPerByte, kBitsPerByte);
429
430      // Calculate number of property fields in the object.
431      Register prop_fields = x6;
432      __ Sub(prop_fields, obj_size, JSObject::kHeaderSize / kPointerSize);
433
434      if (!is_api_function) {
435        Label no_inobject_slack_tracking;
436
437        // Check if slack tracking is enabled.
438        __ Cmp(constructon_count, Operand(JSFunction::kNoSlackTracking));
439        __ B(eq, &no_inobject_slack_tracking);
440        constructon_count = NoReg;
441
442        // Fill the pre-allocated fields with undef.
443        __ FillFields(first_prop, prealloc_fields, filler);
444
445        // Update first_prop register to be the offset of the first field after
446        // pre-allocated fields.
447        __ Add(first_prop, first_prop,
448               Operand(prealloc_fields, LSL, kPointerSizeLog2));
449
450        if (FLAG_debug_code) {
451          Register obj_end = x14;
452          __ Add(obj_end, new_obj, Operand(obj_size, LSL, kPointerSizeLog2));
453          __ Cmp(first_prop, obj_end);
454          __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields);
455        }
456
457        // Fill the remaining fields with one pointer filler map.
458        __ LoadRoot(filler, Heap::kOnePointerFillerMapRootIndex);
459        __ Sub(prop_fields, prop_fields, prealloc_fields);
460
461        __ bind(&no_inobject_slack_tracking);
462      }
463      if (create_memento) {
464        // Fill the pre-allocated fields with undef.
465        __ FillFields(first_prop, prop_fields, filler);
466        __ Add(first_prop, new_obj, Operand(obj_size, LSL, kPointerSizeLog2));
467        __ LoadRoot(x14, Heap::kAllocationMementoMapRootIndex);
468        ASSERT_EQ(0 * kPointerSize, AllocationMemento::kMapOffset);
469        __ Str(x14, MemOperand(first_prop, kPointerSize, PostIndex));
470        // Load the AllocationSite
471        __ Peek(x14, 2 * kXRegSize);
472        ASSERT_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset);
473        __ Str(x14, MemOperand(first_prop, kPointerSize, PostIndex));
474        first_prop = NoReg;
475      } else {
476        // Fill all of the property fields with undef.
477        __ FillFields(first_prop, prop_fields, filler);
478        first_prop = NoReg;
479        prop_fields = NoReg;
480      }
481
482      // Add the object tag to make the JSObject real, so that we can continue
483      // and jump into the continuation code at any time from now on. Any
484      // failures need to undo the allocation, so that the heap is in a
485      // consistent state and verifiable.
486      __ Add(new_obj, new_obj, kHeapObjectTag);
487
488      // Check if a non-empty properties array is needed. Continue with
489      // allocated object if not, or fall through to runtime call if it is.
490      Register element_count = x3;
491      __ Ldrb(element_count,
492              FieldMemOperand(init_map, Map::kUnusedPropertyFieldsOffset));
493      // The field instance sizes contains both pre-allocated property fields
494      // and in-object properties.
495      __ Add(element_count, element_count, prealloc_fields);
496      __ Subs(element_count, element_count, inobject_props);
497
498      // Done if no extra properties are to be allocated.
499      __ B(eq, &allocated);
500      __ Assert(pl, kPropertyAllocationCountFailed);
501
502      // Scale the number of elements by pointer size and add the header for
503      // FixedArrays to the start of the next object calculation from above.
504      Register new_array = x5;
505      Register array_size = x6;
506      __ Add(array_size, element_count, FixedArray::kHeaderSize / kPointerSize);
507      __ Allocate(array_size, new_array, x11, x12, &undo_allocation,
508                  static_cast<AllocationFlags>(RESULT_CONTAINS_TOP |
509                                               SIZE_IN_WORDS));
510
511      Register array_map = x10;
512      __ LoadRoot(array_map, Heap::kFixedArrayMapRootIndex);
513      __ Str(array_map, MemOperand(new_array, FixedArray::kMapOffset));
514      __ SmiTag(x0, element_count);
515      __ Str(x0, MemOperand(new_array, FixedArray::kLengthOffset));
516
517      // Initialize the fields to undefined.
518      Register elements = x10;
519      __ Add(elements, new_array, FixedArray::kHeaderSize);
520      __ FillFields(elements, element_count, filler);
521
522      // Store the initialized FixedArray into the properties field of the
523      // JSObject.
524      __ Add(new_array, new_array, kHeapObjectTag);
525      __ Str(new_array, FieldMemOperand(new_obj, JSObject::kPropertiesOffset));
526
527      // Continue with JSObject being successfully allocated.
528      __ B(&allocated);
529
530      // Undo the setting of the new top so that the heap is verifiable. For
531      // example, the map's unused properties potentially do not match the
532      // allocated objects unused properties.
533      __ Bind(&undo_allocation);
534      __ UndoAllocationInNewSpace(new_obj, x14);
535    }
536
537    // Allocate the new receiver object using the runtime call.
538    __ Bind(&rt_call);
539    Label count_incremented;
540    if (create_memento) {
541      // Get the cell or allocation site.
542      __ Peek(x4, 2 * kXRegSize);
543      __ Push(x4);
544      __ Push(constructor);  // Argument for Runtime_NewObject.
545      __ CallRuntime(Runtime::kHiddenNewObjectWithAllocationSite, 2);
546      __ Mov(x4, x0);
547      // If we ended up using the runtime, and we want a memento, then the
548      // runtime call made it for us, and we shouldn't do create count
549      // increment.
550      __ jmp(&count_incremented);
551    } else {
552      __ Push(constructor);  // Argument for Runtime_NewObject.
553      __ CallRuntime(Runtime::kHiddenNewObject, 1);
554      __ Mov(x4, x0);
555    }
556
557    // Receiver for constructor call allocated.
558    // x4: JSObject
559    __ Bind(&allocated);
560
561    if (create_memento) {
562      __ Peek(x10, 2 * kXRegSize);
563      __ JumpIfRoot(x10, Heap::kUndefinedValueRootIndex, &count_incremented);
564      // r2 is an AllocationSite. We are creating a memento from it, so we
565      // need to increment the memento create count.
566      __ Ldr(x5, FieldMemOperand(x10,
567                                 AllocationSite::kPretenureCreateCountOffset));
568      __ Add(x5, x5, Operand(Smi::FromInt(1)));
569      __ Str(x5, FieldMemOperand(x10,
570                                 AllocationSite::kPretenureCreateCountOffset));
571      __ bind(&count_incremented);
572    }
573
574    __ Push(x4, x4);
575
576    // Reload the number of arguments from the stack.
577    // Set it up in x0 for the function call below.
578    // jssp[0]: receiver
579    // jssp[1]: receiver
580    // jssp[2]: constructor function
581    // jssp[3]: number of arguments (smi-tagged)
582    __ Peek(constructor, 2 * kXRegSize);  // Load constructor.
583    __ Peek(argc, 3 * kXRegSize);  // Load number of arguments.
584    __ SmiUntag(argc);
585
586    // Set up pointer to last argument.
587    __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset);
588
589    // Copy arguments and receiver to the expression stack.
590    // Copy 2 values every loop to use ldp/stp.
591    // x0: number of arguments
592    // x1: constructor function
593    // x2: address of last argument (caller sp)
594    // jssp[0]: receiver
595    // jssp[1]: receiver
596    // jssp[2]: constructor function
597    // jssp[3]: number of arguments (smi-tagged)
598    // Compute the start address of the copy in x3.
599    __ Add(x3, x2, Operand(argc, LSL, kPointerSizeLog2));
600    Label loop, entry, done_copying_arguments;
601    __ B(&entry);
602    __ Bind(&loop);
603    __ Ldp(x10, x11, MemOperand(x3, -2 * kPointerSize, PreIndex));
604    __ Push(x11, x10);
605    __ Bind(&entry);
606    __ Cmp(x3, x2);
607    __ B(gt, &loop);
608    // Because we copied values 2 by 2 we may have copied one extra value.
609    // Drop it if that is the case.
610    __ B(eq, &done_copying_arguments);
611    __ Drop(1);
612    __ Bind(&done_copying_arguments);
613
614    // Call the function.
615    // x0: number of arguments
616    // x1: constructor function
617    if (is_api_function) {
618      __ Ldr(cp, FieldMemOperand(constructor, JSFunction::kContextOffset));
619      Handle<Code> code =
620          masm->isolate()->builtins()->HandleApiCallConstruct();
621      __ Call(code, RelocInfo::CODE_TARGET);
622    } else {
623      ParameterCount actual(argc);
624      __ InvokeFunction(constructor, actual, CALL_FUNCTION, NullCallWrapper());
625    }
626
627    // Store offset of return address for deoptimizer.
628    if (!is_api_function) {
629      masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
630    }
631
632    // Restore the context from the frame.
633    // x0: result
634    // jssp[0]: receiver
635    // jssp[1]: constructor function
636    // jssp[2]: number of arguments (smi-tagged)
637    __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
638
639    // If the result is an object (in the ECMA sense), we should get rid
640    // of the receiver and use the result; see ECMA-262 section 13.2.2-7
641    // on page 74.
642    Label use_receiver, exit;
643
644    // If the result is a smi, it is *not* an object in the ECMA sense.
645    // x0: result
646    // jssp[0]: receiver (newly allocated object)
647    // jssp[1]: constructor function
648    // jssp[2]: number of arguments (smi-tagged)
649    __ JumpIfSmi(x0, &use_receiver);
650
651    // If the type of the result (stored in its map) is less than
652    // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
653    __ JumpIfObjectType(x0, x1, x3, FIRST_SPEC_OBJECT_TYPE, &exit, ge);
654
655    // Throw away the result of the constructor invocation and use the
656    // on-stack receiver as the result.
657    __ Bind(&use_receiver);
658    __ Peek(x0, 0);
659
660    // Remove the receiver from the stack, remove caller arguments, and
661    // return.
662    __ Bind(&exit);
663    // x0: result
664    // jssp[0]: receiver (newly allocated object)
665    // jssp[1]: constructor function
666    // jssp[2]: number of arguments (smi-tagged)
667    __ Peek(x1, 2 * kXRegSize);
668
669    // Leave construct frame.
670  }
671
672  __ DropBySMI(x1);
673  __ Drop(1);
674  __ IncrementCounter(isolate->counters()->constructed_objects(), 1, x1, x2);
675  __ Ret();
676}
677
678
679void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
680  Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
681}
682
683
684void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
685  Generate_JSConstructStubHelper(masm, true, false);
686}
687
688
689// Input:
690//   x0: code entry.
691//   x1: function.
692//   x2: receiver.
693//   x3: argc.
694//   x4: argv.
695// Output:
696//   x0: result.
697static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
698                                             bool is_construct) {
699  // Called from JSEntryStub::GenerateBody().
700  Register function = x1;
701  Register receiver = x2;
702  Register argc = x3;
703  Register argv = x4;
704
705  ProfileEntryHookStub::MaybeCallEntryHook(masm);
706
707  // Clear the context before we push it when entering the internal frame.
708  __ Mov(cp, 0);
709
710  {
711    // Enter an internal frame.
712    FrameScope scope(masm, StackFrame::INTERNAL);
713
714    // Set up the context from the function argument.
715    __ Ldr(cp, FieldMemOperand(function, JSFunction::kContextOffset));
716
717    __ InitializeRootRegister();
718
719    // Push the function and the receiver onto the stack.
720    __ Push(function, receiver);
721
722    // Copy arguments to the stack in a loop, in reverse order.
723    // x3: argc.
724    // x4: argv.
725    Label loop, entry;
726    // Compute the copy end address.
727    __ Add(x10, argv, Operand(argc, LSL, kPointerSizeLog2));
728
729    __ B(&entry);
730    __ Bind(&loop);
731    __ Ldr(x11, MemOperand(argv, kPointerSize, PostIndex));
732    __ Ldr(x12, MemOperand(x11));  // Dereference the handle.
733    __ Push(x12);  // Push the argument.
734    __ Bind(&entry);
735    __ Cmp(x10, argv);
736    __ B(ne, &loop);
737
738    // Initialize all JavaScript callee-saved registers, since they will be seen
739    // by the garbage collector as part of handlers.
740    // The original values have been saved in JSEntryStub::GenerateBody().
741    __ LoadRoot(x19, Heap::kUndefinedValueRootIndex);
742    __ Mov(x20, x19);
743    __ Mov(x21, x19);
744    __ Mov(x22, x19);
745    __ Mov(x23, x19);
746    __ Mov(x24, x19);
747    __ Mov(x25, x19);
748    // Don't initialize the reserved registers.
749    // x26 : root register (root).
750    // x27 : context pointer (cp).
751    // x28 : JS stack pointer (jssp).
752    // x29 : frame pointer (fp).
753
754    __ Mov(x0, argc);
755    if (is_construct) {
756      // No type feedback cell is available.
757      __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
758
759      CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
760      __ CallStub(&stub);
761    } else {
762      ParameterCount actual(x0);
763      __ InvokeFunction(function, actual, CALL_FUNCTION, NullCallWrapper());
764    }
765    // Exit the JS internal frame and remove the parameters (except function),
766    // and return.
767  }
768
769  // Result is in x0. Return.
770  __ Ret();
771}
772
773
774void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
775  Generate_JSEntryTrampolineHelper(masm, false);
776}
777
778
779void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
780  Generate_JSEntryTrampolineHelper(masm, true);
781}
782
783
784void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) {
785  CallRuntimePassFunction(masm, Runtime::kHiddenCompileUnoptimized);
786  GenerateTailCallToReturnedCode(masm);
787}
788
789
790static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
791  FrameScope scope(masm, StackFrame::INTERNAL);
792  Register function = x1;
793
794  // Preserve function. At the same time, push arguments for
795  // kHiddenCompileOptimized.
796  __ LoadObject(x10, masm->isolate()->factory()->ToBoolean(concurrent));
797  __ Push(function, function, x10);
798
799  __ CallRuntime(Runtime::kHiddenCompileOptimized, 2);
800
801  // Restore receiver.
802  __ Pop(function);
803}
804
805
806void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
807  CallCompileOptimized(masm, false);
808  GenerateTailCallToReturnedCode(masm);
809}
810
811
812void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
813  CallCompileOptimized(masm, true);
814  GenerateTailCallToReturnedCode(masm);
815}
816
817
818static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
819  // For now, we are relying on the fact that make_code_young doesn't do any
820  // garbage collection which allows us to save/restore the registers without
821  // worrying about which of them contain pointers. We also don't build an
822  // internal frame to make the code fast, since we shouldn't have to do stack
823  // crawls in MakeCodeYoung. This seems a bit fragile.
824
825  // The following caller-saved registers must be saved and restored when
826  // calling through to the runtime:
827  //   x0 - The address from which to resume execution.
828  //   x1 - isolate
829  //   lr - The return address for the JSFunction itself. It has not yet been
830  //        preserved on the stack because the frame setup code was replaced
831  //        with a call to this stub, to handle code ageing.
832  {
833    FrameScope scope(masm, StackFrame::MANUAL);
834    __ Push(x0, x1, fp, lr);
835    __ Mov(x1, ExternalReference::isolate_address(masm->isolate()));
836    __ CallCFunction(
837        ExternalReference::get_make_code_young_function(masm->isolate()), 2);
838    __ Pop(lr, fp, x1, x0);
839  }
840
841  // The calling function has been made young again, so return to execute the
842  // real frame set-up code.
843  __ Br(x0);
844}
845
846#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                 \
847void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking(  \
848    MacroAssembler* masm) {                                  \
849  GenerateMakeCodeYoungAgainCommon(masm);                    \
850}                                                            \
851void Builtins::Generate_Make##C##CodeYoungAgainOddMarking(   \
852    MacroAssembler* masm) {                                  \
853  GenerateMakeCodeYoungAgainCommon(masm);                    \
854}
855CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
856#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
857
858
859void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
860  // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
861  // that make_code_young doesn't do any garbage collection which allows us to
862  // save/restore the registers without worrying about which of them contain
863  // pointers.
864
865  // The following caller-saved registers must be saved and restored when
866  // calling through to the runtime:
867  //   x0 - The address from which to resume execution.
868  //   x1 - isolate
869  //   lr - The return address for the JSFunction itself. It has not yet been
870  //        preserved on the stack because the frame setup code was replaced
871  //        with a call to this stub, to handle code ageing.
872  {
873    FrameScope scope(masm, StackFrame::MANUAL);
874    __ Push(x0, x1, fp, lr);
875    __ Mov(x1, ExternalReference::isolate_address(masm->isolate()));
876    __ CallCFunction(
877        ExternalReference::get_mark_code_as_executed_function(
878            masm->isolate()), 2);
879    __ Pop(lr, fp, x1, x0);
880
881    // Perform prologue operations usually performed by the young code stub.
882    __ EmitFrameSetupForCodeAgePatching(masm);
883  }
884
885  // Jump to point after the code-age stub.
886  __ Add(x0, x0, kNoCodeAgeSequenceLength);
887  __ Br(x0);
888}
889
890
891void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
892  GenerateMakeCodeYoungAgainCommon(masm);
893}
894
895
896static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
897                                             SaveFPRegsMode save_doubles) {
898  {
899    FrameScope scope(masm, StackFrame::INTERNAL);
900
901    // Preserve registers across notification, this is important for compiled
902    // stubs that tail call the runtime on deopts passing their parameters in
903    // registers.
904    // TODO(jbramley): Is it correct (and appropriate) to use safepoint
905    // registers here? According to the comment above, we should only need to
906    // preserve the registers with parameters.
907    __ PushXRegList(kSafepointSavedRegisters);
908    // Pass the function and deoptimization type to the runtime system.
909    __ CallRuntime(Runtime::kHiddenNotifyStubFailure, 0, save_doubles);
910    __ PopXRegList(kSafepointSavedRegisters);
911  }
912
913  // Ignore state (pushed by Deoptimizer::EntryGenerator::Generate).
914  __ Drop(1);
915
916  // Jump to the miss handler. Deoptimizer::EntryGenerator::Generate loads this
917  // into lr before it jumps here.
918  __ Br(lr);
919}
920
921
922void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
923  Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
924}
925
926
927void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
928  Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
929}
930
931
932static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
933                                             Deoptimizer::BailoutType type) {
934  {
935    FrameScope scope(masm, StackFrame::INTERNAL);
936    // Pass the deoptimization type to the runtime system.
937    __ Mov(x0, Smi::FromInt(static_cast<int>(type)));
938    __ Push(x0);
939    __ CallRuntime(Runtime::kHiddenNotifyDeoptimized, 1);
940  }
941
942  // Get the full codegen state from the stack and untag it.
943  Register state = x6;
944  __ Peek(state, 0);
945  __ SmiUntag(state);
946
947  // Switch on the state.
948  Label with_tos_register, unknown_state;
949  __ CompareAndBranch(
950      state, FullCodeGenerator::NO_REGISTERS, ne, &with_tos_register);
951  __ Drop(1);  // Remove state.
952  __ Ret();
953
954  __ Bind(&with_tos_register);
955  // Reload TOS register.
956  __ Peek(x0, kPointerSize);
957  __ CompareAndBranch(state, FullCodeGenerator::TOS_REG, ne, &unknown_state);
958  __ Drop(2);  // Remove state and TOS.
959  __ Ret();
960
961  __ Bind(&unknown_state);
962  __ Abort(kInvalidFullCodegenState);
963}
964
965
966void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
967  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
968}
969
970
971void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
972  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
973}
974
975
976void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
977  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
978}
979
980
981void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
982  // Lookup the function in the JavaScript frame.
983  __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
984  {
985    FrameScope scope(masm, StackFrame::INTERNAL);
986    // Pass function as argument.
987    __ Push(x0);
988    __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
989  }
990
991  // If the code object is null, just return to the unoptimized code.
992  Label skip;
993  __ CompareAndBranch(x0, Smi::FromInt(0), ne, &skip);
994  __ Ret();
995
996  __ Bind(&skip);
997
998  // Load deoptimization data from the code object.
999  // <deopt_data> = <code>[#deoptimization_data_offset]
1000  __ Ldr(x1, MemOperand(x0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1001
1002  // Load the OSR entrypoint offset from the deoptimization data.
1003  // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1004  __ Ldrsw(w1, UntagSmiFieldMemOperand(x1, FixedArray::OffsetOfElementAt(
1005      DeoptimizationInputData::kOsrPcOffsetIndex)));
1006
1007  // Compute the target address = code_obj + header_size + osr_offset
1008  // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1009  __ Add(x0, x0, x1);
1010  __ Add(lr, x0, Code::kHeaderSize - kHeapObjectTag);
1011
1012  // And "return" to the OSR entry point of the function.
1013  __ Ret();
1014}
1015
1016
1017void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1018  // We check the stack limit as indicator that recompilation might be done.
1019  Label ok;
1020  __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
1021  __ B(hs, &ok);
1022  {
1023    FrameScope scope(masm, StackFrame::INTERNAL);
1024    __ CallRuntime(Runtime::kHiddenStackGuard, 0);
1025  }
1026  __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1027          RelocInfo::CODE_TARGET);
1028
1029  __ Bind(&ok);
1030  __ Ret();
1031}
1032
1033
1034void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1035  enum {
1036    call_type_JS_func = 0,
1037    call_type_func_proxy = 1,
1038    call_type_non_func = 2
1039  };
1040  Register argc = x0;
1041  Register function = x1;
1042  Register call_type = x4;
1043  Register scratch1 = x10;
1044  Register scratch2 = x11;
1045  Register receiver_type = x13;
1046
1047  ASM_LOCATION("Builtins::Generate_FunctionCall");
1048  // 1. Make sure we have at least one argument.
1049  { Label done;
1050    __ Cbnz(argc, &done);
1051    __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex);
1052    __ Push(scratch1);
1053    __ Mov(argc, 1);
1054    __ Bind(&done);
1055  }
1056
1057  // 2. Get the function to call (passed as receiver) from the stack, check
1058  //    if it is a function.
1059  Label slow, non_function;
1060  __ Peek(function, Operand(argc, LSL, kXRegSizeLog2));
1061  __ JumpIfSmi(function, &non_function);
1062  __ JumpIfNotObjectType(function, scratch1, receiver_type,
1063                         JS_FUNCTION_TYPE, &slow);
1064
1065  // 3a. Patch the first argument if necessary when calling a function.
1066  Label shift_arguments;
1067  __ Mov(call_type, static_cast<int>(call_type_JS_func));
1068  { Label convert_to_object, use_global_receiver, patch_receiver;
1069    // Change context eagerly in case we need the global receiver.
1070    __ Ldr(cp, FieldMemOperand(function, JSFunction::kContextOffset));
1071
1072    // Do not transform the receiver for strict mode functions.
1073    // Also do not transform the receiver for native (Compilerhints already in
1074    // x3).
1075    __ Ldr(scratch1,
1076           FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
1077    __ Ldr(scratch2.W(),
1078           FieldMemOperand(scratch1, SharedFunctionInfo::kCompilerHintsOffset));
1079    __ TestAndBranchIfAnySet(
1080        scratch2.W(),
1081        (1 << SharedFunctionInfo::kStrictModeFunction) |
1082        (1 << SharedFunctionInfo::kNative),
1083        &shift_arguments);
1084
1085    // Compute the receiver in sloppy mode.
1086    Register receiver = x2;
1087    __ Sub(scratch1, argc, 1);
1088    __ Peek(receiver, Operand(scratch1, LSL, kXRegSizeLog2));
1089    __ JumpIfSmi(receiver, &convert_to_object);
1090
1091    __ JumpIfRoot(receiver, Heap::kUndefinedValueRootIndex,
1092                  &use_global_receiver);
1093    __ JumpIfRoot(receiver, Heap::kNullValueRootIndex, &use_global_receiver);
1094
1095    STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1096    __ JumpIfObjectType(receiver, scratch1, scratch2,
1097                        FIRST_SPEC_OBJECT_TYPE, &shift_arguments, ge);
1098
1099    __ Bind(&convert_to_object);
1100
1101    {
1102      // Enter an internal frame in order to preserve argument count.
1103      FrameScope scope(masm, StackFrame::INTERNAL);
1104      __ SmiTag(argc);
1105
1106      __ Push(argc, receiver);
1107      __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1108      __ Mov(receiver, x0);
1109
1110      __ Pop(argc);
1111      __ SmiUntag(argc);
1112
1113      // Exit the internal frame.
1114    }
1115
1116    // Restore the function and flag in the registers.
1117    __ Peek(function, Operand(argc, LSL, kXRegSizeLog2));
1118    __ Mov(call_type, static_cast<int>(call_type_JS_func));
1119    __ B(&patch_receiver);
1120
1121    __ Bind(&use_global_receiver);
1122    __ Ldr(receiver, GlobalObjectMemOperand());
1123    __ Ldr(receiver,
1124           FieldMemOperand(receiver, GlobalObject::kGlobalReceiverOffset));
1125
1126
1127    __ Bind(&patch_receiver);
1128    __ Sub(scratch1, argc, 1);
1129    __ Poke(receiver, Operand(scratch1, LSL, kXRegSizeLog2));
1130
1131    __ B(&shift_arguments);
1132  }
1133
1134  // 3b. Check for function proxy.
1135  __ Bind(&slow);
1136  __ Mov(call_type, static_cast<int>(call_type_func_proxy));
1137  __ Cmp(receiver_type, JS_FUNCTION_PROXY_TYPE);
1138  __ B(eq, &shift_arguments);
1139  __ Bind(&non_function);
1140  __ Mov(call_type, static_cast<int>(call_type_non_func));
1141
1142  // 3c. Patch the first argument when calling a non-function.  The
1143  //     CALL_NON_FUNCTION builtin expects the non-function callee as
1144  //     receiver, so overwrite the first argument which will ultimately
1145  //     become the receiver.
1146  // call type (0: JS function, 1: function proxy, 2: non-function)
1147  __ Sub(scratch1, argc, 1);
1148  __ Poke(function, Operand(scratch1, LSL, kXRegSizeLog2));
1149
1150  // 4. Shift arguments and return address one slot down on the stack
1151  //    (overwriting the original receiver).  Adjust argument count to make
1152  //    the original first argument the new receiver.
1153  // call type (0: JS function, 1: function proxy, 2: non-function)
1154  __ Bind(&shift_arguments);
1155  { Label loop;
1156    // Calculate the copy start address (destination). Copy end address is jssp.
1157    __ Add(scratch2, jssp, Operand(argc, LSL, kPointerSizeLog2));
1158    __ Sub(scratch1, scratch2, kPointerSize);
1159
1160    __ Bind(&loop);
1161    __ Ldr(x12, MemOperand(scratch1, -kPointerSize, PostIndex));
1162    __ Str(x12, MemOperand(scratch2, -kPointerSize, PostIndex));
1163    __ Cmp(scratch1, jssp);
1164    __ B(ge, &loop);
1165    // Adjust the actual number of arguments and remove the top element
1166    // (which is a copy of the last argument).
1167    __ Sub(argc, argc, 1);
1168    __ Drop(1);
1169  }
1170
1171  // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1172  //     or a function proxy via CALL_FUNCTION_PROXY.
1173  // call type (0: JS function, 1: function proxy, 2: non-function)
1174  { Label js_function, non_proxy;
1175    __ Cbz(call_type, &js_function);
1176    // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1177    __ Mov(x2, 0);
1178    __ Cmp(call_type, static_cast<int>(call_type_func_proxy));
1179    __ B(ne, &non_proxy);
1180
1181    __ Push(function);  // Re-add proxy object as additional argument.
1182    __ Add(argc, argc, 1);
1183    __ GetBuiltinFunction(function, Builtins::CALL_FUNCTION_PROXY);
1184    __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1185            RelocInfo::CODE_TARGET);
1186
1187    __ Bind(&non_proxy);
1188    __ GetBuiltinFunction(function, Builtins::CALL_NON_FUNCTION);
1189    __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1190            RelocInfo::CODE_TARGET);
1191    __ Bind(&js_function);
1192  }
1193
1194  // 5b. Get the code to call from the function and check that the number of
1195  //     expected arguments matches what we're providing.  If so, jump
1196  //     (tail-call) to the code in register edx without checking arguments.
1197  __ Ldr(x3, FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
1198  __ Ldrsw(x2,
1199           FieldMemOperand(x3,
1200             SharedFunctionInfo::kFormalParameterCountOffset));
1201  Label dont_adapt_args;
1202  __ Cmp(x2, argc);  // Check formal and actual parameter counts.
1203  __ B(eq, &dont_adapt_args);
1204  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1205          RelocInfo::CODE_TARGET);
1206  __ Bind(&dont_adapt_args);
1207
1208  __ Ldr(x3, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
1209  ParameterCount expected(0);
1210  __ InvokeCode(x3, expected, expected, JUMP_FUNCTION, NullCallWrapper());
1211}
1212
1213
1214void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1215  ASM_LOCATION("Builtins::Generate_FunctionApply");
1216  const int kIndexOffset    =
1217      StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize);
1218  const int kLimitOffset    =
1219      StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
1220  const int kArgsOffset     =  2 * kPointerSize;
1221  const int kReceiverOffset =  3 * kPointerSize;
1222  const int kFunctionOffset =  4 * kPointerSize;
1223
1224  {
1225    FrameScope frame_scope(masm, StackFrame::INTERNAL);
1226
1227    Register args = x12;
1228    Register receiver = x14;
1229    Register function = x15;
1230
1231    // Get the length of the arguments via a builtin call.
1232    __ Ldr(function, MemOperand(fp, kFunctionOffset));
1233    __ Ldr(args, MemOperand(fp, kArgsOffset));
1234    __ Push(function, args);
1235    __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1236    Register argc = x0;
1237
1238    // Check the stack for overflow.
1239    // We are not trying to catch interruptions (e.g. debug break and
1240    // preemption) here, so the "real stack limit" is checked.
1241    Label enough_stack_space;
1242    __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
1243    __ Ldr(function, MemOperand(fp, kFunctionOffset));
1244    // Make x10 the space we have left. The stack might already be overflowed
1245    // here which will cause x10 to become negative.
1246    // TODO(jbramley): Check that the stack usage here is safe.
1247    __ Sub(x10, jssp, x10);
1248    // Check if the arguments will overflow the stack.
1249    __ Cmp(x10, Operand(argc, LSR, kSmiShift - kPointerSizeLog2));
1250    __ B(gt, &enough_stack_space);
1251    // There is not enough stack space, so use a builtin to throw an appropriate
1252    // error.
1253    __ Push(function, argc);
1254    __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1255    // We should never return from the APPLY_OVERFLOW builtin.
1256    if (__ emit_debug_code()) {
1257      __ Unreachable();
1258    }
1259
1260    __ Bind(&enough_stack_space);
1261    // Push current limit and index.
1262    __ Mov(x1, 0);  // Initial index.
1263    __ Push(argc, x1);
1264
1265    Label push_receiver;
1266    __ Ldr(receiver, MemOperand(fp, kReceiverOffset));
1267
1268    // Check that the function is a JS function. Otherwise it must be a proxy.
1269    // When it is not the function proxy will be invoked later.
1270    __ JumpIfNotObjectType(function, x10, x11, JS_FUNCTION_TYPE,
1271                           &push_receiver);
1272
1273    // Change context eagerly to get the right global object if necessary.
1274    __ Ldr(cp, FieldMemOperand(function, JSFunction::kContextOffset));
1275    // Load the shared function info.
1276    __ Ldr(x2, FieldMemOperand(function,
1277                               JSFunction::kSharedFunctionInfoOffset));
1278
1279    // Compute and push the receiver.
1280    // Do not transform the receiver for strict mode functions.
1281    Label convert_receiver_to_object, use_global_receiver;
1282    __ Ldr(w10, FieldMemOperand(x2, SharedFunctionInfo::kCompilerHintsOffset));
1283    __ Tbnz(x10, SharedFunctionInfo::kStrictModeFunction, &push_receiver);
1284    // Do not transform the receiver for native functions.
1285    __ Tbnz(x10, SharedFunctionInfo::kNative, &push_receiver);
1286
1287    // Compute the receiver in sloppy mode.
1288    __ JumpIfSmi(receiver, &convert_receiver_to_object);
1289    __ JumpIfRoot(receiver, Heap::kNullValueRootIndex, &use_global_receiver);
1290    __ JumpIfRoot(receiver, Heap::kUndefinedValueRootIndex,
1291                  &use_global_receiver);
1292
1293    // Check if the receiver is already a JavaScript object.
1294    STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1295    __ JumpIfObjectType(receiver, x10, x11, FIRST_SPEC_OBJECT_TYPE,
1296                        &push_receiver, ge);
1297
1298    // Call a builtin to convert the receiver to a regular object.
1299    __ Bind(&convert_receiver_to_object);
1300    __ Push(receiver);
1301    __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1302    __ Mov(receiver, x0);
1303    __ B(&push_receiver);
1304
1305    __ Bind(&use_global_receiver);
1306    __ Ldr(x10, GlobalObjectMemOperand());
1307    __ Ldr(receiver, FieldMemOperand(x10, GlobalObject::kGlobalReceiverOffset));
1308
1309    // Push the receiver
1310    __ Bind(&push_receiver);
1311    __ Push(receiver);
1312
1313    // Copy all arguments from the array to the stack.
1314    Label entry, loop;
1315    Register current = x0;
1316    __ Ldr(current, MemOperand(fp, kIndexOffset));
1317    __ B(&entry);
1318
1319    __ Bind(&loop);
1320    // Load the current argument from the arguments array and push it.
1321    // TODO(all): Couldn't we optimize this for JS arrays?
1322
1323    __ Ldr(x1, MemOperand(fp, kArgsOffset));
1324    __ Push(x1, current);
1325
1326    // Call the runtime to access the property in the arguments array.
1327    __ CallRuntime(Runtime::kGetProperty, 2);
1328    __ Push(x0);
1329
1330    // Use inline caching to access the arguments.
1331    __ Ldr(current, MemOperand(fp, kIndexOffset));
1332    __ Add(current, current, Smi::FromInt(1));
1333    __ Str(current, MemOperand(fp, kIndexOffset));
1334
1335    // Test if the copy loop has finished copying all the elements from the
1336    // arguments object.
1337    __ Bind(&entry);
1338    __ Ldr(x1, MemOperand(fp, kLimitOffset));
1339    __ Cmp(current, x1);
1340    __ B(ne, &loop);
1341
1342    // At the end of the loop, the number of arguments is stored in 'current',
1343    // represented as a smi.
1344
1345    function = x1;  // From now on we want the function to be kept in x1;
1346    __ Ldr(function, MemOperand(fp, kFunctionOffset));
1347
1348    // Call the function.
1349    Label call_proxy;
1350    ParameterCount actual(current);
1351    __ SmiUntag(current);
1352    __ JumpIfNotObjectType(function, x10, x11, JS_FUNCTION_TYPE, &call_proxy);
1353    __ InvokeFunction(function, actual, CALL_FUNCTION, NullCallWrapper());
1354    frame_scope.GenerateLeaveFrame();
1355    __ Drop(3);
1356    __ Ret();
1357
1358    // Call the function proxy.
1359    __ Bind(&call_proxy);
1360    // x0 : argc
1361    // x1 : function
1362    __ Push(function);  // Add function proxy as last argument.
1363    __ Add(x0, x0, 1);
1364    __ Mov(x2, 0);
1365    __ GetBuiltinFunction(x1, Builtins::CALL_FUNCTION_PROXY);
1366    __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1367            RelocInfo::CODE_TARGET);
1368  }
1369  __ Drop(3);
1370  __ Ret();
1371}
1372
1373
1374static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1375                                      Label* stack_overflow) {
1376  // ----------- S t a t e -------------
1377  //  -- x0 : actual number of arguments
1378  //  -- x1 : function (passed through to callee)
1379  //  -- x2 : expected number of arguments
1380  // -----------------------------------
1381  // Check the stack for overflow.
1382  // We are not trying to catch interruptions (e.g. debug break and
1383  // preemption) here, so the "real stack limit" is checked.
1384  Label enough_stack_space;
1385  __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
1386  // Make x10 the space we have left. The stack might already be overflowed
1387  // here which will cause x10 to become negative.
1388  __ Sub(x10, jssp, x10);
1389  // Check if the arguments will overflow the stack.
1390  __ Cmp(x10, Operand(x2, LSL, kPointerSizeLog2));
1391  __ B(le, stack_overflow);
1392}
1393
1394
1395static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1396  __ SmiTag(x10, x0);
1397  __ Mov(x11, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1398  __ Push(lr, fp);
1399  __ Push(x11, x1, x10);
1400  __ Add(fp, jssp,
1401         StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize);
1402}
1403
1404
1405static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1406  // ----------- S t a t e -------------
1407  //  -- x0 : result being passed through
1408  // -----------------------------------
1409  // Get the number of arguments passed (as a smi), tear down the frame and
1410  // then drop the parameters and the receiver.
1411  __ Ldr(x10, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1412                               kPointerSize)));
1413  __ Mov(jssp, fp);
1414  __ Pop(fp, lr);
1415  __ DropBySMI(x10, kXRegSize);
1416  __ Drop(1);
1417}
1418
1419
1420void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1421  ASM_LOCATION("Builtins::Generate_ArgumentsAdaptorTrampoline");
1422  // ----------- S t a t e -------------
1423  //  -- x0 : actual number of arguments
1424  //  -- x1 : function (passed through to callee)
1425  //  -- x2 : expected number of arguments
1426  // -----------------------------------
1427
1428  Label stack_overflow;
1429  ArgumentAdaptorStackCheck(masm, &stack_overflow);
1430
1431  Register argc_actual = x0;  // Excluding the receiver.
1432  Register argc_expected = x2;  // Excluding the receiver.
1433  Register function = x1;
1434  Register code_entry = x3;
1435
1436  Label invoke, dont_adapt_arguments;
1437
1438  Label enough, too_few;
1439  __ Ldr(code_entry, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
1440  __ Cmp(argc_actual, argc_expected);
1441  __ B(lt, &too_few);
1442  __ Cmp(argc_expected, SharedFunctionInfo::kDontAdaptArgumentsSentinel);
1443  __ B(eq, &dont_adapt_arguments);
1444
1445  {  // Enough parameters: actual >= expected
1446    EnterArgumentsAdaptorFrame(masm);
1447
1448    Register copy_start = x10;
1449    Register copy_end = x11;
1450    Register copy_to = x12;
1451    Register scratch1 = x13, scratch2 = x14;
1452
1453    __ Lsl(argc_expected, argc_expected, kPointerSizeLog2);
1454
1455    // Adjust for fp, lr, and the receiver.
1456    __ Add(copy_start, fp, 3 * kPointerSize);
1457    __ Add(copy_start, copy_start, Operand(argc_actual, LSL, kPointerSizeLog2));
1458    __ Sub(copy_end, copy_start, argc_expected);
1459    __ Sub(copy_end, copy_end, kPointerSize);
1460    __ Mov(copy_to, jssp);
1461
1462    // Claim space for the arguments, the receiver, and one extra slot.
1463    // The extra slot ensures we do not write under jssp. It will be popped
1464    // later.
1465    __ Add(scratch1, argc_expected, 2 * kPointerSize);
1466    __ Claim(scratch1, 1);
1467
1468    // Copy the arguments (including the receiver) to the new stack frame.
1469    Label copy_2_by_2;
1470    __ Bind(&copy_2_by_2);
1471    __ Ldp(scratch1, scratch2,
1472           MemOperand(copy_start, - 2 * kPointerSize, PreIndex));
1473    __ Stp(scratch1, scratch2,
1474           MemOperand(copy_to, - 2 * kPointerSize, PreIndex));
1475    __ Cmp(copy_start, copy_end);
1476    __ B(hi, &copy_2_by_2);
1477
1478    // Correct the space allocated for the extra slot.
1479    __ Drop(1);
1480
1481    __ B(&invoke);
1482  }
1483
1484  {  // Too few parameters: Actual < expected
1485    __ Bind(&too_few);
1486    EnterArgumentsAdaptorFrame(masm);
1487
1488    Register copy_from = x10;
1489    Register copy_end = x11;
1490    Register copy_to = x12;
1491    Register scratch1 = x13, scratch2 = x14;
1492
1493    __ Lsl(argc_expected, argc_expected, kPointerSizeLog2);
1494    __ Lsl(argc_actual, argc_actual, kPointerSizeLog2);
1495
1496    // Adjust for fp, lr, and the receiver.
1497    __ Add(copy_from, fp, 3 * kPointerSize);
1498    __ Add(copy_from, copy_from, argc_actual);
1499    __ Mov(copy_to, jssp);
1500    __ Sub(copy_end, copy_to, 1 * kPointerSize);   // Adjust for the receiver.
1501    __ Sub(copy_end, copy_end, argc_actual);
1502
1503    // Claim space for the arguments, the receiver, and one extra slot.
1504    // The extra slot ensures we do not write under jssp. It will be popped
1505    // later.
1506    __ Add(scratch1, argc_expected, 2 * kPointerSize);
1507    __ Claim(scratch1, 1);
1508
1509    // Copy the arguments (including the receiver) to the new stack frame.
1510    Label copy_2_by_2;
1511    __ Bind(&copy_2_by_2);
1512    __ Ldp(scratch1, scratch2,
1513           MemOperand(copy_from, - 2 * kPointerSize, PreIndex));
1514    __ Stp(scratch1, scratch2,
1515           MemOperand(copy_to, - 2 * kPointerSize, PreIndex));
1516    __ Cmp(copy_to, copy_end);
1517    __ B(hi, &copy_2_by_2);
1518
1519    __ Mov(copy_to, copy_end);
1520
1521    // Fill the remaining expected arguments with undefined.
1522    __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex);
1523    __ Add(copy_end, jssp, kPointerSize);
1524
1525    Label fill;
1526    __ Bind(&fill);
1527    __ Stp(scratch1, scratch1,
1528           MemOperand(copy_to, - 2 * kPointerSize, PreIndex));
1529    __ Cmp(copy_to, copy_end);
1530    __ B(hi, &fill);
1531
1532    // Correct the space allocated for the extra slot.
1533    __ Drop(1);
1534  }
1535
1536  // Arguments have been adapted. Now call the entry point.
1537  __ Bind(&invoke);
1538  __ Call(code_entry);
1539
1540  // Store offset of return address for deoptimizer.
1541  masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1542
1543  // Exit frame and return.
1544  LeaveArgumentsAdaptorFrame(masm);
1545  __ Ret();
1546
1547  // Call the entry point without adapting the arguments.
1548  __ Bind(&dont_adapt_arguments);
1549  __ Jump(code_entry);
1550
1551  __ Bind(&stack_overflow);
1552  {
1553    FrameScope frame(masm, StackFrame::MANUAL);
1554    EnterArgumentsAdaptorFrame(masm);
1555    __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1556    __ Unreachable();
1557  }
1558}
1559
1560
1561#undef __
1562
1563} }  // namespace v8::internal
1564
1565#endif  // V8_TARGET_ARCH_ARM
1566