1// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_PPC
6
7#include "src/codegen.h"
8#include "src/debug/debug.h"
9#include "src/deoptimizer.h"
10#include "src/full-codegen/full-codegen.h"
11#include "src/runtime/runtime.h"
12
13namespace v8 {
14namespace internal {
15
16#define __ ACCESS_MASM(masm)
17
18void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
19                                ExitFrameType exit_frame_type) {
20  // ----------- S t a t e -------------
21  //  -- r3                 : number of arguments excluding receiver
22  //  -- r4                 : target
23  //  -- r6                 : new.target
24  //  -- sp[0]              : last argument
25  //  -- ...
26  //  -- sp[4 * (argc - 1)] : first argument
27  //  -- sp[4 * argc]       : receiver
28  // -----------------------------------
29  __ AssertFunction(r4);
30
31  // Make sure we operate in the context of the called function (for example
32  // ConstructStubs implemented in C++ will be run in the context of the caller
33  // instead of the callee, due to the way that [[Construct]] is defined for
34  // ordinary functions).
35  __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
36
37  // JumpToExternalReference expects r3 to contain the number of arguments
38  // including the receiver and the extra arguments.
39  const int num_extra_args = 3;
40  __ addi(r3, r3, Operand(num_extra_args + 1));
41
42  // Insert extra arguments.
43  __ SmiTag(r3);
44  __ Push(r3, r4, r6);
45  __ SmiUntag(r3);
46
47  __ JumpToExternalReference(ExternalReference(address, masm->isolate()),
48                             exit_frame_type == BUILTIN_EXIT);
49}
50
51// Load the built-in InternalArray function from the current context.
52static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
53                                              Register result) {
54  // Load the InternalArray function from the current native context.
55  __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
56}
57
58// Load the built-in Array function from the current context.
59static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
60  // Load the Array function from the current native context.
61  __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
62}
63
64void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
65  // ----------- S t a t e -------------
66  //  -- r3     : number of arguments
67  //  -- lr     : return address
68  //  -- sp[...]: constructor arguments
69  // -----------------------------------
70  Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
71
72  // Get the InternalArray function.
73  GenerateLoadInternalArrayFunction(masm, r4);
74
75  if (FLAG_debug_code) {
76    // Initial map for the builtin InternalArray functions should be maps.
77    __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
78    __ TestIfSmi(r5, r0);
79    __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, cr0);
80    __ CompareObjectType(r5, r6, r7, MAP_TYPE);
81    __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
82  }
83
84  // Run the native code for the InternalArray function called as a normal
85  // function.
86  // tail call a stub
87  InternalArrayConstructorStub stub(masm->isolate());
88  __ TailCallStub(&stub);
89}
90
91void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
92  // ----------- S t a t e -------------
93  //  -- r3     : number of arguments
94  //  -- lr     : return address
95  //  -- sp[...]: constructor arguments
96  // -----------------------------------
97  Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
98
99  // Get the Array function.
100  GenerateLoadArrayFunction(masm, r4);
101
102  if (FLAG_debug_code) {
103    // Initial map for the builtin Array functions should be maps.
104    __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
105    __ TestIfSmi(r5, r0);
106    __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0);
107    __ CompareObjectType(r5, r6, r7, MAP_TYPE);
108    __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
109  }
110
111  __ mr(r6, r4);
112  // Run the native code for the Array function called as a normal function.
113  // tail call a stub
114  __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
115  ArrayConstructorStub stub(masm->isolate());
116  __ TailCallStub(&stub);
117}
118
119// static
120void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
121  // ----------- S t a t e -------------
122  //  -- r3                     : number of arguments
123  //  -- r4                     : function
124  //  -- cp                     : context
125  //  -- lr                     : return address
126  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
127  //  -- sp[argc * 4]           : receiver
128  // -----------------------------------
129  Condition const cond_done = (kind == MathMaxMinKind::kMin) ? lt : gt;
130  Heap::RootListIndex const root_index =
131      (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
132                                     : Heap::kMinusInfinityValueRootIndex;
133  DoubleRegister const reg = (kind == MathMaxMinKind::kMin) ? d2 : d1;
134
135  // Load the accumulator with the default return value (either -Infinity or
136  // +Infinity), with the tagged value in r8 and the double value in d1.
137  __ LoadRoot(r8, root_index);
138  __ lfd(d1, FieldMemOperand(r8, HeapNumber::kValueOffset));
139
140  // Setup state for loop
141  // r5: address of arg[0] + kPointerSize
142  // r6: number of slots to drop at exit (arguments + receiver)
143  __ addi(r7, r3, Operand(1));
144
145  Label done_loop, loop;
146  __ mr(r7, r3);
147  __ bind(&loop);
148  {
149    // Check if all parameters done.
150    __ subi(r7, r7, Operand(1));
151    __ cmpi(r7, Operand::Zero());
152    __ blt(&done_loop);
153
154    // Load the next parameter tagged value into r5.
155    __ ShiftLeftImm(r5, r7, Operand(kPointerSizeLog2));
156    __ LoadPX(r5, MemOperand(sp, r5));
157
158    // Load the double value of the parameter into d2, maybe converting the
159    // parameter to a number first using the ToNumber builtin if necessary.
160    Label convert, convert_smi, convert_number, done_convert;
161    __ bind(&convert);
162    __ JumpIfSmi(r5, &convert_smi);
163    __ LoadP(r6, FieldMemOperand(r5, HeapObject::kMapOffset));
164    __ JumpIfRoot(r6, Heap::kHeapNumberMapRootIndex, &convert_number);
165    {
166      // Parameter is not a Number, use the ToNumber builtin to convert it.
167      FrameScope scope(masm, StackFrame::MANUAL);
168      __ SmiTag(r3);
169      __ SmiTag(r7);
170      __ EnterBuiltinFrame(cp, r4, r3);
171      __ Push(r7, r8);
172      __ mr(r3, r5);
173      __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
174      __ mr(r5, r3);
175      __ Pop(r7, r8);
176      __ LeaveBuiltinFrame(cp, r4, r3);
177      __ SmiUntag(r7);
178      __ SmiUntag(r3);
179      {
180        // Restore the double accumulator value (d1).
181        Label done_restore;
182        __ SmiToDouble(d1, r8);
183        __ JumpIfSmi(r8, &done_restore);
184        __ lfd(d1, FieldMemOperand(r8, HeapNumber::kValueOffset));
185        __ bind(&done_restore);
186      }
187    }
188    __ b(&convert);
189    __ bind(&convert_number);
190    __ lfd(d2, FieldMemOperand(r5, HeapNumber::kValueOffset));
191    __ b(&done_convert);
192    __ bind(&convert_smi);
193    __ SmiToDouble(d2, r5);
194    __ bind(&done_convert);
195
196    // Perform the actual comparison with the accumulator value on the left hand
197    // side (d1) and the next parameter value on the right hand side (d2).
198    Label compare_nan, compare_swap;
199    __ fcmpu(d1, d2);
200    __ bunordered(&compare_nan);
201    __ b(cond_done, &loop);
202    __ b(CommuteCondition(cond_done), &compare_swap);
203
204    // Left and right hand side are equal, check for -0 vs. +0.
205    __ TestDoubleIsMinusZero(reg, r9, r0);
206    __ bne(&loop);
207
208    // Update accumulator. Result is on the right hand side.
209    __ bind(&compare_swap);
210    __ fmr(d1, d2);
211    __ mr(r8, r5);
212    __ b(&loop);
213
214    // At least one side is NaN, which means that the result will be NaN too.
215    // We still need to visit the rest of the arguments.
216    __ bind(&compare_nan);
217    __ LoadRoot(r8, Heap::kNanValueRootIndex);
218    __ lfd(d1, FieldMemOperand(r8, HeapNumber::kValueOffset));
219    __ b(&loop);
220  }
221
222  __ bind(&done_loop);
223  // Drop all slots, including the receiver.
224  __ addi(r3, r3, Operand(1));
225  __ Drop(r3);
226  __ mr(r3, r8);
227  __ Ret();
228}
229
230// static
231void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
232  // ----------- S t a t e -------------
233  //  -- r3                     : number of arguments
234  //  -- r4                     : constructor function
235  //  -- cp                     : context
236  //  -- lr                     : return address
237  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
238  //  -- sp[argc * 4]           : receiver
239  // -----------------------------------
240
241  // 1. Load the first argument into r3.
242  Label no_arguments;
243  {
244    __ mr(r5, r3);  // Store argc in r5.
245    __ cmpi(r3, Operand::Zero());
246    __ beq(&no_arguments);
247    __ subi(r3, r3, Operand(1));
248    __ ShiftLeftImm(r3, r3, Operand(kPointerSizeLog2));
249    __ LoadPX(r3, MemOperand(sp, r3));
250  }
251
252  // 2a. Convert the first argument to a number.
253  {
254    FrameScope scope(masm, StackFrame::MANUAL);
255    __ SmiTag(r5);
256    __ EnterBuiltinFrame(cp, r4, r5);
257    __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
258    __ LeaveBuiltinFrame(cp, r4, r5);
259    __ SmiUntag(r5);
260  }
261
262  {
263    // Drop all arguments including the receiver.
264    __ Drop(r5);
265    __ Ret(1);
266  }
267
268  // 2b. No arguments, return +0.
269  __ bind(&no_arguments);
270  __ LoadSmiLiteral(r3, Smi::kZero);
271  __ Ret(1);
272}
273
274// static
275void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
276  // ----------- S t a t e -------------
277  //  -- r3                     : number of arguments
278  //  -- r4                     : constructor function
279  //  -- r6                     : new target
280  //  -- cp                     : context
281  //  -- lr                     : return address
282  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
283  //  -- sp[argc * 4]           : receiver
284  // -----------------------------------
285
286  // 1. Make sure we operate in the context of the called function.
287  __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
288
289  // 2. Load the first argument into r5.
290  {
291    Label no_arguments, done;
292    __ mr(r9, r3);  // Store argc in r9.
293    __ cmpi(r3, Operand::Zero());
294    __ beq(&no_arguments);
295    __ subi(r3, r3, Operand(1));
296    __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2));
297    __ LoadPX(r5, MemOperand(sp, r5));
298    __ b(&done);
299    __ bind(&no_arguments);
300    __ LoadSmiLiteral(r5, Smi::kZero);
301    __ bind(&done);
302  }
303
304  // 3. Make sure r5 is a number.
305  {
306    Label done_convert;
307    __ JumpIfSmi(r5, &done_convert);
308    __ CompareObjectType(r5, r7, r7, HEAP_NUMBER_TYPE);
309    __ beq(&done_convert);
310    {
311      FrameScope scope(masm, StackFrame::MANUAL);
312      __ SmiTag(r9);
313      __ EnterBuiltinFrame(cp, r4, r9);
314      __ Push(r6);
315      __ mr(r3, r5);
316      __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
317      __ mr(r5, r3);
318      __ Pop(r6);
319      __ LeaveBuiltinFrame(cp, r4, r9);
320      __ SmiUntag(r9);
321    }
322    __ bind(&done_convert);
323  }
324
325  // 4. Check if new target and constructor differ.
326  Label drop_frame_and_ret, new_object;
327  __ cmp(r4, r6);
328  __ bne(&new_object);
329
330  // 5. Allocate a JSValue wrapper for the number.
331  __ AllocateJSValue(r3, r4, r5, r7, r8, &new_object);
332  __ b(&drop_frame_and_ret);
333
334  // 6. Fallback to the runtime to create new object.
335  __ bind(&new_object);
336  {
337    FrameScope scope(masm, StackFrame::MANUAL);
338    __ SmiTag(r9);
339    __ EnterBuiltinFrame(cp, r4, r9);
340    __ Push(r5);  // first argument
341    __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
342            RelocInfo::CODE_TARGET);
343    __ Pop(r5);
344    __ LeaveBuiltinFrame(cp, r4, r9);
345    __ SmiUntag(r9);
346  }
347  __ StoreP(r5, FieldMemOperand(r3, JSValue::kValueOffset), r0);
348
349  __ bind(&drop_frame_and_ret);
350  {
351    __ Drop(r9);
352    __ Ret(1);
353  }
354}
355
356// static
357void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
358  // ----------- S t a t e -------------
359  //  -- r3                     : number of arguments
360  //  -- r4                     : constructor function
361  //  -- cp                     : context
362  //  -- lr                     : return address
363  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
364  //  -- sp[argc * 4]           : receiver
365  // -----------------------------------
366
367  // 1. Load the first argument into r3.
368  Label no_arguments;
369  {
370    __ mr(r5, r3);  // Store argc in r5.
371    __ cmpi(r3, Operand::Zero());
372    __ beq(&no_arguments);
373    __ subi(r3, r3, Operand(1));
374    __ ShiftLeftImm(r3, r3, Operand(kPointerSizeLog2));
375    __ LoadPX(r3, MemOperand(sp, r3));
376  }
377
378  // 2a. At least one argument, return r3 if it's a string, otherwise
379  // dispatch to appropriate conversion.
380  Label drop_frame_and_ret, to_string, symbol_descriptive_string;
381  {
382    __ JumpIfSmi(r3, &to_string);
383    STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
384    __ CompareObjectType(r3, r6, r6, FIRST_NONSTRING_TYPE);
385    __ bgt(&to_string);
386    __ beq(&symbol_descriptive_string);
387    __ b(&drop_frame_and_ret);
388  }
389
390  // 2b. No arguments, return the empty string (and pop the receiver).
391  __ bind(&no_arguments);
392  {
393    __ LoadRoot(r3, Heap::kempty_stringRootIndex);
394    __ Ret(1);
395  }
396
397  // 3a. Convert r3 to a string.
398  __ bind(&to_string);
399  {
400    FrameScope scope(masm, StackFrame::MANUAL);
401    __ SmiTag(r5);
402    __ EnterBuiltinFrame(cp, r4, r5);
403    __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
404    __ LeaveBuiltinFrame(cp, r4, r5);
405    __ SmiUntag(r5);
406  }
407  __ b(&drop_frame_and_ret);
408
409  // 3b. Convert symbol in r3 to a string.
410  __ bind(&symbol_descriptive_string);
411  {
412    __ Drop(r5);
413    __ Drop(1);
414    __ Push(r3);
415    __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
416  }
417
418  __ bind(&drop_frame_and_ret);
419  {
420    __ Drop(r5);
421    __ Ret(1);
422  }
423}
424
425// static
426void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
427  // ----------- S t a t e -------------
428  //  -- r3                     : number of arguments
429  //  -- r4                     : constructor function
430  //  -- r6                     : new target
431  //  -- cp                     : context
432  //  -- lr                     : return address
433  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
434  //  -- sp[argc * 4]           : receiver
435  // -----------------------------------
436
437  // 1. Make sure we operate in the context of the called function.
438  __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
439
440  // 2. Load the first argument into r5.
441  {
442    Label no_arguments, done;
443    __ mr(r9, r3);  // Store argc in r9.
444    __ cmpi(r3, Operand::Zero());
445    __ beq(&no_arguments);
446    __ subi(r3, r3, Operand(1));
447    __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2));
448    __ LoadPX(r5, MemOperand(sp, r5));
449    __ b(&done);
450    __ bind(&no_arguments);
451    __ LoadRoot(r5, Heap::kempty_stringRootIndex);
452    __ bind(&done);
453  }
454
455  // 3. Make sure r5 is a string.
456  {
457    Label convert, done_convert;
458    __ JumpIfSmi(r5, &convert);
459    __ CompareObjectType(r5, r7, r7, FIRST_NONSTRING_TYPE);
460    __ blt(&done_convert);
461    __ bind(&convert);
462    {
463      FrameScope scope(masm, StackFrame::MANUAL);
464      __ SmiTag(r9);
465      __ EnterBuiltinFrame(cp, r4, r9);
466      __ Push(r6);
467      __ mr(r3, r5);
468      __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
469      __ mr(r5, r3);
470      __ Pop(r6);
471      __ LeaveBuiltinFrame(cp, r4, r9);
472      __ SmiUntag(r9);
473    }
474    __ bind(&done_convert);
475  }
476
477  // 4. Check if new target and constructor differ.
478  Label drop_frame_and_ret, new_object;
479  __ cmp(r4, r6);
480  __ bne(&new_object);
481
482  // 5. Allocate a JSValue wrapper for the string.
483  __ AllocateJSValue(r3, r4, r5, r7, r8, &new_object);
484  __ b(&drop_frame_and_ret);
485
486  // 6. Fallback to the runtime to create new object.
487  __ bind(&new_object);
488  {
489    FrameScope scope(masm, StackFrame::MANUAL);
490    __ SmiTag(r9);
491    __ EnterBuiltinFrame(cp, r4, r9);
492    __ Push(r5);  // first argument
493    __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
494            RelocInfo::CODE_TARGET);
495    __ Pop(r5);
496    __ LeaveBuiltinFrame(cp, r4, r9);
497    __ SmiUntag(r9);
498  }
499  __ StoreP(r5, FieldMemOperand(r3, JSValue::kValueOffset), r0);
500
501  __ bind(&drop_frame_and_ret);
502  {
503    __ Drop(r9);
504    __ Ret(1);
505  }
506}
507
508static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
509  __ LoadP(ip, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
510  __ LoadP(ip, FieldMemOperand(ip, SharedFunctionInfo::kCodeOffset));
511  __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
512  __ JumpToJSEntry(ip);
513}
514
515static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
516                                           Runtime::FunctionId function_id) {
517  // ----------- S t a t e -------------
518  //  -- r3 : argument count (preserved for callee)
519  //  -- r4 : target function (preserved for callee)
520  //  -- r6 : new target (preserved for callee)
521  // -----------------------------------
522  {
523    FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
524    // Push the number of arguments to the callee.
525    // Push a copy of the target function and the new target.
526    // Push function as parameter to the runtime call.
527    __ SmiTag(r3);
528    __ Push(r3, r4, r6, r4);
529
530    __ CallRuntime(function_id, 1);
531    __ mr(r5, r3);
532
533    // Restore target function and new target.
534    __ Pop(r3, r4, r6);
535    __ SmiUntag(r3);
536  }
537  __ addi(ip, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
538  __ JumpToJSEntry(ip);
539}
540
541void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
542  // Checking whether the queued function is ready for install is optional,
543  // since we come across interrupts and stack checks elsewhere.  However,
544  // not checking may delay installing ready functions, and always checking
545  // would be quite expensive.  A good compromise is to first check against
546  // stack limit as a cue for an interrupt signal.
547  Label ok;
548  __ LoadRoot(ip, Heap::kStackLimitRootIndex);
549  __ cmpl(sp, ip);
550  __ bge(&ok);
551
552  GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
553
554  __ bind(&ok);
555  GenerateTailCallToSharedCode(masm);
556}
557
558namespace {
559
560void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function,
561                                    bool create_implicit_receiver,
562                                    bool check_derived_construct) {
563  Label post_instantiation_deopt_entry;
564  // ----------- S t a t e -------------
565  //  -- r3     : number of arguments
566  //  -- r4     : constructor function
567  //  -- r6     : new target
568  //  -- cp     : context
569  //  -- lr     : return address
570  //  -- sp[...]: constructor arguments
571  // -----------------------------------
572
573  Isolate* isolate = masm->isolate();
574
575  // Enter a construct frame.
576  {
577    FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
578
579    // Preserve the incoming parameters on the stack.
580
581    if (!create_implicit_receiver) {
582      __ SmiTag(r7, r3, SetRC);
583      __ Push(cp, r7);
584      __ PushRoot(Heap::kTheHoleValueRootIndex);
585    } else {
586      __ SmiTag(r3);
587      __ Push(cp, r3);
588
589      // Allocate the new receiver object.
590      __ Push(r4, r6);
591      __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
592              RelocInfo::CODE_TARGET);
593      __ mr(r7, r3);
594      __ Pop(r4, r6);
595
596      // ----------- S t a t e -------------
597      //  -- r4: constructor function
598      //  -- r6: new target
599      //  -- r7: newly allocated object
600      // -----------------------------------
601
602      // Retrieve smi-tagged arguments count from the stack.
603      __ LoadP(r3, MemOperand(sp));
604      __ SmiUntag(r3, SetRC);
605
606      // Push the allocated receiver to the stack. We need two copies
607      // because we may have to return the original one and the calling
608      // conventions dictate that the called function pops the receiver.
609      __ Push(r7, r7);
610    }
611
612    // Deoptimizer re-enters stub code here.
613    __ bind(&post_instantiation_deopt_entry);
614
615    // Set up pointer to last argument.
616    __ addi(r5, fp, Operand(StandardFrameConstants::kCallerSPOffset));
617
618    // Copy arguments and receiver to the expression stack.
619    // r3: number of arguments
620    // r4: constructor function
621    // r5: address of last argument (caller sp)
622    // r6: new target
623    // cr0: condition indicating whether r3 is zero
624    // sp[0]: receiver
625    // sp[1]: receiver
626    // sp[2]: number of arguments (smi-tagged)
627    Label loop, no_args;
628    __ beq(&no_args, cr0);
629    __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
630    __ sub(sp, sp, ip);
631    __ mtctr(r3);
632    __ bind(&loop);
633    __ subi(ip, ip, Operand(kPointerSize));
634    __ LoadPX(r0, MemOperand(r5, ip));
635    __ StorePX(r0, MemOperand(sp, ip));
636    __ bdnz(&loop);
637    __ bind(&no_args);
638
639    // Call the function.
640    // r3: number of arguments
641    // r4: constructor function
642    // r6: new target
643    {
644      ConstantPoolUnavailableScope constant_pool_unavailable(masm);
645      ParameterCount actual(r3);
646      __ InvokeFunction(r4, r6, actual, CALL_FUNCTION,
647                        CheckDebugStepCallWrapper());
648    }
649
650    // Store offset of return address for deoptimizer.
651    if (create_implicit_receiver && !is_api_function) {
652      masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
653          masm->pc_offset());
654    }
655
656    // Restore context from the frame.
657    // r3: result
658    // sp[0]: receiver
659    // sp[1]: number of arguments (smi-tagged)
660    __ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
661
662    if (create_implicit_receiver) {
663      // If the result is an object (in the ECMA sense), we should get rid
664      // of the receiver and use the result; see ECMA-262 section 13.2.2-7
665      // on page 74.
666      Label use_receiver, exit;
667
668      // If the result is a smi, it is *not* an object in the ECMA sense.
669      // r3: result
670      // sp[0]: receiver
671      // sp[1]: number of arguments (smi-tagged)
672      __ JumpIfSmi(r3, &use_receiver);
673
674      // If the type of the result (stored in its map) is less than
675      // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
676      __ CompareObjectType(r3, r4, r6, FIRST_JS_RECEIVER_TYPE);
677      __ bge(&exit);
678
679      // Throw away the result of the constructor invocation and use the
680      // on-stack receiver as the result.
681      __ bind(&use_receiver);
682      __ LoadP(r3, MemOperand(sp));
683
684      // Remove receiver from the stack, remove caller arguments, and
685      // return.
686      __ bind(&exit);
687      // r3: result
688      // sp[0]: receiver (newly allocated object)
689      // sp[1]: number of arguments (smi-tagged)
690      __ LoadP(r4, MemOperand(sp, 1 * kPointerSize));
691    } else {
692      __ LoadP(r4, MemOperand(sp));
693    }
694
695    // Leave construct frame.
696  }
697
698  // ES6 9.2.2. Step 13+
699  // Check that the result is not a Smi, indicating that the constructor result
700  // from a derived class is neither undefined nor an Object.
701  if (check_derived_construct) {
702    Label dont_throw;
703    __ JumpIfNotSmi(r3, &dont_throw);
704    {
705      FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
706      __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
707    }
708    __ bind(&dont_throw);
709  }
710
711  __ SmiToPtrArrayOffset(r4, r4);
712  __ add(sp, sp, r4);
713  __ addi(sp, sp, Operand(kPointerSize));
714  if (create_implicit_receiver) {
715    __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r4, r5);
716  }
717  __ blr();
718  // Store offset of trampoline address for deoptimizer. This is the bailout
719  // point after the receiver instantiation but before the function invocation.
720  // We need to restore some registers in order to continue the above code.
721  if (create_implicit_receiver && !is_api_function) {
722    masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
723        masm->pc_offset());
724
725    // ----------- S t a t e -------------
726    //  -- r3    : newly allocated object
727    //  -- sp[0] : constructor function
728    // -----------------------------------
729
730    __ pop(r4);
731    __ Push(r3, r3);
732
733    // Retrieve smi-tagged arguments count from the stack.
734    __ LoadP(r3, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
735    __ SmiUntag(r3);
736
737    // Retrieve the new target value from the stack. This was placed into the
738    // frame description in place of the receiver by the optimizing compiler.
739    __ addi(r6, fp, Operand(StandardFrameConstants::kCallerSPOffset));
740    __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
741    __ LoadPX(r6, MemOperand(r6, ip));
742
743    // Continue with constructor function invocation.
744    __ b(&post_instantiation_deopt_entry);
745  }
746}
747
748}  // namespace
749
750void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
751  Generate_JSConstructStubHelper(masm, false, true, false);
752}
753
754void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
755  Generate_JSConstructStubHelper(masm, true, false, false);
756}
757
758void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
759  Generate_JSConstructStubHelper(masm, false, false, false);
760}
761
762void Builtins::Generate_JSBuiltinsConstructStubForDerived(
763    MacroAssembler* masm) {
764  Generate_JSConstructStubHelper(masm, false, false, true);
765}
766
767// static
768void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
769  // ----------- S t a t e -------------
770  //  -- r3 : the value to pass to the generator
771  //  -- r4 : the JSGeneratorObject to resume
772  //  -- r5 : the resume mode (tagged)
773  //  -- lr : return address
774  // -----------------------------------
775  __ AssertGeneratorObject(r4);
776
777  // Store input value into generator object.
778  __ StoreP(r3, FieldMemOperand(r4, JSGeneratorObject::kInputOrDebugPosOffset),
779            r0);
780  __ RecordWriteField(r4, JSGeneratorObject::kInputOrDebugPosOffset, r3, r6,
781                      kLRHasNotBeenSaved, kDontSaveFPRegs);
782
783  // Store resume mode into generator object.
784  __ StoreP(r5, FieldMemOperand(r4, JSGeneratorObject::kResumeModeOffset), r0);
785
786  // Load suspended function and context.
787  __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
788  __ LoadP(cp, FieldMemOperand(r7, JSFunction::kContextOffset));
789
790  // Flood function if we are stepping.
791  Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
792  Label stepping_prepared;
793  ExternalReference debug_hook =
794      ExternalReference::debug_hook_on_function_call_address(masm->isolate());
795  __ mov(ip, Operand(debug_hook));
796  __ LoadByte(ip, MemOperand(ip), r0);
797  __ extsb(ip, ip);
798  __ CmpSmiLiteral(ip, Smi::kZero, r0);
799  __ bne(&prepare_step_in_if_stepping);
800
801  // Flood function if we need to continue stepping in the suspended generator.
802
803  ExternalReference debug_suspended_generator =
804      ExternalReference::debug_suspended_generator_address(masm->isolate());
805
806  __ mov(ip, Operand(debug_suspended_generator));
807  __ LoadP(ip, MemOperand(ip));
808  __ cmp(ip, r4);
809  __ beq(&prepare_step_in_suspended_generator);
810  __ bind(&stepping_prepared);
811
812  // Push receiver.
813  __ LoadP(ip, FieldMemOperand(r4, JSGeneratorObject::kReceiverOffset));
814  __ Push(ip);
815
816  // ----------- S t a t e -------------
817  //  -- r4    : the JSGeneratorObject to resume
818  //  -- r5    : the resume mode (tagged)
819  //  -- r7    : generator function
820  //  -- cp    : generator context
821  //  -- lr    : return address
822  //  -- sp[0] : generator receiver
823  // -----------------------------------
824
825  // Push holes for arguments to generator function. Since the parser forced
826  // context allocation for any variables in generators, the actual argument
827  // values have already been copied into the context and these dummy values
828  // will never be used.
829  __ LoadP(r6, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset));
830  __ LoadWordArith(
831      r3, FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset));
832  {
833    Label loop, done_loop;
834    __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
835#if V8_TARGET_ARCH_PPC64
836    __ cmpi(r3, Operand::Zero());
837    __ beq(&done_loop);
838#else
839    __ SmiUntag(r3, SetRC);
840    __ beq(&done_loop, cr0);
841#endif
842    __ mtctr(r3);
843    __ bind(&loop);
844    __ push(ip);
845    __ bdnz(&loop);
846    __ bind(&done_loop);
847  }
848
849  // Underlying function needs to have bytecode available.
850  if (FLAG_debug_code) {
851    __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kFunctionDataOffset));
852    __ CompareObjectType(r6, r6, r6, BYTECODE_ARRAY_TYPE);
853    __ Assert(eq, kMissingBytecodeArray);
854  }
855
856  // Resume (Ignition/TurboFan) generator object.
857  {
858    // We abuse new.target both to indicate that this is a resume call and to
859    // pass in the generator object.  In ordinary calls, new.target is always
860    // undefined because generator functions are non-constructable.
861    __ mr(r6, r4);
862    __ mr(r4, r7);
863    __ LoadP(ip, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
864    __ JumpToJSEntry(ip);
865  }
866
867  __ bind(&prepare_step_in_if_stepping);
868  {
869    FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
870    __ Push(r4, r5, r7);
871    __ CallRuntime(Runtime::kDebugOnFunctionCall);
872    __ Pop(r4, r5);
873    __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
874  }
875  __ b(&stepping_prepared);
876
877  __ bind(&prepare_step_in_suspended_generator);
878  {
879    FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
880    __ Push(r4, r5);
881    __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
882    __ Pop(r4, r5);
883    __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
884  }
885  __ b(&stepping_prepared);
886}
887
888void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
889  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
890  __ push(r4);
891  __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
892}
893
894enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
895
896// Clobbers r5; preserves all other registers.
897static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
898                                        IsTagged argc_is_tagged) {
899  // Check the stack for overflow. We are not trying to catch
900  // interruptions (e.g. debug break and preemption) here, so the "real stack
901  // limit" is checked.
902  Label okay;
903  __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
904  // Make r5 the space we have left. The stack might already be overflowed
905  // here which will cause r5 to become negative.
906  __ sub(r5, sp, r5);
907  // Check if the arguments will overflow the stack.
908  if (argc_is_tagged == kArgcIsSmiTagged) {
909    __ SmiToPtrArrayOffset(r0, argc);
910  } else {
911    DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
912    __ ShiftLeftImm(r0, argc, Operand(kPointerSizeLog2));
913  }
914  __ cmp(r5, r0);
915  __ bgt(&okay);  // Signed comparison.
916
917  // Out of stack space.
918  __ CallRuntime(Runtime::kThrowStackOverflow);
919
920  __ bind(&okay);
921}
922
923static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
924                                             bool is_construct) {
925  // Called from Generate_JS_Entry
926  // r3: new.target
927  // r4: function
928  // r5: receiver
929  // r6: argc
930  // r7: argv
931  // r0,r8-r9, cp may be clobbered
932  ProfileEntryHookStub::MaybeCallEntryHook(masm);
933
934  // Enter an internal frame.
935  {
936    FrameScope scope(masm, StackFrame::INTERNAL);
937
938    // Setup the context (we need to use the caller context from the isolate).
939    ExternalReference context_address(Isolate::kContextAddress,
940                                      masm->isolate());
941    __ mov(cp, Operand(context_address));
942    __ LoadP(cp, MemOperand(cp));
943
944    __ InitializeRootRegister();
945
946    // Push the function and the receiver onto the stack.
947    __ Push(r4, r5);
948
949    // Check if we have enough stack space to push all arguments.
950    // Clobbers r5.
951    Generate_CheckStackOverflow(masm, r6, kArgcIsUntaggedInt);
952
953    // Copy arguments to the stack in a loop.
954    // r4: function
955    // r6: argc
956    // r7: argv, i.e. points to first arg
957    Label loop, entry;
958    __ ShiftLeftImm(r0, r6, Operand(kPointerSizeLog2));
959    __ add(r5, r7, r0);
960    // r5 points past last arg.
961    __ b(&entry);
962    __ bind(&loop);
963    __ LoadP(r8, MemOperand(r7));  // read next parameter
964    __ addi(r7, r7, Operand(kPointerSize));
965    __ LoadP(r0, MemOperand(r8));  // dereference handle
966    __ push(r0);                   // push parameter
967    __ bind(&entry);
968    __ cmp(r7, r5);
969    __ bne(&loop);
970
971    // Setup new.target and argc.
972    __ mr(r7, r3);
973    __ mr(r3, r6);
974    __ mr(r6, r7);
975
976    // Initialize all JavaScript callee-saved registers, since they will be seen
977    // by the garbage collector as part of handlers.
978    __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
979    __ mr(r14, r7);
980    __ mr(r15, r7);
981    __ mr(r16, r7);
982    __ mr(r17, r7);
983
984    // Invoke the code.
985    Handle<Code> builtin = is_construct
986                               ? masm->isolate()->builtins()->Construct()
987                               : masm->isolate()->builtins()->Call();
988    __ Call(builtin, RelocInfo::CODE_TARGET);
989
990    // Exit the JS frame and remove the parameters (except function), and
991    // return.
992  }
993  __ blr();
994
995  // r3: result
996}
997
998void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
999  Generate_JSEntryTrampolineHelper(masm, false);
1000}
1001
1002void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
1003  Generate_JSEntryTrampolineHelper(masm, true);
1004}
1005
1006static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
1007  Register args_count = scratch;
1008
1009  // Get the arguments + receiver count.
1010  __ LoadP(args_count,
1011           MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1012  __ lwz(args_count,
1013         FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
1014
1015  // Leave the frame (also dropping the register file).
1016  __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1017
1018  __ add(sp, sp, args_count);
1019}
1020
1021// Generate code for entering a JS function with the interpreter.
1022// On entry to the function the receiver and arguments have been pushed on the
1023// stack left to right.  The actual argument count matches the formal parameter
1024// count expected by the function.
1025//
1026// The live registers are:
1027//   o r4: the JS function object being called.
1028//   o r6: the new target
1029//   o cp: our context
1030//   o pp: the caller's constant pool pointer (if enabled)
1031//   o fp: the caller's frame pointer
1032//   o sp: stack pointer
1033//   o lr: return address
1034//
1035// The function builds an interpreter frame.  See InterpreterFrameConstants in
1036// frames.h for its layout.
1037void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
1038  ProfileEntryHookStub::MaybeCallEntryHook(masm);
1039
1040  // Open a frame scope to indicate that there is a frame on the stack.  The
1041  // MANUAL indicates that the scope shouldn't actually generate code to set up
1042  // the frame (that is done below).
1043  FrameScope frame_scope(masm, StackFrame::MANUAL);
1044  __ PushStandardFrame(r4);
1045
1046  // Get the bytecode array from the function object (or from the DebugInfo if
1047  // it is present) and load it into kInterpreterBytecodeArrayRegister.
1048  __ LoadP(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1049  Label array_done;
1050  Register debug_info = r5;
1051  DCHECK(!debug_info.is(r3));
1052  __ LoadP(debug_info,
1053           FieldMemOperand(r3, SharedFunctionInfo::kDebugInfoOffset));
1054  // Load original bytecode array or the debug copy.
1055  __ LoadP(kInterpreterBytecodeArrayRegister,
1056           FieldMemOperand(r3, SharedFunctionInfo::kFunctionDataOffset));
1057  __ TestIfSmi(debug_info, r0);
1058  __ beq(&array_done, cr0);
1059  __ LoadP(kInterpreterBytecodeArrayRegister,
1060           FieldMemOperand(debug_info, DebugInfo::kDebugBytecodeArrayIndex));
1061  __ bind(&array_done);
1062
1063  // Check whether we should continue to use the interpreter.
1064  Label switch_to_different_code_kind;
1065  __ LoadP(r3, FieldMemOperand(r3, SharedFunctionInfo::kCodeOffset));
1066  __ mov(ip, Operand(masm->CodeObject()));  // Self-reference to this code.
1067  __ cmp(r3, ip);
1068  __ bne(&switch_to_different_code_kind);
1069
1070  // Increment invocation count for the function.
1071  __ LoadP(r7, FieldMemOperand(r4, JSFunction::kFeedbackVectorOffset));
1072  __ LoadP(r7, FieldMemOperand(r7, Cell::kValueOffset));
1073  __ LoadP(r8, FieldMemOperand(
1074                   r7, FeedbackVector::kInvocationCountIndex * kPointerSize +
1075                           FeedbackVector::kHeaderSize));
1076  __ AddSmiLiteral(r8, r8, Smi::FromInt(1), r0);
1077  __ StoreP(r8, FieldMemOperand(
1078                    r7, FeedbackVector::kInvocationCountIndex * kPointerSize +
1079                            FeedbackVector::kHeaderSize),
1080            r0);
1081
1082  // Check function data field is actually a BytecodeArray object.
1083
1084  if (FLAG_debug_code) {
1085    __ TestIfSmi(kInterpreterBytecodeArrayRegister, r0);
1086    __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, cr0);
1087    __ CompareObjectType(kInterpreterBytecodeArrayRegister, r3, no_reg,
1088                         BYTECODE_ARRAY_TYPE);
1089    __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1090  }
1091
1092  // Reset code age.
1093  __ mov(r8, Operand(BytecodeArray::kNoAgeBytecodeAge));
1094  __ StoreByte(r8, FieldMemOperand(kInterpreterBytecodeArrayRegister,
1095                                   BytecodeArray::kBytecodeAgeOffset),
1096               r0);
1097
1098  // Load initial bytecode offset.
1099  __ mov(kInterpreterBytecodeOffsetRegister,
1100         Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
1101
1102  // Push new.target, bytecode array and Smi tagged bytecode array offset.
1103  __ SmiTag(r3, kInterpreterBytecodeOffsetRegister);
1104  __ Push(r6, kInterpreterBytecodeArrayRegister, r3);
1105
1106  // Allocate the local and temporary register file on the stack.
1107  {
1108    // Load frame size (word) from the BytecodeArray object.
1109    __ lwz(r5, FieldMemOperand(kInterpreterBytecodeArrayRegister,
1110                               BytecodeArray::kFrameSizeOffset));
1111
1112    // Do a stack check to ensure we don't go over the limit.
1113    Label ok;
1114    __ sub(r6, sp, r5);
1115    __ LoadRoot(r0, Heap::kRealStackLimitRootIndex);
1116    __ cmpl(r6, r0);
1117    __ bge(&ok);
1118    __ CallRuntime(Runtime::kThrowStackOverflow);
1119    __ bind(&ok);
1120
1121    // If ok, push undefined as the initial value for all register file entries.
1122    // TODO(rmcilroy): Consider doing more than one push per loop iteration.
1123    Label loop, no_args;
1124    __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
1125    __ ShiftRightImm(r5, r5, Operand(kPointerSizeLog2), SetRC);
1126    __ beq(&no_args, cr0);
1127    __ mtctr(r5);
1128    __ bind(&loop);
1129    __ push(r6);
1130    __ bdnz(&loop);
1131    __ bind(&no_args);
1132  }
1133
1134  // Load accumulator and dispatch table into registers.
1135  __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
1136  __ mov(kInterpreterDispatchTableRegister,
1137         Operand(ExternalReference::interpreter_dispatch_table_address(
1138             masm->isolate())));
1139
1140  // Dispatch to the first bytecode handler for the function.
1141  __ lbzx(r4, MemOperand(kInterpreterBytecodeArrayRegister,
1142                         kInterpreterBytecodeOffsetRegister));
1143  __ ShiftLeftImm(ip, r4, Operand(kPointerSizeLog2));
1144  __ LoadPX(ip, MemOperand(kInterpreterDispatchTableRegister, ip));
1145  __ Call(ip);
1146
1147  masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
1148
1149  // The return value is in r3.
1150  LeaveInterpreterFrame(masm, r5);
1151  __ blr();
1152
1153  // If the shared code is no longer this entry trampoline, then the underlying
1154  // function has been switched to a different kind of code and we heal the
1155  // closure by switching the code entry field over to the new code as well.
1156  __ bind(&switch_to_different_code_kind);
1157  __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1158  __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1159  __ LoadP(r7, FieldMemOperand(r7, SharedFunctionInfo::kCodeOffset));
1160  __ addi(r7, r7, Operand(Code::kHeaderSize - kHeapObjectTag));
1161  __ StoreP(r7, FieldMemOperand(r4, JSFunction::kCodeEntryOffset), r0);
1162  __ RecordWriteCodeEntryField(r4, r7, r8);
1163  __ JumpToJSEntry(r7);
1164}
1165
1166static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
1167                                        Register scratch,
1168                                        Label* stack_overflow) {
1169  // Check the stack for overflow. We are not trying to catch
1170  // interruptions (e.g. debug break and preemption) here, so the "real stack
1171  // limit" is checked.
1172  __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
1173  // Make scratch the space we have left. The stack might already be overflowed
1174  // here which will cause scratch to become negative.
1175  __ sub(scratch, sp, scratch);
1176  // Check if the arguments will overflow the stack.
1177  __ ShiftLeftImm(r0, num_args, Operand(kPointerSizeLog2));
1178  __ cmp(scratch, r0);
1179  __ ble(stack_overflow);  // Signed comparison.
1180}
1181
1182static void Generate_InterpreterPushArgs(MacroAssembler* masm,
1183                                         Register num_args, Register index,
1184                                         Register count, Register scratch,
1185                                         Label* stack_overflow) {
1186  // A stack check before pushing arguments.
1187  Generate_StackOverflowCheck(masm, num_args, scratch, stack_overflow);
1188
1189  Label loop;
1190  __ addi(index, index, Operand(kPointerSize));  // Bias up for LoadPU
1191  __ mtctr(count);
1192  __ bind(&loop);
1193  __ LoadPU(scratch, MemOperand(index, -kPointerSize));
1194  __ push(scratch);
1195  __ bdnz(&loop);
1196}
1197
1198// static
1199void Builtins::Generate_InterpreterPushArgsAndCallImpl(
1200    MacroAssembler* masm, TailCallMode tail_call_mode,
1201    InterpreterPushArgsMode mode) {
1202  // ----------- S t a t e -------------
1203  //  -- r3 : the number of arguments (not including the receiver)
1204  //  -- r5 : the address of the first argument to be pushed. Subsequent
1205  //          arguments should be consecutive above this, in the same order as
1206  //          they are to be pushed onto the stack.
1207  //  -- r4 : the target to call (can be any Object).
1208  // -----------------------------------
1209  Label stack_overflow;
1210
1211  // Calculate number of arguments (add one for receiver).
1212  __ addi(r6, r3, Operand(1));
1213
1214  // Push the arguments. r5, r6, r7 will be modified.
1215  Generate_InterpreterPushArgs(masm, r6, r5, r6, r7, &stack_overflow);
1216
1217  // Call the target.
1218  if (mode == InterpreterPushArgsMode::kJSFunction) {
1219    __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny,
1220                                                      tail_call_mode),
1221            RelocInfo::CODE_TARGET);
1222  } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1223    __ Jump(masm->isolate()->builtins()->CallWithSpread(),
1224            RelocInfo::CODE_TARGET);
1225  } else {
1226    __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
1227                                              tail_call_mode),
1228            RelocInfo::CODE_TARGET);
1229  }
1230
1231  __ bind(&stack_overflow);
1232  {
1233    __ TailCallRuntime(Runtime::kThrowStackOverflow);
1234    // Unreachable Code.
1235    __ bkpt(0);
1236  }
1237}
1238
1239// static
1240void Builtins::Generate_InterpreterPushArgsAndConstructImpl(
1241    MacroAssembler* masm, InterpreterPushArgsMode mode) {
1242  // ----------- S t a t e -------------
1243  // -- r3 : argument count (not including receiver)
1244  // -- r6 : new target
1245  // -- r4 : constructor to call
1246  // -- r5 : allocation site feedback if available, undefined otherwise.
1247  // -- r7 : address of the first argument
1248  // -----------------------------------
1249  Label stack_overflow;
1250
1251  // Push a slot for the receiver to be constructed.
1252  __ li(r0, Operand::Zero());
1253  __ push(r0);
1254
1255  // Push the arguments (skip if none).
1256  Label skip;
1257  __ cmpi(r3, Operand::Zero());
1258  __ beq(&skip);
1259  // Push the arguments. r8, r7, r9 will be modified.
1260  Generate_InterpreterPushArgs(masm, r3, r7, r3, r8, &stack_overflow);
1261  __ bind(&skip);
1262
1263  __ AssertUndefinedOrAllocationSite(r5, r8);
1264  if (mode == InterpreterPushArgsMode::kJSFunction) {
1265    __ AssertFunction(r4);
1266
1267    // Tail call to the function-specific construct stub (still in the caller
1268    // context at this point).
1269    __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1270    __ LoadP(r7, FieldMemOperand(r7, SharedFunctionInfo::kConstructStubOffset));
1271    // Jump to the construct function.
1272    __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag));
1273    __ Jump(ip);
1274  } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1275    // Call the constructor with r3, r4, and r6 unmodified.
1276    __ Jump(masm->isolate()->builtins()->ConstructWithSpread(),
1277            RelocInfo::CODE_TARGET);
1278  } else {
1279    DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
1280    // Call the constructor with r3, r4, and r6 unmodified.
1281    __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1282  }
1283
1284  __ bind(&stack_overflow);
1285  {
1286    __ TailCallRuntime(Runtime::kThrowStackOverflow);
1287    // Unreachable Code.
1288    __ bkpt(0);
1289  }
1290}
1291
1292// static
1293void Builtins::Generate_InterpreterPushArgsAndConstructArray(
1294    MacroAssembler* masm) {
1295  // ----------- S t a t e -------------
1296  // -- r3 : argument count (not including receiver)
1297  // -- r4 : target to call verified to be Array function
1298  // -- r5 : allocation site feedback if available, undefined otherwise.
1299  // -- r6 : address of the first argument
1300  // -----------------------------------
1301  Label stack_overflow;
1302
1303  __ addi(r7, r3, Operand(1));  // Add one for receiver.
1304
1305  // Push the arguments. r6, r8, r3 will be modified.
1306  Generate_InterpreterPushArgs(masm, r7, r6, r7, r8, &stack_overflow);
1307
1308  // Array constructor expects constructor in r6. It is same as r4 here.
1309  __ mr(r6, r4);
1310
1311  ArrayConstructorStub stub(masm->isolate());
1312  __ TailCallStub(&stub);
1313
1314  __ bind(&stack_overflow);
1315  {
1316    __ TailCallRuntime(Runtime::kThrowStackOverflow);
1317    // Unreachable code.
1318    __ bkpt(0);
1319  }
1320}
1321
1322static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1323  // Set the return address to the correct point in the interpreter entry
1324  // trampoline.
1325  Smi* interpreter_entry_return_pc_offset(
1326      masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1327  DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
1328  __ Move(r5, masm->isolate()->builtins()->InterpreterEntryTrampoline());
1329  __ addi(r0, r5, Operand(interpreter_entry_return_pc_offset->value() +
1330                          Code::kHeaderSize - kHeapObjectTag));
1331  __ mtlr(r0);
1332
1333  // Initialize the dispatch table register.
1334  __ mov(kInterpreterDispatchTableRegister,
1335         Operand(ExternalReference::interpreter_dispatch_table_address(
1336             masm->isolate())));
1337
1338  // Get the bytecode array pointer from the frame.
1339  __ LoadP(kInterpreterBytecodeArrayRegister,
1340           MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1341
1342  if (FLAG_debug_code) {
1343    // Check function data field is actually a BytecodeArray object.
1344    __ TestIfSmi(kInterpreterBytecodeArrayRegister, r0);
1345    __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, cr0);
1346    __ CompareObjectType(kInterpreterBytecodeArrayRegister, r4, no_reg,
1347                         BYTECODE_ARRAY_TYPE);
1348    __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1349  }
1350
1351  // Get the target bytecode offset from the frame.
1352  __ LoadP(kInterpreterBytecodeOffsetRegister,
1353           MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1354  __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1355
1356  // Dispatch to the target bytecode.
1357  __ lbzx(r4, MemOperand(kInterpreterBytecodeArrayRegister,
1358                         kInterpreterBytecodeOffsetRegister));
1359  __ ShiftLeftImm(ip, r4, Operand(kPointerSizeLog2));
1360  __ LoadPX(ip, MemOperand(kInterpreterDispatchTableRegister, ip));
1361  __ Jump(ip);
1362}
1363
1364void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1365  // Advance the current bytecode offset stored within the given interpreter
1366  // stack frame. This simulates what all bytecode handlers do upon completion
1367  // of the underlying operation.
1368  __ LoadP(r4, MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1369  __ LoadP(r5,
1370           MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1371  __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1372  {
1373    FrameScope scope(masm, StackFrame::INTERNAL);
1374    __ Push(kInterpreterAccumulatorRegister, r4, r5);
1375    __ CallRuntime(Runtime::kInterpreterAdvanceBytecodeOffset);
1376    __ Move(r5, r3);  // Result is the new bytecode offset.
1377    __ Pop(kInterpreterAccumulatorRegister);
1378  }
1379  __ StoreP(r5,
1380            MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1381
1382  Generate_InterpreterEnterBytecode(masm);
1383}
1384
1385void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1386  Generate_InterpreterEnterBytecode(masm);
1387}
1388
1389void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
1390  // ----------- S t a t e -------------
1391  //  -- r3 : argument count (preserved for callee)
1392  //  -- r6 : new target (preserved for callee)
1393  //  -- r4 : target function (preserved for callee)
1394  // -----------------------------------
1395  // First lookup code, maybe we don't need to compile!
1396  Label gotta_call_runtime;
1397  Label try_shared;
1398  Label loop_top, loop_bottom;
1399
1400  Register closure = r4;
1401  Register map = r9;
1402  Register index = r5;
1403
1404  // Do we have a valid feedback vector?
1405  __ LoadP(index, FieldMemOperand(closure, JSFunction::kFeedbackVectorOffset));
1406  __ LoadP(index, FieldMemOperand(index, Cell::kValueOffset));
1407  __ JumpIfRoot(index, Heap::kUndefinedValueRootIndex, &gotta_call_runtime);
1408
1409  __ LoadP(map,
1410           FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1411  __ LoadP(map,
1412           FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
1413  __ LoadP(index, FieldMemOperand(map, FixedArray::kLengthOffset));
1414  __ CmpSmiLiteral(index, Smi::FromInt(2), r0);
1415  __ blt(&try_shared);
1416
1417  // r10 : native context
1418  // r5  : length / index
1419  // r9  : optimized code map
1420  // r6  : new target
1421  // r4  : closure
1422  Register native_context = r10;
1423  __ LoadP(native_context, NativeContextMemOperand());
1424
1425  __ bind(&loop_top);
1426  Register temp = r11;
1427  Register array_pointer = r8;
1428
1429  // Does the native context match?
1430  __ SmiToPtrArrayOffset(array_pointer, index);
1431  __ add(array_pointer, map, array_pointer);
1432  __ LoadP(temp, FieldMemOperand(array_pointer,
1433                                 SharedFunctionInfo::kOffsetToPreviousContext));
1434  __ LoadP(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1435  __ cmp(temp, native_context);
1436  __ bne(&loop_bottom);
1437
1438  // Code available?
1439  Register entry = r7;
1440  __ LoadP(entry,
1441           FieldMemOperand(array_pointer,
1442                           SharedFunctionInfo::kOffsetToPreviousCachedCode));
1443  __ LoadP(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1444  __ JumpIfSmi(entry, &try_shared);
1445
1446  // Found code. Get it into the closure and return.
1447  // Store code entry in the closure.
1448  __ addi(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1449  __ StoreP(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset), r0);
1450  __ RecordWriteCodeEntryField(closure, entry, r8);
1451
1452  // Link the closure into the optimized function list.
1453  // r7 : code entry
1454  // r10: native context
1455  // r4 : closure
1456  __ LoadP(
1457      r8, ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1458  __ StoreP(r8, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset),
1459            r0);
1460  __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, r8, temp,
1461                      kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1462                      OMIT_SMI_CHECK);
1463  const int function_list_offset =
1464      Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
1465  __ StoreP(
1466      closure,
1467      ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST), r0);
1468  // Save closure before the write barrier.
1469  __ mr(r8, closure);
1470  __ RecordWriteContextSlot(native_context, function_list_offset, r8, temp,
1471                            kLRHasNotBeenSaved, kDontSaveFPRegs);
1472  __ JumpToJSEntry(entry);
1473
1474  __ bind(&loop_bottom);
1475  __ SubSmiLiteral(index, index, Smi::FromInt(SharedFunctionInfo::kEntryLength),
1476                   r0);
1477  __ CmpSmiLiteral(index, Smi::FromInt(1), r0);
1478  __ bgt(&loop_top);
1479
1480  // We found no code.
1481  __ b(&gotta_call_runtime);
1482
1483  __ bind(&try_shared);
1484  __ LoadP(entry,
1485           FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1486  // Is the shared function marked for tier up?
1487  __ lbz(r8, FieldMemOperand(entry,
1488                             SharedFunctionInfo::kMarkedForTierUpByteOffset));
1489  __ TestBit(r8, SharedFunctionInfo::kMarkedForTierUpBitWithinByte, r0);
1490  __ bne(&gotta_call_runtime, cr0);
1491
1492  // If SFI points to anything other than CompileLazy, install that.
1493  __ LoadP(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset));
1494  __ mov(r8, Operand(masm->CodeObject()));
1495  __ cmp(entry, r8);
1496  __ beq(&gotta_call_runtime);
1497
1498  // Install the SFI's code entry.
1499  __ addi(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1500  __ StoreP(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset), r0);
1501  __ RecordWriteCodeEntryField(closure, entry, r8);
1502  __ JumpToJSEntry(entry);
1503
1504  __ bind(&gotta_call_runtime);
1505  GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1506}
1507
1508void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
1509  GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
1510}
1511
1512void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
1513  GenerateTailCallToReturnedCode(masm,
1514                                 Runtime::kCompileOptimized_NotConcurrent);
1515}
1516
1517void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
1518  GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
1519}
1520
1521void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1522  // ----------- S t a t e -------------
1523  //  -- r3 : argument count (preserved for callee)
1524  //  -- r4 : new target (preserved for callee)
1525  //  -- r6 : target function (preserved for callee)
1526  // -----------------------------------
1527  Label failed;
1528  {
1529    FrameScope scope(masm, StackFrame::INTERNAL);
1530    // Preserve argument count for later compare.
1531    __ Move(r7, r3);
1532    // Push a copy of the target function and the new target.
1533    // Push function as parameter to the runtime call.
1534    __ SmiTag(r3);
1535    __ Push(r3, r4, r6, r4);
1536
1537    // Copy arguments from caller (stdlib, foreign, heap).
1538    Label args_done;
1539    for (int j = 0; j < 4; ++j) {
1540      Label over;
1541      if (j < 3) {
1542        __ cmpi(r7, Operand(j));
1543        __ bne(&over);
1544      }
1545      for (int i = j - 1; i >= 0; --i) {
1546        __ LoadP(r7, MemOperand(fp, StandardFrameConstants::kCallerSPOffset +
1547                                        i * kPointerSize));
1548        __ push(r7);
1549      }
1550      for (int i = 0; i < 3 - j; ++i) {
1551        __ PushRoot(Heap::kUndefinedValueRootIndex);
1552      }
1553      if (j < 3) {
1554        __ jmp(&args_done);
1555        __ bind(&over);
1556      }
1557    }
1558    __ bind(&args_done);
1559
1560    // Call runtime, on success unwind frame, and parent frame.
1561    __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1562    // A smi 0 is returned on failure, an object on success.
1563    __ JumpIfSmi(r3, &failed);
1564
1565    __ Drop(2);
1566    __ pop(r7);
1567    __ SmiUntag(r7);
1568    scope.GenerateLeaveFrame();
1569
1570    __ addi(r7, r7, Operand(1));
1571    __ Drop(r7);
1572    __ Ret();
1573
1574    __ bind(&failed);
1575    // Restore target function and new target.
1576    __ Pop(r3, r4, r6);
1577    __ SmiUntag(r3);
1578  }
1579  // On failure, tail call back to regular js.
1580  GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1581}
1582
1583static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1584  // For now, we are relying on the fact that make_code_young doesn't do any
1585  // garbage collection which allows us to save/restore the registers without
1586  // worrying about which of them contain pointers. We also don't build an
1587  // internal frame to make the code faster, since we shouldn't have to do stack
1588  // crawls in MakeCodeYoung. This seems a bit fragile.
1589
1590  // Point r3 at the start of the PlatformCodeAge sequence.
1591  __ mr(r3, ip);
1592
1593  // The following registers must be saved and restored when calling through to
1594  // the runtime:
1595  //   r3 - contains return address (beginning of patch sequence)
1596  //   r4 - isolate
1597  //   r6 - new target
1598  //   lr - return address
1599  FrameScope scope(masm, StackFrame::MANUAL);
1600  __ mflr(r0);
1601  __ MultiPush(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit());
1602  __ PrepareCallCFunction(2, 0, r5);
1603  __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate())));
1604  __ CallCFunction(
1605      ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1606  __ MultiPop(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit());
1607  __ mtlr(r0);
1608  __ mr(ip, r3);
1609  __ Jump(ip);
1610}
1611
1612#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                              \
1613  void Builtins::Generate_Make##C##CodeYoungAgain(MacroAssembler* masm) { \
1614    GenerateMakeCodeYoungAgainCommon(masm);                               \
1615  }
1616CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1617#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1618
1619void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1620  // For now, we are relying on the fact that make_code_young doesn't do any
1621  // garbage collection which allows us to save/restore the registers without
1622  // worrying about which of them contain pointers. We also don't build an
1623  // internal frame to make the code faster, since we shouldn't have to do stack
1624  // crawls in MakeCodeYoung. This seems a bit fragile.
1625
1626  // Point r3 at the start of the PlatformCodeAge sequence.
1627  __ mr(r3, ip);
1628
1629  // The following registers must be saved and restored when calling through to
1630  // the runtime:
1631  //   r3 - contains return address (beginning of patch sequence)
1632  //   r4 - isolate
1633  //   r6 - new target
1634  //   lr - return address
1635  FrameScope scope(masm, StackFrame::MANUAL);
1636  __ mflr(r0);
1637  __ MultiPush(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit());
1638  __ PrepareCallCFunction(2, 0, r5);
1639  __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate())));
1640  __ CallCFunction(
1641      ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1642      2);
1643  __ MultiPop(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit());
1644  __ mtlr(r0);
1645  __ mr(ip, r3);
1646
1647  // Perform prologue operations usually performed by the young code stub.
1648  __ PushStandardFrame(r4);
1649
1650  // Jump to point after the code-age stub.
1651  __ addi(r3, ip, Operand(kNoCodeAgeSequenceLength));
1652  __ Jump(r3);
1653}
1654
1655void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1656  GenerateMakeCodeYoungAgainCommon(masm);
1657}
1658
1659void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1660  Generate_MarkCodeAsExecutedOnce(masm);
1661}
1662
1663static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1664                                             SaveFPRegsMode save_doubles) {
1665  {
1666    FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1667
1668    // Preserve registers across notification, this is important for compiled
1669    // stubs that tail call the runtime on deopts passing their parameters in
1670    // registers.
1671    __ MultiPush(kJSCallerSaved | kCalleeSaved);
1672    // Pass the function and deoptimization type to the runtime system.
1673    __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
1674    __ MultiPop(kJSCallerSaved | kCalleeSaved);
1675  }
1676
1677  __ addi(sp, sp, Operand(kPointerSize));  // Ignore state
1678  __ blr();                                // Jump to miss handler
1679}
1680
1681void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1682  Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1683}
1684
1685void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1686  Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1687}
1688
1689static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1690                                             Deoptimizer::BailoutType type) {
1691  {
1692    FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1693    // Pass the function and deoptimization type to the runtime system.
1694    __ LoadSmiLiteral(r3, Smi::FromInt(static_cast<int>(type)));
1695    __ push(r3);
1696    __ CallRuntime(Runtime::kNotifyDeoptimized);
1697  }
1698
1699  // Get the full codegen state from the stack and untag it -> r9.
1700  __ LoadP(r9, MemOperand(sp, 0 * kPointerSize));
1701  __ SmiUntag(r9);
1702  // Switch on the state.
1703  Label with_tos_register, unknown_state;
1704  __ cmpi(
1705      r9,
1706      Operand(static_cast<intptr_t>(Deoptimizer::BailoutState::NO_REGISTERS)));
1707  __ bne(&with_tos_register);
1708  __ addi(sp, sp, Operand(1 * kPointerSize));  // Remove state.
1709  __ Ret();
1710
1711  __ bind(&with_tos_register);
1712  DCHECK_EQ(kInterpreterAccumulatorRegister.code(), r3.code());
1713  __ LoadP(r3, MemOperand(sp, 1 * kPointerSize));
1714  __ cmpi(
1715      r9,
1716      Operand(static_cast<intptr_t>(Deoptimizer::BailoutState::TOS_REGISTER)));
1717  __ bne(&unknown_state);
1718  __ addi(sp, sp, Operand(2 * kPointerSize));  // Remove state.
1719  __ Ret();
1720
1721  __ bind(&unknown_state);
1722  __ stop("no cases left");
1723}
1724
1725void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1726  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1727}
1728
1729void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1730  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1731}
1732
1733void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1734  Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1735}
1736
1737// Clobbers registers {r7, r8, r9, r10}.
1738void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1739                             Register function_template_info,
1740                             Label* receiver_check_failed) {
1741  Register signature = r7;
1742  Register map = r8;
1743  Register constructor = r9;
1744  Register scratch = r10;
1745
1746  // If there is no signature, return the holder.
1747  __ LoadP(signature, FieldMemOperand(function_template_info,
1748                                      FunctionTemplateInfo::kSignatureOffset));
1749  Label receiver_check_passed;
1750  __ JumpIfRoot(signature, Heap::kUndefinedValueRootIndex,
1751                &receiver_check_passed);
1752
1753  // Walk the prototype chain.
1754  __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1755  Label prototype_loop_start;
1756  __ bind(&prototype_loop_start);
1757
1758  // Get the constructor, if any.
1759  __ GetMapConstructor(constructor, map, scratch, scratch);
1760  __ cmpi(scratch, Operand(JS_FUNCTION_TYPE));
1761  Label next_prototype;
1762  __ bne(&next_prototype);
1763  Register type = constructor;
1764  __ LoadP(type,
1765           FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
1766  __ LoadP(type,
1767           FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
1768
1769  // Loop through the chain of inheriting function templates.
1770  Label function_template_loop;
1771  __ bind(&function_template_loop);
1772
1773  // If the signatures match, we have a compatible receiver.
1774  __ cmp(signature, type);
1775  __ beq(&receiver_check_passed);
1776
1777  // If the current type is not a FunctionTemplateInfo, load the next prototype
1778  // in the chain.
1779  __ JumpIfSmi(type, &next_prototype);
1780  __ CompareObjectType(type, scratch, scratch, FUNCTION_TEMPLATE_INFO_TYPE);
1781  __ bne(&next_prototype);
1782
1783  // Otherwise load the parent function template and iterate.
1784  __ LoadP(type,
1785           FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
1786  __ b(&function_template_loop);
1787
1788  // Load the next prototype.
1789  __ bind(&next_prototype);
1790  __ lwz(scratch, FieldMemOperand(map, Map::kBitField3Offset));
1791  __ DecodeField<Map::HasHiddenPrototype>(scratch, SetRC);
1792  __ beq(receiver_check_failed, cr0);
1793
1794  __ LoadP(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1795  __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1796  // Iterate.
1797  __ b(&prototype_loop_start);
1798
1799  __ bind(&receiver_check_passed);
1800}
1801
1802void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1803  // ----------- S t a t e -------------
1804  //  -- r3                 : number of arguments excluding receiver
1805  //  -- r4                 : callee
1806  //  -- lr                 : return address
1807  //  -- sp[0]              : last argument
1808  //  -- ...
1809  //  -- sp[4 * (argc - 1)] : first argument
1810  //  -- sp[4 * argc]       : receiver
1811  // -----------------------------------
1812
1813  // Load the FunctionTemplateInfo.
1814  __ LoadP(r6, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1815  __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kFunctionDataOffset));
1816
1817  // Do the compatible receiver check.
1818  Label receiver_check_failed;
1819  __ ShiftLeftImm(r11, r3, Operand(kPointerSizeLog2));
1820  __ LoadPX(r5, MemOperand(sp, r11));
1821  CompatibleReceiverCheck(masm, r5, r6, &receiver_check_failed);
1822
1823  // Get the callback offset from the FunctionTemplateInfo, and jump to the
1824  // beginning of the code.
1825  __ LoadP(r7, FieldMemOperand(r6, FunctionTemplateInfo::kCallCodeOffset));
1826  __ LoadP(r7, FieldMemOperand(r7, CallHandlerInfo::kFastHandlerOffset));
1827  __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag));
1828  __ JumpToJSEntry(ip);
1829
1830  // Compatible receiver check failed: throw an Illegal Invocation exception.
1831  __ bind(&receiver_check_failed);
1832  // Drop the arguments (including the receiver);
1833  __ addi(r11, r11, Operand(kPointerSize));
1834  __ add(sp, sp, r11);
1835  __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1836}
1837
1838static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
1839                                              bool has_handler_frame) {
1840  // Lookup the function in the JavaScript frame.
1841  if (has_handler_frame) {
1842    __ LoadP(r3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1843    __ LoadP(r3, MemOperand(r3, JavaScriptFrameConstants::kFunctionOffset));
1844  } else {
1845    __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1846  }
1847
1848  {
1849    FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1850    // Pass function as argument.
1851    __ push(r3);
1852    __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1853  }
1854
1855  // If the code object is null, just return to the caller.
1856  Label skip;
1857  __ CmpSmiLiteral(r3, Smi::kZero, r0);
1858  __ bne(&skip);
1859  __ Ret();
1860
1861  __ bind(&skip);
1862
1863  // Drop any potential handler frame that is be sitting on top of the actual
1864  // JavaScript frame. This is the case then OSR is triggered from bytecode.
1865  if (has_handler_frame) {
1866    __ LeaveFrame(StackFrame::STUB);
1867  }
1868
1869  // Load deoptimization data from the code object.
1870  // <deopt_data> = <code>[#deoptimization_data_offset]
1871  __ LoadP(r4, FieldMemOperand(r3, Code::kDeoptimizationDataOffset));
1872
1873  {
1874    ConstantPoolUnavailableScope constant_pool_unavailable(masm);
1875    __ addi(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag));  // Code start
1876
1877    if (FLAG_enable_embedded_constant_pool) {
1878      __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r3);
1879    }
1880
1881    // Load the OSR entrypoint offset from the deoptimization data.
1882    // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1883    __ LoadP(r4, FieldMemOperand(
1884                     r4, FixedArray::OffsetOfElementAt(
1885                             DeoptimizationInputData::kOsrPcOffsetIndex)));
1886    __ SmiUntag(r4);
1887
1888    // Compute the target address = code start + osr_offset
1889    __ add(r0, r3, r4);
1890
1891    // And "return" to the OSR entry point of the function.
1892    __ mtlr(r0);
1893    __ blr();
1894  }
1895}
1896
1897void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1898  Generate_OnStackReplacementHelper(masm, false);
1899}
1900
1901void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
1902  Generate_OnStackReplacementHelper(masm, true);
1903}
1904
1905// static
1906void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1907  // ----------- S t a t e -------------
1908  //  -- r3    : argc
1909  //  -- sp[0] : argArray
1910  //  -- sp[4] : thisArg
1911  //  -- sp[8] : receiver
1912  // -----------------------------------
1913
1914  // 1. Load receiver into r4, argArray into r3 (if present), remove all
1915  // arguments from the stack (including the receiver), and push thisArg (if
1916  // present) instead.
1917  {
1918    Label skip;
1919    Register arg_size = r5;
1920    Register new_sp = r6;
1921    Register scratch = r7;
1922    __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
1923    __ add(new_sp, sp, arg_size);
1924    __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1925    __ mr(scratch, r3);
1926    __ LoadP(r4, MemOperand(new_sp, 0));  // receiver
1927    __ cmpi(arg_size, Operand(kPointerSize));
1928    __ blt(&skip);
1929    __ LoadP(scratch, MemOperand(new_sp, 1 * -kPointerSize));  // thisArg
1930    __ beq(&skip);
1931    __ LoadP(r3, MemOperand(new_sp, 2 * -kPointerSize));  // argArray
1932    __ bind(&skip);
1933    __ mr(sp, new_sp);
1934    __ StoreP(scratch, MemOperand(sp, 0));
1935  }
1936
1937  // ----------- S t a t e -------------
1938  //  -- r3    : argArray
1939  //  -- r4    : receiver
1940  //  -- sp[0] : thisArg
1941  // -----------------------------------
1942
1943  // 2. Make sure the receiver is actually callable.
1944  Label receiver_not_callable;
1945  __ JumpIfSmi(r4, &receiver_not_callable);
1946  __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
1947  __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
1948  __ TestBit(r7, Map::kIsCallable, r0);
1949  __ beq(&receiver_not_callable, cr0);
1950
1951  // 3. Tail call with no arguments if argArray is null or undefined.
1952  Label no_arguments;
1953  __ JumpIfRoot(r3, Heap::kNullValueRootIndex, &no_arguments);
1954  __ JumpIfRoot(r3, Heap::kUndefinedValueRootIndex, &no_arguments);
1955
1956  // 4a. Apply the receiver to the given argArray (passing undefined for
1957  // new.target).
1958  __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
1959  __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1960
1961  // 4b. The argArray is either null or undefined, so we tail call without any
1962  // arguments to the receiver.
1963  __ bind(&no_arguments);
1964  {
1965    __ li(r3, Operand::Zero());
1966    __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1967  }
1968
1969  // 4c. The receiver is not callable, throw an appropriate TypeError.
1970  __ bind(&receiver_not_callable);
1971  {
1972    __ StoreP(r4, MemOperand(sp, 0));
1973    __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1974  }
1975}
1976
1977// static
1978void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1979  // 1. Make sure we have at least one argument.
1980  // r3: actual number of arguments
1981  {
1982    Label done;
1983    __ cmpi(r3, Operand::Zero());
1984    __ bne(&done);
1985    __ PushRoot(Heap::kUndefinedValueRootIndex);
1986    __ addi(r3, r3, Operand(1));
1987    __ bind(&done);
1988  }
1989
1990  // 2. Get the callable to call (passed as receiver) from the stack.
1991  // r3: actual number of arguments
1992  __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2));
1993  __ LoadPX(r4, MemOperand(sp, r5));
1994
1995  // 3. Shift arguments and return address one slot down on the stack
1996  //    (overwriting the original receiver).  Adjust argument count to make
1997  //    the original first argument the new receiver.
1998  // r3: actual number of arguments
1999  // r4: callable
2000  {
2001    Label loop;
2002    // Calculate the copy start address (destination). Copy end address is sp.
2003    __ add(r5, sp, r5);
2004
2005    __ mtctr(r3);
2006    __ bind(&loop);
2007    __ LoadP(ip, MemOperand(r5, -kPointerSize));
2008    __ StoreP(ip, MemOperand(r5));
2009    __ subi(r5, r5, Operand(kPointerSize));
2010    __ bdnz(&loop);
2011    // Adjust the actual number of arguments and remove the top element
2012    // (which is a copy of the last argument).
2013    __ subi(r3, r3, Operand(1));
2014    __ pop();
2015  }
2016
2017  // 4. Call the callable.
2018  __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2019}
2020
2021void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
2022  // ----------- S t a t e -------------
2023  //  -- r3     : argc
2024  //  -- sp[0]  : argumentsList
2025  //  -- sp[4]  : thisArgument
2026  //  -- sp[8]  : target
2027  //  -- sp[12] : receiver
2028  // -----------------------------------
2029
2030  // 1. Load target into r4 (if present), argumentsList into r3 (if present),
2031  // remove all arguments from the stack (including the receiver), and push
2032  // thisArgument (if present) instead.
2033  {
2034    Label skip;
2035    Register arg_size = r5;
2036    Register new_sp = r6;
2037    Register scratch = r7;
2038    __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
2039    __ add(new_sp, sp, arg_size);
2040    __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2041    __ mr(scratch, r4);
2042    __ mr(r3, r4);
2043    __ cmpi(arg_size, Operand(kPointerSize));
2044    __ blt(&skip);
2045    __ LoadP(r4, MemOperand(new_sp, 1 * -kPointerSize));  // target
2046    __ beq(&skip);
2047    __ LoadP(scratch, MemOperand(new_sp, 2 * -kPointerSize));  // thisArgument
2048    __ cmpi(arg_size, Operand(2 * kPointerSize));
2049    __ beq(&skip);
2050    __ LoadP(r3, MemOperand(new_sp, 3 * -kPointerSize));  // argumentsList
2051    __ bind(&skip);
2052    __ mr(sp, new_sp);
2053    __ StoreP(scratch, MemOperand(sp, 0));
2054  }
2055
2056  // ----------- S t a t e -------------
2057  //  -- r3    : argumentsList
2058  //  -- r4    : target
2059  //  -- sp[0] : thisArgument
2060  // -----------------------------------
2061
2062  // 2. Make sure the target is actually callable.
2063  Label target_not_callable;
2064  __ JumpIfSmi(r4, &target_not_callable);
2065  __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
2066  __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
2067  __ TestBit(r7, Map::kIsCallable, r0);
2068  __ beq(&target_not_callable, cr0);
2069
2070  // 3a. Apply the target to the given argumentsList (passing undefined for
2071  // new.target).
2072  __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
2073  __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
2074
2075  // 3b. The target is not callable, throw an appropriate TypeError.
2076  __ bind(&target_not_callable);
2077  {
2078    __ StoreP(r4, MemOperand(sp, 0));
2079    __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
2080  }
2081}
2082
2083void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
2084  // ----------- S t a t e -------------
2085  //  -- r3     : argc
2086  //  -- sp[0]  : new.target (optional)
2087  //  -- sp[4]  : argumentsList
2088  //  -- sp[8]  : target
2089  //  -- sp[12] : receiver
2090  // -----------------------------------
2091
2092  // 1. Load target into r4 (if present), argumentsList into r3 (if present),
2093  // new.target into r6 (if present, otherwise use target), remove all
2094  // arguments from the stack (including the receiver), and push thisArgument
2095  // (if present) instead.
2096  {
2097    Label skip;
2098    Register arg_size = r5;
2099    Register new_sp = r7;
2100    __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
2101    __ add(new_sp, sp, arg_size);
2102    __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2103    __ mr(r3, r4);
2104    __ mr(r6, r4);
2105    __ StoreP(r4, MemOperand(new_sp, 0));  // receiver (undefined)
2106    __ cmpi(arg_size, Operand(kPointerSize));
2107    __ blt(&skip);
2108    __ LoadP(r4, MemOperand(new_sp, 1 * -kPointerSize));  // target
2109    __ mr(r6, r4);  // new.target defaults to target
2110    __ beq(&skip);
2111    __ LoadP(r3, MemOperand(new_sp, 2 * -kPointerSize));  // argumentsList
2112    __ cmpi(arg_size, Operand(2 * kPointerSize));
2113    __ beq(&skip);
2114    __ LoadP(r6, MemOperand(new_sp, 3 * -kPointerSize));  // new.target
2115    __ bind(&skip);
2116    __ mr(sp, new_sp);
2117  }
2118
2119  // ----------- S t a t e -------------
2120  //  -- r3    : argumentsList
2121  //  -- r6    : new.target
2122  //  -- r4    : target
2123  //  -- sp[0] : receiver (undefined)
2124  // -----------------------------------
2125
2126  // 2. Make sure the target is actually a constructor.
2127  Label target_not_constructor;
2128  __ JumpIfSmi(r4, &target_not_constructor);
2129  __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
2130  __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
2131  __ TestBit(r7, Map::kIsConstructor, r0);
2132  __ beq(&target_not_constructor, cr0);
2133
2134  // 3. Make sure the target is actually a constructor.
2135  Label new_target_not_constructor;
2136  __ JumpIfSmi(r6, &new_target_not_constructor);
2137  __ LoadP(r7, FieldMemOperand(r6, HeapObject::kMapOffset));
2138  __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
2139  __ TestBit(r7, Map::kIsConstructor, r0);
2140  __ beq(&new_target_not_constructor, cr0);
2141
2142  // 4a. Construct the target with the given new.target and argumentsList.
2143  __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
2144
2145  // 4b. The target is not a constructor, throw an appropriate TypeError.
2146  __ bind(&target_not_constructor);
2147  {
2148    __ StoreP(r4, MemOperand(sp, 0));
2149    __ TailCallRuntime(Runtime::kThrowNotConstructor);
2150  }
2151
2152  // 4c. The new.target is not a constructor, throw an appropriate TypeError.
2153  __ bind(&new_target_not_constructor);
2154  {
2155    __ StoreP(r6, MemOperand(sp, 0));
2156    __ TailCallRuntime(Runtime::kThrowNotConstructor);
2157  }
2158}
2159
2160static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
2161  __ SmiTag(r3);
2162  __ mov(r7, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
2163  __ mflr(r0);
2164  __ push(r0);
2165  if (FLAG_enable_embedded_constant_pool) {
2166    __ Push(fp, kConstantPoolRegister, r7, r4, r3);
2167  } else {
2168    __ Push(fp, r7, r4, r3);
2169  }
2170  __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
2171                          kPointerSize));
2172}
2173
2174static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
2175  // ----------- S t a t e -------------
2176  //  -- r3 : result being passed through
2177  // -----------------------------------
2178  // Get the number of arguments passed (as a smi), tear down the frame and
2179  // then tear down the parameters.
2180  __ LoadP(r4, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
2181                                kPointerSize)));
2182  int stack_adjustment = kPointerSize;  // adjust for receiver
2183  __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment);
2184  __ SmiToPtrArrayOffset(r0, r4);
2185  __ add(sp, sp, r0);
2186}
2187
2188// static
2189void Builtins::Generate_Apply(MacroAssembler* masm) {
2190  // ----------- S t a t e -------------
2191  //  -- r3    : argumentsList
2192  //  -- r4    : target
2193  //  -- r6    : new.target (checked to be constructor or undefined)
2194  //  -- sp[0] : thisArgument
2195  // -----------------------------------
2196
2197  // Create the list of arguments from the array-like argumentsList.
2198  {
2199    Label create_arguments, create_array, create_holey_array, create_runtime,
2200        done_create;
2201    __ JumpIfSmi(r3, &create_runtime);
2202
2203    // Load the map of argumentsList into r5.
2204    __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
2205
2206    // Load native context into r7.
2207    __ LoadP(r7, NativeContextMemOperand());
2208
2209    // Check if argumentsList is an (unmodified) arguments object.
2210    __ LoadP(ip, ContextMemOperand(r7, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
2211    __ cmp(ip, r5);
2212    __ beq(&create_arguments);
2213    __ LoadP(ip, ContextMemOperand(r7, Context::STRICT_ARGUMENTS_MAP_INDEX));
2214    __ cmp(ip, r5);
2215    __ beq(&create_arguments);
2216
2217    // Check if argumentsList is a fast JSArray.
2218    __ CompareInstanceType(r5, ip, JS_ARRAY_TYPE);
2219    __ beq(&create_array);
2220
2221    // Ask the runtime to create the list (actually a FixedArray).
2222    __ bind(&create_runtime);
2223    {
2224      FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2225      __ Push(r4, r6, r3);
2226      __ CallRuntime(Runtime::kCreateListFromArrayLike);
2227      __ Pop(r4, r6);
2228      __ LoadP(r5, FieldMemOperand(r3, FixedArray::kLengthOffset));
2229      __ SmiUntag(r5);
2230    }
2231    __ b(&done_create);
2232
2233    // Try to create the list from an arguments object.
2234    __ bind(&create_arguments);
2235    __ LoadP(r5, FieldMemOperand(r3, JSArgumentsObject::kLengthOffset));
2236    __ LoadP(r7, FieldMemOperand(r3, JSObject::kElementsOffset));
2237    __ LoadP(ip, FieldMemOperand(r7, FixedArray::kLengthOffset));
2238    __ cmp(r5, ip);
2239    __ bne(&create_runtime);
2240    __ SmiUntag(r5);
2241    __ mr(r3, r7);
2242    __ b(&done_create);
2243
2244    // For holey JSArrays we need to check that the array prototype chain
2245    // protector is intact and our prototype is the Array.prototype actually.
2246    __ bind(&create_holey_array);
2247    __ LoadP(r5, FieldMemOperand(r5, Map::kPrototypeOffset));
2248    __ LoadP(r7, ContextMemOperand(r7, Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
2249    __ cmp(r5, r7);
2250    __ bne(&create_runtime);
2251    __ LoadRoot(r7, Heap::kArrayProtectorRootIndex);
2252    __ LoadP(r5, FieldMemOperand(r7, PropertyCell::kValueOffset));
2253    __ CmpSmiLiteral(r5, Smi::FromInt(Isolate::kProtectorValid), r0);
2254    __ bne(&create_runtime);
2255    __ LoadP(r5, FieldMemOperand(r3, JSArray::kLengthOffset));
2256    __ LoadP(r3, FieldMemOperand(r3, JSArray::kElementsOffset));
2257    __ SmiUntag(r5);
2258    __ b(&done_create);
2259
2260    // Try to create the list from a JSArray object.
2261    // -- r5 and r7 must be preserved till bne create_holey_array.
2262    __ bind(&create_array);
2263    __ lbz(r8, FieldMemOperand(r5, Map::kBitField2Offset));
2264    __ DecodeField<Map::ElementsKindBits>(r8);
2265    STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2266    STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2267    STATIC_ASSERT(FAST_ELEMENTS == 2);
2268    STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
2269    __ cmpi(r8, Operand(FAST_HOLEY_ELEMENTS));
2270    __ bgt(&create_runtime);
2271    // Only FAST_XXX after this point, FAST_HOLEY_XXX are odd values.
2272    __ TestBit(r8, Map::kHasNonInstancePrototype, r0);
2273    __ bne(&create_holey_array, cr0);
2274    // FAST_SMI_ELEMENTS or FAST_ELEMENTS after this point.
2275    __ LoadP(r5, FieldMemOperand(r3, JSArray::kLengthOffset));
2276    __ LoadP(r3, FieldMemOperand(r3, JSArray::kElementsOffset));
2277    __ SmiUntag(r5);
2278
2279    __ bind(&done_create);
2280  }
2281
2282  // Check for stack overflow.
2283  {
2284    // Check the stack for overflow. We are not trying to catch interruptions
2285    // (i.e. debug break and preemption) here, so check the "real stack limit".
2286    Label done;
2287    __ LoadRoot(ip, Heap::kRealStackLimitRootIndex);
2288    // Make ip the space we have left. The stack might already be overflowed
2289    // here which will cause ip to become negative.
2290    __ sub(ip, sp, ip);
2291    // Check if the arguments will overflow the stack.
2292    __ ShiftLeftImm(r0, r5, Operand(kPointerSizeLog2));
2293    __ cmp(ip, r0);  // Signed comparison.
2294    __ bgt(&done);
2295    __ TailCallRuntime(Runtime::kThrowStackOverflow);
2296    __ bind(&done);
2297  }
2298
2299  // ----------- S t a t e -------------
2300  //  -- r4    : target
2301  //  -- r3    : args (a FixedArray built from argumentsList)
2302  //  -- r5    : len (number of elements to push from args)
2303  //  -- r6    : new.target (checked to be constructor or undefined)
2304  //  -- sp[0] : thisArgument
2305  // -----------------------------------
2306
2307  // Push arguments onto the stack (thisArgument is already on the stack).
2308  {
2309    __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
2310    Label loop, no_args, skip;
2311    __ cmpi(r5, Operand::Zero());
2312    __ beq(&no_args);
2313    __ addi(r3, r3,
2314            Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
2315    __ mtctr(r5);
2316    __ bind(&loop);
2317    __ LoadPU(ip, MemOperand(r3, kPointerSize));
2318    __ CompareRoot(ip, Heap::kTheHoleValueRootIndex);
2319    __ bne(&skip);
2320    __ mr(ip, r9);
2321    __ bind(&skip);
2322    __ push(ip);
2323    __ bdnz(&loop);
2324    __ bind(&no_args);
2325    __ mr(r3, r5);
2326  }
2327
2328  // Dispatch to Call or Construct depending on whether new.target is undefined.
2329  {
2330    __ CompareRoot(r6, Heap::kUndefinedValueRootIndex);
2331    __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq);
2332    __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2333  }
2334}
2335
2336// static
2337void Builtins::Generate_CallForwardVarargs(MacroAssembler* masm,
2338                                           Handle<Code> code) {
2339  // ----------- S t a t e -------------
2340  //  -- r4    : the target to call (can be any Object)
2341  //  -- r5    : start index (to support rest parameters)
2342  //  -- lr    : return address.
2343  //  -- sp[0] : thisArgument
2344  // -----------------------------------
2345
2346  // Check if we have an arguments adaptor frame below the function frame.
2347  Label arguments_adaptor, arguments_done;
2348  __ LoadP(r6, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2349  __ LoadP(ip, MemOperand(r6, CommonFrameConstants::kContextOrFrameTypeOffset));
2350  __ cmpi(ip, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
2351  __ beq(&arguments_adaptor);
2352  {
2353    __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2354    __ LoadP(r3, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
2355    __ LoadWordArith(
2356        r3,
2357        FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
2358    __ mr(r6, fp);
2359  }
2360  __ b(&arguments_done);
2361  __ bind(&arguments_adaptor);
2362  {
2363    // Load the length from the ArgumentsAdaptorFrame.
2364    __ LoadP(r3, MemOperand(r6, ArgumentsAdaptorFrameConstants::kLengthOffset));
2365  }
2366  __ bind(&arguments_done);
2367
2368  Label stack_empty, stack_done, stack_overflow;
2369  __ SmiUntag(r3);
2370  __ sub(r3, r3, r5);
2371  __ cmpi(r3, Operand::Zero());
2372  __ ble(&stack_empty);
2373  {
2374    // Check for stack overflow.
2375    Generate_StackOverflowCheck(masm, r3, r5, &stack_overflow);
2376
2377    // Forward the arguments from the caller frame.
2378    {
2379      Label loop;
2380      __ addi(r6, r6, Operand(kPointerSize));
2381      __ mr(r5, r3);
2382      __ bind(&loop);
2383      {
2384        __ ShiftLeftImm(ip, r5, Operand(kPointerSizeLog2));
2385        __ LoadPX(ip, MemOperand(r6, ip));
2386        __ push(ip);
2387        __ subi(r5, r5, Operand(1));
2388        __ cmpi(r5, Operand::Zero());
2389        __ bne(&loop);
2390      }
2391    }
2392  }
2393  __ b(&stack_done);
2394  __ bind(&stack_overflow);
2395  __ TailCallRuntime(Runtime::kThrowStackOverflow);
2396  __ bind(&stack_empty);
2397  {
2398    // We just pass the receiver, which is already on the stack.
2399    __ mov(r3, Operand::Zero());
2400  }
2401  __ bind(&stack_done);
2402
2403  __ Jump(code, RelocInfo::CODE_TARGET);
2404}
2405
2406namespace {
2407
2408// Drops top JavaScript frame and an arguments adaptor frame below it (if
2409// present) preserving all the arguments prepared for current call.
2410// Does nothing if debugger is currently active.
2411// ES6 14.6.3. PrepareForTailCall
2412//
2413// Stack structure for the function g() tail calling f():
2414//
2415// ------- Caller frame: -------
2416// |  ...
2417// |  g()'s arg M
2418// |  ...
2419// |  g()'s arg 1
2420// |  g()'s receiver arg
2421// |  g()'s caller pc
2422// ------- g()'s frame: -------
2423// |  g()'s caller fp      <- fp
2424// |  g()'s context
2425// |  function pointer: g
2426// |  -------------------------
2427// |  ...
2428// |  ...
2429// |  f()'s arg N
2430// |  ...
2431// |  f()'s arg 1
2432// |  f()'s receiver arg   <- sp (f()'s caller pc is not on the stack yet!)
2433// ----------------------
2434//
2435void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
2436                        Register scratch1, Register scratch2,
2437                        Register scratch3) {
2438  DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
2439  Comment cmnt(masm, "[ PrepareForTailCall");
2440
2441  // Prepare for tail call only if ES2015 tail call elimination is enabled.
2442  Label done;
2443  ExternalReference is_tail_call_elimination_enabled =
2444      ExternalReference::is_tail_call_elimination_enabled_address(
2445          masm->isolate());
2446  __ mov(scratch1, Operand(is_tail_call_elimination_enabled));
2447  __ lbz(scratch1, MemOperand(scratch1));
2448  __ cmpi(scratch1, Operand::Zero());
2449  __ beq(&done);
2450
2451  // Drop possible interpreter handler/stub frame.
2452  {
2453    Label no_interpreter_frame;
2454    __ LoadP(scratch3,
2455             MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset));
2456    __ cmpi(scratch3, Operand(StackFrame::TypeToMarker(StackFrame::STUB)));
2457    __ bne(&no_interpreter_frame);
2458    __ LoadP(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2459    __ bind(&no_interpreter_frame);
2460  }
2461
2462  // Check if next frame is an arguments adaptor frame.
2463  Register caller_args_count_reg = scratch1;
2464  Label no_arguments_adaptor, formal_parameter_count_loaded;
2465  __ LoadP(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2466  __ LoadP(
2467      scratch3,
2468      MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
2469  __ cmpi(scratch3,
2470          Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
2471  __ bne(&no_arguments_adaptor);
2472
2473  // Drop current frame and load arguments count from arguments adaptor frame.
2474  __ mr(fp, scratch2);
2475  __ LoadP(caller_args_count_reg,
2476           MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
2477  __ SmiUntag(caller_args_count_reg);
2478  __ b(&formal_parameter_count_loaded);
2479
2480  __ bind(&no_arguments_adaptor);
2481  // Load caller's formal parameter count
2482  __ LoadP(scratch1,
2483           MemOperand(fp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
2484  __ LoadP(scratch1,
2485           FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
2486  __ LoadWordArith(
2487      caller_args_count_reg,
2488      FieldMemOperand(scratch1,
2489                      SharedFunctionInfo::kFormalParameterCountOffset));
2490#if !V8_TARGET_ARCH_PPC64
2491  __ SmiUntag(caller_args_count_reg);
2492#endif
2493
2494  __ bind(&formal_parameter_count_loaded);
2495
2496  ParameterCount callee_args_count(args_reg);
2497  __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
2498                        scratch3);
2499  __ bind(&done);
2500}
2501}  // namespace
2502
2503// static
2504void Builtins::Generate_CallFunction(MacroAssembler* masm,
2505                                     ConvertReceiverMode mode,
2506                                     TailCallMode tail_call_mode) {
2507  // ----------- S t a t e -------------
2508  //  -- r3 : the number of arguments (not including the receiver)
2509  //  -- r4 : the function to call (checked to be a JSFunction)
2510  // -----------------------------------
2511  __ AssertFunction(r4);
2512
2513  // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2514  // Check that the function is not a "classConstructor".
2515  Label class_constructor;
2516  __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2517  __ lwz(r6, FieldMemOperand(r5, SharedFunctionInfo::kCompilerHintsOffset));
2518  __ TestBitMask(r6, FunctionKind::kClassConstructor
2519                         << SharedFunctionInfo::kFunctionKindShift,
2520                 r0);
2521  __ bne(&class_constructor, cr0);
2522
2523  // Enter the context of the function; ToObject has to run in the function
2524  // context, and we also need to take the global proxy from the function
2525  // context in case of conversion.
2526  __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
2527  // We need to convert the receiver for non-native sloppy mode functions.
2528  Label done_convert;
2529  __ andi(r0, r6, Operand((1 << SharedFunctionInfo::kStrictModeBit) |
2530                          (1 << SharedFunctionInfo::kNativeBit)));
2531  __ bne(&done_convert, cr0);
2532  {
2533    // ----------- S t a t e -------------
2534    //  -- r3 : the number of arguments (not including the receiver)
2535    //  -- r4 : the function to call (checked to be a JSFunction)
2536    //  -- r5 : the shared function info.
2537    //  -- cp : the function context.
2538    // -----------------------------------
2539
2540    if (mode == ConvertReceiverMode::kNullOrUndefined) {
2541      // Patch receiver to global proxy.
2542      __ LoadGlobalProxy(r6);
2543    } else {
2544      Label convert_to_object, convert_receiver;
2545      __ ShiftLeftImm(r6, r3, Operand(kPointerSizeLog2));
2546      __ LoadPX(r6, MemOperand(sp, r6));
2547      __ JumpIfSmi(r6, &convert_to_object);
2548      STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2549      __ CompareObjectType(r6, r7, r7, FIRST_JS_RECEIVER_TYPE);
2550      __ bge(&done_convert);
2551      if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2552        Label convert_global_proxy;
2553        __ JumpIfRoot(r6, Heap::kUndefinedValueRootIndex,
2554                      &convert_global_proxy);
2555        __ JumpIfNotRoot(r6, Heap::kNullValueRootIndex, &convert_to_object);
2556        __ bind(&convert_global_proxy);
2557        {
2558          // Patch receiver to global proxy.
2559          __ LoadGlobalProxy(r6);
2560        }
2561        __ b(&convert_receiver);
2562      }
2563      __ bind(&convert_to_object);
2564      {
2565        // Convert receiver using ToObject.
2566        // TODO(bmeurer): Inline the allocation here to avoid building the frame
2567        // in the fast case? (fall back to AllocateInNewSpace?)
2568        FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2569        __ SmiTag(r3);
2570        __ Push(r3, r4);
2571        __ mr(r3, r6);
2572        __ Push(cp);
2573        __ Call(masm->isolate()->builtins()->ToObject(),
2574                RelocInfo::CODE_TARGET);
2575        __ Pop(cp);
2576        __ mr(r6, r3);
2577        __ Pop(r3, r4);
2578        __ SmiUntag(r3);
2579      }
2580      __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2581      __ bind(&convert_receiver);
2582    }
2583    __ ShiftLeftImm(r7, r3, Operand(kPointerSizeLog2));
2584    __ StorePX(r6, MemOperand(sp, r7));
2585  }
2586  __ bind(&done_convert);
2587
2588  // ----------- S t a t e -------------
2589  //  -- r3 : the number of arguments (not including the receiver)
2590  //  -- r4 : the function to call (checked to be a JSFunction)
2591  //  -- r5 : the shared function info.
2592  //  -- cp : the function context.
2593  // -----------------------------------
2594
2595  if (tail_call_mode == TailCallMode::kAllow) {
2596    PrepareForTailCall(masm, r3, r6, r7, r8);
2597  }
2598
2599  __ LoadWordArith(
2600      r5, FieldMemOperand(r5, SharedFunctionInfo::kFormalParameterCountOffset));
2601#if !V8_TARGET_ARCH_PPC64
2602  __ SmiUntag(r5);
2603#endif
2604  ParameterCount actual(r3);
2605  ParameterCount expected(r5);
2606  __ InvokeFunctionCode(r4, no_reg, expected, actual, JUMP_FUNCTION,
2607                        CheckDebugStepCallWrapper());
2608
2609  // The function is a "classConstructor", need to raise an exception.
2610  __ bind(&class_constructor);
2611  {
2612    FrameAndConstantPoolScope frame(masm, StackFrame::INTERNAL);
2613    __ push(r4);
2614    __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2615  }
2616}
2617
2618namespace {
2619
2620void Generate_PushBoundArguments(MacroAssembler* masm) {
2621  // ----------- S t a t e -------------
2622  //  -- r3 : the number of arguments (not including the receiver)
2623  //  -- r4 : target (checked to be a JSBoundFunction)
2624  //  -- r6 : new.target (only in case of [[Construct]])
2625  // -----------------------------------
2626
2627  // Load [[BoundArguments]] into r5 and length of that into r7.
2628  Label no_bound_arguments;
2629  __ LoadP(r5, FieldMemOperand(r4, JSBoundFunction::kBoundArgumentsOffset));
2630  __ LoadP(r7, FieldMemOperand(r5, FixedArray::kLengthOffset));
2631  __ SmiUntag(r7, SetRC);
2632  __ beq(&no_bound_arguments, cr0);
2633  {
2634    // ----------- S t a t e -------------
2635    //  -- r3 : the number of arguments (not including the receiver)
2636    //  -- r4 : target (checked to be a JSBoundFunction)
2637    //  -- r5 : the [[BoundArguments]] (implemented as FixedArray)
2638    //  -- r6 : new.target (only in case of [[Construct]])
2639    //  -- r7 : the number of [[BoundArguments]]
2640    // -----------------------------------
2641
2642    // Reserve stack space for the [[BoundArguments]].
2643    {
2644      Label done;
2645      __ mr(r9, sp);  // preserve previous stack pointer
2646      __ ShiftLeftImm(r10, r7, Operand(kPointerSizeLog2));
2647      __ sub(sp, sp, r10);
2648      // Check the stack for overflow. We are not trying to catch interruptions
2649      // (i.e. debug break and preemption) here, so check the "real stack
2650      // limit".
2651      __ CompareRoot(sp, Heap::kRealStackLimitRootIndex);
2652      __ bgt(&done);  // Signed comparison.
2653      // Restore the stack pointer.
2654      __ mr(sp, r9);
2655      {
2656        FrameScope scope(masm, StackFrame::MANUAL);
2657        __ EnterFrame(StackFrame::INTERNAL);
2658        __ CallRuntime(Runtime::kThrowStackOverflow);
2659      }
2660      __ bind(&done);
2661    }
2662
2663    // Relocate arguments down the stack.
2664    //  -- r3 : the number of arguments (not including the receiver)
2665    //  -- r9 : the previous stack pointer
2666    //  -- r10: the size of the [[BoundArguments]]
2667    {
2668      Label skip, loop;
2669      __ li(r8, Operand::Zero());
2670      __ cmpi(r3, Operand::Zero());
2671      __ beq(&skip);
2672      __ mtctr(r3);
2673      __ bind(&loop);
2674      __ LoadPX(r0, MemOperand(r9, r8));
2675      __ StorePX(r0, MemOperand(sp, r8));
2676      __ addi(r8, r8, Operand(kPointerSize));
2677      __ bdnz(&loop);
2678      __ bind(&skip);
2679    }
2680
2681    // Copy [[BoundArguments]] to the stack (below the arguments).
2682    {
2683      Label loop;
2684      __ addi(r5, r5, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2685      __ add(r5, r5, r10);
2686      __ mtctr(r7);
2687      __ bind(&loop);
2688      __ LoadPU(r0, MemOperand(r5, -kPointerSize));
2689      __ StorePX(r0, MemOperand(sp, r8));
2690      __ addi(r8, r8, Operand(kPointerSize));
2691      __ bdnz(&loop);
2692      __ add(r3, r3, r7);
2693    }
2694  }
2695  __ bind(&no_bound_arguments);
2696}
2697
2698}  // namespace
2699
2700// static
2701void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2702                                              TailCallMode tail_call_mode) {
2703  // ----------- S t a t e -------------
2704  //  -- r3 : the number of arguments (not including the receiver)
2705  //  -- r4 : the function to call (checked to be a JSBoundFunction)
2706  // -----------------------------------
2707  __ AssertBoundFunction(r4);
2708
2709  if (tail_call_mode == TailCallMode::kAllow) {
2710    PrepareForTailCall(masm, r3, r6, r7, r8);
2711  }
2712
2713  // Patch the receiver to [[BoundThis]].
2714  __ LoadP(ip, FieldMemOperand(r4, JSBoundFunction::kBoundThisOffset));
2715  __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2));
2716  __ StorePX(ip, MemOperand(sp, r0));
2717
2718  // Push the [[BoundArguments]] onto the stack.
2719  Generate_PushBoundArguments(masm);
2720
2721  // Call the [[BoundTargetFunction]] via the Call builtin.
2722  __ LoadP(r4,
2723           FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset));
2724  __ mov(ip, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny,
2725                                       masm->isolate())));
2726  __ LoadP(ip, MemOperand(ip));
2727  __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
2728  __ JumpToJSEntry(ip);
2729}
2730
2731// static
2732void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2733                             TailCallMode tail_call_mode) {
2734  // ----------- S t a t e -------------
2735  //  -- r3 : the number of arguments (not including the receiver)
2736  //  -- r4 : the target to call (can be any Object).
2737  // -----------------------------------
2738
2739  Label non_callable, non_function, non_smi;
2740  __ JumpIfSmi(r4, &non_callable);
2741  __ bind(&non_smi);
2742  __ CompareObjectType(r4, r7, r8, JS_FUNCTION_TYPE);
2743  __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
2744          RelocInfo::CODE_TARGET, eq);
2745  __ cmpi(r8, Operand(JS_BOUND_FUNCTION_TYPE));
2746  __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
2747          RelocInfo::CODE_TARGET, eq);
2748
2749  // Check if target has a [[Call]] internal method.
2750  __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
2751  __ TestBit(r7, Map::kIsCallable, r0);
2752  __ beq(&non_callable, cr0);
2753
2754  __ cmpi(r8, Operand(JS_PROXY_TYPE));
2755  __ bne(&non_function);
2756
2757  // 0. Prepare for tail call if necessary.
2758  if (tail_call_mode == TailCallMode::kAllow) {
2759    PrepareForTailCall(masm, r3, r6, r7, r8);
2760  }
2761
2762  // 1. Runtime fallback for Proxy [[Call]].
2763  __ Push(r4);
2764  // Increase the arguments size to include the pushed function and the
2765  // existing receiver on the stack.
2766  __ addi(r3, r3, Operand(2));
2767  // Tail-call to the runtime.
2768  __ JumpToExternalReference(
2769      ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2770
2771  // 2. Call to something else, which might have a [[Call]] internal method (if
2772  // not we raise an exception).
2773  __ bind(&non_function);
2774  // Overwrite the original receiver the (original) target.
2775  __ ShiftLeftImm(r8, r3, Operand(kPointerSizeLog2));
2776  __ StorePX(r4, MemOperand(sp, r8));
2777  // Let the "call_as_function_delegate" take care of the rest.
2778  __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r4);
2779  __ Jump(masm->isolate()->builtins()->CallFunction(
2780              ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
2781          RelocInfo::CODE_TARGET);
2782
2783  // 3. Call to something that is not callable.
2784  __ bind(&non_callable);
2785  {
2786    FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2787    __ Push(r4);
2788    __ CallRuntime(Runtime::kThrowCalledNonCallable);
2789  }
2790}
2791
2792static void CheckSpreadAndPushToStack(MacroAssembler* masm) {
2793  Register argc = r3;
2794  Register constructor = r4;
2795  Register new_target = r6;
2796
2797  Register scratch = r5;
2798  Register scratch2 = r9;
2799
2800  Register spread = r7;
2801  Register spread_map = r8;
2802  Register spread_len = r8;
2803  Label runtime_call, push_args;
2804  __ LoadP(spread, MemOperand(sp, 0));
2805  __ JumpIfSmi(spread, &runtime_call);
2806  __ LoadP(spread_map, FieldMemOperand(spread, HeapObject::kMapOffset));
2807
2808  // Check that the spread is an array.
2809  __ CompareInstanceType(spread_map, scratch, JS_ARRAY_TYPE);
2810  __ bne(&runtime_call);
2811
2812  // Check that we have the original ArrayPrototype.
2813  __ LoadP(scratch, FieldMemOperand(spread_map, Map::kPrototypeOffset));
2814  __ LoadP(scratch2, NativeContextMemOperand());
2815  __ LoadP(scratch2,
2816           ContextMemOperand(scratch2, Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
2817  __ cmp(scratch, scratch2);
2818  __ bne(&runtime_call);
2819
2820  // Check that the ArrayPrototype hasn't been modified in a way that would
2821  // affect iteration.
2822  __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex);
2823  __ LoadP(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
2824  __ CmpSmiLiteral(scratch, Smi::FromInt(Isolate::kProtectorValid), r0);
2825  __ bne(&runtime_call);
2826
2827  // Check that the map of the initial array iterator hasn't changed.
2828  __ LoadP(scratch2, NativeContextMemOperand());
2829  __ LoadP(scratch,
2830           ContextMemOperand(scratch2,
2831                             Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX));
2832  __ LoadP(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset));
2833  __ LoadP(scratch2,
2834           ContextMemOperand(
2835               scratch2, Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX));
2836  __ cmp(scratch, scratch2);
2837  __ bne(&runtime_call);
2838
2839  // For FastPacked kinds, iteration will have the same effect as simply
2840  // accessing each property in order.
2841  Label no_protector_check;
2842  __ lbz(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset));
2843  __ DecodeField<Map::ElementsKindBits>(scratch);
2844  __ cmpi(scratch, Operand(FAST_HOLEY_ELEMENTS));
2845  __ bgt(&runtime_call);
2846  // For non-FastHoley kinds, we can skip the protector check.
2847  __ cmpi(scratch, Operand(FAST_SMI_ELEMENTS));
2848  __ beq(&no_protector_check);
2849  __ cmpi(scratch, Operand(FAST_ELEMENTS));
2850  __ beq(&no_protector_check);
2851  // Check the ArrayProtector cell.
2852  __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex);
2853  __ LoadP(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
2854  __ CmpSmiLiteral(scratch, Smi::FromInt(Isolate::kProtectorValid), r0);
2855  __ bne(&runtime_call);
2856
2857  __ bind(&no_protector_check);
2858  // Load the FixedArray backing store, but use the length from the array.
2859  __ LoadP(spread_len, FieldMemOperand(spread, JSArray::kLengthOffset));
2860  __ SmiUntag(spread_len);
2861  __ LoadP(spread, FieldMemOperand(spread, JSArray::kElementsOffset));
2862  __ b(&push_args);
2863
2864  __ bind(&runtime_call);
2865  {
2866    // Call the builtin for the result of the spread.
2867    FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2868    __ SmiTag(argc);
2869    __ Push(constructor, new_target, argc, spread);
2870    __ CallRuntime(Runtime::kSpreadIterableFixed);
2871    __ mr(spread, r3);
2872    __ Pop(constructor, new_target, argc);
2873    __ SmiUntag(argc);
2874  }
2875
2876  {
2877    // Calculate the new nargs including the result of the spread.
2878    __ LoadP(spread_len, FieldMemOperand(spread, FixedArray::kLengthOffset));
2879    __ SmiUntag(spread_len);
2880
2881    __ bind(&push_args);
2882    // argc += spread_len - 1. Subtract 1 for the spread itself.
2883    __ add(argc, argc, spread_len);
2884    __ subi(argc, argc, Operand(1));
2885
2886    // Pop the spread argument off the stack.
2887    __ Pop(scratch);
2888  }
2889
2890  // Check for stack overflow.
2891  {
2892    // Check the stack for overflow. We are not trying to catch interruptions
2893    // (i.e. debug break and preemption) here, so check the "real stack limit".
2894    Label done;
2895    __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
2896    // Make scratch the space we have left. The stack might already be
2897    // overflowed here which will cause scratch to become negative.
2898    __ sub(scratch, sp, scratch);
2899    // Check if the arguments will overflow the stack.
2900    __ ShiftLeftImm(r0, spread_len, Operand(kPointerSizeLog2));
2901    __ cmp(scratch, r0);
2902    __ bgt(&done);  // Signed comparison.
2903    __ TailCallRuntime(Runtime::kThrowStackOverflow);
2904    __ bind(&done);
2905  }
2906
2907  // Put the evaluated spread onto the stack as additional arguments.
2908  {
2909    __ li(scratch, Operand::Zero());
2910    Label done, push, loop;
2911    __ bind(&loop);
2912    __ cmp(scratch, spread_len);
2913    __ beq(&done);
2914    __ ShiftLeftImm(r0, scratch, Operand(kPointerSizeLog2));
2915    __ add(scratch2, spread, r0);
2916    __ LoadP(scratch2, FieldMemOperand(scratch2, FixedArray::kHeaderSize));
2917    __ JumpIfNotRoot(scratch2, Heap::kTheHoleValueRootIndex, &push);
2918    __ LoadRoot(scratch2, Heap::kUndefinedValueRootIndex);
2919    __ bind(&push);
2920    __ Push(scratch2);
2921    __ addi(scratch, scratch, Operand(1));
2922    __ b(&loop);
2923    __ bind(&done);
2924  }
2925}
2926
2927// static
2928void Builtins::Generate_CallWithSpread(MacroAssembler* masm) {
2929  // ----------- S t a t e -------------
2930  //  -- r3 : the number of arguments (not including the receiver)
2931  //  -- r4 : the constructor to call (can be any Object)
2932  // -----------------------------------
2933
2934  // CheckSpreadAndPushToStack will push r6 to save it.
2935  __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
2936  CheckSpreadAndPushToStack(masm);
2937  __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2938                                            TailCallMode::kDisallow),
2939          RelocInfo::CODE_TARGET);
2940}
2941
2942// static
2943void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2944  // ----------- S t a t e -------------
2945  //  -- r3 : the number of arguments (not including the receiver)
2946  //  -- r4 : the constructor to call (checked to be a JSFunction)
2947  //  -- r6 : the new target (checked to be a constructor)
2948  // -----------------------------------
2949  __ AssertFunction(r4);
2950
2951  // Calling convention for function specific ConstructStubs require
2952  // r5 to contain either an AllocationSite or undefined.
2953  __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
2954
2955  // Tail call to the function-specific construct stub (still in the caller
2956  // context at this point).
2957  __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2958  __ LoadP(r7, FieldMemOperand(r7, SharedFunctionInfo::kConstructStubOffset));
2959  __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag));
2960  __ JumpToJSEntry(ip);
2961}
2962
2963// static
2964void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2965  // ----------- S t a t e -------------
2966  //  -- r3 : the number of arguments (not including the receiver)
2967  //  -- r4 : the function to call (checked to be a JSBoundFunction)
2968  //  -- r6 : the new target (checked to be a constructor)
2969  // -----------------------------------
2970  __ AssertBoundFunction(r4);
2971
2972  // Push the [[BoundArguments]] onto the stack.
2973  Generate_PushBoundArguments(masm);
2974
2975  // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2976  Label skip;
2977  __ cmp(r4, r6);
2978  __ bne(&skip);
2979  __ LoadP(r6,
2980           FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset));
2981  __ bind(&skip);
2982
2983  // Construct the [[BoundTargetFunction]] via the Construct builtin.
2984  __ LoadP(r4,
2985           FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset));
2986  __ mov(ip, Operand(ExternalReference(Builtins::kConstruct, masm->isolate())));
2987  __ LoadP(ip, MemOperand(ip));
2988  __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
2989  __ JumpToJSEntry(ip);
2990}
2991
2992// static
2993void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2994  // ----------- S t a t e -------------
2995  //  -- r3 : the number of arguments (not including the receiver)
2996  //  -- r4 : the constructor to call (checked to be a JSProxy)
2997  //  -- r6 : the new target (either the same as the constructor or
2998  //          the JSFunction on which new was invoked initially)
2999  // -----------------------------------
3000
3001  // Call into the Runtime for Proxy [[Construct]].
3002  __ Push(r4, r6);
3003  // Include the pushed new_target, constructor and the receiver.
3004  __ addi(r3, r3, Operand(3));
3005  // Tail-call to the runtime.
3006  __ JumpToExternalReference(
3007      ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
3008}
3009
3010// static
3011void Builtins::Generate_Construct(MacroAssembler* masm) {
3012  // ----------- S t a t e -------------
3013  //  -- r3 : the number of arguments (not including the receiver)
3014  //  -- r4 : the constructor to call (can be any Object)
3015  //  -- r6 : the new target (either the same as the constructor or
3016  //          the JSFunction on which new was invoked initially)
3017  // -----------------------------------
3018
3019  // Check if target is a Smi.
3020  Label non_constructor;
3021  __ JumpIfSmi(r4, &non_constructor);
3022
3023  // Dispatch based on instance type.
3024  __ CompareObjectType(r4, r7, r8, JS_FUNCTION_TYPE);
3025  __ Jump(masm->isolate()->builtins()->ConstructFunction(),
3026          RelocInfo::CODE_TARGET, eq);
3027
3028  // Check if target has a [[Construct]] internal method.
3029  __ lbz(r5, FieldMemOperand(r7, Map::kBitFieldOffset));
3030  __ TestBit(r5, Map::kIsConstructor, r0);
3031  __ beq(&non_constructor, cr0);
3032
3033  // Only dispatch to bound functions after checking whether they are
3034  // constructors.
3035  __ cmpi(r8, Operand(JS_BOUND_FUNCTION_TYPE));
3036  __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
3037          RelocInfo::CODE_TARGET, eq);
3038
3039  // Only dispatch to proxies after checking whether they are constructors.
3040  __ cmpi(r8, Operand(JS_PROXY_TYPE));
3041  __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
3042          eq);
3043
3044  // Called Construct on an exotic Object with a [[Construct]] internal method.
3045  {
3046    // Overwrite the original receiver with the (original) target.
3047    __ ShiftLeftImm(r8, r3, Operand(kPointerSizeLog2));
3048    __ StorePX(r4, MemOperand(sp, r8));
3049    // Let the "call_as_constructor_delegate" take care of the rest.
3050    __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, r4);
3051    __ Jump(masm->isolate()->builtins()->CallFunction(),
3052            RelocInfo::CODE_TARGET);
3053  }
3054
3055  // Called Construct on an Object that doesn't have a [[Construct]] internal
3056  // method.
3057  __ bind(&non_constructor);
3058  __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
3059          RelocInfo::CODE_TARGET);
3060}
3061
3062void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) {
3063  // ----------- S t a t e -------------
3064  //  -- r3 : the number of arguments (not including the receiver)
3065  //  -- r4 : the constructor to call (can be any Object)
3066  //  -- r6 : the new target (either the same as the constructor or
3067  //          the JSFunction on which new was invoked initially)
3068  // -----------------------------------
3069
3070  CheckSpreadAndPushToStack(masm);
3071  __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
3072}
3073
3074// static
3075void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
3076  // ----------- S t a t e -------------
3077  //  -- r4 : requested object size (untagged)
3078  //  -- lr : return address
3079  // -----------------------------------
3080  __ SmiTag(r4);
3081  __ Push(r4);
3082  __ LoadSmiLiteral(cp, Smi::kZero);
3083  __ TailCallRuntime(Runtime::kAllocateInNewSpace);
3084}
3085
3086// static
3087void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
3088  // ----------- S t a t e -------------
3089  //  -- r4 : requested object size (untagged)
3090  //  -- lr : return address
3091  // -----------------------------------
3092  __ SmiTag(r4);
3093  __ LoadSmiLiteral(r5, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
3094  __ Push(r4, r5);
3095  __ LoadSmiLiteral(cp, Smi::kZero);
3096  __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
3097}
3098
3099// static
3100void Builtins::Generate_Abort(MacroAssembler* masm) {
3101  // ----------- S t a t e -------------
3102  //  -- r4 : message_id as Smi
3103  //  -- lr : return address
3104  // -----------------------------------
3105  __ push(r4);
3106  __ LoadSmiLiteral(cp, Smi::kZero);
3107  __ TailCallRuntime(Runtime::kAbort);
3108}
3109
3110void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
3111  // ----------- S t a t e -------------
3112  //  -- r3 : actual number of arguments
3113  //  -- r4 : function (passed through to callee)
3114  //  -- r5 : expected number of arguments
3115  //  -- r6 : new target (passed through to callee)
3116  // -----------------------------------
3117
3118  Label invoke, dont_adapt_arguments, stack_overflow;
3119
3120  Label enough, too_few;
3121  __ LoadP(ip, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
3122  __ cmp(r3, r5);
3123  __ blt(&too_few);
3124  __ cmpi(r5, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
3125  __ beq(&dont_adapt_arguments);
3126
3127  {  // Enough parameters: actual >= expected
3128    __ bind(&enough);
3129    EnterArgumentsAdaptorFrame(masm);
3130    Generate_StackOverflowCheck(masm, r5, r8, &stack_overflow);
3131
3132    // Calculate copy start address into r3 and copy end address into r7.
3133    // r3: actual number of arguments as a smi
3134    // r4: function
3135    // r5: expected number of arguments
3136    // r6: new target (passed through to callee)
3137    // ip: code entry to call
3138    __ SmiToPtrArrayOffset(r3, r3);
3139    __ add(r3, r3, fp);
3140    // adjust for return address and receiver
3141    __ addi(r3, r3, Operand(2 * kPointerSize));
3142    __ ShiftLeftImm(r7, r5, Operand(kPointerSizeLog2));
3143    __ sub(r7, r3, r7);
3144
3145    // Copy the arguments (including the receiver) to the new stack frame.
3146    // r3: copy start address
3147    // r4: function
3148    // r5: expected number of arguments
3149    // r6: new target (passed through to callee)
3150    // r7: copy end address
3151    // ip: code entry to call
3152
3153    Label copy;
3154    __ bind(&copy);
3155    __ LoadP(r0, MemOperand(r3, 0));
3156    __ push(r0);
3157    __ cmp(r3, r7);  // Compare before moving to next argument.
3158    __ subi(r3, r3, Operand(kPointerSize));
3159    __ bne(&copy);
3160
3161    __ b(&invoke);
3162  }
3163
3164  {  // Too few parameters: Actual < expected
3165    __ bind(&too_few);
3166
3167    EnterArgumentsAdaptorFrame(masm);
3168    Generate_StackOverflowCheck(masm, r5, r8, &stack_overflow);
3169
3170    // Calculate copy start address into r0 and copy end address is fp.
3171    // r3: actual number of arguments as a smi
3172    // r4: function
3173    // r5: expected number of arguments
3174    // r6: new target (passed through to callee)
3175    // ip: code entry to call
3176    __ SmiToPtrArrayOffset(r3, r3);
3177    __ add(r3, r3, fp);
3178
3179    // Copy the arguments (including the receiver) to the new stack frame.
3180    // r3: copy start address
3181    // r4: function
3182    // r5: expected number of arguments
3183    // r6: new target (passed through to callee)
3184    // ip: code entry to call
3185    Label copy;
3186    __ bind(&copy);
3187    // Adjust load for return address and receiver.
3188    __ LoadP(r0, MemOperand(r3, 2 * kPointerSize));
3189    __ push(r0);
3190    __ cmp(r3, fp);  // Compare before moving to next argument.
3191    __ subi(r3, r3, Operand(kPointerSize));
3192    __ bne(&copy);
3193
3194    // Fill the remaining expected arguments with undefined.
3195    // r4: function
3196    // r5: expected number of arguments
3197    // r6: new target (passed through to callee)
3198    // ip: code entry to call
3199    __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3200    __ ShiftLeftImm(r7, r5, Operand(kPointerSizeLog2));
3201    __ sub(r7, fp, r7);
3202    // Adjust for frame.
3203    __ subi(r7, r7, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
3204                            2 * kPointerSize));
3205
3206    Label fill;
3207    __ bind(&fill);
3208    __ push(r0);
3209    __ cmp(sp, r7);
3210    __ bne(&fill);
3211  }
3212
3213  // Call the entry point.
3214  __ bind(&invoke);
3215  __ mr(r3, r5);
3216  // r3 : expected number of arguments
3217  // r4 : function (passed through to callee)
3218  // r6 : new target (passed through to callee)
3219  __ CallJSEntry(ip);
3220
3221  // Store offset of return address for deoptimizer.
3222  masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
3223
3224  // Exit frame and return.
3225  LeaveArgumentsAdaptorFrame(masm);
3226  __ blr();
3227
3228  // -------------------------------------------
3229  // Dont adapt arguments.
3230  // -------------------------------------------
3231  __ bind(&dont_adapt_arguments);
3232  __ JumpToJSEntry(ip);
3233
3234  __ bind(&stack_overflow);
3235  {
3236    FrameScope frame(masm, StackFrame::MANUAL);
3237    __ CallRuntime(Runtime::kThrowStackOverflow);
3238    __ bkpt(0);
3239  }
3240}
3241
3242#undef __
3243}  // namespace internal
3244}  // namespace v8
3245
3246#endif  // V8_TARGET_ARCH_PPC
3247