builtins-arm.cc revision 0d5e116f6aee03185f237311a943491bb079a768
1// Copyright 2006-2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#if defined(V8_TARGET_ARCH_ARM)
31
32#include "codegen-inl.h"
33#include "debug.h"
34#include "runtime.h"
35
36namespace v8 {
37namespace internal {
38
39
40#define __ ACCESS_MASM(masm)
41
42
43void Builtins::Generate_Adaptor(MacroAssembler* masm,
44                                CFunctionId id,
45                                BuiltinExtraArguments extra_args) {
46  // ----------- S t a t e -------------
47  //  -- r0                 : number of arguments excluding receiver
48  //  -- r1                 : called function (only guaranteed when
49  //                          extra_args requires it)
50  //  -- cp                 : context
51  //  -- sp[0]              : last argument
52  //  -- ...
53  //  -- sp[4 * (argc - 1)] : first argument (argc == r0)
54  //  -- sp[4 * argc]       : receiver
55  // -----------------------------------
56
57  // Insert extra arguments.
58  int num_extra_args = 0;
59  if (extra_args == NEEDS_CALLED_FUNCTION) {
60    num_extra_args = 1;
61    __ push(r1);
62  } else {
63    ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
64  }
65
66  // JumpToExternalReference expects r0 to contain the number of arguments
67  // including the receiver and the extra arguments.
68  __ add(r0, r0, Operand(num_extra_args + 1));
69  __ JumpToExternalReference(ExternalReference(id));
70}
71
72
73// Load the built-in Array function from the current context.
74static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
75  // Load the global context.
76
77  __ ldr(result, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
78  __ ldr(result,
79         FieldMemOperand(result, GlobalObject::kGlobalContextOffset));
80  // Load the Array function from the global context.
81  __ ldr(result,
82         MemOperand(result,
83                    Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
84}
85
86
87// This constant has the same value as JSArray::kPreallocatedArrayElements and
88// if JSArray::kPreallocatedArrayElements is changed handling of loop unfolding
89// below should be reconsidered.
90static const int kLoopUnfoldLimit = 4;
91
92
93// Allocate an empty JSArray. The allocated array is put into the result
94// register. An elements backing store is allocated with size initial_capacity
95// and filled with the hole values.
96static void AllocateEmptyJSArray(MacroAssembler* masm,
97                                 Register array_function,
98                                 Register result,
99                                 Register scratch1,
100                                 Register scratch2,
101                                 Register scratch3,
102                                 int initial_capacity,
103                                 Label* gc_required) {
104  ASSERT(initial_capacity > 0);
105  // Load the initial map from the array function.
106  __ ldr(scratch1, FieldMemOperand(array_function,
107                                   JSFunction::kPrototypeOrInitialMapOffset));
108
109  // Allocate the JSArray object together with space for a fixed array with the
110  // requested elements.
111  int size = JSArray::kSize + FixedArray::SizeFor(initial_capacity);
112  __ AllocateInNewSpace(size,
113                        result,
114                        scratch2,
115                        scratch3,
116                        gc_required,
117                        TAG_OBJECT);
118
119  // Allocated the JSArray. Now initialize the fields except for the elements
120  // array.
121  // result: JSObject
122  // scratch1: initial map
123  // scratch2: start of next object
124  __ str(scratch1, FieldMemOperand(result, JSObject::kMapOffset));
125  __ LoadRoot(scratch1, Heap::kEmptyFixedArrayRootIndex);
126  __ str(scratch1, FieldMemOperand(result, JSArray::kPropertiesOffset));
127  // Field JSArray::kElementsOffset is initialized later.
128  __ mov(scratch3,  Operand(0, RelocInfo::NONE));
129  __ str(scratch3, FieldMemOperand(result, JSArray::kLengthOffset));
130
131  // Calculate the location of the elements array and set elements array member
132  // of the JSArray.
133  // result: JSObject
134  // scratch2: start of next object
135  __ add(scratch1, result, Operand(JSArray::kSize));
136  __ str(scratch1, FieldMemOperand(result, JSArray::kElementsOffset));
137
138  // Clear the heap tag on the elements array.
139  ASSERT(kSmiTag == 0);
140  __ sub(scratch1, scratch1, Operand(kHeapObjectTag));
141
142  // Initialize the FixedArray and fill it with holes. FixedArray length is
143  // stored as a smi.
144  // result: JSObject
145  // scratch1: elements array (untagged)
146  // scratch2: start of next object
147  __ LoadRoot(scratch3, Heap::kFixedArrayMapRootIndex);
148  ASSERT_EQ(0 * kPointerSize, FixedArray::kMapOffset);
149  __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
150  __ mov(scratch3,  Operand(Smi::FromInt(initial_capacity)));
151  ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
152  __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
153
154  // Fill the FixedArray with the hole value.
155  ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
156  ASSERT(initial_capacity <= kLoopUnfoldLimit);
157  __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
158  for (int i = 0; i < initial_capacity; i++) {
159    __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
160  }
161}
162
163// Allocate a JSArray with the number of elements stored in a register. The
164// register array_function holds the built-in Array function and the register
165// array_size holds the size of the array as a smi. The allocated array is put
166// into the result register and beginning and end of the FixedArray elements
167// storage is put into registers elements_array_storage and elements_array_end
168// (see  below for when that is not the case). If the parameter fill_with_holes
169// is true the allocated elements backing store is filled with the hole values
170// otherwise it is left uninitialized. When the backing store is filled the
171// register elements_array_storage is scratched.
172static void AllocateJSArray(MacroAssembler* masm,
173                            Register array_function,  // Array function.
174                            Register array_size,  // As a smi.
175                            Register result,
176                            Register elements_array_storage,
177                            Register elements_array_end,
178                            Register scratch1,
179                            Register scratch2,
180                            bool fill_with_hole,
181                            Label* gc_required) {
182  Label not_empty, allocated;
183
184  // Load the initial map from the array function.
185  __ ldr(elements_array_storage,
186         FieldMemOperand(array_function,
187                         JSFunction::kPrototypeOrInitialMapOffset));
188
189  // Check whether an empty sized array is requested.
190  __ tst(array_size, array_size);
191  __ b(nz, &not_empty);
192
193  // If an empty array is requested allocate a small elements array anyway. This
194  // keeps the code below free of special casing for the empty array.
195  int size = JSArray::kSize +
196             FixedArray::SizeFor(JSArray::kPreallocatedArrayElements);
197  __ AllocateInNewSpace(size,
198                        result,
199                        elements_array_end,
200                        scratch1,
201                        gc_required,
202                        TAG_OBJECT);
203  __ jmp(&allocated);
204
205  // Allocate the JSArray object together with space for a FixedArray with the
206  // requested number of elements.
207  __ bind(&not_empty);
208  ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
209  __ mov(elements_array_end,
210         Operand((JSArray::kSize + FixedArray::kHeaderSize) / kPointerSize));
211  __ add(elements_array_end,
212         elements_array_end,
213         Operand(array_size, ASR, kSmiTagSize));
214  __ AllocateInNewSpace(
215      elements_array_end,
216      result,
217      scratch1,
218      scratch2,
219      gc_required,
220      static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS));
221
222  // Allocated the JSArray. Now initialize the fields except for the elements
223  // array.
224  // result: JSObject
225  // elements_array_storage: initial map
226  // array_size: size of array (smi)
227  __ bind(&allocated);
228  __ str(elements_array_storage, FieldMemOperand(result, JSObject::kMapOffset));
229  __ LoadRoot(elements_array_storage, Heap::kEmptyFixedArrayRootIndex);
230  __ str(elements_array_storage,
231         FieldMemOperand(result, JSArray::kPropertiesOffset));
232  // Field JSArray::kElementsOffset is initialized later.
233  __ str(array_size, FieldMemOperand(result, JSArray::kLengthOffset));
234
235  // Calculate the location of the elements array and set elements array member
236  // of the JSArray.
237  // result: JSObject
238  // array_size: size of array (smi)
239  __ add(elements_array_storage, result, Operand(JSArray::kSize));
240  __ str(elements_array_storage,
241         FieldMemOperand(result, JSArray::kElementsOffset));
242
243  // Clear the heap tag on the elements array.
244  ASSERT(kSmiTag == 0);
245  __ sub(elements_array_storage,
246         elements_array_storage,
247         Operand(kHeapObjectTag));
248  // Initialize the fixed array and fill it with holes. FixedArray length is
249  // stored as a smi.
250  // result: JSObject
251  // elements_array_storage: elements array (untagged)
252  // array_size: size of array (smi)
253  __ LoadRoot(scratch1, Heap::kFixedArrayMapRootIndex);
254  ASSERT_EQ(0 * kPointerSize, FixedArray::kMapOffset);
255  __ str(scratch1, MemOperand(elements_array_storage, kPointerSize, PostIndex));
256  ASSERT(kSmiTag == 0);
257  __ tst(array_size, array_size);
258  // Length of the FixedArray is the number of pre-allocated elements if
259  // the actual JSArray has length 0 and the size of the JSArray for non-empty
260  // JSArrays. The length of a FixedArray is stored as a smi.
261  __ mov(array_size,
262         Operand(Smi::FromInt(JSArray::kPreallocatedArrayElements)),
263         LeaveCC,
264         eq);
265  ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
266  __ str(array_size,
267         MemOperand(elements_array_storage, kPointerSize, PostIndex));
268
269  // Calculate elements array and elements array end.
270  // result: JSObject
271  // elements_array_storage: elements array element storage
272  // array_size: smi-tagged size of elements array
273  ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
274  __ add(elements_array_end,
275         elements_array_storage,
276         Operand(array_size, LSL, kPointerSizeLog2 - kSmiTagSize));
277
278  // Fill the allocated FixedArray with the hole value if requested.
279  // result: JSObject
280  // elements_array_storage: elements array element storage
281  // elements_array_end: start of next object
282  if (fill_with_hole) {
283    Label loop, entry;
284    __ LoadRoot(scratch1, Heap::kTheHoleValueRootIndex);
285    __ jmp(&entry);
286    __ bind(&loop);
287    __ str(scratch1,
288           MemOperand(elements_array_storage, kPointerSize, PostIndex));
289    __ bind(&entry);
290    __ cmp(elements_array_storage, elements_array_end);
291    __ b(lt, &loop);
292  }
293}
294
295// Create a new array for the built-in Array function. This function allocates
296// the JSArray object and the FixedArray elements array and initializes these.
297// If the Array cannot be constructed in native code the runtime is called. This
298// function assumes the following state:
299//   r0: argc
300//   r1: constructor (built-in Array function)
301//   lr: return address
302//   sp[0]: last argument
303// This function is used for both construct and normal calls of Array. The only
304// difference between handling a construct call and a normal call is that for a
305// construct call the constructor function in r1 needs to be preserved for
306// entering the generic code. In both cases argc in r0 needs to be preserved.
307// Both registers are preserved by this code so no need to differentiate between
308// construct call and normal call.
309static void ArrayNativeCode(MacroAssembler* masm,
310                            Label* call_generic_code) {
311  Label argc_one_or_more, argc_two_or_more;
312
313  // Check for array construction with zero arguments or one.
314  __ cmp(r0, Operand(0, RelocInfo::NONE));
315  __ b(ne, &argc_one_or_more);
316
317  // Handle construction of an empty array.
318  AllocateEmptyJSArray(masm,
319                       r1,
320                       r2,
321                       r3,
322                       r4,
323                       r5,
324                       JSArray::kPreallocatedArrayElements,
325                       call_generic_code);
326  __ IncrementCounter(&Counters::array_function_native, 1, r3, r4);
327  // Setup return value, remove receiver from stack and return.
328  __ mov(r0, r2);
329  __ add(sp, sp, Operand(kPointerSize));
330  __ Jump(lr);
331
332  // Check for one argument. Bail out if argument is not smi or if it is
333  // negative.
334  __ bind(&argc_one_or_more);
335  __ cmp(r0, Operand(1));
336  __ b(ne, &argc_two_or_more);
337  ASSERT(kSmiTag == 0);
338  __ ldr(r2, MemOperand(sp));  // Get the argument from the stack.
339  __ and_(r3, r2, Operand(kIntptrSignBit | kSmiTagMask), SetCC);
340  __ b(ne, call_generic_code);
341
342  // Handle construction of an empty array of a certain size. Bail out if size
343  // is too large to actually allocate an elements array.
344  ASSERT(kSmiTag == 0);
345  __ cmp(r2, Operand(JSObject::kInitialMaxFastElementArray << kSmiTagSize));
346  __ b(ge, call_generic_code);
347
348  // r0: argc
349  // r1: constructor
350  // r2: array_size (smi)
351  // sp[0]: argument
352  AllocateJSArray(masm,
353                  r1,
354                  r2,
355                  r3,
356                  r4,
357                  r5,
358                  r6,
359                  r7,
360                  true,
361                  call_generic_code);
362  __ IncrementCounter(&Counters::array_function_native, 1, r2, r4);
363  // Setup return value, remove receiver and argument from stack and return.
364  __ mov(r0, r3);
365  __ add(sp, sp, Operand(2 * kPointerSize));
366  __ Jump(lr);
367
368  // Handle construction of an array from a list of arguments.
369  __ bind(&argc_two_or_more);
370  __ mov(r2, Operand(r0, LSL, kSmiTagSize));  // Convet argc to a smi.
371
372  // r0: argc
373  // r1: constructor
374  // r2: array_size (smi)
375  // sp[0]: last argument
376  AllocateJSArray(masm,
377                  r1,
378                  r2,
379                  r3,
380                  r4,
381                  r5,
382                  r6,
383                  r7,
384                  false,
385                  call_generic_code);
386  __ IncrementCounter(&Counters::array_function_native, 1, r2, r6);
387
388  // Fill arguments as array elements. Copy from the top of the stack (last
389  // element) to the array backing store filling it backwards. Note:
390  // elements_array_end points after the backing store therefore PreIndex is
391  // used when filling the backing store.
392  // r0: argc
393  // r3: JSArray
394  // r4: elements_array storage start (untagged)
395  // r5: elements_array_end (untagged)
396  // sp[0]: last argument
397  Label loop, entry;
398  __ jmp(&entry);
399  __ bind(&loop);
400  __ ldr(r2, MemOperand(sp, kPointerSize, PostIndex));
401  __ str(r2, MemOperand(r5, -kPointerSize, PreIndex));
402  __ bind(&entry);
403  __ cmp(r4, r5);
404  __ b(lt, &loop);
405
406  // Remove caller arguments and receiver from the stack, setup return value and
407  // return.
408  // r0: argc
409  // r3: JSArray
410  // sp[0]: receiver
411  __ add(sp, sp, Operand(kPointerSize));
412  __ mov(r0, r3);
413  __ Jump(lr);
414}
415
416
417void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
418  // ----------- S t a t e -------------
419  //  -- r0     : number of arguments
420  //  -- lr     : return address
421  //  -- sp[...]: constructor arguments
422  // -----------------------------------
423  Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
424
425  // Get the Array function.
426  GenerateLoadArrayFunction(masm, r1);
427
428  if (FLAG_debug_code) {
429    // Initial map for the builtin Array function shoud be a map.
430    __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
431    __ tst(r2, Operand(kSmiTagMask));
432    __ Assert(ne, "Unexpected initial map for Array function");
433    __ CompareObjectType(r2, r3, r4, MAP_TYPE);
434    __ Assert(eq, "Unexpected initial map for Array function");
435  }
436
437  // Run the native code for the Array function called as a normal function.
438  ArrayNativeCode(masm, &generic_array_code);
439
440  // Jump to the generic array code if the specialized code cannot handle
441  // the construction.
442  __ bind(&generic_array_code);
443  Code* code = Builtins::builtin(Builtins::ArrayCodeGeneric);
444  Handle<Code> array_code(code);
445  __ Jump(array_code, RelocInfo::CODE_TARGET);
446}
447
448
449void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
450  // ----------- S t a t e -------------
451  //  -- r0     : number of arguments
452  //  -- r1     : constructor function
453  //  -- lr     : return address
454  //  -- sp[...]: constructor arguments
455  // -----------------------------------
456  Label generic_constructor;
457
458  if (FLAG_debug_code) {
459    // The array construct code is only set for the builtin Array function which
460    // always have a map.
461    GenerateLoadArrayFunction(masm, r2);
462    __ cmp(r1, r2);
463    __ Assert(eq, "Unexpected Array function");
464    // Initial map for the builtin Array function should be a map.
465    __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
466    __ tst(r2, Operand(kSmiTagMask));
467    __ Assert(ne, "Unexpected initial map for Array function");
468    __ CompareObjectType(r2, r3, r4, MAP_TYPE);
469    __ Assert(eq, "Unexpected initial map for Array function");
470  }
471
472  // Run the native code for the Array function called as a constructor.
473  ArrayNativeCode(masm, &generic_constructor);
474
475  // Jump to the generic construct code in case the specialized code cannot
476  // handle the construction.
477  __ bind(&generic_constructor);
478  Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
479  Handle<Code> generic_construct_stub(code);
480  __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
481}
482
483
484void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
485  // TODO(849): implement custom construct stub.
486  // Generate a copy of the generic stub for now.
487  Generate_JSConstructStubGeneric(masm);
488}
489
490
491void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
492  // ----------- S t a t e -------------
493  //  -- r0     : number of arguments
494  //  -- r1     : constructor function
495  //  -- lr     : return address
496  //  -- sp[...]: constructor arguments
497  // -----------------------------------
498
499  Label non_function_call;
500  // Check that the function is not a smi.
501  __ tst(r1, Operand(kSmiTagMask));
502  __ b(eq, &non_function_call);
503  // Check that the function is a JSFunction.
504  __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
505  __ b(ne, &non_function_call);
506
507  // Jump to the function-specific construct stub.
508  __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
509  __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kConstructStubOffset));
510  __ add(pc, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
511
512  // r0: number of arguments
513  // r1: called object
514  __ bind(&non_function_call);
515  // Set expected number of arguments to zero (not changing r0).
516  __ mov(r2, Operand(0, RelocInfo::NONE));
517  __ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
518  __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
519          RelocInfo::CODE_TARGET);
520}
521
522
523static void Generate_JSConstructStubHelper(MacroAssembler* masm,
524                                           bool is_api_function,
525                                           bool count_constructions) {
526  // Should never count constructions for api objects.
527  ASSERT(!is_api_function || !count_constructions);
528
529  // Enter a construct frame.
530  __ EnterConstructFrame();
531
532  // Preserve the two incoming parameters on the stack.
533  __ mov(r0, Operand(r0, LSL, kSmiTagSize));
534  __ push(r0);  // Smi-tagged arguments count.
535  __ push(r1);  // Constructor function.
536
537  // Try to allocate the object without transitioning into C code. If any of the
538  // preconditions is not met, the code bails out to the runtime call.
539  Label rt_call, allocated;
540  if (FLAG_inline_new) {
541    Label undo_allocation;
542#ifdef ENABLE_DEBUGGER_SUPPORT
543    ExternalReference debug_step_in_fp =
544        ExternalReference::debug_step_in_fp_address();
545    __ mov(r2, Operand(debug_step_in_fp));
546    __ ldr(r2, MemOperand(r2));
547    __ tst(r2, r2);
548    __ b(nz, &rt_call);
549#endif
550
551    // Load the initial map and verify that it is in fact a map.
552    // r1: constructor function
553    __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
554    __ tst(r2, Operand(kSmiTagMask));
555    __ b(eq, &rt_call);
556    __ CompareObjectType(r2, r3, r4, MAP_TYPE);
557    __ b(ne, &rt_call);
558
559    // Check that the constructor is not constructing a JSFunction (see comments
560    // in Runtime_NewObject in runtime.cc). In which case the initial map's
561    // instance type would be JS_FUNCTION_TYPE.
562    // r1: constructor function
563    // r2: initial map
564    __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
565    __ b(eq, &rt_call);
566
567    if (count_constructions) {
568      Label allocate;
569      // Decrease generous allocation count.
570      __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
571      MemOperand constructor_count =
572          FieldMemOperand(r3, SharedFunctionInfo::kConstructionCountOffset);
573      __ ldrb(r4, constructor_count);
574      __ sub(r4, r4, Operand(1), SetCC);
575      __ strb(r4, constructor_count);
576      __ b(ne, &allocate);
577
578      __ Push(r1, r2);
579
580      __ push(r1);  // constructor
581      // The call will replace the stub, so the countdown is only done once.
582      __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
583
584      __ pop(r2);
585      __ pop(r1);
586
587      __ bind(&allocate);
588    }
589
590    // Now allocate the JSObject on the heap.
591    // r1: constructor function
592    // r2: initial map
593    __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
594    __ AllocateInNewSpace(r3, r4, r5, r6, &rt_call, SIZE_IN_WORDS);
595
596    // Allocated the JSObject, now initialize the fields. Map is set to initial
597    // map and properties and elements are set to empty fixed array.
598    // r1: constructor function
599    // r2: initial map
600    // r3: object size
601    // r4: JSObject (not tagged)
602    __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
603    __ mov(r5, r4);
604    ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
605    __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
606    ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
607    __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
608    ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
609    __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
610
611    // Fill all the in-object properties with the appropriate filler.
612    // r1: constructor function
613    // r2: initial map
614    // r3: object size (in words)
615    // r4: JSObject (not tagged)
616    // r5: First in-object property of JSObject (not tagged)
617    __ add(r6, r4, Operand(r3, LSL, kPointerSizeLog2));  // End of object.
618    ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize);
619    { Label loop, entry;
620      if (count_constructions) {
621        // To allow for truncation.
622        __ LoadRoot(r7, Heap::kOnePointerFillerMapRootIndex);
623      } else {
624        __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
625      }
626      __ b(&entry);
627      __ bind(&loop);
628      __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
629      __ bind(&entry);
630      __ cmp(r5, r6);
631      __ b(lt, &loop);
632    }
633
634    // Add the object tag to make the JSObject real, so that we can continue and
635    // jump into the continuation code at any time from now on. Any failures
636    // need to undo the allocation, so that the heap is in a consistent state
637    // and verifiable.
638    __ add(r4, r4, Operand(kHeapObjectTag));
639
640    // Check if a non-empty properties array is needed. Continue with allocated
641    // object if not fall through to runtime call if it is.
642    // r1: constructor function
643    // r4: JSObject
644    // r5: start of next object (not tagged)
645    __ ldrb(r3, FieldMemOperand(r2, Map::kUnusedPropertyFieldsOffset));
646    // The field instance sizes contains both pre-allocated property fields and
647    // in-object properties.
648    __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset));
649    __ Ubfx(r6, r0, Map::kPreAllocatedPropertyFieldsByte * 8, 8);
650    __ add(r3, r3, Operand(r6));
651    __ Ubfx(r6, r0, Map::kInObjectPropertiesByte * 8, 8);
652    __ sub(r3, r3, Operand(r6), SetCC);
653
654    // Done if no extra properties are to be allocated.
655    __ b(eq, &allocated);
656    __ Assert(pl, "Property allocation count failed.");
657
658    // Scale the number of elements by pointer size and add the header for
659    // FixedArrays to the start of the next object calculation from above.
660    // r1: constructor
661    // r3: number of elements in properties array
662    // r4: JSObject
663    // r5: start of next object
664    __ add(r0, r3, Operand(FixedArray::kHeaderSize / kPointerSize));
665    __ AllocateInNewSpace(
666        r0,
667        r5,
668        r6,
669        r2,
670        &undo_allocation,
671        static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
672
673    // Initialize the FixedArray.
674    // r1: constructor
675    // r3: number of elements in properties array
676    // r4: JSObject
677    // r5: FixedArray (not tagged)
678    __ LoadRoot(r6, Heap::kFixedArrayMapRootIndex);
679    __ mov(r2, r5);
680    ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
681    __ str(r6, MemOperand(r2, kPointerSize, PostIndex));
682    ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
683    __ mov(r0, Operand(r3, LSL, kSmiTagSize));
684    __ str(r0, MemOperand(r2, kPointerSize, PostIndex));
685
686    // Initialize the fields to undefined.
687    // r1: constructor function
688    // r2: First element of FixedArray (not tagged)
689    // r3: number of elements in properties array
690    // r4: JSObject
691    // r5: FixedArray (not tagged)
692    __ add(r6, r2, Operand(r3, LSL, kPointerSizeLog2));  // End of object.
693    ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
694    { Label loop, entry;
695      if (count_constructions) {
696        __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
697      } else if (FLAG_debug_code) {
698        __ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
699        __ cmp(r7, r8);
700        __ Assert(eq, "Undefined value not loaded.");
701      }
702      __ b(&entry);
703      __ bind(&loop);
704      __ str(r7, MemOperand(r2, kPointerSize, PostIndex));
705      __ bind(&entry);
706      __ cmp(r2, r6);
707      __ b(lt, &loop);
708    }
709
710    // Store the initialized FixedArray into the properties field of
711    // the JSObject
712    // r1: constructor function
713    // r4: JSObject
714    // r5: FixedArray (not tagged)
715    __ add(r5, r5, Operand(kHeapObjectTag));  // Add the heap tag.
716    __ str(r5, FieldMemOperand(r4, JSObject::kPropertiesOffset));
717
718    // Continue with JSObject being successfully allocated
719    // r1: constructor function
720    // r4: JSObject
721    __ jmp(&allocated);
722
723    // Undo the setting of the new top so that the heap is verifiable. For
724    // example, the map's unused properties potentially do not match the
725    // allocated objects unused properties.
726    // r4: JSObject (previous new top)
727    __ bind(&undo_allocation);
728    __ UndoAllocationInNewSpace(r4, r5);
729  }
730
731  // Allocate the new receiver object using the runtime call.
732  // r1: constructor function
733  __ bind(&rt_call);
734  __ push(r1);  // argument for Runtime_NewObject
735  __ CallRuntime(Runtime::kNewObject, 1);
736  __ mov(r4, r0);
737
738  // Receiver for constructor call allocated.
739  // r4: JSObject
740  __ bind(&allocated);
741  __ push(r4);
742
743  // Push the function and the allocated receiver from the stack.
744  // sp[0]: receiver (newly allocated object)
745  // sp[1]: constructor function
746  // sp[2]: number of arguments (smi-tagged)
747  __ ldr(r1, MemOperand(sp, kPointerSize));
748  __ push(r1);  // Constructor function.
749  __ push(r4);  // Receiver.
750
751  // Reload the number of arguments from the stack.
752  // r1: constructor function
753  // sp[0]: receiver
754  // sp[1]: constructor function
755  // sp[2]: receiver
756  // sp[3]: constructor function
757  // sp[4]: number of arguments (smi-tagged)
758  __ ldr(r3, MemOperand(sp, 4 * kPointerSize));
759
760  // Setup pointer to last argument.
761  __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
762
763  // Setup number of arguments for function call below
764  __ mov(r0, Operand(r3, LSR, kSmiTagSize));
765
766  // Copy arguments and receiver to the expression stack.
767  // r0: number of arguments
768  // r2: address of last argument (caller sp)
769  // r1: constructor function
770  // r3: number of arguments (smi-tagged)
771  // sp[0]: receiver
772  // sp[1]: constructor function
773  // sp[2]: receiver
774  // sp[3]: constructor function
775  // sp[4]: number of arguments (smi-tagged)
776  Label loop, entry;
777  __ b(&entry);
778  __ bind(&loop);
779  __ ldr(ip, MemOperand(r2, r3, LSL, kPointerSizeLog2 - 1));
780  __ push(ip);
781  __ bind(&entry);
782  __ sub(r3, r3, Operand(2), SetCC);
783  __ b(ge, &loop);
784
785  // Call the function.
786  // r0: number of arguments
787  // r1: constructor function
788  if (is_api_function) {
789    __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
790    Handle<Code> code = Handle<Code>(
791        Builtins::builtin(Builtins::HandleApiCallConstruct));
792    ParameterCount expected(0);
793    __ InvokeCode(code, expected, expected,
794                  RelocInfo::CODE_TARGET, CALL_FUNCTION);
795  } else {
796    ParameterCount actual(r0);
797    __ InvokeFunction(r1, actual, CALL_FUNCTION);
798  }
799
800  // Pop the function from the stack.
801  // sp[0]: constructor function
802  // sp[2]: receiver
803  // sp[3]: constructor function
804  // sp[4]: number of arguments (smi-tagged)
805  __ pop();
806
807  // Restore context from the frame.
808  // r0: result
809  // sp[0]: receiver
810  // sp[1]: constructor function
811  // sp[2]: number of arguments (smi-tagged)
812  __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
813
814  // If the result is an object (in the ECMA sense), we should get rid
815  // of the receiver and use the result; see ECMA-262 section 13.2.2-7
816  // on page 74.
817  Label use_receiver, exit;
818
819  // If the result is a smi, it is *not* an object in the ECMA sense.
820  // r0: result
821  // sp[0]: receiver (newly allocated object)
822  // sp[1]: constructor function
823  // sp[2]: number of arguments (smi-tagged)
824  __ tst(r0, Operand(kSmiTagMask));
825  __ b(eq, &use_receiver);
826
827  // If the type of the result (stored in its map) is less than
828  // FIRST_JS_OBJECT_TYPE, it is not an object in the ECMA sense.
829  __ CompareObjectType(r0, r3, r3, FIRST_JS_OBJECT_TYPE);
830  __ b(ge, &exit);
831
832  // Throw away the result of the constructor invocation and use the
833  // on-stack receiver as the result.
834  __ bind(&use_receiver);
835  __ ldr(r0, MemOperand(sp));
836
837  // Remove receiver from the stack, remove caller arguments, and
838  // return.
839  __ bind(&exit);
840  // r0: result
841  // sp[0]: receiver (newly allocated object)
842  // sp[1]: constructor function
843  // sp[2]: number of arguments (smi-tagged)
844  __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
845  __ LeaveConstructFrame();
846  __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
847  __ add(sp, sp, Operand(kPointerSize));
848  __ IncrementCounter(&Counters::constructed_objects, 1, r1, r2);
849  __ Jump(lr);
850}
851
852
853void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
854  Generate_JSConstructStubHelper(masm, false, true);
855}
856
857
858void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
859  Generate_JSConstructStubHelper(masm, false, false);
860}
861
862
863void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
864  Generate_JSConstructStubHelper(masm, true, false);
865}
866
867
868static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
869                                             bool is_construct) {
870  // Called from Generate_JS_Entry
871  // r0: code entry
872  // r1: function
873  // r2: receiver
874  // r3: argc
875  // r4: argv
876  // r5-r7, cp may be clobbered
877
878  // Clear the context before we push it when entering the JS frame.
879  __ mov(cp, Operand(0, RelocInfo::NONE));
880
881  // Enter an internal frame.
882  __ EnterInternalFrame();
883
884  // Set up the context from the function argument.
885  __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
886
887  // Set up the roots register.
888  ExternalReference roots_address = ExternalReference::roots_address();
889  __ mov(r10, Operand(roots_address));
890
891  // Push the function and the receiver onto the stack.
892  __ push(r1);
893  __ push(r2);
894
895  // Copy arguments to the stack in a loop.
896  // r1: function
897  // r3: argc
898  // r4: argv, i.e. points to first arg
899  Label loop, entry;
900  __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2));
901  // r2 points past last arg.
902  __ b(&entry);
903  __ bind(&loop);
904  __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex));  // read next parameter
905  __ ldr(r0, MemOperand(r0));  // dereference handle
906  __ push(r0);  // push parameter
907  __ bind(&entry);
908  __ cmp(r4, r2);
909  __ b(ne, &loop);
910
911  // Initialize all JavaScript callee-saved registers, since they will be seen
912  // by the garbage collector as part of handlers.
913  __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
914  __ mov(r5, Operand(r4));
915  __ mov(r6, Operand(r4));
916  __ mov(r7, Operand(r4));
917  if (kR9Available == 1) {
918    __ mov(r9, Operand(r4));
919  }
920
921  // Invoke the code and pass argc as r0.
922  __ mov(r0, Operand(r3));
923  if (is_construct) {
924    __ Call(Handle<Code>(Builtins::builtin(Builtins::JSConstructCall)),
925            RelocInfo::CODE_TARGET);
926  } else {
927    ParameterCount actual(r0);
928    __ InvokeFunction(r1, actual, CALL_FUNCTION);
929  }
930
931  // Exit the JS frame and remove the parameters (except function), and return.
932  // Respect ABI stack constraint.
933  __ LeaveInternalFrame();
934  __ Jump(lr);
935
936  // r0: result
937}
938
939
940void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
941  Generate_JSEntryTrampolineHelper(masm, false);
942}
943
944
945void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
946  Generate_JSEntryTrampolineHelper(masm, true);
947}
948
949
950void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
951  // Enter an internal frame.
952  __ EnterInternalFrame();
953
954  // Preserve the function.
955  __ push(r1);
956
957  // Push the function on the stack as the argument to the runtime function.
958  __ push(r1);
959  __ CallRuntime(Runtime::kLazyCompile, 1);
960  // Calculate the entry point.
961  __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
962  // Restore saved function.
963  __ pop(r1);
964
965  // Tear down temporary frame.
966  __ LeaveInternalFrame();
967
968  // Do a tail-call of the compiled function.
969  __ Jump(r2);
970}
971
972
973void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
974  // 1. Make sure we have at least one argument.
975  // r0: actual number of arguments
976  { Label done;
977    __ tst(r0, Operand(r0));
978    __ b(ne, &done);
979    __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
980    __ push(r2);
981    __ add(r0, r0, Operand(1));
982    __ bind(&done);
983  }
984
985  // 2. Get the function to call (passed as receiver) from the stack, check
986  //    if it is a function.
987  // r0: actual number of arguments
988  Label non_function;
989  __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
990  __ tst(r1, Operand(kSmiTagMask));
991  __ b(eq, &non_function);
992  __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
993  __ b(ne, &non_function);
994
995  // 3a. Patch the first argument if necessary when calling a function.
996  // r0: actual number of arguments
997  // r1: function
998  Label shift_arguments;
999  { Label convert_to_object, use_global_receiver, patch_receiver;
1000    // Change context eagerly in case we need the global receiver.
1001    __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1002
1003    __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1004    __ ldr(r2, MemOperand(r2, -kPointerSize));
1005    // r0: actual number of arguments
1006    // r1: function
1007    // r2: first argument
1008    __ tst(r2, Operand(kSmiTagMask));
1009    __ b(eq, &convert_to_object);
1010
1011    __ LoadRoot(r3, Heap::kNullValueRootIndex);
1012    __ cmp(r2, r3);
1013    __ b(eq, &use_global_receiver);
1014    __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1015    __ cmp(r2, r3);
1016    __ b(eq, &use_global_receiver);
1017
1018    __ CompareObjectType(r2, r3, r3, FIRST_JS_OBJECT_TYPE);
1019    __ b(lt, &convert_to_object);
1020    __ cmp(r3, Operand(LAST_JS_OBJECT_TYPE));
1021    __ b(le, &shift_arguments);
1022
1023    __ bind(&convert_to_object);
1024    __ EnterInternalFrame();  // In order to preserve argument count.
1025    __ mov(r0, Operand(r0, LSL, kSmiTagSize));  // Smi-tagged.
1026    __ push(r0);
1027
1028    __ push(r2);
1029    __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_JS);
1030    __ mov(r2, r0);
1031
1032    __ pop(r0);
1033    __ mov(r0, Operand(r0, ASR, kSmiTagSize));
1034    __ LeaveInternalFrame();
1035    // Restore the function to r1.
1036    __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1037    __ jmp(&patch_receiver);
1038
1039    // Use the global receiver object from the called function as the
1040    // receiver.
1041    __ bind(&use_global_receiver);
1042    const int kGlobalIndex =
1043        Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
1044    __ ldr(r2, FieldMemOperand(cp, kGlobalIndex));
1045    __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalContextOffset));
1046    __ ldr(r2, FieldMemOperand(r2, kGlobalIndex));
1047    __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset));
1048
1049    __ bind(&patch_receiver);
1050    __ add(r3, sp, Operand(r0, LSL, kPointerSizeLog2));
1051    __ str(r2, MemOperand(r3, -kPointerSize));
1052
1053    __ jmp(&shift_arguments);
1054  }
1055
1056  // 3b. Patch the first argument when calling a non-function.  The
1057  //     CALL_NON_FUNCTION builtin expects the non-function callee as
1058  //     receiver, so overwrite the first argument which will ultimately
1059  //     become the receiver.
1060  // r0: actual number of arguments
1061  // r1: function
1062  __ bind(&non_function);
1063  __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1064  __ str(r1, MemOperand(r2, -kPointerSize));
1065  // Clear r1 to indicate a non-function being called.
1066  __ mov(r1, Operand(0, RelocInfo::NONE));
1067
1068  // 4. Shift arguments and return address one slot down on the stack
1069  //    (overwriting the original receiver).  Adjust argument count to make
1070  //    the original first argument the new receiver.
1071  // r0: actual number of arguments
1072  // r1: function
1073  __ bind(&shift_arguments);
1074  { Label loop;
1075    // Calculate the copy start address (destination). Copy end address is sp.
1076    __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1077
1078    __ bind(&loop);
1079    __ ldr(ip, MemOperand(r2, -kPointerSize));
1080    __ str(ip, MemOperand(r2));
1081    __ sub(r2, r2, Operand(kPointerSize));
1082    __ cmp(r2, sp);
1083    __ b(ne, &loop);
1084    // Adjust the actual number of arguments and remove the top element
1085    // (which is a copy of the last argument).
1086    __ sub(r0, r0, Operand(1));
1087    __ pop();
1088  }
1089
1090  // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin.
1091  // r0: actual number of arguments
1092  // r1: function
1093  { Label function;
1094    __ tst(r1, r1);
1095    __ b(ne, &function);
1096    // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1097    __ mov(r2, Operand(0, RelocInfo::NONE));
1098    __ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION);
1099    __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
1100                         RelocInfo::CODE_TARGET);
1101    __ bind(&function);
1102  }
1103
1104  // 5b. Get the code to call from the function and check that the number of
1105  //     expected arguments matches what we're providing.  If so, jump
1106  //     (tail-call) to the code in register edx without checking arguments.
1107  // r0: actual number of arguments
1108  // r1: function
1109  __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1110  __ ldr(r2,
1111         FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
1112  __ mov(r2, Operand(r2, ASR, kSmiTagSize));
1113  __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
1114  __ cmp(r2, r0);  // Check formal and actual parameter counts.
1115  __ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
1116          RelocInfo::CODE_TARGET, ne);
1117
1118  ParameterCount expected(0);
1119  __ InvokeCode(r3, expected, expected, JUMP_FUNCTION);
1120}
1121
1122
1123void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1124  const int kIndexOffset    = -5 * kPointerSize;
1125  const int kLimitOffset    = -4 * kPointerSize;
1126  const int kArgsOffset     =  2 * kPointerSize;
1127  const int kRecvOffset     =  3 * kPointerSize;
1128  const int kFunctionOffset =  4 * kPointerSize;
1129
1130  __ EnterInternalFrame();
1131
1132  __ ldr(r0, MemOperand(fp, kFunctionOffset));  // get the function
1133  __ push(r0);
1134  __ ldr(r0, MemOperand(fp, kArgsOffset));  // get the args array
1135  __ push(r0);
1136  __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_JS);
1137
1138  // Check the stack for overflow. We are not trying need to catch
1139  // interruptions (e.g. debug break and preemption) here, so the "real stack
1140  // limit" is checked.
1141  Label okay;
1142  __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
1143  // Make r2 the space we have left. The stack might already be overflowed
1144  // here which will cause r2 to become negative.
1145  __ sub(r2, sp, r2);
1146  // Check if the arguments will overflow the stack.
1147  __ cmp(r2, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1148  __ b(gt, &okay);  // Signed comparison.
1149
1150  // Out of stack space.
1151  __ ldr(r1, MemOperand(fp, kFunctionOffset));
1152  __ push(r1);
1153  __ push(r0);
1154  __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_JS);
1155  // End of stack check.
1156
1157  // Push current limit and index.
1158  __ bind(&okay);
1159  __ push(r0);  // limit
1160  __ mov(r1, Operand(0, RelocInfo::NONE));  // initial index
1161  __ push(r1);
1162
1163  // Change context eagerly to get the right global object if necessary.
1164  __ ldr(r0, MemOperand(fp, kFunctionOffset));
1165  __ ldr(cp, FieldMemOperand(r0, JSFunction::kContextOffset));
1166
1167  // Compute the receiver.
1168  Label call_to_object, use_global_receiver, push_receiver;
1169  __ ldr(r0, MemOperand(fp, kRecvOffset));
1170  __ tst(r0, Operand(kSmiTagMask));
1171  __ b(eq, &call_to_object);
1172  __ LoadRoot(r1, Heap::kNullValueRootIndex);
1173  __ cmp(r0, r1);
1174  __ b(eq, &use_global_receiver);
1175  __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
1176  __ cmp(r0, r1);
1177  __ b(eq, &use_global_receiver);
1178
1179  // Check if the receiver is already a JavaScript object.
1180  // r0: receiver
1181  __ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE);
1182  __ b(lt, &call_to_object);
1183  __ cmp(r1, Operand(LAST_JS_OBJECT_TYPE));
1184  __ b(le, &push_receiver);
1185
1186  // Convert the receiver to a regular object.
1187  // r0: receiver
1188  __ bind(&call_to_object);
1189  __ push(r0);
1190  __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_JS);
1191  __ b(&push_receiver);
1192
1193  // Use the current global receiver object as the receiver.
1194  __ bind(&use_global_receiver);
1195  const int kGlobalOffset =
1196      Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
1197  __ ldr(r0, FieldMemOperand(cp, kGlobalOffset));
1198  __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalContextOffset));
1199  __ ldr(r0, FieldMemOperand(r0, kGlobalOffset));
1200  __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
1201
1202  // Push the receiver.
1203  // r0: receiver
1204  __ bind(&push_receiver);
1205  __ push(r0);
1206
1207  // Copy all arguments from the array to the stack.
1208  Label entry, loop;
1209  __ ldr(r0, MemOperand(fp, kIndexOffset));
1210  __ b(&entry);
1211
1212  // Load the current argument from the arguments array and push it to the
1213  // stack.
1214  // r0: current argument index
1215  __ bind(&loop);
1216  __ ldr(r1, MemOperand(fp, kArgsOffset));
1217  __ push(r1);
1218  __ push(r0);
1219
1220  // Call the runtime to access the property in the arguments array.
1221  __ CallRuntime(Runtime::kGetProperty, 2);
1222  __ push(r0);
1223
1224  // Use inline caching to access the arguments.
1225  __ ldr(r0, MemOperand(fp, kIndexOffset));
1226  __ add(r0, r0, Operand(1 << kSmiTagSize));
1227  __ str(r0, MemOperand(fp, kIndexOffset));
1228
1229  // Test if the copy loop has finished copying all the elements from the
1230  // arguments object.
1231  __ bind(&entry);
1232  __ ldr(r1, MemOperand(fp, kLimitOffset));
1233  __ cmp(r0, r1);
1234  __ b(ne, &loop);
1235
1236  // Invoke the function.
1237  ParameterCount actual(r0);
1238  __ mov(r0, Operand(r0, ASR, kSmiTagSize));
1239  __ ldr(r1, MemOperand(fp, kFunctionOffset));
1240  __ InvokeFunction(r1, actual, CALL_FUNCTION);
1241
1242  // Tear down the internal frame and remove function, receiver and args.
1243  __ LeaveInternalFrame();
1244  __ add(sp, sp, Operand(3 * kPointerSize));
1245  __ Jump(lr);
1246}
1247
1248
1249static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1250  __ mov(r0, Operand(r0, LSL, kSmiTagSize));
1251  __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1252  __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() | fp.bit() | lr.bit());
1253  __ add(fp, sp, Operand(3 * kPointerSize));
1254}
1255
1256
1257static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1258  // ----------- S t a t e -------------
1259  //  -- r0 : result being passed through
1260  // -----------------------------------
1261  // Get the number of arguments passed (as a smi), tear down the frame and
1262  // then tear down the parameters.
1263  __ ldr(r1, MemOperand(fp, -3 * kPointerSize));
1264  __ mov(sp, fp);
1265  __ ldm(ia_w, sp, fp.bit() | lr.bit());
1266  __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
1267  __ add(sp, sp, Operand(kPointerSize));  // adjust for receiver
1268}
1269
1270
1271void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1272  // ----------- S t a t e -------------
1273  //  -- r0 : actual number of arguments
1274  //  -- r1 : function (passed through to callee)
1275  //  -- r2 : expected number of arguments
1276  //  -- r3 : code entry to call
1277  // -----------------------------------
1278
1279  Label invoke, dont_adapt_arguments;
1280
1281  Label enough, too_few;
1282  __ cmp(r0, r2);
1283  __ b(lt, &too_few);
1284  __ cmp(r2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1285  __ b(eq, &dont_adapt_arguments);
1286
1287  {  // Enough parameters: actual >= expected
1288    __ bind(&enough);
1289    EnterArgumentsAdaptorFrame(masm);
1290
1291    // Calculate copy start address into r0 and copy end address into r2.
1292    // r0: actual number of arguments as a smi
1293    // r1: function
1294    // r2: expected number of arguments
1295    // r3: code entry to call
1296    __ add(r0, fp, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1297    // adjust for return address and receiver
1298    __ add(r0, r0, Operand(2 * kPointerSize));
1299    __ sub(r2, r0, Operand(r2, LSL, kPointerSizeLog2));
1300
1301    // Copy the arguments (including the receiver) to the new stack frame.
1302    // r0: copy start address
1303    // r1: function
1304    // r2: copy end address
1305    // r3: code entry to call
1306
1307    Label copy;
1308    __ bind(&copy);
1309    __ ldr(ip, MemOperand(r0, 0));
1310    __ push(ip);
1311    __ cmp(r0, r2);  // Compare before moving to next argument.
1312    __ sub(r0, r0, Operand(kPointerSize));
1313    __ b(ne, &copy);
1314
1315    __ b(&invoke);
1316  }
1317
1318  {  // Too few parameters: Actual < expected
1319    __ bind(&too_few);
1320    EnterArgumentsAdaptorFrame(masm);
1321
1322    // Calculate copy start address into r0 and copy end address is fp.
1323    // r0: actual number of arguments as a smi
1324    // r1: function
1325    // r2: expected number of arguments
1326    // r3: code entry to call
1327    __ add(r0, fp, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1328
1329    // Copy the arguments (including the receiver) to the new stack frame.
1330    // r0: copy start address
1331    // r1: function
1332    // r2: expected number of arguments
1333    // r3: code entry to call
1334    Label copy;
1335    __ bind(&copy);
1336    // Adjust load for return address and receiver.
1337    __ ldr(ip, MemOperand(r0, 2 * kPointerSize));
1338    __ push(ip);
1339    __ cmp(r0, fp);  // Compare before moving to next argument.
1340    __ sub(r0, r0, Operand(kPointerSize));
1341    __ b(ne, &copy);
1342
1343    // Fill the remaining expected arguments with undefined.
1344    // r1: function
1345    // r2: expected number of arguments
1346    // r3: code entry to call
1347    __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1348    __ sub(r2, fp, Operand(r2, LSL, kPointerSizeLog2));
1349    __ sub(r2, r2, Operand(4 * kPointerSize));  // Adjust for frame.
1350
1351    Label fill;
1352    __ bind(&fill);
1353    __ push(ip);
1354    __ cmp(sp, r2);
1355    __ b(ne, &fill);
1356  }
1357
1358  // Call the entry point.
1359  __ bind(&invoke);
1360  __ Call(r3);
1361
1362  // Exit frame and return.
1363  LeaveArgumentsAdaptorFrame(masm);
1364  __ Jump(lr);
1365
1366
1367  // -------------------------------------------
1368  // Dont adapt arguments.
1369  // -------------------------------------------
1370  __ bind(&dont_adapt_arguments);
1371  __ Jump(r3);
1372}
1373
1374
1375#undef __
1376
1377} }  // namespace v8::internal
1378
1379#endif  // V8_TARGET_ARCH_ARM
1380