stub-cache-arm.cc revision 1a80c996a0cb6c5ac739148352552ab47038ccc3
1// Copyright 2006-2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#if defined(V8_TARGET_ARCH_ARM)
31
32#include "ic-inl.h"
33#include "codegen-inl.h"
34#include "stub-cache.h"
35
36namespace v8 {
37namespace internal {
38
39#define __ ACCESS_MASM(masm)
40
41
42static void ProbeTable(MacroAssembler* masm,
43                       Code::Flags flags,
44                       StubCache::Table table,
45                       Register name,
46                       Register offset,
47                       Register scratch,
48                       Register scratch2) {
49  ExternalReference key_offset(SCTableReference::keyReference(table));
50  ExternalReference value_offset(SCTableReference::valueReference(table));
51
52  uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address());
53  uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address());
54
55  // Check the relative positions of the address fields.
56  ASSERT(value_off_addr > key_off_addr);
57  ASSERT((value_off_addr - key_off_addr) % 4 == 0);
58  ASSERT((value_off_addr - key_off_addr) < (256 * 4));
59
60  Label miss;
61  Register offsets_base_addr = scratch;
62
63  // Check that the key in the entry matches the name.
64  __ mov(offsets_base_addr, Operand(key_offset));
65  __ ldr(ip, MemOperand(offsets_base_addr, offset, LSL, 1));
66  __ cmp(name, ip);
67  __ b(ne, &miss);
68
69  // Get the code entry from the cache.
70  __ add(offsets_base_addr, offsets_base_addr,
71         Operand(value_off_addr - key_off_addr));
72  __ ldr(scratch2, MemOperand(offsets_base_addr, offset, LSL, 1));
73
74  // Check that the flags match what we're looking for.
75  __ ldr(scratch2, FieldMemOperand(scratch2, Code::kFlagsOffset));
76  __ bic(scratch2, scratch2, Operand(Code::kFlagsNotUsedInLookup));
77  __ cmp(scratch2, Operand(flags));
78  __ b(ne, &miss);
79
80  // Re-load code entry from cache.
81  __ ldr(offset, MemOperand(offsets_base_addr, offset, LSL, 1));
82
83  // Jump to the first instruction in the code stub.
84  __ add(offset, offset, Operand(Code::kHeaderSize - kHeapObjectTag));
85  __ Jump(offset);
86
87  // Miss: fall through.
88  __ bind(&miss);
89}
90
91
92// Helper function used to check that the dictionary doesn't contain
93// the property. This function may return false negatives, so miss_label
94// must always call a backup property check that is complete.
95// This function is safe to call if the receiver has fast properties.
96// Name must be a symbol and receiver must be a heap object.
97static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
98                                             Label* miss_label,
99                                             Register receiver,
100                                             String* name,
101                                             Register scratch0,
102                                             Register scratch1) {
103  ASSERT(name->IsSymbol());
104  __ IncrementCounter(&Counters::negative_lookups, 1, scratch0, scratch1);
105  __ IncrementCounter(&Counters::negative_lookups_miss, 1, scratch0, scratch1);
106
107  Label done;
108
109  const int kInterceptorOrAccessCheckNeededMask =
110      (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
111
112  // Bail out if the receiver has a named interceptor or requires access checks.
113  Register map = scratch1;
114  __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
115  __ ldrb(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
116  __ tst(scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
117  __ b(ne, miss_label);
118
119  // Check that receiver is a JSObject.
120  __ ldrb(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
121  __ cmp(scratch0, Operand(FIRST_JS_OBJECT_TYPE));
122  __ b(lt, miss_label);
123
124  // Load properties array.
125  Register properties = scratch0;
126  __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
127  // Check that the properties array is a dictionary.
128  __ ldr(map, FieldMemOperand(properties, HeapObject::kMapOffset));
129  Register tmp = properties;
130  __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
131  __ cmp(map, tmp);
132  __ b(ne, miss_label);
133
134  // Restore the temporarily used register.
135  __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
136
137  // Compute the capacity mask.
138  const int kCapacityOffset =
139      StringDictionary::kHeaderSize +
140      StringDictionary::kCapacityIndex * kPointerSize;
141
142  // Generate an unrolled loop that performs a few probes before
143  // giving up.
144  static const int kProbes = 4;
145  const int kElementsStartOffset =
146      StringDictionary::kHeaderSize +
147      StringDictionary::kElementsStartIndex * kPointerSize;
148
149  // If names of slots in range from 1 to kProbes - 1 for the hash value are
150  // not equal to the name and kProbes-th slot is not used (its name is the
151  // undefined value), it guarantees the hash table doesn't contain the
152  // property. It's true even if some slots represent deleted properties
153  // (their names are the null value).
154  for (int i = 0; i < kProbes; i++) {
155    // scratch0 points to properties hash.
156    // Compute the masked index: (hash + i + i * i) & mask.
157    Register index = scratch1;
158    // Capacity is smi 2^n.
159    __ ldr(index, FieldMemOperand(properties, kCapacityOffset));
160    __ sub(index, index, Operand(1));
161    __ and_(index, index, Operand(
162        Smi::FromInt(name->Hash() + StringDictionary::GetProbeOffset(i))));
163
164    // Scale the index by multiplying by the entry size.
165    ASSERT(StringDictionary::kEntrySize == 3);
166    __ add(index, index, Operand(index, LSL, 1));  // index *= 3.
167
168    Register entity_name = scratch1;
169    // Having undefined at this place means the name is not contained.
170    ASSERT_EQ(kSmiTagSize, 1);
171    Register tmp = properties;
172    __ add(tmp, properties, Operand(index, LSL, 1));
173    __ ldr(entity_name, FieldMemOperand(tmp, kElementsStartOffset));
174
175    ASSERT(!tmp.is(entity_name));
176    __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex);
177    __ cmp(entity_name, tmp);
178    if (i != kProbes - 1) {
179      __ b(eq, &done);
180
181      // Stop if found the property.
182      __ cmp(entity_name, Operand(Handle<String>(name)));
183      __ b(eq, miss_label);
184
185      // Check if the entry name is not a symbol.
186      __ ldr(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset));
187      __ ldrb(entity_name,
188              FieldMemOperand(entity_name, Map::kInstanceTypeOffset));
189      __ tst(entity_name, Operand(kIsSymbolMask));
190      __ b(eq, miss_label);
191
192      // Restore the properties.
193      __ ldr(properties,
194             FieldMemOperand(receiver, JSObject::kPropertiesOffset));
195    } else {
196      // Give up probing if still not found the undefined value.
197      __ b(ne, miss_label);
198    }
199  }
200  __ bind(&done);
201  __ DecrementCounter(&Counters::negative_lookups_miss, 1, scratch0, scratch1);
202}
203
204
205void StubCache::GenerateProbe(MacroAssembler* masm,
206                              Code::Flags flags,
207                              Register receiver,
208                              Register name,
209                              Register scratch,
210                              Register extra,
211                              Register extra2) {
212  Label miss;
213
214  // Make sure that code is valid. The shifting code relies on the
215  // entry size being 8.
216  ASSERT(sizeof(Entry) == 8);
217
218  // Make sure the flags does not name a specific type.
219  ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
220
221  // Make sure that there are no register conflicts.
222  ASSERT(!scratch.is(receiver));
223  ASSERT(!scratch.is(name));
224  ASSERT(!extra.is(receiver));
225  ASSERT(!extra.is(name));
226  ASSERT(!extra.is(scratch));
227  ASSERT(!extra2.is(receiver));
228  ASSERT(!extra2.is(name));
229  ASSERT(!extra2.is(scratch));
230  ASSERT(!extra2.is(extra));
231
232  // Check scratch, extra and extra2 registers are valid.
233  ASSERT(!scratch.is(no_reg));
234  ASSERT(!extra.is(no_reg));
235  ASSERT(!extra2.is(no_reg));
236
237  // Check that the receiver isn't a smi.
238  __ tst(receiver, Operand(kSmiTagMask));
239  __ b(eq, &miss);
240
241  // Get the map of the receiver and compute the hash.
242  __ ldr(scratch, FieldMemOperand(name, String::kHashFieldOffset));
243  __ ldr(ip, FieldMemOperand(receiver, HeapObject::kMapOffset));
244  __ add(scratch, scratch, Operand(ip));
245  __ eor(scratch, scratch, Operand(flags));
246  __ and_(scratch,
247          scratch,
248          Operand((kPrimaryTableSize - 1) << kHeapObjectTagSize));
249
250  // Probe the primary table.
251  ProbeTable(masm, flags, kPrimary, name, scratch, extra, extra2);
252
253  // Primary miss: Compute hash for secondary probe.
254  __ sub(scratch, scratch, Operand(name));
255  __ add(scratch, scratch, Operand(flags));
256  __ and_(scratch,
257          scratch,
258          Operand((kSecondaryTableSize - 1) << kHeapObjectTagSize));
259
260  // Probe the secondary table.
261  ProbeTable(masm, flags, kSecondary, name, scratch, extra, extra2);
262
263  // Cache miss: Fall-through and let caller handle the miss by
264  // entering the runtime system.
265  __ bind(&miss);
266}
267
268
269void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
270                                                       int index,
271                                                       Register prototype) {
272  // Load the global or builtins object from the current context.
273  __ ldr(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
274  // Load the global context from the global or builtins object.
275  __ ldr(prototype,
276         FieldMemOperand(prototype, GlobalObject::kGlobalContextOffset));
277  // Load the function from the global context.
278  __ ldr(prototype, MemOperand(prototype, Context::SlotOffset(index)));
279  // Load the initial map.  The global functions all have initial maps.
280  __ ldr(prototype,
281         FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
282  // Load the prototype from the initial map.
283  __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
284}
285
286
287void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
288    MacroAssembler* masm, int index, Register prototype, Label* miss) {
289  // Check we're still in the same context.
290  __ ldr(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
291  __ Move(ip, Top::global());
292  __ cmp(prototype, ip);
293  __ b(ne, miss);
294  // Get the global function with the given index.
295  JSFunction* function = JSFunction::cast(Top::global_context()->get(index));
296  // Load its initial map. The global functions all have initial maps.
297  __ Move(prototype, Handle<Map>(function->initial_map()));
298  // Load the prototype from the initial map.
299  __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
300}
301
302
303// Load a fast property out of a holder object (src). In-object properties
304// are loaded directly otherwise the property is loaded from the properties
305// fixed array.
306void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
307                                            Register dst, Register src,
308                                            JSObject* holder, int index) {
309  // Adjust for the number of properties stored in the holder.
310  index -= holder->map()->inobject_properties();
311  if (index < 0) {
312    // Get the property straight out of the holder.
313    int offset = holder->map()->instance_size() + (index * kPointerSize);
314    __ ldr(dst, FieldMemOperand(src, offset));
315  } else {
316    // Calculate the offset into the properties array.
317    int offset = index * kPointerSize + FixedArray::kHeaderSize;
318    __ ldr(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
319    __ ldr(dst, FieldMemOperand(dst, offset));
320  }
321}
322
323
324void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
325                                           Register receiver,
326                                           Register scratch,
327                                           Label* miss_label) {
328  // Check that the receiver isn't a smi.
329  __ tst(receiver, Operand(kSmiTagMask));
330  __ b(eq, miss_label);
331
332  // Check that the object is a JS array.
333  __ CompareObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE);
334  __ b(ne, miss_label);
335
336  // Load length directly from the JS array.
337  __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
338  __ Ret();
339}
340
341
342// Generate code to check if an object is a string.  If the object is a
343// heap object, its map's instance type is left in the scratch1 register.
344// If this is not needed, scratch1 and scratch2 may be the same register.
345static void GenerateStringCheck(MacroAssembler* masm,
346                                Register receiver,
347                                Register scratch1,
348                                Register scratch2,
349                                Label* smi,
350                                Label* non_string_object) {
351  // Check that the receiver isn't a smi.
352  __ tst(receiver, Operand(kSmiTagMask));
353  __ b(eq, smi);
354
355  // Check that the object is a string.
356  __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
357  __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
358  __ and_(scratch2, scratch1, Operand(kIsNotStringMask));
359  // The cast is to resolve the overload for the argument of 0x0.
360  __ cmp(scratch2, Operand(static_cast<int32_t>(kStringTag)));
361  __ b(ne, non_string_object);
362}
363
364
365// Generate code to load the length from a string object and return the length.
366// If the receiver object is not a string or a wrapped string object the
367// execution continues at the miss label. The register containing the
368// receiver is potentially clobbered.
369void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
370                                            Register receiver,
371                                            Register scratch1,
372                                            Register scratch2,
373                                            Label* miss) {
374  Label check_wrapper;
375
376  // Check if the object is a string leaving the instance type in the
377  // scratch1 register.
378  GenerateStringCheck(masm, receiver, scratch1, scratch2, miss, &check_wrapper);
379
380  // Load length directly from the string.
381  __ ldr(r0, FieldMemOperand(receiver, String::kLengthOffset));
382  __ Ret();
383
384  // Check if the object is a JSValue wrapper.
385  __ bind(&check_wrapper);
386  __ cmp(scratch1, Operand(JS_VALUE_TYPE));
387  __ b(ne, miss);
388
389  // Unwrap the value and check if the wrapped value is a string.
390  __ ldr(scratch1, FieldMemOperand(receiver, JSValue::kValueOffset));
391  GenerateStringCheck(masm, scratch1, scratch2, scratch2, miss, miss);
392  __ ldr(r0, FieldMemOperand(scratch1, String::kLengthOffset));
393  __ Ret();
394}
395
396
397void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
398                                                 Register receiver,
399                                                 Register scratch1,
400                                                 Register scratch2,
401                                                 Label* miss_label) {
402  __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
403  __ mov(r0, scratch1);
404  __ Ret();
405}
406
407
408// Generate StoreField code, value is passed in r0 register.
409// When leaving generated code after success, the receiver_reg and name_reg
410// may be clobbered.  Upon branch to miss_label, the receiver and name
411// registers have their original values.
412void StubCompiler::GenerateStoreField(MacroAssembler* masm,
413                                      JSObject* object,
414                                      int index,
415                                      Map* transition,
416                                      Register receiver_reg,
417                                      Register name_reg,
418                                      Register scratch,
419                                      Label* miss_label) {
420  // r0 : value
421  Label exit;
422
423  // Check that the receiver isn't a smi.
424  __ tst(receiver_reg, Operand(kSmiTagMask));
425  __ b(eq, miss_label);
426
427  // Check that the map of the receiver hasn't changed.
428  __ ldr(scratch, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
429  __ cmp(scratch, Operand(Handle<Map>(object->map())));
430  __ b(ne, miss_label);
431
432  // Perform global security token check if needed.
433  if (object->IsJSGlobalProxy()) {
434    __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
435  }
436
437  // Stub never generated for non-global objects that require access
438  // checks.
439  ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
440
441  // Perform map transition for the receiver if necessary.
442  if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) {
443    // The properties must be extended before we can store the value.
444    // We jump to a runtime call that extends the properties array.
445    __ push(receiver_reg);
446    __ mov(r2, Operand(Handle<Map>(transition)));
447    __ Push(r2, r0);
448    __ TailCallExternalReference(
449           ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage)),
450           3, 1);
451    return;
452  }
453
454  if (transition != NULL) {
455    // Update the map of the object; no write barrier updating is
456    // needed because the map is never in new space.
457    __ mov(ip, Operand(Handle<Map>(transition)));
458    __ str(ip, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
459  }
460
461  // Adjust for the number of properties stored in the object. Even in the
462  // face of a transition we can use the old map here because the size of the
463  // object and the number of in-object properties is not going to change.
464  index -= object->map()->inobject_properties();
465
466  if (index < 0) {
467    // Set the property straight into the object.
468    int offset = object->map()->instance_size() + (index * kPointerSize);
469    __ str(r0, FieldMemOperand(receiver_reg, offset));
470
471    // Skip updating write barrier if storing a smi.
472    __ tst(r0, Operand(kSmiTagMask));
473    __ b(eq, &exit);
474
475    // Update the write barrier for the array address.
476    // Pass the now unused name_reg as a scratch register.
477    __ RecordWrite(receiver_reg, Operand(offset), name_reg, scratch);
478  } else {
479    // Write to the properties array.
480    int offset = index * kPointerSize + FixedArray::kHeaderSize;
481    // Get the properties array
482    __ ldr(scratch, FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
483    __ str(r0, FieldMemOperand(scratch, offset));
484
485    // Skip updating write barrier if storing a smi.
486    __ tst(r0, Operand(kSmiTagMask));
487    __ b(eq, &exit);
488
489    // Update the write barrier for the array address.
490    // Ok to clobber receiver_reg and name_reg, since we return.
491    __ RecordWrite(scratch, Operand(offset), name_reg, receiver_reg);
492  }
493
494  // Return the value (register r0).
495  __ bind(&exit);
496  __ Ret();
497}
498
499
500void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
501  ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
502  Code* code = NULL;
503  if (kind == Code::LOAD_IC) {
504    code = Builtins::builtin(Builtins::LoadIC_Miss);
505  } else {
506    code = Builtins::builtin(Builtins::KeyedLoadIC_Miss);
507  }
508
509  Handle<Code> ic(code);
510  __ Jump(ic, RelocInfo::CODE_TARGET);
511}
512
513
514static void GenerateCallFunction(MacroAssembler* masm,
515                                 Object* object,
516                                 const ParameterCount& arguments,
517                                 Label* miss) {
518  // ----------- S t a t e -------------
519  //  -- r0: receiver
520  //  -- r1: function to call
521  // -----------------------------------
522
523  // Check that the function really is a function.
524  __ BranchOnSmi(r1, miss);
525  __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
526  __ b(ne, miss);
527
528  // Patch the receiver on the stack with the global proxy if
529  // necessary.
530  if (object->IsGlobalObject()) {
531    __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
532    __ str(r3, MemOperand(sp, arguments.immediate() * kPointerSize));
533  }
534
535  // Invoke the function.
536  __ InvokeFunction(r1, arguments, JUMP_FUNCTION);
537}
538
539
540static void PushInterceptorArguments(MacroAssembler* masm,
541                                     Register receiver,
542                                     Register holder,
543                                     Register name,
544                                     JSObject* holder_obj) {
545  __ push(name);
546  InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor();
547  ASSERT(!Heap::InNewSpace(interceptor));
548  Register scratch = name;
549  __ mov(scratch, Operand(Handle<Object>(interceptor)));
550  __ push(scratch);
551  __ push(receiver);
552  __ push(holder);
553  __ ldr(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset));
554  __ push(scratch);
555}
556
557
558static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm,
559                                                   Register receiver,
560                                                   Register holder,
561                                                   Register name,
562                                                   JSObject* holder_obj) {
563  PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
564
565  ExternalReference ref =
566      ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly));
567  __ mov(r0, Operand(5));
568  __ mov(r1, Operand(ref));
569
570  CEntryStub stub(1);
571  __ CallStub(&stub);
572}
573
574
575// Reserves space for the extra arguments to FastHandleApiCall in the
576// caller's frame.
577//
578// These arguments are set by CheckPrototypes and GenerateFastApiCall.
579static void ReserveSpaceForFastApiCall(MacroAssembler* masm,
580                                       Register scratch) {
581  __ mov(scratch, Operand(Smi::FromInt(0)));
582  __ push(scratch);
583  __ push(scratch);
584  __ push(scratch);
585  __ push(scratch);
586}
587
588
589// Undoes the effects of ReserveSpaceForFastApiCall.
590static void FreeSpaceForFastApiCall(MacroAssembler* masm) {
591  __ Drop(4);
592}
593
594
595// Generates call to FastHandleApiCall builtin.
596static void GenerateFastApiCall(MacroAssembler* masm,
597                                const CallOptimization& optimization,
598                                int argc) {
599  // Get the function and setup the context.
600  JSFunction* function = optimization.constant_function();
601  __ mov(r5, Operand(Handle<JSFunction>(function)));
602  __ ldr(cp, FieldMemOperand(r5, JSFunction::kContextOffset));
603
604  // Pass the additional arguments FastHandleApiCall expects.
605  bool info_loaded = false;
606  Object* callback = optimization.api_call_info()->callback();
607  if (Heap::InNewSpace(callback)) {
608    info_loaded = true;
609    __ Move(r0, Handle<CallHandlerInfo>(optimization.api_call_info()));
610    __ ldr(r7, FieldMemOperand(r0, CallHandlerInfo::kCallbackOffset));
611  } else {
612    __ Move(r7, Handle<Object>(callback));
613  }
614  Object* call_data = optimization.api_call_info()->data();
615  if (Heap::InNewSpace(call_data)) {
616    if (!info_loaded) {
617      __ Move(r0, Handle<CallHandlerInfo>(optimization.api_call_info()));
618    }
619    __ ldr(r6, FieldMemOperand(r0, CallHandlerInfo::kDataOffset));
620  } else {
621    __ Move(r6, Handle<Object>(call_data));
622  }
623
624  __ add(sp, sp, Operand(1 * kPointerSize));
625  __ stm(ia, sp, r5.bit() | r6.bit() | r7.bit());
626  __ sub(sp, sp, Operand(1 * kPointerSize));
627
628  // Set the number of arguments.
629  __ mov(r0, Operand(argc + 4));
630
631  // Jump to the fast api call builtin (tail call).
632  Handle<Code> code = Handle<Code>(
633      Builtins::builtin(Builtins::FastHandleApiCall));
634  ParameterCount expected(0);
635  __ InvokeCode(code, expected, expected,
636                RelocInfo::CODE_TARGET, JUMP_FUNCTION);
637}
638
639
640class CallInterceptorCompiler BASE_EMBEDDED {
641 public:
642  CallInterceptorCompiler(StubCompiler* stub_compiler,
643                          const ParameterCount& arguments,
644                          Register name)
645      : stub_compiler_(stub_compiler),
646        arguments_(arguments),
647        name_(name) {}
648
649  void Compile(MacroAssembler* masm,
650               JSObject* object,
651               JSObject* holder,
652               String* name,
653               LookupResult* lookup,
654               Register receiver,
655               Register scratch1,
656               Register scratch2,
657               Register scratch3,
658               Label* miss) {
659    ASSERT(holder->HasNamedInterceptor());
660    ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
661
662    // Check that the receiver isn't a smi.
663    __ BranchOnSmi(receiver, miss);
664
665    CallOptimization optimization(lookup);
666
667    if (optimization.is_constant_call()) {
668      CompileCacheable(masm,
669                       object,
670                       receiver,
671                       scratch1,
672                       scratch2,
673                       scratch3,
674                       holder,
675                       lookup,
676                       name,
677                       optimization,
678                       miss);
679    } else {
680      CompileRegular(masm,
681                     object,
682                     receiver,
683                     scratch1,
684                     scratch2,
685                     scratch3,
686                     name,
687                     holder,
688                     miss);
689    }
690  }
691
692 private:
693  void CompileCacheable(MacroAssembler* masm,
694                       JSObject* object,
695                       Register receiver,
696                       Register scratch1,
697                       Register scratch2,
698                       Register scratch3,
699                       JSObject* interceptor_holder,
700                       LookupResult* lookup,
701                       String* name,
702                       const CallOptimization& optimization,
703                       Label* miss_label) {
704    ASSERT(optimization.is_constant_call());
705    ASSERT(!lookup->holder()->IsGlobalObject());
706
707    int depth1 = kInvalidProtoDepth;
708    int depth2 = kInvalidProtoDepth;
709    bool can_do_fast_api_call = false;
710    if (optimization.is_simple_api_call() &&
711       !lookup->holder()->IsGlobalObject()) {
712     depth1 =
713         optimization.GetPrototypeDepthOfExpectedType(object,
714                                                      interceptor_holder);
715     if (depth1 == kInvalidProtoDepth) {
716       depth2 =
717           optimization.GetPrototypeDepthOfExpectedType(interceptor_holder,
718                                                        lookup->holder());
719     }
720     can_do_fast_api_call = (depth1 != kInvalidProtoDepth) ||
721                            (depth2 != kInvalidProtoDepth);
722    }
723
724    __ IncrementCounter(&Counters::call_const_interceptor, 1,
725                      scratch1, scratch2);
726
727    if (can_do_fast_api_call) {
728      __ IncrementCounter(&Counters::call_const_interceptor_fast_api, 1,
729                          scratch1, scratch2);
730      ReserveSpaceForFastApiCall(masm, scratch1);
731    }
732
733    // Check that the maps from receiver to interceptor's holder
734    // haven't changed and thus we can invoke interceptor.
735    Label miss_cleanup;
736    Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
737    Register holder =
738        stub_compiler_->CheckPrototypes(object, receiver,
739                                        interceptor_holder, scratch1,
740                                        scratch2, scratch3, name, depth1, miss);
741
742    // Invoke an interceptor and if it provides a value,
743    // branch to |regular_invoke|.
744    Label regular_invoke;
745    LoadWithInterceptor(masm, receiver, holder, interceptor_holder, scratch2,
746                        &regular_invoke);
747
748    // Interceptor returned nothing for this property.  Try to use cached
749    // constant function.
750
751    // Check that the maps from interceptor's holder to constant function's
752    // holder haven't changed and thus we can use cached constant function.
753    if (interceptor_holder != lookup->holder()) {
754      stub_compiler_->CheckPrototypes(interceptor_holder, receiver,
755                                      lookup->holder(), scratch1,
756                                      scratch2, scratch3, name, depth2, miss);
757    } else {
758      // CheckPrototypes has a side effect of fetching a 'holder'
759      // for API (object which is instanceof for the signature).  It's
760      // safe to omit it here, as if present, it should be fetched
761      // by the previous CheckPrototypes.
762      ASSERT(depth2 == kInvalidProtoDepth);
763    }
764
765    // Invoke function.
766    if (can_do_fast_api_call) {
767      GenerateFastApiCall(masm, optimization, arguments_.immediate());
768    } else {
769      __ InvokeFunction(optimization.constant_function(), arguments_,
770                        JUMP_FUNCTION);
771    }
772
773    // Deferred code for fast API call case---clean preallocated space.
774    if (can_do_fast_api_call) {
775      __ bind(&miss_cleanup);
776      FreeSpaceForFastApiCall(masm);
777      __ b(miss_label);
778    }
779
780    // Invoke a regular function.
781    __ bind(&regular_invoke);
782    if (can_do_fast_api_call) {
783      FreeSpaceForFastApiCall(masm);
784    }
785  }
786
787  void CompileRegular(MacroAssembler* masm,
788                      JSObject* object,
789                      Register receiver,
790                      Register scratch1,
791                      Register scratch2,
792                      Register scratch3,
793                      String* name,
794                      JSObject* interceptor_holder,
795                      Label* miss_label) {
796    Register holder =
797        stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
798                                        scratch1, scratch2, scratch3, name,
799                                        miss_label);
800
801    // Call a runtime function to load the interceptor property.
802    __ EnterInternalFrame();
803    // Save the name_ register across the call.
804    __ push(name_);
805
806    PushInterceptorArguments(masm,
807                             receiver,
808                             holder,
809                             name_,
810                             interceptor_holder);
811
812    __ CallExternalReference(
813          ExternalReference(
814              IC_Utility(IC::kLoadPropertyWithInterceptorForCall)),
815          5);
816
817    // Restore the name_ register.
818    __ pop(name_);
819    __ LeaveInternalFrame();
820  }
821
822  void LoadWithInterceptor(MacroAssembler* masm,
823                           Register receiver,
824                           Register holder,
825                           JSObject* holder_obj,
826                           Register scratch,
827                           Label* interceptor_succeeded) {
828    __ EnterInternalFrame();
829    __ Push(holder, name_);
830
831    CompileCallLoadPropertyWithInterceptor(masm,
832                                           receiver,
833                                           holder,
834                                           name_,
835                                           holder_obj);
836
837    __ pop(name_);  // Restore the name.
838    __ pop(receiver);  // Restore the holder.
839    __ LeaveInternalFrame();
840
841    // If interceptor returns no-result sentinel, call the constant function.
842    __ LoadRoot(scratch, Heap::kNoInterceptorResultSentinelRootIndex);
843    __ cmp(r0, scratch);
844    __ b(ne, interceptor_succeeded);
845  }
846
847  StubCompiler* stub_compiler_;
848  const ParameterCount& arguments_;
849  Register name_;
850};
851
852
853// Generate code to check that a global property cell is empty. Create
854// the property cell at compilation time if no cell exists for the
855// property.
856MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCell(
857    MacroAssembler* masm,
858    GlobalObject* global,
859    String* name,
860    Register scratch,
861    Label* miss) {
862  Object* probe;
863  { MaybeObject* maybe_probe = global->EnsurePropertyCell(name);
864    if (!maybe_probe->ToObject(&probe)) return maybe_probe;
865  }
866  JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(probe);
867  ASSERT(cell->value()->IsTheHole());
868  __ mov(scratch, Operand(Handle<Object>(cell)));
869  __ ldr(scratch,
870         FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
871  __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
872  __ cmp(scratch, ip);
873  __ b(ne, miss);
874  return cell;
875}
876
877
878#undef __
879#define __ ACCESS_MASM(masm())
880
881
882Register StubCompiler::CheckPrototypes(JSObject* object,
883                                       Register object_reg,
884                                       JSObject* holder,
885                                       Register holder_reg,
886                                       Register scratch1,
887                                       Register scratch2,
888                                       String* name,
889                                       int save_at_depth,
890                                       Label* miss) {
891  // Make sure there's no overlap between holder and object registers.
892  ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
893  ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
894         && !scratch2.is(scratch1));
895
896  // Keep track of the current object in register reg.
897  Register reg = object_reg;
898  int depth = 0;
899
900  if (save_at_depth == depth) {
901    __ str(reg, MemOperand(sp));
902  }
903
904  // Check the maps in the prototype chain.
905  // Traverse the prototype chain from the object and do map checks.
906  JSObject* current = object;
907  while (current != holder) {
908    depth++;
909
910    // Only global objects and objects that do not require access
911    // checks are allowed in stubs.
912    ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
913
914    JSObject* prototype = JSObject::cast(current->GetPrototype());
915    if (!current->HasFastProperties() &&
916        !current->IsJSGlobalObject() &&
917        !current->IsJSGlobalProxy()) {
918      if (!name->IsSymbol()) {
919        MaybeObject* lookup_result = Heap::LookupSymbol(name);
920        if (lookup_result->IsFailure()) {
921          set_failure(Failure::cast(lookup_result));
922          return reg;
923        } else {
924          name = String::cast(lookup_result->ToObjectUnchecked());
925        }
926      }
927      ASSERT(current->property_dictionary()->FindEntry(name) ==
928             StringDictionary::kNotFound);
929
930      GenerateDictionaryNegativeLookup(masm(),
931                                       miss,
932                                       reg,
933                                       name,
934                                       scratch1,
935                                       scratch2);
936      __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
937      reg = holder_reg;  // from now the object is in holder_reg
938      __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
939    } else {
940      // Get the map of the current object.
941      __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
942      __ cmp(scratch1, Operand(Handle<Map>(current->map())));
943
944      // Branch on the result of the map check.
945      __ b(ne, miss);
946
947      // Check access rights to the global object.  This has to happen
948      // after the map check so that we know that the object is
949      // actually a global object.
950      if (current->IsJSGlobalProxy()) {
951        __ CheckAccessGlobalProxy(reg, scratch1, miss);
952        // Restore scratch register to be the map of the object.  In the
953        // new space case below, we load the prototype from the map in
954        // the scratch register.
955        __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
956      }
957
958      reg = holder_reg;  // from now the object is in holder_reg
959      if (Heap::InNewSpace(prototype)) {
960        // The prototype is in new space; we cannot store a reference
961        // to it in the code. Load it from the map.
962        __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
963      } else {
964        // The prototype is in old space; load it directly.
965        __ mov(reg, Operand(Handle<JSObject>(prototype)));
966      }
967    }
968
969    if (save_at_depth == depth) {
970      __ str(reg, MemOperand(sp));
971    }
972
973    // Go to the next object in the prototype chain.
974    current = prototype;
975  }
976
977  // Check the holder map.
978  __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
979  __ cmp(scratch1, Operand(Handle<Map>(current->map())));
980  __ b(ne, miss);
981
982  // Log the check depth.
983  LOG(IntEvent("check-maps-depth", depth + 1));
984
985  // Perform security check for access to the global object and return
986  // the holder register.
987  ASSERT(current == holder);
988  ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
989  if (current->IsJSGlobalProxy()) {
990    __ CheckAccessGlobalProxy(reg, scratch1, miss);
991  }
992
993  // If we've skipped any global objects, it's not enough to verify
994  // that their maps haven't changed.  We also need to check that the
995  // property cell for the property is still empty.
996  current = object;
997  while (current != holder) {
998    if (current->IsGlobalObject()) {
999      MaybeObject* cell = GenerateCheckPropertyCell(masm(),
1000                                                    GlobalObject::cast(current),
1001                                                    name,
1002                                                    scratch1,
1003                                                    miss);
1004      if (cell->IsFailure()) {
1005        set_failure(Failure::cast(cell));
1006        return reg;
1007      }
1008    }
1009    current = JSObject::cast(current->GetPrototype());
1010  }
1011
1012  // Return the register containing the holder.
1013  return reg;
1014}
1015
1016
1017void StubCompiler::GenerateLoadField(JSObject* object,
1018                                     JSObject* holder,
1019                                     Register receiver,
1020                                     Register scratch1,
1021                                     Register scratch2,
1022                                     Register scratch3,
1023                                     int index,
1024                                     String* name,
1025                                     Label* miss) {
1026  // Check that the receiver isn't a smi.
1027  __ tst(receiver, Operand(kSmiTagMask));
1028  __ b(eq, miss);
1029
1030  // Check that the maps haven't changed.
1031  Register reg =
1032      CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3,
1033                      name, miss);
1034  GenerateFastPropertyLoad(masm(), r0, reg, holder, index);
1035  __ Ret();
1036}
1037
1038
1039void StubCompiler::GenerateLoadConstant(JSObject* object,
1040                                        JSObject* holder,
1041                                        Register receiver,
1042                                        Register scratch1,
1043                                        Register scratch2,
1044                                        Register scratch3,
1045                                        Object* value,
1046                                        String* name,
1047                                        Label* miss) {
1048  // Check that the receiver isn't a smi.
1049  __ tst(receiver, Operand(kSmiTagMask));
1050  __ b(eq, miss);
1051
1052  // Check that the maps haven't changed.
1053  Register reg =
1054      CheckPrototypes(object, receiver, holder,
1055                      scratch1, scratch2, scratch3, name, miss);
1056
1057  // Return the constant value.
1058  __ mov(r0, Operand(Handle<Object>(value)));
1059  __ Ret();
1060}
1061
1062
1063bool StubCompiler::GenerateLoadCallback(JSObject* object,
1064                                        JSObject* holder,
1065                                        Register receiver,
1066                                        Register name_reg,
1067                                        Register scratch1,
1068                                        Register scratch2,
1069                                        Register scratch3,
1070                                        AccessorInfo* callback,
1071                                        String* name,
1072                                        Label* miss,
1073                                        Failure** failure) {
1074  // Check that the receiver isn't a smi.
1075  __ tst(receiver, Operand(kSmiTagMask));
1076  __ b(eq, miss);
1077
1078  // Check that the maps haven't changed.
1079  Register reg =
1080      CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3,
1081                      name, miss);
1082
1083  // Push the arguments on the JS stack of the caller.
1084  __ push(receiver);  // Receiver.
1085  __ mov(scratch3, Operand(Handle<AccessorInfo>(callback)));  // callback data
1086  __ ldr(ip, FieldMemOperand(scratch3, AccessorInfo::kDataOffset));
1087  __ Push(reg, ip, scratch3, name_reg);
1088
1089  // Do tail-call to the runtime system.
1090  ExternalReference load_callback_property =
1091      ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
1092  __ TailCallExternalReference(load_callback_property, 5, 1);
1093
1094  return true;
1095}
1096
1097
1098void StubCompiler::GenerateLoadInterceptor(JSObject* object,
1099                                           JSObject* interceptor_holder,
1100                                           LookupResult* lookup,
1101                                           Register receiver,
1102                                           Register name_reg,
1103                                           Register scratch1,
1104                                           Register scratch2,
1105                                           Register scratch3,
1106                                           String* name,
1107                                           Label* miss) {
1108  ASSERT(interceptor_holder->HasNamedInterceptor());
1109  ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1110
1111  // Check that the receiver isn't a smi.
1112  __ BranchOnSmi(receiver, miss);
1113
1114  // So far the most popular follow ups for interceptor loads are FIELD
1115  // and CALLBACKS, so inline only them, other cases may be added
1116  // later.
1117  bool compile_followup_inline = false;
1118  if (lookup->IsProperty() && lookup->IsCacheable()) {
1119    if (lookup->type() == FIELD) {
1120      compile_followup_inline = true;
1121    } else if (lookup->type() == CALLBACKS &&
1122        lookup->GetCallbackObject()->IsAccessorInfo() &&
1123        AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL) {
1124      compile_followup_inline = true;
1125    }
1126  }
1127
1128  if (compile_followup_inline) {
1129    // Compile the interceptor call, followed by inline code to load the
1130    // property from further up the prototype chain if the call fails.
1131    // Check that the maps haven't changed.
1132    Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1133                                          scratch1, scratch2, scratch3,
1134                                          name, miss);
1135    ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
1136
1137    // Save necessary data before invoking an interceptor.
1138    // Requires a frame to make GC aware of pushed pointers.
1139    __ EnterInternalFrame();
1140
1141    if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1142      // CALLBACKS case needs a receiver to be passed into C++ callback.
1143      __ Push(receiver, holder_reg, name_reg);
1144    } else {
1145      __ Push(holder_reg, name_reg);
1146    }
1147
1148    // Invoke an interceptor.  Note: map checks from receiver to
1149    // interceptor's holder has been compiled before (see a caller
1150    // of this method.)
1151    CompileCallLoadPropertyWithInterceptor(masm(),
1152                                           receiver,
1153                                           holder_reg,
1154                                           name_reg,
1155                                           interceptor_holder);
1156
1157    // Check if interceptor provided a value for property.  If it's
1158    // the case, return immediately.
1159    Label interceptor_failed;
1160    __ LoadRoot(scratch1, Heap::kNoInterceptorResultSentinelRootIndex);
1161    __ cmp(r0, scratch1);
1162    __ b(eq, &interceptor_failed);
1163    __ LeaveInternalFrame();
1164    __ Ret();
1165
1166    __ bind(&interceptor_failed);
1167    __ pop(name_reg);
1168    __ pop(holder_reg);
1169    if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1170      __ pop(receiver);
1171    }
1172
1173    __ LeaveInternalFrame();
1174
1175    // Check that the maps from interceptor's holder to lookup's holder
1176    // haven't changed.  And load lookup's holder into |holder| register.
1177    if (interceptor_holder != lookup->holder()) {
1178      holder_reg = CheckPrototypes(interceptor_holder,
1179                                   holder_reg,
1180                                   lookup->holder(),
1181                                   scratch1,
1182                                   scratch2,
1183                                   scratch3,
1184                                   name,
1185                                   miss);
1186    }
1187
1188    if (lookup->type() == FIELD) {
1189      // We found FIELD property in prototype chain of interceptor's holder.
1190      // Retrieve a field from field's holder.
1191      GenerateFastPropertyLoad(masm(), r0, holder_reg,
1192                               lookup->holder(), lookup->GetFieldIndex());
1193      __ Ret();
1194    } else {
1195      // We found CALLBACKS property in prototype chain of interceptor's
1196      // holder.
1197      ASSERT(lookup->type() == CALLBACKS);
1198      ASSERT(lookup->GetCallbackObject()->IsAccessorInfo());
1199      AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
1200      ASSERT(callback != NULL);
1201      ASSERT(callback->getter() != NULL);
1202
1203      // Tail call to runtime.
1204      // Important invariant in CALLBACKS case: the code above must be
1205      // structured to never clobber |receiver| register.
1206      __ Move(scratch2, Handle<AccessorInfo>(callback));
1207      // holder_reg is either receiver or scratch1.
1208      if (!receiver.is(holder_reg)) {
1209        ASSERT(scratch1.is(holder_reg));
1210        __ Push(receiver, holder_reg);
1211        __ ldr(scratch3,
1212               FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
1213        __ Push(scratch3, scratch2, name_reg);
1214      } else {
1215        __ push(receiver);
1216        __ ldr(scratch3,
1217               FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
1218        __ Push(holder_reg, scratch3, scratch2, name_reg);
1219      }
1220
1221      ExternalReference ref =
1222          ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
1223      __ TailCallExternalReference(ref, 5, 1);
1224    }
1225  } else {  // !compile_followup_inline
1226    // Call the runtime system to load the interceptor.
1227    // Check that the maps haven't changed.
1228    Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1229                                          scratch1, scratch2, scratch3,
1230                                          name, miss);
1231    PushInterceptorArguments(masm(), receiver, holder_reg,
1232                             name_reg, interceptor_holder);
1233
1234    ExternalReference ref = ExternalReference(
1235        IC_Utility(IC::kLoadPropertyWithInterceptorForLoad));
1236    __ TailCallExternalReference(ref, 5, 1);
1237  }
1238}
1239
1240
1241void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) {
1242  if (kind_ == Code::KEYED_CALL_IC) {
1243    __ cmp(r2, Operand(Handle<String>(name)));
1244    __ b(ne, miss);
1245  }
1246}
1247
1248
1249void CallStubCompiler::GenerateGlobalReceiverCheck(JSObject* object,
1250                                                   JSObject* holder,
1251                                                   String* name,
1252                                                   Label* miss) {
1253  ASSERT(holder->IsGlobalObject());
1254
1255  // Get the number of arguments.
1256  const int argc = arguments().immediate();
1257
1258  // Get the receiver from the stack.
1259  __ ldr(r0, MemOperand(sp, argc * kPointerSize));
1260
1261  // If the object is the holder then we know that it's a global
1262  // object which can only happen for contextual calls. In this case,
1263  // the receiver cannot be a smi.
1264  if (object != holder) {
1265    __ tst(r0, Operand(kSmiTagMask));
1266    __ b(eq, miss);
1267  }
1268
1269  // Check that the maps haven't changed.
1270  CheckPrototypes(object, r0, holder, r3, r1, r4, name, miss);
1271}
1272
1273
1274void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
1275                                                    JSFunction* function,
1276                                                    Label* miss) {
1277  // Get the value from the cell.
1278  __ mov(r3, Operand(Handle<JSGlobalPropertyCell>(cell)));
1279  __ ldr(r1, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset));
1280
1281  // Check that the cell contains the same function.
1282  if (Heap::InNewSpace(function)) {
1283    // We can't embed a pointer to a function in new space so we have
1284    // to verify that the shared function info is unchanged. This has
1285    // the nice side effect that multiple closures based on the same
1286    // function can all use this call IC. Before we load through the
1287    // function, we have to verify that it still is a function.
1288    __ tst(r1, Operand(kSmiTagMask));
1289    __ b(eq, miss);
1290    __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
1291    __ b(ne, miss);
1292
1293    // Check the shared function info. Make sure it hasn't changed.
1294    __ Move(r3, Handle<SharedFunctionInfo>(function->shared()));
1295    __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1296    __ cmp(r4, r3);
1297    __ b(ne, miss);
1298  } else {
1299    __ cmp(r1, Operand(Handle<JSFunction>(function)));
1300    __ b(ne, miss);
1301  }
1302}
1303
1304
1305MaybeObject* CallStubCompiler::GenerateMissBranch() {
1306  Object* obj;
1307  { MaybeObject* maybe_obj =
1308        StubCache::ComputeCallMiss(arguments().immediate(), kind_);
1309    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1310  }
1311  __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
1312  return obj;
1313}
1314
1315
1316MaybeObject* CallStubCompiler::CompileCallField(JSObject* object,
1317                                                JSObject* holder,
1318                                                int index,
1319                                                String* name) {
1320  // ----------- S t a t e -------------
1321  //  -- r2    : name
1322  //  -- lr    : return address
1323  // -----------------------------------
1324  Label miss;
1325
1326  GenerateNameCheck(name, &miss);
1327
1328  const int argc = arguments().immediate();
1329
1330  // Get the receiver of the function from the stack into r0.
1331  __ ldr(r0, MemOperand(sp, argc * kPointerSize));
1332  // Check that the receiver isn't a smi.
1333  __ tst(r0, Operand(kSmiTagMask));
1334  __ b(eq, &miss);
1335
1336  // Do the right check and compute the holder register.
1337  Register reg = CheckPrototypes(object, r0, holder, r1, r3, r4, name, &miss);
1338  GenerateFastPropertyLoad(masm(), r1, reg, holder, index);
1339
1340  GenerateCallFunction(masm(), object, arguments(), &miss);
1341
1342  // Handle call cache miss.
1343  __ bind(&miss);
1344  Object* obj;
1345  { MaybeObject* maybe_obj = GenerateMissBranch();
1346    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1347  }
1348
1349  // Return the generated code.
1350  return GetCode(FIELD, name);
1351}
1352
1353
1354MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object,
1355                                                    JSObject* holder,
1356                                                    JSGlobalPropertyCell* cell,
1357                                                    JSFunction* function,
1358                                                    String* name) {
1359  // ----------- S t a t e -------------
1360  //  -- r2    : name
1361  //  -- lr    : return address
1362  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1363  //  -- ...
1364  //  -- sp[argc * 4]           : receiver
1365  // -----------------------------------
1366
1367  // If object is not an array, bail out to regular call.
1368  if (!object->IsJSArray() || cell != NULL) return Heap::undefined_value();
1369
1370  Label miss;
1371
1372  GenerateNameCheck(name, &miss);
1373
1374  Register receiver = r1;
1375
1376  // Get the receiver from the stack
1377  const int argc = arguments().immediate();
1378  __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
1379
1380  // Check that the receiver isn't a smi.
1381  __ BranchOnSmi(receiver, &miss);
1382
1383  // Check that the maps haven't changed.
1384  CheckPrototypes(JSObject::cast(object), receiver,
1385                  holder, r3, r0, r4, name, &miss);
1386
1387  if (argc == 0) {
1388    // Nothing to do, just return the length.
1389    __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1390    __ Drop(argc + 1);
1391    __ Ret();
1392  } else {
1393    Label call_builtin;
1394
1395    Register elements = r3;
1396    Register end_elements = r5;
1397
1398    // Get the elements array of the object.
1399    __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1400
1401    // Check that the elements are in fast mode and writable.
1402    __ CheckMap(elements, r0,
1403                Heap::kFixedArrayMapRootIndex, &call_builtin, true);
1404
1405    if (argc == 1) {  // Otherwise fall through to call the builtin.
1406      Label exit, with_write_barrier, attempt_to_grow_elements;
1407
1408      // Get the array's length into r0 and calculate new length.
1409      __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1410      STATIC_ASSERT(kSmiTagSize == 1);
1411      STATIC_ASSERT(kSmiTag == 0);
1412      __ add(r0, r0, Operand(Smi::FromInt(argc)));
1413
1414      // Get the element's length.
1415      __ ldr(r4, FieldMemOperand(elements, FixedArray::kLengthOffset));
1416
1417      // Check if we could survive without allocation.
1418      __ cmp(r0, r4);
1419      __ b(gt, &attempt_to_grow_elements);
1420
1421      // Save new length.
1422      __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1423
1424      // Push the element.
1425      __ ldr(r4, MemOperand(sp, (argc - 1) * kPointerSize));
1426      // We may need a register containing the address end_elements below,
1427      // so write back the value in end_elements.
1428      __ add(end_elements, elements,
1429             Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1430      const int kEndElementsOffset =
1431          FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize;
1432      __ str(r4, MemOperand(end_elements, kEndElementsOffset, PreIndex));
1433
1434      // Check for a smi.
1435      __ BranchOnNotSmi(r4, &with_write_barrier);
1436      __ bind(&exit);
1437      __ Drop(argc + 1);
1438      __ Ret();
1439
1440      __ bind(&with_write_barrier);
1441      __ InNewSpace(elements, r4, eq, &exit);
1442      __ RecordWriteHelper(elements, end_elements, r4);
1443      __ Drop(argc + 1);
1444      __ Ret();
1445
1446      __ bind(&attempt_to_grow_elements);
1447      // r0: array's length + 1.
1448      // r4: elements' length.
1449
1450      if (!FLAG_inline_new) {
1451        __ b(&call_builtin);
1452      }
1453
1454      ExternalReference new_space_allocation_top =
1455          ExternalReference::new_space_allocation_top_address();
1456      ExternalReference new_space_allocation_limit =
1457          ExternalReference::new_space_allocation_limit_address();
1458
1459      const int kAllocationDelta = 4;
1460      // Load top and check if it is the end of elements.
1461      __ add(end_elements, elements,
1462             Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1463      __ add(end_elements, end_elements, Operand(kEndElementsOffset));
1464      __ mov(r7, Operand(new_space_allocation_top));
1465      __ ldr(r6, MemOperand(r7));
1466      __ cmp(end_elements, r6);
1467      __ b(ne, &call_builtin);
1468
1469      __ mov(r9, Operand(new_space_allocation_limit));
1470      __ ldr(r9, MemOperand(r9));
1471      __ add(r6, r6, Operand(kAllocationDelta * kPointerSize));
1472      __ cmp(r6, r9);
1473      __ b(hi, &call_builtin);
1474
1475      // We fit and could grow elements.
1476      // Update new_space_allocation_top.
1477      __ str(r6, MemOperand(r7));
1478      // Push the argument.
1479      __ ldr(r6, MemOperand(sp, (argc - 1) * kPointerSize));
1480      __ str(r6, MemOperand(end_elements));
1481      // Fill the rest with holes.
1482      __ LoadRoot(r6, Heap::kTheHoleValueRootIndex);
1483      for (int i = 1; i < kAllocationDelta; i++) {
1484        __ str(r6, MemOperand(end_elements, i * kPointerSize));
1485      }
1486
1487      // Update elements' and array's sizes.
1488      __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1489      __ add(r4, r4, Operand(Smi::FromInt(kAllocationDelta)));
1490      __ str(r4, FieldMemOperand(elements, FixedArray::kLengthOffset));
1491
1492      // Elements are in new space, so write barrier is not required.
1493      __ Drop(argc + 1);
1494      __ Ret();
1495    }
1496    __ bind(&call_builtin);
1497    __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush),
1498                                 argc + 1,
1499                                 1);
1500  }
1501
1502  // Handle call cache miss.
1503  __ bind(&miss);
1504  Object* obj;
1505  { MaybeObject* maybe_obj = GenerateMissBranch();
1506    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1507  }
1508
1509  // Return the generated code.
1510  return GetCode(function);
1511}
1512
1513
1514MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object,
1515                                                   JSObject* holder,
1516                                                   JSGlobalPropertyCell* cell,
1517                                                   JSFunction* function,
1518                                                   String* name) {
1519  // ----------- S t a t e -------------
1520  //  -- r2    : name
1521  //  -- lr    : return address
1522  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1523  //  -- ...
1524  //  -- sp[argc * 4]           : receiver
1525  // -----------------------------------
1526
1527  // If object is not an array, bail out to regular call.
1528  if (!object->IsJSArray() || cell != NULL) return Heap::undefined_value();
1529
1530  Label miss, return_undefined, call_builtin;
1531
1532  Register receiver = r1;
1533  Register elements = r3;
1534
1535  GenerateNameCheck(name, &miss);
1536
1537  // Get the receiver from the stack
1538  const int argc = arguments().immediate();
1539  __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
1540
1541  // Check that the receiver isn't a smi.
1542  __ BranchOnSmi(receiver, &miss);
1543
1544  // Check that the maps haven't changed.
1545  CheckPrototypes(JSObject::cast(object),
1546                  receiver, holder, elements, r4, r0, name, &miss);
1547
1548  // Get the elements array of the object.
1549  __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1550
1551  // Check that the elements are in fast mode and writable.
1552  __ CheckMap(elements, r0, Heap::kFixedArrayMapRootIndex, &call_builtin, true);
1553
1554  // Get the array's length into r4 and calculate new length.
1555  __ ldr(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
1556  __ sub(r4, r4, Operand(Smi::FromInt(1)), SetCC);
1557  __ b(lt, &return_undefined);
1558
1559  // Get the last element.
1560  __ LoadRoot(r6, Heap::kTheHoleValueRootIndex);
1561  STATIC_ASSERT(kSmiTagSize == 1);
1562  STATIC_ASSERT(kSmiTag == 0);
1563  // We can't address the last element in one operation. Compute the more
1564  // expensive shift first, and use an offset later on.
1565  __ add(elements, elements, Operand(r4, LSL, kPointerSizeLog2 - kSmiTagSize));
1566  __ ldr(r0, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag));
1567  __ cmp(r0, r6);
1568  __ b(eq, &call_builtin);
1569
1570  // Set the array's length.
1571  __ str(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
1572
1573  // Fill with the hole.
1574  __ str(r6, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag));
1575  __ Drop(argc + 1);
1576  __ Ret();
1577
1578  __ bind(&return_undefined);
1579  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1580  __ Drop(argc + 1);
1581  __ Ret();
1582
1583  __ bind(&call_builtin);
1584  __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop),
1585                               argc + 1,
1586                               1);
1587
1588  // Handle call cache miss.
1589  __ bind(&miss);
1590  Object* obj;
1591  { MaybeObject* maybe_obj = GenerateMissBranch();
1592    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1593  }
1594
1595  // Return the generated code.
1596  return GetCode(function);
1597}
1598
1599
1600MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall(
1601    Object* object,
1602    JSObject* holder,
1603    JSGlobalPropertyCell* cell,
1604    JSFunction* function,
1605    String* name) {
1606  // ----------- S t a t e -------------
1607  //  -- r2                     : function name
1608  //  -- lr                     : return address
1609  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1610  //  -- ...
1611  //  -- sp[argc * 4]           : receiver
1612  // -----------------------------------
1613
1614  // If object is not a string, bail out to regular call.
1615  if (!object->IsString() || cell != NULL) return Heap::undefined_value();
1616
1617  const int argc = arguments().immediate();
1618
1619  Label miss;
1620  Label index_out_of_range;
1621  GenerateNameCheck(name, &miss);
1622
1623  // Check that the maps starting from the prototype haven't changed.
1624  GenerateDirectLoadGlobalFunctionPrototype(masm(),
1625                                            Context::STRING_FUNCTION_INDEX,
1626                                            r0,
1627                                            &miss);
1628  ASSERT(object != holder);
1629  CheckPrototypes(JSObject::cast(object->GetPrototype()), r0, holder,
1630                  r1, r3, r4, name, &miss);
1631
1632  Register receiver = r1;
1633  Register index = r4;
1634  Register scratch = r3;
1635  Register result = r0;
1636  __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
1637  if (argc > 0) {
1638    __ ldr(index, MemOperand(sp, (argc - 1) * kPointerSize));
1639  } else {
1640    __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1641  }
1642
1643  StringCharCodeAtGenerator char_code_at_generator(receiver,
1644                                                   index,
1645                                                   scratch,
1646                                                   result,
1647                                                   &miss,  // When not a string.
1648                                                   &miss,  // When not a number.
1649                                                   &index_out_of_range,
1650                                                   STRING_INDEX_IS_NUMBER);
1651  char_code_at_generator.GenerateFast(masm());
1652  __ Drop(argc + 1);
1653  __ Ret();
1654
1655  ICRuntimeCallHelper call_helper;
1656  char_code_at_generator.GenerateSlow(masm(), call_helper);
1657
1658  __ bind(&index_out_of_range);
1659  __ LoadRoot(r0, Heap::kNanValueRootIndex);
1660  __ Drop(argc + 1);
1661  __ Ret();
1662
1663  __ bind(&miss);
1664  Object* obj;
1665  { MaybeObject* maybe_obj = GenerateMissBranch();
1666    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1667  }
1668
1669  // Return the generated code.
1670  return GetCode(function);
1671}
1672
1673
1674MaybeObject* CallStubCompiler::CompileStringCharAtCall(
1675    Object* object,
1676    JSObject* holder,
1677    JSGlobalPropertyCell* cell,
1678    JSFunction* function,
1679    String* name) {
1680  // ----------- S t a t e -------------
1681  //  -- r2                     : function name
1682  //  -- lr                     : return address
1683  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1684  //  -- ...
1685  //  -- sp[argc * 4]           : receiver
1686  // -----------------------------------
1687
1688  // If object is not a string, bail out to regular call.
1689  if (!object->IsString() || cell != NULL) return Heap::undefined_value();
1690
1691  const int argc = arguments().immediate();
1692
1693  Label miss;
1694  Label index_out_of_range;
1695
1696  GenerateNameCheck(name, &miss);
1697
1698  // Check that the maps starting from the prototype haven't changed.
1699  GenerateDirectLoadGlobalFunctionPrototype(masm(),
1700                                            Context::STRING_FUNCTION_INDEX,
1701                                            r0,
1702                                            &miss);
1703  ASSERT(object != holder);
1704  CheckPrototypes(JSObject::cast(object->GetPrototype()), r0, holder,
1705                  r1, r3, r4, name, &miss);
1706
1707  Register receiver = r0;
1708  Register index = r4;
1709  Register scratch1 = r1;
1710  Register scratch2 = r3;
1711  Register result = r0;
1712  __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
1713  if (argc > 0) {
1714    __ ldr(index, MemOperand(sp, (argc - 1) * kPointerSize));
1715  } else {
1716    __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1717  }
1718
1719  StringCharAtGenerator char_at_generator(receiver,
1720                                          index,
1721                                          scratch1,
1722                                          scratch2,
1723                                          result,
1724                                          &miss,  // When not a string.
1725                                          &miss,  // When not a number.
1726                                          &index_out_of_range,
1727                                          STRING_INDEX_IS_NUMBER);
1728  char_at_generator.GenerateFast(masm());
1729  __ Drop(argc + 1);
1730  __ Ret();
1731
1732  ICRuntimeCallHelper call_helper;
1733  char_at_generator.GenerateSlow(masm(), call_helper);
1734
1735  __ bind(&index_out_of_range);
1736  __ LoadRoot(r0, Heap::kEmptyStringRootIndex);
1737  __ Drop(argc + 1);
1738  __ Ret();
1739
1740  __ bind(&miss);
1741  Object* obj;
1742  { MaybeObject* maybe_obj = GenerateMissBranch();
1743    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1744  }
1745
1746  // Return the generated code.
1747  return GetCode(function);
1748}
1749
1750
1751MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall(
1752    Object* object,
1753    JSObject* holder,
1754    JSGlobalPropertyCell* cell,
1755    JSFunction* function,
1756    String* name) {
1757  // ----------- S t a t e -------------
1758  //  -- r2                     : function name
1759  //  -- lr                     : return address
1760  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1761  //  -- ...
1762  //  -- sp[argc * 4]           : receiver
1763  // -----------------------------------
1764
1765  const int argc = arguments().immediate();
1766
1767  // If the object is not a JSObject or we got an unexpected number of
1768  // arguments, bail out to the regular call.
1769  if (!object->IsJSObject() || argc != 1) return Heap::undefined_value();
1770
1771  Label miss;
1772  GenerateNameCheck(name, &miss);
1773
1774  if (cell == NULL) {
1775    __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
1776
1777    STATIC_ASSERT(kSmiTag == 0);
1778    __ tst(r1, Operand(kSmiTagMask));
1779    __ b(eq, &miss);
1780
1781    CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
1782                    &miss);
1783  } else {
1784    ASSERT(cell->value() == function);
1785    GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
1786    GenerateLoadFunctionFromCell(cell, function, &miss);
1787  }
1788
1789  // Load the char code argument.
1790  Register code = r1;
1791  __ ldr(code, MemOperand(sp, 0 * kPointerSize));
1792
1793  // Check the code is a smi.
1794  Label slow;
1795  STATIC_ASSERT(kSmiTag == 0);
1796  __ tst(code, Operand(kSmiTagMask));
1797  __ b(ne, &slow);
1798
1799  // Convert the smi code to uint16.
1800  __ and_(code, code, Operand(Smi::FromInt(0xffff)));
1801
1802  StringCharFromCodeGenerator char_from_code_generator(code, r0);
1803  char_from_code_generator.GenerateFast(masm());
1804  __ Drop(argc + 1);
1805  __ Ret();
1806
1807  ICRuntimeCallHelper call_helper;
1808  char_from_code_generator.GenerateSlow(masm(), call_helper);
1809
1810  // Tail call the full function. We do not have to patch the receiver
1811  // because the function makes no use of it.
1812  __ bind(&slow);
1813  __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
1814
1815  __ bind(&miss);
1816  // r2: function name.
1817  Object* obj;
1818  { MaybeObject* maybe_obj = GenerateMissBranch();
1819    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1820  }
1821
1822  // Return the generated code.
1823  return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
1824}
1825
1826
1827MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object,
1828                                                    JSObject* holder,
1829                                                    JSGlobalPropertyCell* cell,
1830                                                    JSFunction* function,
1831                                                    String* name) {
1832  // ----------- S t a t e -------------
1833  //  -- r2                     : function name
1834  //  -- lr                     : return address
1835  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1836  //  -- ...
1837  //  -- sp[argc * 4]           : receiver
1838  // -----------------------------------
1839
1840  if (!CpuFeatures::IsSupported(VFP3)) return Heap::undefined_value();
1841  CpuFeatures::Scope scope_vfp3(VFP3);
1842
1843  const int argc = arguments().immediate();
1844
1845  // If the object is not a JSObject or we got an unexpected number of
1846  // arguments, bail out to the regular call.
1847  if (!object->IsJSObject() || argc != 1) return Heap::undefined_value();
1848
1849  Label miss, slow;
1850  GenerateNameCheck(name, &miss);
1851
1852  if (cell == NULL) {
1853    __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
1854
1855    STATIC_ASSERT(kSmiTag == 0);
1856    __ BranchOnSmi(r1, &miss);
1857
1858    CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
1859                    &miss);
1860  } else {
1861    ASSERT(cell->value() == function);
1862    GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
1863    GenerateLoadFunctionFromCell(cell, function, &miss);
1864  }
1865
1866  // Load the (only) argument into r0.
1867  __ ldr(r0, MemOperand(sp, 0 * kPointerSize));
1868
1869  // If the argument is a smi, just return.
1870  STATIC_ASSERT(kSmiTag == 0);
1871  __ tst(r0, Operand(kSmiTagMask));
1872  __ Drop(argc + 1, eq);
1873  __ Ret(eq);
1874
1875  __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, &slow, true);
1876
1877  Label wont_fit_smi, no_vfp_exception, restore_fpscr_and_return;
1878
1879  // If vfp3 is enabled, we use the fpu rounding with the RM (round towards
1880  // minus infinity) mode.
1881
1882  // Load the HeapNumber value.
1883  // We will need access to the value in the core registers, so we load it
1884  // with ldrd and move it to the fpu. It also spares a sub instruction for
1885  // updating the HeapNumber value address, as vldr expects a multiple
1886  // of 4 offset.
1887  __ Ldrd(r4, r5, FieldMemOperand(r0, HeapNumber::kValueOffset));
1888  __ vmov(d1, r4, r5);
1889
1890  // Backup FPSCR.
1891  __ vmrs(r3);
1892  // Set custom FPCSR:
1893  //  - Set rounding mode to "Round towards Minus Infinity"
1894  //    (ie bits [23:22] = 0b10).
1895  //  - Clear vfp cumulative exception flags (bits [3:0]).
1896  //  - Make sure Flush-to-zero mode control bit is unset (bit 22).
1897  __ bic(r9, r3,
1898      Operand(kVFPExceptionMask | kVFPRoundingModeMask | kVFPFlushToZeroMask));
1899  __ orr(r9, r9, Operand(kVFPRoundToMinusInfinityBits));
1900  __ vmsr(r9);
1901
1902  // Convert the argument to an integer.
1903  __ vcvt_s32_f64(s0, d1, Assembler::FPSCRRounding, al);
1904
1905  // Use vcvt latency to start checking for special cases.
1906  // Get the argument exponent and clear the sign bit.
1907  __ bic(r6, r5, Operand(HeapNumber::kSignMask));
1908  __ mov(r6, Operand(r6, LSR, HeapNumber::kMantissaBitsInTopWord));
1909
1910  // Retrieve FPSCR and check for vfp exceptions.
1911  __ vmrs(r9);
1912  __ tst(r9, Operand(kVFPExceptionMask));
1913  __ b(&no_vfp_exception, eq);
1914
1915  // Check for NaN, Infinity, and -Infinity.
1916  // They are invariant through a Math.Floor call, so just
1917  // return the original argument.
1918  __ sub(r7, r6, Operand(HeapNumber::kExponentMask
1919        >> HeapNumber::kMantissaBitsInTopWord), SetCC);
1920  __ b(&restore_fpscr_and_return, eq);
1921  // We had an overflow or underflow in the conversion. Check if we
1922  // have a big exponent.
1923  __ cmp(r7, Operand(HeapNumber::kMantissaBits));
1924  // If greater or equal, the argument is already round and in r0.
1925  __ b(&restore_fpscr_and_return, ge);
1926  __ b(&wont_fit_smi);
1927
1928  __ bind(&no_vfp_exception);
1929  // Move the result back to general purpose register r0.
1930  __ vmov(r0, s0);
1931  // Check if the result fits into a smi.
1932  __ add(r1, r0, Operand(0x40000000), SetCC);
1933  __ b(&wont_fit_smi, mi);
1934  // Tag the result.
1935  STATIC_ASSERT(kSmiTag == 0);
1936  __ mov(r0, Operand(r0, LSL, kSmiTagSize));
1937
1938  // Check for -0.
1939  __ cmp(r0, Operand(0));
1940  __ b(&restore_fpscr_and_return, ne);
1941  // r5 already holds the HeapNumber exponent.
1942  __ tst(r5, Operand(HeapNumber::kSignMask));
1943  // If our HeapNumber is negative it was -0, so load its address and return.
1944  // Else r0 is loaded with 0, so we can also just return.
1945  __ ldr(r0, MemOperand(sp, 0 * kPointerSize), ne);
1946
1947  __ bind(&restore_fpscr_and_return);
1948  // Restore FPSCR and return.
1949  __ vmsr(r3);
1950  __ Drop(argc + 1);
1951  __ Ret();
1952
1953  __ bind(&wont_fit_smi);
1954  // Restore FPCSR and fall to slow case.
1955  __ vmsr(r3);
1956
1957  __ bind(&slow);
1958  // Tail call the full function. We do not have to patch the receiver
1959  // because the function makes no use of it.
1960  __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
1961
1962  __ bind(&miss);
1963  // r2: function name.
1964  MaybeObject* obj = GenerateMissBranch();
1965  if (obj->IsFailure()) return obj;
1966
1967  // Return the generated code.
1968  return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
1969}
1970
1971
1972MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object,
1973                                                  JSObject* holder,
1974                                                  JSGlobalPropertyCell* cell,
1975                                                  JSFunction* function,
1976                                                  String* name) {
1977  // ----------- S t a t e -------------
1978  //  -- r2                     : function name
1979  //  -- lr                     : return address
1980  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1981  //  -- ...
1982  //  -- sp[argc * 4]           : receiver
1983  // -----------------------------------
1984
1985  const int argc = arguments().immediate();
1986
1987  // If the object is not a JSObject or we got an unexpected number of
1988  // arguments, bail out to the regular call.
1989  if (!object->IsJSObject() || argc != 1) return Heap::undefined_value();
1990
1991  Label miss;
1992  GenerateNameCheck(name, &miss);
1993
1994  if (cell == NULL) {
1995    __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
1996
1997    STATIC_ASSERT(kSmiTag == 0);
1998    __ tst(r1, Operand(kSmiTagMask));
1999    __ b(eq, &miss);
2000
2001    CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
2002                    &miss);
2003  } else {
2004    ASSERT(cell->value() == function);
2005    GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
2006    GenerateLoadFunctionFromCell(cell, function, &miss);
2007  }
2008
2009  // Load the (only) argument into r0.
2010  __ ldr(r0, MemOperand(sp, 0 * kPointerSize));
2011
2012  // Check if the argument is a smi.
2013  Label not_smi;
2014  STATIC_ASSERT(kSmiTag == 0);
2015  __ BranchOnNotSmi(r0, &not_smi);
2016
2017  // Do bitwise not or do nothing depending on the sign of the
2018  // argument.
2019  __ eor(r1, r0, Operand(r0, ASR, kBitsPerInt - 1));
2020
2021  // Add 1 or do nothing depending on the sign of the argument.
2022  __ sub(r0, r1, Operand(r0, ASR, kBitsPerInt - 1), SetCC);
2023
2024  // If the result is still negative, go to the slow case.
2025  // This only happens for the most negative smi.
2026  Label slow;
2027  __ b(mi, &slow);
2028
2029  // Smi case done.
2030  __ Drop(argc + 1);
2031  __ Ret();
2032
2033  // Check if the argument is a heap number and load its exponent and
2034  // sign.
2035  __ bind(&not_smi);
2036  __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, &slow, true);
2037  __ ldr(r1, FieldMemOperand(r0, HeapNumber::kExponentOffset));
2038
2039  // Check the sign of the argument. If the argument is positive,
2040  // just return it.
2041  Label negative_sign;
2042  __ tst(r1, Operand(HeapNumber::kSignMask));
2043  __ b(ne, &negative_sign);
2044  __ Drop(argc + 1);
2045  __ Ret();
2046
2047  // If the argument is negative, clear the sign, and return a new
2048  // number.
2049  __ bind(&negative_sign);
2050  __ eor(r1, r1, Operand(HeapNumber::kSignMask));
2051  __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
2052  __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
2053  __ AllocateHeapNumber(r0, r4, r5, r6, &slow);
2054  __ str(r1, FieldMemOperand(r0, HeapNumber::kExponentOffset));
2055  __ str(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
2056  __ Drop(argc + 1);
2057  __ Ret();
2058
2059  // Tail call the full function. We do not have to patch the receiver
2060  // because the function makes no use of it.
2061  __ bind(&slow);
2062  __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2063
2064  __ bind(&miss);
2065  // r2: function name.
2066  Object* obj;
2067  { MaybeObject* maybe_obj = GenerateMissBranch();
2068    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2069  }
2070
2071  // Return the generated code.
2072  return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
2073}
2074
2075
2076MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
2077                                                   JSObject* holder,
2078                                                   JSFunction* function,
2079                                                   String* name,
2080                                                   CheckType check) {
2081  // ----------- S t a t e -------------
2082  //  -- r2    : name
2083  //  -- lr    : return address
2084  // -----------------------------------
2085  SharedFunctionInfo* function_info = function->shared();
2086  if (function_info->HasCustomCallGenerator()) {
2087    const int id = function_info->custom_call_generator_id();
2088    MaybeObject* maybe_result = CompileCustomCall(
2089        id, object, holder, NULL, function, name);
2090    Object* result;
2091    if (!maybe_result->ToObject(&result)) return maybe_result;
2092    // undefined means bail out to regular compiler.
2093    if (!result->IsUndefined()) {
2094      return result;
2095    }
2096  }
2097
2098  Label miss_in_smi_check;
2099
2100  GenerateNameCheck(name, &miss_in_smi_check);
2101
2102  // Get the receiver from the stack
2103  const int argc = arguments().immediate();
2104  __ ldr(r1, MemOperand(sp, argc * kPointerSize));
2105
2106  // Check that the receiver isn't a smi.
2107  if (check != NUMBER_CHECK) {
2108    __ tst(r1, Operand(kSmiTagMask));
2109    __ b(eq, &miss_in_smi_check);
2110  }
2111
2112  // Make sure that it's okay not to patch the on stack receiver
2113  // unless we're doing a receiver map check.
2114  ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
2115
2116  CallOptimization optimization(function);
2117  int depth = kInvalidProtoDepth;
2118  Label miss;
2119
2120  switch (check) {
2121    case RECEIVER_MAP_CHECK:
2122      __ IncrementCounter(&Counters::call_const, 1, r0, r3);
2123
2124      if (optimization.is_simple_api_call() && !object->IsGlobalObject()) {
2125        depth = optimization.GetPrototypeDepthOfExpectedType(
2126            JSObject::cast(object), holder);
2127      }
2128
2129      if (depth != kInvalidProtoDepth) {
2130        __ IncrementCounter(&Counters::call_const_fast_api, 1, r0, r3);
2131        ReserveSpaceForFastApiCall(masm(), r0);
2132      }
2133
2134      // Check that the maps haven't changed.
2135      CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
2136                      depth, &miss);
2137
2138      // Patch the receiver on the stack with the global proxy if
2139      // necessary.
2140      if (object->IsGlobalObject()) {
2141        ASSERT(depth == kInvalidProtoDepth);
2142        __ ldr(r3, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
2143        __ str(r3, MemOperand(sp, argc * kPointerSize));
2144      }
2145      break;
2146
2147    case STRING_CHECK:
2148      if (!function->IsBuiltin()) {
2149        // Calling non-builtins with a value as receiver requires boxing.
2150        __ jmp(&miss);
2151      } else {
2152        // Check that the object is a two-byte string or a symbol.
2153        __ CompareObjectType(r1, r3, r3, FIRST_NONSTRING_TYPE);
2154        __ b(hs, &miss);
2155        // Check that the maps starting from the prototype haven't changed.
2156        GenerateDirectLoadGlobalFunctionPrototype(
2157            masm(), Context::STRING_FUNCTION_INDEX, r0, &miss);
2158        CheckPrototypes(JSObject::cast(object->GetPrototype()), r0, holder, r3,
2159                        r1, r4, name, &miss);
2160      }
2161      break;
2162
2163    case NUMBER_CHECK: {
2164      if (!function->IsBuiltin()) {
2165        // Calling non-builtins with a value as receiver requires boxing.
2166        __ jmp(&miss);
2167      } else {
2168        Label fast;
2169        // Check that the object is a smi or a heap number.
2170        __ tst(r1, Operand(kSmiTagMask));
2171        __ b(eq, &fast);
2172        __ CompareObjectType(r1, r0, r0, HEAP_NUMBER_TYPE);
2173        __ b(ne, &miss);
2174        __ bind(&fast);
2175        // Check that the maps starting from the prototype haven't changed.
2176        GenerateDirectLoadGlobalFunctionPrototype(
2177            masm(), Context::NUMBER_FUNCTION_INDEX, r0, &miss);
2178        CheckPrototypes(JSObject::cast(object->GetPrototype()), r0, holder, r3,
2179                        r1, r4, name, &miss);
2180      }
2181      break;
2182    }
2183
2184    case BOOLEAN_CHECK: {
2185      if (!function->IsBuiltin()) {
2186        // Calling non-builtins with a value as receiver requires boxing.
2187        __ jmp(&miss);
2188      } else {
2189        Label fast;
2190        // Check that the object is a boolean.
2191        __ LoadRoot(ip, Heap::kTrueValueRootIndex);
2192        __ cmp(r1, ip);
2193        __ b(eq, &fast);
2194        __ LoadRoot(ip, Heap::kFalseValueRootIndex);
2195        __ cmp(r1, ip);
2196        __ b(ne, &miss);
2197        __ bind(&fast);
2198        // Check that the maps starting from the prototype haven't changed.
2199        GenerateDirectLoadGlobalFunctionPrototype(
2200            masm(), Context::BOOLEAN_FUNCTION_INDEX, r0, &miss);
2201        CheckPrototypes(JSObject::cast(object->GetPrototype()), r0, holder, r3,
2202                        r1, r4, name, &miss);
2203      }
2204      break;
2205    }
2206
2207    default:
2208      UNREACHABLE();
2209  }
2210
2211  if (depth != kInvalidProtoDepth) {
2212    GenerateFastApiCall(masm(), optimization, argc);
2213  } else {
2214    __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2215  }
2216
2217  // Handle call cache miss.
2218  __ bind(&miss);
2219  if (depth != kInvalidProtoDepth) {
2220    FreeSpaceForFastApiCall(masm());
2221  }
2222
2223  __ bind(&miss_in_smi_check);
2224  Object* obj;
2225  { MaybeObject* maybe_obj = GenerateMissBranch();
2226    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2227  }
2228
2229  // Return the generated code.
2230  return GetCode(function);
2231}
2232
2233
2234MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object,
2235                                                      JSObject* holder,
2236                                                      String* name) {
2237  // ----------- S t a t e -------------
2238  //  -- r2    : name
2239  //  -- lr    : return address
2240  // -----------------------------------
2241
2242  Label miss;
2243
2244  GenerateNameCheck(name, &miss);
2245
2246  // Get the number of arguments.
2247  const int argc = arguments().immediate();
2248
2249  LookupResult lookup;
2250  LookupPostInterceptor(holder, name, &lookup);
2251
2252  // Get the receiver from the stack.
2253  __ ldr(r1, MemOperand(sp, argc * kPointerSize));
2254
2255  CallInterceptorCompiler compiler(this, arguments(), r2);
2256  compiler.Compile(masm(),
2257                   object,
2258                   holder,
2259                   name,
2260                   &lookup,
2261                   r1,
2262                   r3,
2263                   r4,
2264                   r0,
2265                   &miss);
2266
2267  // Move returned value, the function to call, to r1.
2268  __ mov(r1, r0);
2269  // Restore receiver.
2270  __ ldr(r0, MemOperand(sp, argc * kPointerSize));
2271
2272  GenerateCallFunction(masm(), object, arguments(), &miss);
2273
2274  // Handle call cache miss.
2275  __ bind(&miss);
2276  Object* obj;
2277  { MaybeObject* maybe_obj = GenerateMissBranch();
2278    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2279  }
2280
2281  // Return the generated code.
2282  return GetCode(INTERCEPTOR, name);
2283}
2284
2285
2286MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object,
2287                                                 GlobalObject* holder,
2288                                                 JSGlobalPropertyCell* cell,
2289                                                 JSFunction* function,
2290                                                 String* name) {
2291  // ----------- S t a t e -------------
2292  //  -- r2    : name
2293  //  -- lr    : return address
2294  // -----------------------------------
2295
2296  SharedFunctionInfo* function_info = function->shared();
2297  if (function_info->HasCustomCallGenerator()) {
2298    const int id = function_info->custom_call_generator_id();
2299    MaybeObject* maybe_result = CompileCustomCall(
2300        id, object, holder, cell, function, name);
2301    Object* result;
2302    if (!maybe_result->ToObject(&result)) return maybe_result;
2303    // undefined means bail out to regular compiler.
2304    if (!result->IsUndefined()) return result;
2305  }
2306
2307  Label miss;
2308
2309  GenerateNameCheck(name, &miss);
2310
2311  // Get the number of arguments.
2312  const int argc = arguments().immediate();
2313
2314  GenerateGlobalReceiverCheck(object, holder, name, &miss);
2315
2316  GenerateLoadFunctionFromCell(cell, function, &miss);
2317
2318  // Patch the receiver on the stack with the global proxy if
2319  // necessary.
2320  if (object->IsGlobalObject()) {
2321    __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
2322    __ str(r3, MemOperand(sp, argc * kPointerSize));
2323  }
2324
2325  // Setup the context (function already in r1).
2326  __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
2327
2328  // Jump to the cached code (tail call).
2329  __ IncrementCounter(&Counters::call_global_inline, 1, r3, r4);
2330  ASSERT(function->is_compiled());
2331  Handle<Code> code(function->code());
2332  ParameterCount expected(function->shared()->formal_parameter_count());
2333  __ InvokeCode(code, expected, arguments(),
2334                RelocInfo::CODE_TARGET, JUMP_FUNCTION);
2335
2336  // Handle call cache miss.
2337  __ bind(&miss);
2338  __ IncrementCounter(&Counters::call_global_inline_miss, 1, r1, r3);
2339  Object* obj;
2340  { MaybeObject* maybe_obj = GenerateMissBranch();
2341    if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2342  }
2343
2344  // Return the generated code.
2345  return GetCode(NORMAL, name);
2346}
2347
2348
2349MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object,
2350                                                  int index,
2351                                                  Map* transition,
2352                                                  String* name) {
2353  // ----------- S t a t e -------------
2354  //  -- r0    : value
2355  //  -- r1    : receiver
2356  //  -- r2    : name
2357  //  -- lr    : return address
2358  // -----------------------------------
2359  Label miss;
2360
2361  GenerateStoreField(masm(),
2362                     object,
2363                     index,
2364                     transition,
2365                     r1, r2, r3,
2366                     &miss);
2367  __ bind(&miss);
2368  Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2369  __ Jump(ic, RelocInfo::CODE_TARGET);
2370
2371  // Return the generated code.
2372  return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2373}
2374
2375
2376MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object,
2377                                                     AccessorInfo* callback,
2378                                                     String* name) {
2379  // ----------- S t a t e -------------
2380  //  -- r0    : value
2381  //  -- r1    : receiver
2382  //  -- r2    : name
2383  //  -- lr    : return address
2384  // -----------------------------------
2385  Label miss;
2386
2387  // Check that the object isn't a smi.
2388  __ tst(r1, Operand(kSmiTagMask));
2389  __ b(eq, &miss);
2390
2391  // Check that the map of the object hasn't changed.
2392  __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
2393  __ cmp(r3, Operand(Handle<Map>(object->map())));
2394  __ b(ne, &miss);
2395
2396  // Perform global security token check if needed.
2397  if (object->IsJSGlobalProxy()) {
2398    __ CheckAccessGlobalProxy(r1, r3, &miss);
2399  }
2400
2401  // Stub never generated for non-global objects that require access
2402  // checks.
2403  ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2404
2405  __ push(r1);  // receiver
2406  __ mov(ip, Operand(Handle<AccessorInfo>(callback)));  // callback info
2407  __ Push(ip, r2, r0);
2408
2409  // Do tail-call to the runtime system.
2410  ExternalReference store_callback_property =
2411      ExternalReference(IC_Utility(IC::kStoreCallbackProperty));
2412  __ TailCallExternalReference(store_callback_property, 4, 1);
2413
2414  // Handle store cache miss.
2415  __ bind(&miss);
2416  Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2417  __ Jump(ic, RelocInfo::CODE_TARGET);
2418
2419  // Return the generated code.
2420  return GetCode(CALLBACKS, name);
2421}
2422
2423
2424MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
2425                                                        String* name) {
2426  // ----------- S t a t e -------------
2427  //  -- r0    : value
2428  //  -- r1    : receiver
2429  //  -- r2    : name
2430  //  -- lr    : return address
2431  // -----------------------------------
2432  Label miss;
2433
2434  // Check that the object isn't a smi.
2435  __ tst(r1, Operand(kSmiTagMask));
2436  __ b(eq, &miss);
2437
2438  // Check that the map of the object hasn't changed.
2439  __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
2440  __ cmp(r3, Operand(Handle<Map>(receiver->map())));
2441  __ b(ne, &miss);
2442
2443  // Perform global security token check if needed.
2444  if (receiver->IsJSGlobalProxy()) {
2445    __ CheckAccessGlobalProxy(r1, r3, &miss);
2446  }
2447
2448  // Stub is never generated for non-global objects that require access
2449  // checks.
2450  ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
2451
2452  __ Push(r1, r2, r0);  // Receiver, name, value.
2453
2454  // Do tail-call to the runtime system.
2455  ExternalReference store_ic_property =
2456      ExternalReference(IC_Utility(IC::kStoreInterceptorProperty));
2457  __ TailCallExternalReference(store_ic_property, 3, 1);
2458
2459  // Handle store cache miss.
2460  __ bind(&miss);
2461  Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2462  __ Jump(ic, RelocInfo::CODE_TARGET);
2463
2464  // Return the generated code.
2465  return GetCode(INTERCEPTOR, name);
2466}
2467
2468
2469MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
2470                                                   JSGlobalPropertyCell* cell,
2471                                                   String* name) {
2472  // ----------- S t a t e -------------
2473  //  -- r0    : value
2474  //  -- r1    : receiver
2475  //  -- r2    : name
2476  //  -- lr    : return address
2477  // -----------------------------------
2478  Label miss;
2479
2480  // Check that the map of the global has not changed.
2481  __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
2482  __ cmp(r3, Operand(Handle<Map>(object->map())));
2483  __ b(ne, &miss);
2484
2485  // Store the value in the cell.
2486  __ mov(r2, Operand(Handle<JSGlobalPropertyCell>(cell)));
2487  __ str(r0, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
2488
2489  __ IncrementCounter(&Counters::named_store_global_inline, 1, r4, r3);
2490  __ Ret();
2491
2492  // Handle store cache miss.
2493  __ bind(&miss);
2494  __ IncrementCounter(&Counters::named_store_global_inline_miss, 1, r4, r3);
2495  Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2496  __ Jump(ic, RelocInfo::CODE_TARGET);
2497
2498  // Return the generated code.
2499  return GetCode(NORMAL, name);
2500}
2501
2502
2503MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
2504                                                      JSObject* object,
2505                                                      JSObject* last) {
2506  // ----------- S t a t e -------------
2507  //  -- r0    : receiver
2508  //  -- lr    : return address
2509  // -----------------------------------
2510  Label miss;
2511
2512  // Check that receiver is not a smi.
2513  __ tst(r0, Operand(kSmiTagMask));
2514  __ b(eq, &miss);
2515
2516  // Check the maps of the full prototype chain.
2517  CheckPrototypes(object, r0, last, r3, r1, r4, name, &miss);
2518
2519  // If the last object in the prototype chain is a global object,
2520  // check that the global property cell is empty.
2521  if (last->IsGlobalObject()) {
2522    MaybeObject* cell = GenerateCheckPropertyCell(masm(),
2523                                                  GlobalObject::cast(last),
2524                                                  name,
2525                                                  r1,
2526                                                  &miss);
2527    if (cell->IsFailure()) {
2528      miss.Unuse();
2529      return cell;
2530    }
2531  }
2532
2533  // Return undefined if maps of the full prototype chain are still the
2534  // same and no global property with this name contains a value.
2535  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2536  __ Ret();
2537
2538  __ bind(&miss);
2539  GenerateLoadMiss(masm(), Code::LOAD_IC);
2540
2541  // Return the generated code.
2542  return GetCode(NONEXISTENT, Heap::empty_string());
2543}
2544
2545
2546MaybeObject* LoadStubCompiler::CompileLoadField(JSObject* object,
2547                                                JSObject* holder,
2548                                                int index,
2549                                                String* name) {
2550  // ----------- S t a t e -------------
2551  //  -- r0    : receiver
2552  //  -- r2    : name
2553  //  -- lr    : return address
2554  // -----------------------------------
2555  Label miss;
2556
2557  GenerateLoadField(object, holder, r0, r3, r1, r4, index, name, &miss);
2558  __ bind(&miss);
2559  GenerateLoadMiss(masm(), Code::LOAD_IC);
2560
2561  // Return the generated code.
2562  return GetCode(FIELD, name);
2563}
2564
2565
2566MaybeObject* LoadStubCompiler::CompileLoadCallback(String* name,
2567                                                   JSObject* object,
2568                                                   JSObject* holder,
2569                                                   AccessorInfo* callback) {
2570  // ----------- S t a t e -------------
2571  //  -- r0    : receiver
2572  //  -- r2    : name
2573  //  -- lr    : return address
2574  // -----------------------------------
2575  Label miss;
2576
2577  Failure* failure = Failure::InternalError();
2578  bool success = GenerateLoadCallback(object, holder, r0, r2, r3, r1, r4,
2579                                      callback, name, &miss, &failure);
2580  if (!success) {
2581    miss.Unuse();
2582    return failure;
2583  }
2584
2585  __ bind(&miss);
2586  GenerateLoadMiss(masm(), Code::LOAD_IC);
2587
2588  // Return the generated code.
2589  return GetCode(CALLBACKS, name);
2590}
2591
2592
2593MaybeObject* LoadStubCompiler::CompileLoadConstant(JSObject* object,
2594                                                   JSObject* holder,
2595                                                   Object* value,
2596                                                   String* name) {
2597  // ----------- S t a t e -------------
2598  //  -- r0    : receiver
2599  //  -- r2    : name
2600  //  -- lr    : return address
2601  // -----------------------------------
2602  Label miss;
2603
2604  GenerateLoadConstant(object, holder, r0, r3, r1, r4, value, name, &miss);
2605  __ bind(&miss);
2606  GenerateLoadMiss(masm(), Code::LOAD_IC);
2607
2608  // Return the generated code.
2609  return GetCode(CONSTANT_FUNCTION, name);
2610}
2611
2612
2613MaybeObject* LoadStubCompiler::CompileLoadInterceptor(JSObject* object,
2614                                                      JSObject* holder,
2615                                                      String* name) {
2616  // ----------- S t a t e -------------
2617  //  -- r0    : receiver
2618  //  -- r2    : name
2619  //  -- lr    : return address
2620  // -----------------------------------
2621  Label miss;
2622
2623  LookupResult lookup;
2624  LookupPostInterceptor(holder, name, &lookup);
2625  GenerateLoadInterceptor(object,
2626                          holder,
2627                          &lookup,
2628                          r0,
2629                          r2,
2630                          r3,
2631                          r1,
2632                          r4,
2633                          name,
2634                          &miss);
2635  __ bind(&miss);
2636  GenerateLoadMiss(masm(), Code::LOAD_IC);
2637
2638  // Return the generated code.
2639  return GetCode(INTERCEPTOR, name);
2640}
2641
2642
2643MaybeObject* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
2644                                                 GlobalObject* holder,
2645                                                 JSGlobalPropertyCell* cell,
2646                                                 String* name,
2647                                                 bool is_dont_delete) {
2648  // ----------- S t a t e -------------
2649  //  -- r0    : receiver
2650  //  -- r2    : name
2651  //  -- lr    : return address
2652  // -----------------------------------
2653  Label miss;
2654
2655  // If the object is the holder then we know that it's a global
2656  // object which can only happen for contextual calls. In this case,
2657  // the receiver cannot be a smi.
2658  if (object != holder) {
2659    __ tst(r0, Operand(kSmiTagMask));
2660    __ b(eq, &miss);
2661  }
2662
2663  // Check that the map of the global has not changed.
2664  CheckPrototypes(object, r0, holder, r3, r4, r1, name, &miss);
2665
2666  // Get the value from the cell.
2667  __ mov(r3, Operand(Handle<JSGlobalPropertyCell>(cell)));
2668  __ ldr(r4, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset));
2669
2670  // Check for deleted property if property can actually be deleted.
2671  if (!is_dont_delete) {
2672    __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2673    __ cmp(r4, ip);
2674    __ b(eq, &miss);
2675  }
2676
2677  __ mov(r0, r4);
2678  __ IncrementCounter(&Counters::named_load_global_stub, 1, r1, r3);
2679  __ Ret();
2680
2681  __ bind(&miss);
2682  __ IncrementCounter(&Counters::named_load_global_stub_miss, 1, r1, r3);
2683  GenerateLoadMiss(masm(), Code::LOAD_IC);
2684
2685  // Return the generated code.
2686  return GetCode(NORMAL, name);
2687}
2688
2689
2690MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name,
2691                                                     JSObject* receiver,
2692                                                     JSObject* holder,
2693                                                     int index) {
2694  // ----------- S t a t e -------------
2695  //  -- lr    : return address
2696  //  -- r0    : key
2697  //  -- r1    : receiver
2698  // -----------------------------------
2699  Label miss;
2700
2701  // Check the key is the cached one.
2702  __ cmp(r0, Operand(Handle<String>(name)));
2703  __ b(ne, &miss);
2704
2705  GenerateLoadField(receiver, holder, r1, r2, r3, r4, index, name, &miss);
2706  __ bind(&miss);
2707  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2708
2709  return GetCode(FIELD, name);
2710}
2711
2712
2713MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback(
2714    String* name,
2715    JSObject* receiver,
2716    JSObject* holder,
2717    AccessorInfo* callback) {
2718  // ----------- S t a t e -------------
2719  //  -- lr    : return address
2720  //  -- r0    : key
2721  //  -- r1    : receiver
2722  // -----------------------------------
2723  Label miss;
2724
2725  // Check the key is the cached one.
2726  __ cmp(r0, Operand(Handle<String>(name)));
2727  __ b(ne, &miss);
2728
2729  Failure* failure = Failure::InternalError();
2730  bool success = GenerateLoadCallback(receiver, holder, r1, r0, r2, r3, r4,
2731                                      callback, name, &miss, &failure);
2732  if (!success) {
2733    miss.Unuse();
2734    return failure;
2735  }
2736
2737  __ bind(&miss);
2738  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2739
2740  return GetCode(CALLBACKS, name);
2741}
2742
2743
2744MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
2745                                                        JSObject* receiver,
2746                                                        JSObject* holder,
2747                                                        Object* value) {
2748  // ----------- S t a t e -------------
2749  //  -- lr    : return address
2750  //  -- r0    : key
2751  //  -- r1    : receiver
2752  // -----------------------------------
2753  Label miss;
2754
2755  // Check the key is the cached one.
2756  __ cmp(r0, Operand(Handle<String>(name)));
2757  __ b(ne, &miss);
2758
2759  GenerateLoadConstant(receiver, holder, r1, r2, r3, r4, value, name, &miss);
2760  __ bind(&miss);
2761  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2762
2763  // Return the generated code.
2764  return GetCode(CONSTANT_FUNCTION, name);
2765}
2766
2767
2768MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
2769                                                           JSObject* holder,
2770                                                           String* name) {
2771  // ----------- S t a t e -------------
2772  //  -- lr    : return address
2773  //  -- r0    : key
2774  //  -- r1    : receiver
2775  // -----------------------------------
2776  Label miss;
2777
2778  // Check the key is the cached one.
2779  __ cmp(r0, Operand(Handle<String>(name)));
2780  __ b(ne, &miss);
2781
2782  LookupResult lookup;
2783  LookupPostInterceptor(holder, name, &lookup);
2784  GenerateLoadInterceptor(receiver,
2785                          holder,
2786                          &lookup,
2787                          r1,
2788                          r0,
2789                          r2,
2790                          r3,
2791                          r4,
2792                          name,
2793                          &miss);
2794  __ bind(&miss);
2795  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2796
2797  return GetCode(INTERCEPTOR, name);
2798}
2799
2800
2801MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
2802  // ----------- S t a t e -------------
2803  //  -- lr    : return address
2804  //  -- r0    : key
2805  //  -- r1    : receiver
2806  // -----------------------------------
2807  Label miss;
2808
2809  // Check the key is the cached one.
2810  __ cmp(r0, Operand(Handle<String>(name)));
2811  __ b(ne, &miss);
2812
2813  GenerateLoadArrayLength(masm(), r1, r2, &miss);
2814  __ bind(&miss);
2815  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2816
2817  return GetCode(CALLBACKS, name);
2818}
2819
2820
2821MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
2822  // ----------- S t a t e -------------
2823  //  -- lr    : return address
2824  //  -- r0    : key
2825  //  -- r1    : receiver
2826  // -----------------------------------
2827  Label miss;
2828  __ IncrementCounter(&Counters::keyed_load_string_length, 1, r2, r3);
2829
2830  // Check the key is the cached one.
2831  __ cmp(r0, Operand(Handle<String>(name)));
2832  __ b(ne, &miss);
2833
2834  GenerateLoadStringLength(masm(), r1, r2, r3, &miss);
2835  __ bind(&miss);
2836  __ DecrementCounter(&Counters::keyed_load_string_length, 1, r2, r3);
2837
2838  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2839
2840  return GetCode(CALLBACKS, name);
2841}
2842
2843
2844MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
2845  // ----------- S t a t e -------------
2846  //  -- lr    : return address
2847  //  -- r0    : key
2848  //  -- r1    : receiver
2849  // -----------------------------------
2850  Label miss;
2851
2852  __ IncrementCounter(&Counters::keyed_load_function_prototype, 1, r2, r3);
2853
2854  // Check the name hasn't changed.
2855  __ cmp(r0, Operand(Handle<String>(name)));
2856  __ b(ne, &miss);
2857
2858  GenerateLoadFunctionPrototype(masm(), r1, r2, r3, &miss);
2859  __ bind(&miss);
2860  __ DecrementCounter(&Counters::keyed_load_function_prototype, 1, r2, r3);
2861  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2862
2863  return GetCode(CALLBACKS, name);
2864}
2865
2866
2867MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
2868                                                       int index,
2869                                                       Map* transition,
2870                                                       String* name) {
2871  // ----------- S t a t e -------------
2872  //  -- r0    : value
2873  //  -- r1    : key
2874  //  -- r2    : receiver
2875  //  -- lr    : return address
2876  // -----------------------------------
2877  Label miss;
2878
2879  __ IncrementCounter(&Counters::keyed_store_field, 1, r3, r4);
2880
2881  // Check that the name has not changed.
2882  __ cmp(r1, Operand(Handle<String>(name)));
2883  __ b(ne, &miss);
2884
2885  // r3 is used as scratch register. r1 and r2 keep their values if a jump to
2886  // the miss label is generated.
2887  GenerateStoreField(masm(),
2888                     object,
2889                     index,
2890                     transition,
2891                     r2, r1, r3,
2892                     &miss);
2893  __ bind(&miss);
2894
2895  __ DecrementCounter(&Counters::keyed_store_field, 1, r3, r4);
2896  Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
2897
2898  __ Jump(ic, RelocInfo::CODE_TARGET);
2899
2900  // Return the generated code.
2901  return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2902}
2903
2904
2905MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
2906  // ----------- S t a t e -------------
2907  //  -- r0    : argc
2908  //  -- r1    : constructor
2909  //  -- lr    : return address
2910  //  -- [sp]  : last argument
2911  // -----------------------------------
2912  Label generic_stub_call;
2913
2914  // Use r7 for holding undefined which is used in several places below.
2915  __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
2916
2917#ifdef ENABLE_DEBUGGER_SUPPORT
2918  // Check to see whether there are any break points in the function code. If
2919  // there are jump to the generic constructor stub which calls the actual
2920  // code for the function thereby hitting the break points.
2921  __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
2922  __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kDebugInfoOffset));
2923  __ cmp(r2, r7);
2924  __ b(ne, &generic_stub_call);
2925#endif
2926
2927  // Load the initial map and verify that it is in fact a map.
2928  // r1: constructor function
2929  // r7: undefined
2930  __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
2931  __ tst(r2, Operand(kSmiTagMask));
2932  __ b(eq, &generic_stub_call);
2933  __ CompareObjectType(r2, r3, r4, MAP_TYPE);
2934  __ b(ne, &generic_stub_call);
2935
2936#ifdef DEBUG
2937  // Cannot construct functions this way.
2938  // r0: argc
2939  // r1: constructor function
2940  // r2: initial map
2941  // r7: undefined
2942  __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
2943  __ Check(ne, "Function constructed by construct stub.");
2944#endif
2945
2946  // Now allocate the JSObject in new space.
2947  // r0: argc
2948  // r1: constructor function
2949  // r2: initial map
2950  // r7: undefined
2951  __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
2952  __ AllocateInNewSpace(r3,
2953                        r4,
2954                        r5,
2955                        r6,
2956                        &generic_stub_call,
2957                        SIZE_IN_WORDS);
2958
2959  // Allocated the JSObject, now initialize the fields. Map is set to initial
2960  // map and properties and elements are set to empty fixed array.
2961  // r0: argc
2962  // r1: constructor function
2963  // r2: initial map
2964  // r3: object size (in words)
2965  // r4: JSObject (not tagged)
2966  // r7: undefined
2967  __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
2968  __ mov(r5, r4);
2969  ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
2970  __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
2971  ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
2972  __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
2973  ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
2974  __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
2975
2976  // Calculate the location of the first argument. The stack contains only the
2977  // argc arguments.
2978  __ add(r1, sp, Operand(r0, LSL, kPointerSizeLog2));
2979
2980  // Fill all the in-object properties with undefined.
2981  // r0: argc
2982  // r1: first argument
2983  // r3: object size (in words)
2984  // r4: JSObject (not tagged)
2985  // r5: First in-object property of JSObject (not tagged)
2986  // r7: undefined
2987  // Fill the initialized properties with a constant value or a passed argument
2988  // depending on the this.x = ...; assignment in the function.
2989  SharedFunctionInfo* shared = function->shared();
2990  for (int i = 0; i < shared->this_property_assignments_count(); i++) {
2991    if (shared->IsThisPropertyAssignmentArgument(i)) {
2992      Label not_passed, next;
2993      // Check if the argument assigned to the property is actually passed.
2994      int arg_number = shared->GetThisPropertyAssignmentArgument(i);
2995      __ cmp(r0, Operand(arg_number));
2996      __ b(le, &not_passed);
2997      // Argument passed - find it on the stack.
2998      __ ldr(r2, MemOperand(r1, (arg_number + 1) * -kPointerSize));
2999      __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
3000      __ b(&next);
3001      __ bind(&not_passed);
3002      // Set the property to undefined.
3003      __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
3004      __ bind(&next);
3005    } else {
3006      // Set the property to the constant value.
3007      Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i));
3008      __ mov(r2, Operand(constant));
3009      __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
3010    }
3011  }
3012
3013  // Fill the unused in-object property fields with undefined.
3014  ASSERT(function->has_initial_map());
3015  for (int i = shared->this_property_assignments_count();
3016       i < function->initial_map()->inobject_properties();
3017       i++) {
3018      __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
3019  }
3020
3021  // r0: argc
3022  // r4: JSObject (not tagged)
3023  // Move argc to r1 and the JSObject to return to r0 and tag it.
3024  __ mov(r1, r0);
3025  __ mov(r0, r4);
3026  __ orr(r0, r0, Operand(kHeapObjectTag));
3027
3028  // r0: JSObject
3029  // r1: argc
3030  // Remove caller arguments and receiver from the stack and return.
3031  __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2));
3032  __ add(sp, sp, Operand(kPointerSize));
3033  __ IncrementCounter(&Counters::constructed_objects, 1, r1, r2);
3034  __ IncrementCounter(&Counters::constructed_objects_stub, 1, r1, r2);
3035  __ Jump(lr);
3036
3037  // Jump to the generic stub in case the specialized code cannot handle the
3038  // construction.
3039  __ bind(&generic_stub_call);
3040  Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
3041  Handle<Code> generic_construct_stub(code);
3042  __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
3043
3044  // Return the generated code.
3045  return GetCode();
3046}
3047
3048
3049#undef __
3050
3051} }  // namespace v8::internal
3052
3053#endif  // V8_TARGET_ARCH_ARM
3054