stub-cache-arm.cc revision 402d937239b0e2fd11bf2f4fe972ad78aa9fd481
1// Copyright 2006-2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "ic-inl.h"
31#include "codegen-inl.h"
32#include "stub-cache.h"
33
34namespace v8 {
35namespace internal {
36
37#define __ ACCESS_MASM(masm)
38
39
40static void ProbeTable(MacroAssembler* masm,
41                       Code::Flags flags,
42                       StubCache::Table table,
43                       Register name,
44                       Register offset) {
45  ExternalReference key_offset(SCTableReference::keyReference(table));
46  ExternalReference value_offset(SCTableReference::valueReference(table));
47
48  Label miss;
49
50  // Save the offset on the stack.
51  __ push(offset);
52
53  // Check that the key in the entry matches the name.
54  __ mov(ip, Operand(key_offset));
55  __ ldr(ip, MemOperand(ip, offset, LSL, 1));
56  __ cmp(name, Operand(ip));
57  __ b(ne, &miss);
58
59  // Get the code entry from the cache.
60  __ mov(ip, Operand(value_offset));
61  __ ldr(offset, MemOperand(ip, offset, LSL, 1));
62
63  // Check that the flags match what we're looking for.
64  __ ldr(offset, FieldMemOperand(offset, Code::kFlagsOffset));
65  __ and_(offset, offset, Operand(~Code::kFlagsNotUsedInLookup));
66  __ cmp(offset, Operand(flags));
67  __ b(ne, &miss);
68
69  // Restore offset and re-load code entry from cache.
70  __ pop(offset);
71  __ mov(ip, Operand(value_offset));
72  __ ldr(offset, MemOperand(ip, offset, LSL, 1));
73
74  // Jump to the first instruction in the code stub.
75  __ add(offset, offset, Operand(Code::kHeaderSize - kHeapObjectTag));
76  __ Jump(offset);
77
78  // Miss: Restore offset and fall through.
79  __ bind(&miss);
80  __ pop(offset);
81}
82
83
84void StubCache::GenerateProbe(MacroAssembler* masm,
85                              Code::Flags flags,
86                              Register receiver,
87                              Register name,
88                              Register scratch,
89                              Register extra) {
90  Label miss;
91
92  // Make sure that code is valid. The shifting code relies on the
93  // entry size being 8.
94  ASSERT(sizeof(Entry) == 8);
95
96  // Make sure the flags does not name a specific type.
97  ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
98
99  // Make sure that there are no register conflicts.
100  ASSERT(!scratch.is(receiver));
101  ASSERT(!scratch.is(name));
102
103  // Check that the receiver isn't a smi.
104  __ tst(receiver, Operand(kSmiTagMask));
105  __ b(eq, &miss);
106
107  // Get the map of the receiver and compute the hash.
108  __ ldr(scratch, FieldMemOperand(name, String::kHashFieldOffset));
109  __ ldr(ip, FieldMemOperand(receiver, HeapObject::kMapOffset));
110  __ add(scratch, scratch, Operand(ip));
111  __ eor(scratch, scratch, Operand(flags));
112  __ and_(scratch,
113          scratch,
114          Operand((kPrimaryTableSize - 1) << kHeapObjectTagSize));
115
116  // Probe the primary table.
117  ProbeTable(masm, flags, kPrimary, name, scratch);
118
119  // Primary miss: Compute hash for secondary probe.
120  __ sub(scratch, scratch, Operand(name));
121  __ add(scratch, scratch, Operand(flags));
122  __ and_(scratch,
123          scratch,
124          Operand((kSecondaryTableSize - 1) << kHeapObjectTagSize));
125
126  // Probe the secondary table.
127  ProbeTable(masm, flags, kSecondary, name, scratch);
128
129  // Cache miss: Fall-through and let caller handle the miss by
130  // entering the runtime system.
131  __ bind(&miss);
132}
133
134
135void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
136                                                       int index,
137                                                       Register prototype) {
138  // Load the global or builtins object from the current context.
139  __ ldr(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
140  // Load the global context from the global or builtins object.
141  __ ldr(prototype,
142         FieldMemOperand(prototype, GlobalObject::kGlobalContextOffset));
143  // Load the function from the global context.
144  __ ldr(prototype, MemOperand(prototype, Context::SlotOffset(index)));
145  // Load the initial map.  The global functions all have initial maps.
146  __ ldr(prototype,
147         FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
148  // Load the prototype from the initial map.
149  __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
150}
151
152
153// Load a fast property out of a holder object (src). In-object properties
154// are loaded directly otherwise the property is loaded from the properties
155// fixed array.
156void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
157                                            Register dst, Register src,
158                                            JSObject* holder, int index) {
159  // Adjust for the number of properties stored in the holder.
160  index -= holder->map()->inobject_properties();
161  if (index < 0) {
162    // Get the property straight out of the holder.
163    int offset = holder->map()->instance_size() + (index * kPointerSize);
164    __ ldr(dst, FieldMemOperand(src, offset));
165  } else {
166    // Calculate the offset into the properties array.
167    int offset = index * kPointerSize + FixedArray::kHeaderSize;
168    __ ldr(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
169    __ ldr(dst, FieldMemOperand(dst, offset));
170  }
171}
172
173
174void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
175                                           Register receiver,
176                                           Register scratch,
177                                           Label* miss_label) {
178  // Check that the receiver isn't a smi.
179  __ tst(receiver, Operand(kSmiTagMask));
180  __ b(eq, miss_label);
181
182  // Check that the object is a JS array.
183  __ CompareObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE);
184  __ b(ne, miss_label);
185
186  // Load length directly from the JS array.
187  __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
188  __ Ret();
189}
190
191
192// Generate code to check if an object is a string.  If the object is a
193// heap object, its map's instance type is left in the scratch1 register.
194// If this is not needed, scratch1 and scratch2 may be the same register.
195static void GenerateStringCheck(MacroAssembler* masm,
196                                Register receiver,
197                                Register scratch1,
198                                Register scratch2,
199                                Label* smi,
200                                Label* non_string_object) {
201  // Check that the receiver isn't a smi.
202  __ tst(receiver, Operand(kSmiTagMask));
203  __ b(eq, smi);
204
205  // Check that the object is a string.
206  __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
207  __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
208  __ and_(scratch2, scratch1, Operand(kIsNotStringMask));
209  // The cast is to resolve the overload for the argument of 0x0.
210  __ cmp(scratch2, Operand(static_cast<int32_t>(kStringTag)));
211  __ b(ne, non_string_object);
212}
213
214
215// Generate code to load the length from a string object and return the length.
216// If the receiver object is not a string or a wrapped string object the
217// execution continues at the miss label. The register containing the
218// receiver is potentially clobbered.
219void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
220                                            Register receiver,
221                                            Register scratch1,
222                                            Register scratch2,
223                                            Label* miss) {
224  Label check_wrapper;
225
226  // Check if the object is a string leaving the instance type in the
227  // scratch1 register.
228  GenerateStringCheck(masm, receiver, scratch1, scratch2, miss, &check_wrapper);
229
230  // Load length directly from the string.
231  __ ldr(r0, FieldMemOperand(receiver, String::kLengthOffset));
232  __ mov(r0, Operand(r0, LSL, kSmiTagSize));
233  __ Ret();
234
235  // Check if the object is a JSValue wrapper.
236  __ bind(&check_wrapper);
237  __ cmp(scratch1, Operand(JS_VALUE_TYPE));
238  __ b(ne, miss);
239
240  // Unwrap the value and check if the wrapped value is a string.
241  __ ldr(scratch1, FieldMemOperand(receiver, JSValue::kValueOffset));
242  GenerateStringCheck(masm, scratch1, scratch2, scratch2, miss, miss);
243  __ ldr(r0, FieldMemOperand(scratch1, String::kLengthOffset));
244  __ mov(r0, Operand(r0, LSL, kSmiTagSize));
245  __ Ret();
246}
247
248
249void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
250                                                 Register receiver,
251                                                 Register scratch1,
252                                                 Register scratch2,
253                                                 Label* miss_label) {
254  __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
255  __ mov(r0, scratch1);
256  __ Ret();
257}
258
259
260// Generate StoreField code, value is passed in r0 register.
261// When leaving generated code after success, the receiver_reg and name_reg
262// may be clobbered.  Upon branch to miss_label, the receiver and name
263// registers have their original values.
264void StubCompiler::GenerateStoreField(MacroAssembler* masm,
265                                      JSObject* object,
266                                      int index,
267                                      Map* transition,
268                                      Register receiver_reg,
269                                      Register name_reg,
270                                      Register scratch,
271                                      Label* miss_label) {
272  // r0 : value
273  Label exit;
274
275  // Check that the receiver isn't a smi.
276  __ tst(receiver_reg, Operand(kSmiTagMask));
277  __ b(eq, miss_label);
278
279  // Check that the map of the receiver hasn't changed.
280  __ ldr(scratch, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
281  __ cmp(scratch, Operand(Handle<Map>(object->map())));
282  __ b(ne, miss_label);
283
284  // Perform global security token check if needed.
285  if (object->IsJSGlobalProxy()) {
286    __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
287  }
288
289  // Stub never generated for non-global objects that require access
290  // checks.
291  ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
292
293  // Perform map transition for the receiver if necessary.
294  if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) {
295    // The properties must be extended before we can store the value.
296    // We jump to a runtime call that extends the properties array.
297    __ push(receiver_reg);
298    __ mov(r2, Operand(Handle<Map>(transition)));
299    __ stm(db_w, sp, r2.bit() | r0.bit());
300    __ TailCallRuntime(
301           ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage)),
302           3, 1);
303    return;
304  }
305
306  if (transition != NULL) {
307    // Update the map of the object; no write barrier updating is
308    // needed because the map is never in new space.
309    __ mov(ip, Operand(Handle<Map>(transition)));
310    __ str(ip, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
311  }
312
313  // Adjust for the number of properties stored in the object. Even in the
314  // face of a transition we can use the old map here because the size of the
315  // object and the number of in-object properties is not going to change.
316  index -= object->map()->inobject_properties();
317
318  if (index < 0) {
319    // Set the property straight into the object.
320    int offset = object->map()->instance_size() + (index * kPointerSize);
321    __ str(r0, FieldMemOperand(receiver_reg, offset));
322
323    // Skip updating write barrier if storing a smi.
324    __ tst(r0, Operand(kSmiTagMask));
325    __ b(eq, &exit);
326
327    // Update the write barrier for the array address.
328    // Pass the value being stored in the now unused name_reg.
329    __ mov(name_reg, Operand(offset));
330    __ RecordWrite(receiver_reg, name_reg, scratch);
331  } else {
332    // Write to the properties array.
333    int offset = index * kPointerSize + FixedArray::kHeaderSize;
334    // Get the properties array
335    __ ldr(scratch, FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
336    __ str(r0, FieldMemOperand(scratch, offset));
337
338    // Skip updating write barrier if storing a smi.
339    __ tst(r0, Operand(kSmiTagMask));
340    __ b(eq, &exit);
341
342    // Update the write barrier for the array address.
343    // Ok to clobber receiver_reg and name_reg, since we return.
344    __ mov(name_reg, Operand(offset));
345    __ RecordWrite(scratch, name_reg, receiver_reg);
346  }
347
348  // Return the value (register r0).
349  __ bind(&exit);
350  __ Ret();
351}
352
353
354void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
355  ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
356  Code* code = NULL;
357  if (kind == Code::LOAD_IC) {
358    code = Builtins::builtin(Builtins::LoadIC_Miss);
359  } else {
360    code = Builtins::builtin(Builtins::KeyedLoadIC_Miss);
361  }
362
363  Handle<Code> ic(code);
364  __ Jump(ic, RelocInfo::CODE_TARGET);
365}
366
367
368static void GenerateCallFunction(MacroAssembler* masm,
369                                 Object* object,
370                                 const ParameterCount& arguments,
371                                 Label* miss) {
372  // ----------- S t a t e -------------
373  //  -- r0: receiver
374  //  -- r1: function to call
375  // -----------------------------------
376
377  // Check that the function really is a function.
378  __ BranchOnSmi(r1, miss);
379  __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
380  __ b(ne, miss);
381
382  // Patch the receiver on the stack with the global proxy if
383  // necessary.
384  if (object->IsGlobalObject()) {
385    __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
386    __ str(r3, MemOperand(sp, arguments.immediate() * kPointerSize));
387  }
388
389  // Invoke the function.
390  __ InvokeFunction(r1, arguments, JUMP_FUNCTION);
391}
392
393
394static void PushInterceptorArguments(MacroAssembler* masm,
395                                     Register receiver,
396                                     Register holder,
397                                     Register name,
398                                     JSObject* holder_obj) {
399  __ push(receiver);
400  __ push(holder);
401  __ push(name);
402  InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor();
403  ASSERT(!Heap::InNewSpace(interceptor));
404
405  Register scratch = receiver;
406  __ mov(scratch, Operand(Handle<Object>(interceptor)));
407  __ push(scratch);
408  __ ldr(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset));
409  __ push(scratch);
410}
411
412
413static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm,
414                                                   Register receiver,
415                                                   Register holder,
416                                                   Register name,
417                                                   JSObject* holder_obj) {
418  PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
419
420  ExternalReference ref =
421      ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly));
422  __ mov(r0, Operand(5));
423  __ mov(r1, Operand(ref));
424
425  CEntryStub stub(1);
426  __ CallStub(&stub);
427}
428
429
430class LoadInterceptorCompiler BASE_EMBEDDED {
431 public:
432  explicit LoadInterceptorCompiler(Register name) : name_(name) {}
433
434  void CompileCacheable(MacroAssembler* masm,
435                        StubCompiler* stub_compiler,
436                        Register receiver,
437                        Register holder,
438                        Register scratch1,
439                        Register scratch2,
440                        JSObject* holder_obj,
441                        LookupResult* lookup,
442                        String* name,
443                        Label* miss_label) {
444    AccessorInfo* callback = NULL;
445    bool optimize = false;
446    // So far the most popular follow ups for interceptor loads are FIELD
447    // and CALLBACKS, so inline only them, other cases may be added
448    // later.
449    if (lookup->type() == FIELD) {
450      optimize = true;
451    } else if (lookup->type() == CALLBACKS) {
452      Object* callback_object = lookup->GetCallbackObject();
453      if (callback_object->IsAccessorInfo()) {
454        callback = AccessorInfo::cast(callback_object);
455        optimize = callback->getter() != NULL;
456      }
457    }
458
459    if (!optimize) {
460      CompileRegular(masm, receiver, holder, scratch2, holder_obj, miss_label);
461      return;
462    }
463
464    // Note: starting a frame here makes GC aware of pointers pushed below.
465    __ EnterInternalFrame();
466
467    __ push(receiver);
468    __ push(holder);
469    __ push(name_);
470
471    CompileCallLoadPropertyWithInterceptor(masm,
472                                           receiver,
473                                           holder,
474                                           name_,
475                                           holder_obj);
476
477    Label interceptor_failed;
478    // Compare with no_interceptor_result_sentinel.
479    __ LoadRoot(scratch1, Heap::kNoInterceptorResultSentinelRootIndex);
480    __ cmp(r0, scratch1);
481    __ b(eq, &interceptor_failed);
482    __ LeaveInternalFrame();
483    __ Ret();
484
485    __ bind(&interceptor_failed);
486    __ pop(name_);
487    __ pop(holder);
488    __ pop(receiver);
489
490    __ LeaveInternalFrame();
491
492    if (lookup->type() == FIELD) {
493      holder = stub_compiler->CheckPrototypes(holder_obj,
494                                              holder,
495                                              lookup->holder(),
496                                              scratch1,
497                                              scratch2,
498                                              name,
499                                              miss_label);
500      stub_compiler->GenerateFastPropertyLoad(masm,
501                                              r0,
502                                              holder,
503                                              lookup->holder(),
504                                              lookup->GetFieldIndex());
505      __ Ret();
506    } else {
507      ASSERT(lookup->type() == CALLBACKS);
508      ASSERT(lookup->GetCallbackObject()->IsAccessorInfo());
509      ASSERT(callback != NULL);
510      ASSERT(callback->getter() != NULL);
511
512      Label cleanup;
513      __ pop(scratch2);
514      __ push(receiver);
515      __ push(scratch2);
516
517      holder = stub_compiler->CheckPrototypes(holder_obj, holder,
518                                              lookup->holder(), scratch1,
519                                              scratch2,
520                                              name,
521                                              &cleanup);
522
523      __ push(holder);
524      __ Move(holder, Handle<AccessorInfo>(callback));
525      __ push(holder);
526      __ ldr(scratch1, FieldMemOperand(holder, AccessorInfo::kDataOffset));
527      __ push(scratch1);
528      __ push(name_);
529
530      ExternalReference ref =
531          ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
532      __ TailCallRuntime(ref, 5, 1);
533
534      __ bind(&cleanup);
535      __ pop(scratch1);
536      __ pop(scratch2);
537      __ push(scratch1);
538    }
539  }
540
541
542  void CompileRegular(MacroAssembler* masm,
543                      Register receiver,
544                      Register holder,
545                      Register scratch,
546                      JSObject* holder_obj,
547                      Label* miss_label) {
548    PushInterceptorArguments(masm, receiver, holder, name_, holder_obj);
549
550    ExternalReference ref = ExternalReference(
551        IC_Utility(IC::kLoadPropertyWithInterceptorForLoad));
552    __ TailCallRuntime(ref, 5, 1);
553  }
554
555 private:
556  Register name_;
557};
558
559
560static void CompileLoadInterceptor(LoadInterceptorCompiler* compiler,
561                                   StubCompiler* stub_compiler,
562                                   MacroAssembler* masm,
563                                   JSObject* object,
564                                   JSObject* holder,
565                                   String* name,
566                                   LookupResult* lookup,
567                                   Register receiver,
568                                   Register scratch1,
569                                   Register scratch2,
570                                   Label* miss) {
571  ASSERT(holder->HasNamedInterceptor());
572  ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
573
574  // Check that the receiver isn't a smi.
575  __ BranchOnSmi(receiver, miss);
576
577  // Check that the maps haven't changed.
578  Register reg =
579      stub_compiler->CheckPrototypes(object, receiver, holder,
580                                     scratch1, scratch2, name, miss);
581
582  if (lookup->IsProperty() && lookup->IsCacheable()) {
583    compiler->CompileCacheable(masm,
584                               stub_compiler,
585                               receiver,
586                               reg,
587                               scratch1,
588                               scratch2,
589                               holder,
590                               lookup,
591                               name,
592                               miss);
593  } else {
594    compiler->CompileRegular(masm,
595                             receiver,
596                             reg,
597                             scratch2,
598                             holder,
599                             miss);
600  }
601}
602
603
604#undef __
605#define __ ACCESS_MASM(masm())
606
607
608Register StubCompiler::CheckPrototypes(JSObject* object,
609                                       Register object_reg,
610                                       JSObject* holder,
611                                       Register holder_reg,
612                                       Register scratch,
613                                       String* name,
614                                       int save_at_depth,
615                                       Label* miss) {
616  // TODO(602): support object saving.
617  ASSERT(save_at_depth == kInvalidProtoDepth);
618
619  // Check that the maps haven't changed.
620  Register result =
621      masm()->CheckMaps(object, object_reg, holder, holder_reg, scratch, miss);
622
623  // If we've skipped any global objects, it's not enough to verify
624  // that their maps haven't changed.
625  while (object != holder) {
626    if (object->IsGlobalObject()) {
627      GlobalObject* global = GlobalObject::cast(object);
628      Object* probe = global->EnsurePropertyCell(name);
629      if (probe->IsFailure()) {
630        set_failure(Failure::cast(probe));
631        return result;
632      }
633      JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(probe);
634      ASSERT(cell->value()->IsTheHole());
635      __ mov(scratch, Operand(Handle<Object>(cell)));
636      __ ldr(scratch,
637             FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
638      __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
639      __ cmp(scratch, ip);
640      __ b(ne, miss);
641    }
642    object = JSObject::cast(object->GetPrototype());
643  }
644
645  // Return the register containing the holder.
646  return result;
647}
648
649
650void StubCompiler::GenerateLoadField(JSObject* object,
651                                     JSObject* holder,
652                                     Register receiver,
653                                     Register scratch1,
654                                     Register scratch2,
655                                     int index,
656                                     String* name,
657                                     Label* miss) {
658  // Check that the receiver isn't a smi.
659  __ tst(receiver, Operand(kSmiTagMask));
660  __ b(eq, miss);
661
662  // Check that the maps haven't changed.
663  Register reg =
664      CheckPrototypes(object, receiver, holder, scratch1, scratch2, name, miss);
665  GenerateFastPropertyLoad(masm(), r0, reg, holder, index);
666  __ Ret();
667}
668
669
670void StubCompiler::GenerateLoadConstant(JSObject* object,
671                                        JSObject* holder,
672                                        Register receiver,
673                                        Register scratch1,
674                                        Register scratch2,
675                                        Object* value,
676                                        String* name,
677                                        Label* miss) {
678  // Check that the receiver isn't a smi.
679  __ tst(receiver, Operand(kSmiTagMask));
680  __ b(eq, miss);
681
682  // Check that the maps haven't changed.
683  Register reg =
684      CheckPrototypes(object, receiver, holder, scratch1, scratch2, name, miss);
685
686  // Return the constant value.
687  __ mov(r0, Operand(Handle<Object>(value)));
688  __ Ret();
689}
690
691
692bool StubCompiler::GenerateLoadCallback(JSObject* object,
693                                        JSObject* holder,
694                                        Register receiver,
695                                        Register name_reg,
696                                        Register scratch1,
697                                        Register scratch2,
698                                        AccessorInfo* callback,
699                                        String* name,
700                                        Label* miss,
701                                        Failure** failure) {
702  // Check that the receiver isn't a smi.
703  __ tst(receiver, Operand(kSmiTagMask));
704  __ b(eq, miss);
705
706  // Check that the maps haven't changed.
707  Register reg =
708      CheckPrototypes(object, receiver, holder, scratch1, scratch2, name, miss);
709
710  // Push the arguments on the JS stack of the caller.
711  __ push(receiver);  // receiver
712  __ push(reg);  // holder
713  __ mov(ip, Operand(Handle<AccessorInfo>(callback)));  // callback data
714  __ push(ip);
715  __ ldr(reg, FieldMemOperand(ip, AccessorInfo::kDataOffset));
716  __ push(reg);
717  __ push(name_reg);  // name
718
719  // Do tail-call to the runtime system.
720  ExternalReference load_callback_property =
721      ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
722  __ TailCallRuntime(load_callback_property, 5, 1);
723
724  return true;
725}
726
727
728void StubCompiler::GenerateLoadInterceptor(JSObject* object,
729                                           JSObject* holder,
730                                           LookupResult* lookup,
731                                           Register receiver,
732                                           Register name_reg,
733                                           Register scratch1,
734                                           Register scratch2,
735                                           String* name,
736                                           Label* miss) {
737  LoadInterceptorCompiler compiler(name_reg);
738  CompileLoadInterceptor(&compiler,
739                         this,
740                         masm(),
741                         object,
742                         holder,
743                         name,
744                         lookup,
745                         receiver,
746                         scratch1,
747                         scratch2,
748                         miss);
749}
750
751
752Object* StubCompiler::CompileLazyCompile(Code::Flags flags) {
753  // ----------- S t a t e -------------
754  //  -- r1: function
755  //  -- lr: return address
756  // -----------------------------------
757
758  // Enter an internal frame.
759  __ EnterInternalFrame();
760
761  // Preserve the function.
762  __ push(r1);
763
764  // Push the function on the stack as the argument to the runtime function.
765  __ push(r1);
766  __ CallRuntime(Runtime::kLazyCompile, 1);
767
768  // Calculate the entry point.
769  __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
770
771  // Restore saved function.
772  __ pop(r1);
773
774  // Tear down temporary frame.
775  __ LeaveInternalFrame();
776
777  // Do a tail-call of the compiled function.
778  __ Jump(r2);
779
780  return GetCodeWithFlags(flags, "LazyCompileStub");
781}
782
783
784Object* CallStubCompiler::CompileCallField(JSObject* object,
785                                           JSObject* holder,
786                                           int index,
787                                           String* name) {
788  // ----------- S t a t e -------------
789  //  -- r2    : name
790  //  -- lr    : return address
791  // -----------------------------------
792  Label miss;
793
794  const int argc = arguments().immediate();
795
796  // Get the receiver of the function from the stack into r0.
797  __ ldr(r0, MemOperand(sp, argc * kPointerSize));
798  // Check that the receiver isn't a smi.
799  __ tst(r0, Operand(kSmiTagMask));
800  __ b(eq, &miss);
801
802  // Do the right check and compute the holder register.
803  Register reg = CheckPrototypes(object, r0, holder, r1, r3, name, &miss);
804  GenerateFastPropertyLoad(masm(), r1, reg, holder, index);
805
806  GenerateCallFunction(masm(), object, arguments(), &miss);
807
808  // Handle call cache miss.
809  __ bind(&miss);
810  Handle<Code> ic = ComputeCallMiss(arguments().immediate());
811  __ Jump(ic, RelocInfo::CODE_TARGET);
812
813  // Return the generated code.
814  return GetCode(FIELD, name);
815}
816
817
818Object* CallStubCompiler::CompileCallConstant(Object* object,
819                                              JSObject* holder,
820                                              JSFunction* function,
821                                              String* name,
822                                              CheckType check) {
823  // ----------- S t a t e -------------
824  //  -- r2    : name
825  //  -- lr    : return address
826  // -----------------------------------
827  Label miss;
828
829  // Get the receiver from the stack
830  const int argc = arguments().immediate();
831  __ ldr(r1, MemOperand(sp, argc * kPointerSize));
832
833  // Check that the receiver isn't a smi.
834  if (check != NUMBER_CHECK) {
835    __ tst(r1, Operand(kSmiTagMask));
836    __ b(eq, &miss);
837  }
838
839  // Make sure that it's okay not to patch the on stack receiver
840  // unless we're doing a receiver map check.
841  ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
842
843  switch (check) {
844    case RECEIVER_MAP_CHECK:
845      // Check that the maps haven't changed.
846      CheckPrototypes(JSObject::cast(object), r1, holder, r3, r0, name, &miss);
847
848      // Patch the receiver on the stack with the global proxy if
849      // necessary.
850      if (object->IsGlobalObject()) {
851        __ ldr(r3, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
852        __ str(r3, MemOperand(sp, argc * kPointerSize));
853      }
854      break;
855
856    case STRING_CHECK:
857      if (!function->IsBuiltin()) {
858        // Calling non-builtins with a value as receiver requires boxing.
859        __ jmp(&miss);
860      } else {
861        // Check that the object is a two-byte string or a symbol.
862        __ CompareObjectType(r1, r3, r3, FIRST_NONSTRING_TYPE);
863        __ b(hs, &miss);
864        // Check that the maps starting from the prototype haven't changed.
865        GenerateLoadGlobalFunctionPrototype(masm(),
866                                            Context::STRING_FUNCTION_INDEX,
867                                            r0);
868        CheckPrototypes(JSObject::cast(object->GetPrototype()), r0, holder, r3,
869                        r1, name, &miss);
870      }
871      break;
872
873    case NUMBER_CHECK: {
874      if (!function->IsBuiltin()) {
875        // Calling non-builtins with a value as receiver requires boxing.
876        __ jmp(&miss);
877      } else {
878        Label fast;
879        // Check that the object is a smi or a heap number.
880        __ tst(r1, Operand(kSmiTagMask));
881        __ b(eq, &fast);
882        __ CompareObjectType(r1, r0, r0, HEAP_NUMBER_TYPE);
883        __ b(ne, &miss);
884        __ bind(&fast);
885        // Check that the maps starting from the prototype haven't changed.
886        GenerateLoadGlobalFunctionPrototype(masm(),
887                                            Context::NUMBER_FUNCTION_INDEX,
888                                            r0);
889        CheckPrototypes(JSObject::cast(object->GetPrototype()), r0, holder, r3,
890                        r1, name, &miss);
891      }
892      break;
893    }
894
895    case BOOLEAN_CHECK: {
896      if (!function->IsBuiltin()) {
897        // Calling non-builtins with a value as receiver requires boxing.
898        __ jmp(&miss);
899      } else {
900        Label fast;
901        // Check that the object is a boolean.
902        __ LoadRoot(ip, Heap::kTrueValueRootIndex);
903        __ cmp(r1, ip);
904        __ b(eq, &fast);
905        __ LoadRoot(ip, Heap::kFalseValueRootIndex);
906        __ cmp(r1, ip);
907        __ b(ne, &miss);
908        __ bind(&fast);
909        // Check that the maps starting from the prototype haven't changed.
910        GenerateLoadGlobalFunctionPrototype(masm(),
911                                            Context::BOOLEAN_FUNCTION_INDEX,
912                                            r0);
913        CheckPrototypes(JSObject::cast(object->GetPrototype()), r0, holder, r3,
914                        r1, name, &miss);
915      }
916      break;
917    }
918
919    case JSARRAY_HAS_FAST_ELEMENTS_CHECK:
920      CheckPrototypes(JSObject::cast(object), r1, holder, r3, r0, name, &miss);
921      // Make sure object->HasFastElements().
922      // Get the elements array of the object.
923      __ ldr(r3, FieldMemOperand(r1, JSObject::kElementsOffset));
924      // Check that the object is in fast mode (not dictionary).
925      __ ldr(r0, FieldMemOperand(r3, HeapObject::kMapOffset));
926      __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
927      __ cmp(r0, ip);
928      __ b(ne, &miss);
929      break;
930
931    default:
932      UNREACHABLE();
933  }
934
935  __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
936
937  // Handle call cache miss.
938  __ bind(&miss);
939  Handle<Code> ic = ComputeCallMiss(arguments().immediate());
940  __ Jump(ic, RelocInfo::CODE_TARGET);
941
942  // Return the generated code.
943  String* function_name = NULL;
944  if (function->shared()->name()->IsString()) {
945    function_name = String::cast(function->shared()->name());
946  }
947  return GetCode(CONSTANT_FUNCTION, function_name);
948}
949
950
951Object* CallStubCompiler::CompileCallInterceptor(JSObject* object,
952                                                 JSObject* holder,
953                                                 String* name) {
954  // ----------- S t a t e -------------
955  //  -- r2    : name
956  //  -- lr    : return address
957  // -----------------------------------
958  ASSERT(holder->HasNamedInterceptor());
959  ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
960  Label miss;
961
962  const Register receiver = r0;
963  const Register holder_reg = r1;
964  const Register name_reg = r2;
965  const Register scratch = r3;
966
967  // Get the number of arguments.
968  const int argc = arguments().immediate();
969
970  LookupResult lookup;
971  LookupPostInterceptor(holder, name, &lookup);
972
973  // Get the receiver from the stack into r0.
974  __ ldr(r0, MemOperand(sp, argc * kPointerSize));
975
976  // Check that the receiver isn't a smi.
977  __ BranchOnSmi(receiver, &miss);
978
979  // Check that the maps haven't changed.
980  Register reg = CheckPrototypes(object, receiver, holder, holder_reg,
981                                 scratch, name, &miss);
982  if (!reg.is(holder_reg)) {
983    __ mov(holder_reg, reg);
984  }
985
986  // If we call a constant function when the interceptor returns
987  // the no-result sentinel, generate code that optimizes this case.
988  if (lookup.IsProperty() &&
989      lookup.IsCacheable() &&
990      lookup.type() == CONSTANT_FUNCTION &&
991      lookup.GetConstantFunction()->is_compiled() &&
992      !holder->IsJSArray()) {
993    // Constant functions cannot sit on global object.
994    ASSERT(!lookup.holder()->IsGlobalObject());
995
996    // Call the interceptor.
997    __ EnterInternalFrame();
998    __ push(holder_reg);
999    __ push(name_reg);
1000    CompileCallLoadPropertyWithInterceptor(masm(),
1001                                           receiver,
1002                                           holder_reg,
1003                                           name_reg,
1004                                           holder);
1005    __ pop(name_reg);
1006    __ pop(holder_reg);
1007    __ LeaveInternalFrame();
1008    // r0 no longer contains the receiver.
1009
1010    // If interceptor returns no-result sentinal, call the constant function.
1011    __ LoadRoot(scratch, Heap::kNoInterceptorResultSentinelRootIndex);
1012    __ cmp(r0, scratch);
1013    Label invoke;
1014    __ b(ne, &invoke);
1015    // Check the prototypes between the interceptor's holder and the
1016    // constant function's holder.
1017    CheckPrototypes(holder, holder_reg,
1018                    lookup.holder(), r0,
1019                    scratch,
1020                    name,
1021                    &miss);
1022
1023    __ InvokeFunction(lookup.GetConstantFunction(),
1024                      arguments(),
1025                      JUMP_FUNCTION);
1026
1027    __ bind(&invoke);
1028
1029  } else {
1030    // Call a runtime function to load the interceptor property.
1031    __ EnterInternalFrame();
1032    __ push(name_reg);
1033
1034    PushInterceptorArguments(masm(), receiver, holder_reg, name_reg, holder);
1035
1036    __ CallExternalReference(
1037        ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall)),
1038        5);
1039
1040    __ pop(name_reg);
1041    __ LeaveInternalFrame();
1042  }
1043
1044  // Move returned value, the function to call, to r1.
1045  __ mov(r1, r0);
1046  // Restore receiver.
1047  __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
1048
1049  GenerateCallFunction(masm(), object, arguments(), &miss);
1050
1051  // Handle call cache miss.
1052  __ bind(&miss);
1053  Handle<Code> ic = ComputeCallMiss(arguments().immediate());
1054  __ Jump(ic, RelocInfo::CODE_TARGET);
1055
1056  // Return the generated code.
1057  return GetCode(INTERCEPTOR, name);
1058}
1059
1060
1061Object* CallStubCompiler::CompileCallGlobal(JSObject* object,
1062                                            GlobalObject* holder,
1063                                            JSGlobalPropertyCell* cell,
1064                                            JSFunction* function,
1065                                            String* name) {
1066  // ----------- S t a t e -------------
1067  //  -- r2    : name
1068  //  -- lr    : return address
1069  // -----------------------------------
1070  Label miss;
1071
1072  // Get the number of arguments.
1073  const int argc = arguments().immediate();
1074
1075  // Get the receiver from the stack.
1076  __ ldr(r0, MemOperand(sp, argc * kPointerSize));
1077
1078  // If the object is the holder then we know that it's a global
1079  // object which can only happen for contextual calls. In this case,
1080  // the receiver cannot be a smi.
1081  if (object != holder) {
1082    __ tst(r0, Operand(kSmiTagMask));
1083    __ b(eq, &miss);
1084  }
1085
1086  // Check that the maps haven't changed.
1087  CheckPrototypes(object, r0, holder, r3, r1, name, &miss);
1088
1089  // Get the value from the cell.
1090  __ mov(r3, Operand(Handle<JSGlobalPropertyCell>(cell)));
1091  __ ldr(r1, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset));
1092
1093  // Check that the cell contains the same function.
1094  if (Heap::InNewSpace(function)) {
1095    // We can't embed a pointer to a function in new space so we have
1096    // to verify that the shared function info is unchanged. This has
1097    // the nice side effect that multiple closures based on the same
1098    // function can all use this call IC. Before we load through the
1099    // function, we have to verify that it still is a function.
1100    __ tst(r1, Operand(kSmiTagMask));
1101    __ b(eq, &miss);
1102    __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
1103    __ b(ne, &miss);
1104
1105    // Check the shared function info. Make sure it hasn't changed.
1106    __ mov(r3, Operand(Handle<SharedFunctionInfo>(function->shared())));
1107    __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1108    __ cmp(r4, r3);
1109    __ b(ne, &miss);
1110  } else {
1111    __ cmp(r1, Operand(Handle<JSFunction>(function)));
1112    __ b(ne, &miss);
1113  }
1114
1115  // Patch the receiver on the stack with the global proxy if
1116  // necessary.
1117  if (object->IsGlobalObject()) {
1118    __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
1119    __ str(r3, MemOperand(sp, argc * kPointerSize));
1120  }
1121
1122  // Setup the context (function already in r1).
1123  __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1124
1125  // Jump to the cached code (tail call).
1126  __ IncrementCounter(&Counters::call_global_inline, 1, r1, r3);
1127  ASSERT(function->is_compiled());
1128  Handle<Code> code(function->code());
1129  ParameterCount expected(function->shared()->formal_parameter_count());
1130  __ InvokeCode(code, expected, arguments(),
1131                RelocInfo::CODE_TARGET, JUMP_FUNCTION);
1132
1133  // Handle call cache miss.
1134  __ bind(&miss);
1135  __ IncrementCounter(&Counters::call_global_inline_miss, 1, r1, r3);
1136  Handle<Code> ic = ComputeCallMiss(arguments().immediate());
1137  __ Jump(ic, RelocInfo::CODE_TARGET);
1138
1139  // Return the generated code.
1140  return GetCode(NORMAL, name);
1141}
1142
1143
1144Object* StoreStubCompiler::CompileStoreField(JSObject* object,
1145                                             int index,
1146                                             Map* transition,
1147                                             String* name) {
1148  // ----------- S t a t e -------------
1149  //  -- r0    : value
1150  //  -- r1    : receiver
1151  //  -- r2    : name
1152  //  -- lr    : return address
1153  // -----------------------------------
1154  Label miss;
1155
1156  GenerateStoreField(masm(),
1157                     object,
1158                     index,
1159                     transition,
1160                     r1, r2, r3,
1161                     &miss);
1162  __ bind(&miss);
1163  Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
1164  __ Jump(ic, RelocInfo::CODE_TARGET);
1165
1166  // Return the generated code.
1167  return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
1168}
1169
1170
1171Object* StoreStubCompiler::CompileStoreCallback(JSObject* object,
1172                                                AccessorInfo* callback,
1173                                                String* name) {
1174  // ----------- S t a t e -------------
1175  //  -- r0    : value
1176  //  -- r1    : receiver
1177  //  -- r2    : name
1178  //  -- lr    : return address
1179  // -----------------------------------
1180  Label miss;
1181
1182  // Check that the object isn't a smi.
1183  __ tst(r1, Operand(kSmiTagMask));
1184  __ b(eq, &miss);
1185
1186  // Check that the map of the object hasn't changed.
1187  __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
1188  __ cmp(r3, Operand(Handle<Map>(object->map())));
1189  __ b(ne, &miss);
1190
1191  // Perform global security token check if needed.
1192  if (object->IsJSGlobalProxy()) {
1193    __ CheckAccessGlobalProxy(r1, r3, &miss);
1194  }
1195
1196  // Stub never generated for non-global objects that require access
1197  // checks.
1198  ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
1199
1200  __ push(r1);  // receiver
1201  __ mov(ip, Operand(Handle<AccessorInfo>(callback)));  // callback info
1202  __ stm(db_w, sp, ip.bit() | r2.bit() | r0.bit());
1203
1204  // Do tail-call to the runtime system.
1205  ExternalReference store_callback_property =
1206      ExternalReference(IC_Utility(IC::kStoreCallbackProperty));
1207  __ TailCallRuntime(store_callback_property, 4, 1);
1208
1209  // Handle store cache miss.
1210  __ bind(&miss);
1211  Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
1212  __ Jump(ic, RelocInfo::CODE_TARGET);
1213
1214  // Return the generated code.
1215  return GetCode(CALLBACKS, name);
1216}
1217
1218
1219Object* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
1220                                                   String* name) {
1221  // ----------- S t a t e -------------
1222  //  -- r0    : value
1223  //  -- r1    : receiver
1224  //  -- r2    : name
1225  //  -- lr    : return address
1226  // -----------------------------------
1227  Label miss;
1228
1229  // Check that the object isn't a smi.
1230  __ tst(r1, Operand(kSmiTagMask));
1231  __ b(eq, &miss);
1232
1233  // Check that the map of the object hasn't changed.
1234  __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
1235  __ cmp(r3, Operand(Handle<Map>(receiver->map())));
1236  __ b(ne, &miss);
1237
1238  // Perform global security token check if needed.
1239  if (receiver->IsJSGlobalProxy()) {
1240    __ CheckAccessGlobalProxy(r1, r3, &miss);
1241  }
1242
1243  // Stub is never generated for non-global objects that require access
1244  // checks.
1245  ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
1246
1247  __ push(r1);  // receiver.
1248  __ push(r2);  // name.
1249  __ push(r0);  // value.
1250
1251  // Do tail-call to the runtime system.
1252  ExternalReference store_ic_property =
1253      ExternalReference(IC_Utility(IC::kStoreInterceptorProperty));
1254  __ TailCallRuntime(store_ic_property, 3, 1);
1255
1256  // Handle store cache miss.
1257  __ bind(&miss);
1258  Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
1259  __ Jump(ic, RelocInfo::CODE_TARGET);
1260
1261  // Return the generated code.
1262  return GetCode(INTERCEPTOR, name);
1263}
1264
1265
1266Object* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
1267                                              JSGlobalPropertyCell* cell,
1268                                              String* name) {
1269  // ----------- S t a t e -------------
1270  //  -- r0    : value
1271  //  -- r1    : receiver
1272  //  -- r2    : name
1273  //  -- lr    : return address
1274  // -----------------------------------
1275  Label miss;
1276
1277  // Check that the map of the global has not changed.
1278  __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
1279  __ cmp(r3, Operand(Handle<Map>(object->map())));
1280  __ b(ne, &miss);
1281
1282  // Store the value in the cell.
1283  __ mov(r2, Operand(Handle<JSGlobalPropertyCell>(cell)));
1284  __ str(r0, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
1285
1286  __ IncrementCounter(&Counters::named_store_global_inline, 1, r4, r3);
1287  __ Ret();
1288
1289  // Handle store cache miss.
1290  __ bind(&miss);
1291  __ IncrementCounter(&Counters::named_store_global_inline_miss, 1, r4, r3);
1292  Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
1293  __ Jump(ic, RelocInfo::CODE_TARGET);
1294
1295  // Return the generated code.
1296  return GetCode(NORMAL, name);
1297}
1298
1299
1300Object* LoadStubCompiler::CompileLoadField(JSObject* object,
1301                                           JSObject* holder,
1302                                           int index,
1303                                           String* name) {
1304  // ----------- S t a t e -------------
1305  //  -- r2    : name
1306  //  -- lr    : return address
1307  //  -- [sp]  : receiver
1308  // -----------------------------------
1309  Label miss;
1310
1311  __ ldr(r0, MemOperand(sp, 0));
1312
1313  GenerateLoadField(object, holder, r0, r3, r1, index, name, &miss);
1314  __ bind(&miss);
1315  GenerateLoadMiss(masm(), Code::LOAD_IC);
1316
1317  // Return the generated code.
1318  return GetCode(FIELD, name);
1319}
1320
1321
1322Object* LoadStubCompiler::CompileLoadCallback(String* name,
1323                                              JSObject* object,
1324                                              JSObject* holder,
1325                                              AccessorInfo* callback) {
1326  // ----------- S t a t e -------------
1327  //  -- r2    : name
1328  //  -- lr    : return address
1329  //  -- [sp]  : receiver
1330  // -----------------------------------
1331  Label miss;
1332
1333  __ ldr(r0, MemOperand(sp, 0));
1334  Failure* failure = Failure::InternalError();
1335  bool success = GenerateLoadCallback(object, holder, r0, r2, r3, r1,
1336                                      callback, name, &miss, &failure);
1337  if (!success) return failure;
1338
1339  __ bind(&miss);
1340  GenerateLoadMiss(masm(), Code::LOAD_IC);
1341
1342  // Return the generated code.
1343  return GetCode(CALLBACKS, name);
1344}
1345
1346
1347Object* LoadStubCompiler::CompileLoadConstant(JSObject* object,
1348                                              JSObject* holder,
1349                                              Object* value,
1350                                              String* name) {
1351  // ----------- S t a t e -------------
1352  //  -- r2    : name
1353  //  -- lr    : return address
1354  //  -- [sp] : receiver
1355  // -----------------------------------
1356  Label miss;
1357
1358  __ ldr(r0, MemOperand(sp, 0));
1359
1360  GenerateLoadConstant(object, holder, r0, r3, r1, value, name, &miss);
1361  __ bind(&miss);
1362  GenerateLoadMiss(masm(), Code::LOAD_IC);
1363
1364  // Return the generated code.
1365  return GetCode(CONSTANT_FUNCTION, name);
1366}
1367
1368
1369Object* LoadStubCompiler::CompileLoadInterceptor(JSObject* object,
1370                                                 JSObject* holder,
1371                                                 String* name) {
1372  // ----------- S t a t e -------------
1373  //  -- r2    : name
1374  //  -- lr    : return address
1375  //  -- [sp]  : receiver
1376  // -----------------------------------
1377  Label miss;
1378
1379  __ ldr(r0, MemOperand(sp, 0));
1380
1381  LookupResult lookup;
1382  LookupPostInterceptor(holder, name, &lookup);
1383  GenerateLoadInterceptor(object,
1384                          holder,
1385                          &lookup,
1386                          r0,
1387                          r2,
1388                          r3,
1389                          r1,
1390                          name,
1391                          &miss);
1392  __ bind(&miss);
1393  GenerateLoadMiss(masm(), Code::LOAD_IC);
1394
1395  // Return the generated code.
1396  return GetCode(INTERCEPTOR, name);
1397}
1398
1399
1400Object* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
1401                                            GlobalObject* holder,
1402                                            JSGlobalPropertyCell* cell,
1403                                            String* name,
1404                                            bool is_dont_delete) {
1405  // ----------- S t a t e -------------
1406  //  -- r2    : name
1407  //  -- lr    : return address
1408  //  -- [sp]  : receiver
1409  // -----------------------------------
1410  Label miss;
1411
1412  // Get the receiver from the stack.
1413  __ ldr(r1, MemOperand(sp, 0 * kPointerSize));
1414
1415  // If the object is the holder then we know that it's a global
1416  // object which can only happen for contextual calls. In this case,
1417  // the receiver cannot be a smi.
1418  if (object != holder) {
1419    __ tst(r1, Operand(kSmiTagMask));
1420    __ b(eq, &miss);
1421  }
1422
1423  // Check that the map of the global has not changed.
1424  CheckPrototypes(object, r1, holder, r3, r0, name, &miss);
1425
1426  // Get the value from the cell.
1427  __ mov(r3, Operand(Handle<JSGlobalPropertyCell>(cell)));
1428  __ ldr(r0, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset));
1429
1430  // Check for deleted property if property can actually be deleted.
1431  if (!is_dont_delete) {
1432    __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1433    __ cmp(r0, ip);
1434    __ b(eq, &miss);
1435  }
1436
1437  __ IncrementCounter(&Counters::named_load_global_inline, 1, r1, r3);
1438  __ Ret();
1439
1440  __ bind(&miss);
1441  __ IncrementCounter(&Counters::named_load_global_inline_miss, 1, r1, r3);
1442  GenerateLoadMiss(masm(), Code::LOAD_IC);
1443
1444  // Return the generated code.
1445  return GetCode(NORMAL, name);
1446}
1447
1448
1449Object* KeyedLoadStubCompiler::CompileLoadField(String* name,
1450                                                JSObject* receiver,
1451                                                JSObject* holder,
1452                                                int index) {
1453  // ----------- S t a t e -------------
1454  //  -- lr    : return address
1455  //  -- sp[0] : key
1456  //  -- sp[4] : receiver
1457  // -----------------------------------
1458  Label miss;
1459
1460  __ ldr(r2, MemOperand(sp, 0));
1461  __ ldr(r0, MemOperand(sp, kPointerSize));
1462
1463  __ cmp(r2, Operand(Handle<String>(name)));
1464  __ b(ne, &miss);
1465
1466  GenerateLoadField(receiver, holder, r0, r3, r1, index, name, &miss);
1467  __ bind(&miss);
1468  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
1469
1470  return GetCode(FIELD, name);
1471}
1472
1473
1474Object* KeyedLoadStubCompiler::CompileLoadCallback(String* name,
1475                                                   JSObject* receiver,
1476                                                   JSObject* holder,
1477                                                   AccessorInfo* callback) {
1478  // ----------- S t a t e -------------
1479  //  -- lr    : return address
1480  //  -- sp[0] : key
1481  //  -- sp[4] : receiver
1482  // -----------------------------------
1483  Label miss;
1484
1485  __ ldr(r2, MemOperand(sp, 0));
1486  __ ldr(r0, MemOperand(sp, kPointerSize));
1487
1488  __ cmp(r2, Operand(Handle<String>(name)));
1489  __ b(ne, &miss);
1490
1491  Failure* failure = Failure::InternalError();
1492  bool success = GenerateLoadCallback(receiver, holder, r0, r2, r3, r1,
1493                                      callback, name, &miss, &failure);
1494  if (!success) return failure;
1495
1496  __ bind(&miss);
1497  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
1498
1499  return GetCode(CALLBACKS, name);
1500}
1501
1502
1503Object* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
1504                                                   JSObject* receiver,
1505                                                   JSObject* holder,
1506                                                   Object* value) {
1507  // ----------- S t a t e -------------
1508  //  -- lr    : return address
1509  //  -- sp[0] : key
1510  //  -- sp[4] : receiver
1511  // -----------------------------------
1512  Label miss;
1513
1514  // Check the key is the cached one
1515  __ ldr(r2, MemOperand(sp, 0));
1516  __ ldr(r0, MemOperand(sp, kPointerSize));
1517
1518  __ cmp(r2, Operand(Handle<String>(name)));
1519  __ b(ne, &miss);
1520
1521  GenerateLoadConstant(receiver, holder, r0, r3, r1, value, name, &miss);
1522  __ bind(&miss);
1523  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
1524
1525  // Return the generated code.
1526  return GetCode(CONSTANT_FUNCTION, name);
1527}
1528
1529
1530Object* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
1531                                                      JSObject* holder,
1532                                                      String* name) {
1533  // ----------- S t a t e -------------
1534  //  -- lr    : return address
1535  //  -- sp[0] : key
1536  //  -- sp[4] : receiver
1537  // -----------------------------------
1538  Label miss;
1539
1540  // Check the key is the cached one
1541  __ ldr(r2, MemOperand(sp, 0));
1542  __ ldr(r0, MemOperand(sp, kPointerSize));
1543
1544  __ cmp(r2, Operand(Handle<String>(name)));
1545  __ b(ne, &miss);
1546
1547  LookupResult lookup;
1548  LookupPostInterceptor(holder, name, &lookup);
1549  GenerateLoadInterceptor(receiver,
1550                          holder,
1551                          &lookup,
1552                          r0,
1553                          r2,
1554                          r3,
1555                          r1,
1556                          name,
1557                          &miss);
1558  __ bind(&miss);
1559  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
1560
1561  return GetCode(INTERCEPTOR, name);
1562}
1563
1564
1565Object* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
1566  // ----------- S t a t e -------------
1567  //  -- lr    : return address
1568  //  -- sp[0] : key
1569  //  -- sp[4] : receiver
1570  // -----------------------------------
1571  Label miss;
1572
1573  // Check the key is the cached one
1574  __ ldr(r2, MemOperand(sp, 0));
1575  __ ldr(r0, MemOperand(sp, kPointerSize));
1576
1577  __ cmp(r2, Operand(Handle<String>(name)));
1578  __ b(ne, &miss);
1579
1580  GenerateLoadArrayLength(masm(), r0, r3, &miss);
1581  __ bind(&miss);
1582  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
1583
1584  return GetCode(CALLBACKS, name);
1585}
1586
1587
1588Object* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
1589  // ----------- S t a t e -------------
1590  //  -- lr    : return address
1591  //  -- sp[0] : key
1592  //  -- sp[4] : receiver
1593  // -----------------------------------
1594  Label miss;
1595  __ IncrementCounter(&Counters::keyed_load_string_length, 1, r1, r3);
1596
1597  __ ldr(r2, MemOperand(sp));
1598  __ ldr(r0, MemOperand(sp, kPointerSize));  // receiver
1599
1600  __ cmp(r2, Operand(Handle<String>(name)));
1601  __ b(ne, &miss);
1602
1603  GenerateLoadStringLength(masm(), r0, r1, r3, &miss);
1604  __ bind(&miss);
1605  __ DecrementCounter(&Counters::keyed_load_string_length, 1, r1, r3);
1606
1607  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
1608
1609  return GetCode(CALLBACKS, name);
1610}
1611
1612
1613// TODO(1224671): implement the fast case.
1614Object* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
1615  // ----------- S t a t e -------------
1616  //  -- lr    : return address
1617  //  -- sp[0] : key
1618  //  -- sp[4] : receiver
1619  // -----------------------------------
1620  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
1621
1622  return GetCode(CALLBACKS, name);
1623}
1624
1625
1626Object* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
1627                                                  int index,
1628                                                  Map* transition,
1629                                                  String* name) {
1630  // ----------- S t a t e -------------
1631  //  -- r0    : value
1632  //  -- r2    : name
1633  //  -- lr    : return address
1634  //  -- [sp]  : receiver
1635  // -----------------------------------
1636  Label miss;
1637
1638  __ IncrementCounter(&Counters::keyed_store_field, 1, r1, r3);
1639
1640  // Check that the name has not changed.
1641  __ cmp(r2, Operand(Handle<String>(name)));
1642  __ b(ne, &miss);
1643
1644  // Load receiver from the stack.
1645  __ ldr(r3, MemOperand(sp));
1646  // r1 is used as scratch register, r3 and r2 might be clobbered.
1647  GenerateStoreField(masm(),
1648                     object,
1649                     index,
1650                     transition,
1651                     r3, r2, r1,
1652                     &miss);
1653  __ bind(&miss);
1654
1655  __ DecrementCounter(&Counters::keyed_store_field, 1, r1, r3);
1656  __ mov(r2, Operand(Handle<String>(name)));  // restore name register.
1657  Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
1658  __ Jump(ic, RelocInfo::CODE_TARGET);
1659
1660  // Return the generated code.
1661  return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
1662}
1663
1664
1665Object* ConstructStubCompiler::CompileConstructStub(
1666    SharedFunctionInfo* shared) {
1667  // ----------- S t a t e -------------
1668  //  -- r0    : argc
1669  //  -- r1    : constructor
1670  //  -- lr    : return address
1671  //  -- [sp]  : last argument
1672  // -----------------------------------
1673  Label generic_stub_call;
1674
1675  // Use r7 for holding undefined which is used in several places below.
1676  __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
1677
1678#ifdef ENABLE_DEBUGGER_SUPPORT
1679  // Check to see whether there are any break points in the function code. If
1680  // there are jump to the generic constructor stub which calls the actual
1681  // code for the function thereby hitting the break points.
1682  __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1683  __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kDebugInfoOffset));
1684  __ cmp(r2, r7);
1685  __ b(ne, &generic_stub_call);
1686#endif
1687
1688  // Load the initial map and verify that it is in fact a map.
1689  // r1: constructor function
1690  // r7: undefined
1691  __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
1692  __ tst(r2, Operand(kSmiTagMask));
1693  __ b(eq, &generic_stub_call);
1694  __ CompareObjectType(r2, r3, r4, MAP_TYPE);
1695  __ b(ne, &generic_stub_call);
1696
1697#ifdef DEBUG
1698  // Cannot construct functions this way.
1699  // r0: argc
1700  // r1: constructor function
1701  // r2: initial map
1702  // r7: undefined
1703  __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
1704  __ Check(ne, "Function constructed by construct stub.");
1705#endif
1706
1707  // Now allocate the JSObject in new space.
1708  // r0: argc
1709  // r1: constructor function
1710  // r2: initial map
1711  // r7: undefined
1712  __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
1713  __ AllocateInNewSpace(r3,
1714                        r4,
1715                        r5,
1716                        r6,
1717                        &generic_stub_call,
1718                        NO_ALLOCATION_FLAGS);
1719
1720  // Allocated the JSObject, now initialize the fields. Map is set to initial
1721  // map and properties and elements are set to empty fixed array.
1722  // r0: argc
1723  // r1: constructor function
1724  // r2: initial map
1725  // r3: object size (in words)
1726  // r4: JSObject (not tagged)
1727  // r7: undefined
1728  __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
1729  __ mov(r5, r4);
1730  ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
1731  __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
1732  ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
1733  __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
1734  ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
1735  __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
1736
1737  // Calculate the location of the first argument. The stack contains only the
1738  // argc arguments.
1739  __ add(r1, sp, Operand(r0, LSL, kPointerSizeLog2));
1740
1741  // Fill all the in-object properties with undefined.
1742  // r0: argc
1743  // r1: first argument
1744  // r3: object size (in words)
1745  // r4: JSObject (not tagged)
1746  // r5: First in-object property of JSObject (not tagged)
1747  // r7: undefined
1748  // Fill the initialized properties with a constant value or a passed argument
1749  // depending on the this.x = ...; assignment in the function.
1750  for (int i = 0; i < shared->this_property_assignments_count(); i++) {
1751    if (shared->IsThisPropertyAssignmentArgument(i)) {
1752      Label not_passed, next;
1753      // Check if the argument assigned to the property is actually passed.
1754      int arg_number = shared->GetThisPropertyAssignmentArgument(i);
1755      __ cmp(r0, Operand(arg_number));
1756      __ b(le, &not_passed);
1757      // Argument passed - find it on the stack.
1758      __ ldr(r2, MemOperand(r1, (arg_number + 1) * -kPointerSize));
1759      __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
1760      __ b(&next);
1761      __ bind(&not_passed);
1762      // Set the property to undefined.
1763      __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
1764      __ bind(&next);
1765    } else {
1766      // Set the property to the constant value.
1767      Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i));
1768      __ mov(r2, Operand(constant));
1769      __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
1770    }
1771  }
1772
1773  // Fill the unused in-object property fields with undefined.
1774  for (int i = shared->this_property_assignments_count();
1775       i < shared->CalculateInObjectProperties();
1776       i++) {
1777      __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
1778  }
1779
1780  // r0: argc
1781  // r4: JSObject (not tagged)
1782  // Move argc to r1 and the JSObject to return to r0 and tag it.
1783  __ mov(r1, r0);
1784  __ mov(r0, r4);
1785  __ orr(r0, r0, Operand(kHeapObjectTag));
1786
1787  // r0: JSObject
1788  // r1: argc
1789  // Remove caller arguments and receiver from the stack and return.
1790  __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2));
1791  __ add(sp, sp, Operand(kPointerSize));
1792  __ IncrementCounter(&Counters::constructed_objects, 1, r1, r2);
1793  __ IncrementCounter(&Counters::constructed_objects_stub, 1, r1, r2);
1794  __ Jump(lr);
1795
1796  // Jump to the generic stub in case the specialized code cannot handle the
1797  // construction.
1798  __ bind(&generic_stub_call);
1799  Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
1800  Handle<Code> generic_construct_stub(code);
1801  __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
1802
1803  // Return the generated code.
1804  return GetCode();
1805}
1806
1807
1808#undef __
1809
1810} }  // namespace v8::internal
1811