stub-cache-arm.cc revision 85b71799222b55eb5dd74ea26efe0c64ab655c8c
1// Copyright 2011 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#if defined(V8_TARGET_ARCH_ARM)
31
32#include "ic-inl.h"
33#include "codegen.h"
34#include "stub-cache.h"
35
36namespace v8 {
37namespace internal {
38
39#define __ ACCESS_MASM(masm)
40
41
42static void ProbeTable(Isolate* isolate,
43                       MacroAssembler* masm,
44                       Code::Flags flags,
45                       StubCache::Table table,
46                       Register name,
47                       Register offset,
48                       Register scratch,
49                       Register scratch2) {
50  ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
51  ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
52
53  uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address());
54  uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address());
55
56  // Check the relative positions of the address fields.
57  ASSERT(value_off_addr > key_off_addr);
58  ASSERT((value_off_addr - key_off_addr) % 4 == 0);
59  ASSERT((value_off_addr - key_off_addr) < (256 * 4));
60
61  Label miss;
62  Register offsets_base_addr = scratch;
63
64  // Check that the key in the entry matches the name.
65  __ mov(offsets_base_addr, Operand(key_offset));
66  __ ldr(ip, MemOperand(offsets_base_addr, offset, LSL, 1));
67  __ cmp(name, ip);
68  __ b(ne, &miss);
69
70  // Get the code entry from the cache.
71  __ add(offsets_base_addr, offsets_base_addr,
72         Operand(value_off_addr - key_off_addr));
73  __ ldr(scratch2, MemOperand(offsets_base_addr, offset, LSL, 1));
74
75  // Check that the flags match what we're looking for.
76  __ ldr(scratch2, FieldMemOperand(scratch2, Code::kFlagsOffset));
77  __ bic(scratch2, scratch2, Operand(Code::kFlagsNotUsedInLookup));
78  __ cmp(scratch2, Operand(flags));
79  __ b(ne, &miss);
80
81  // Re-load code entry from cache.
82  __ ldr(offset, MemOperand(offsets_base_addr, offset, LSL, 1));
83
84  // Jump to the first instruction in the code stub.
85  __ add(offset, offset, Operand(Code::kHeaderSize - kHeapObjectTag));
86  __ Jump(offset);
87
88  // Miss: fall through.
89  __ bind(&miss);
90}
91
92
93// Helper function used to check that the dictionary doesn't contain
94// the property. This function may return false negatives, so miss_label
95// must always call a backup property check that is complete.
96// This function is safe to call if the receiver has fast properties.
97// Name must be a symbol and receiver must be a heap object.
98MUST_USE_RESULT static MaybeObject* GenerateDictionaryNegativeLookup(
99    MacroAssembler* masm,
100    Label* miss_label,
101    Register receiver,
102    String* name,
103    Register scratch0,
104    Register scratch1) {
105  ASSERT(name->IsSymbol());
106  Counters* counters = masm->isolate()->counters();
107  __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
108  __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
109
110  Label done;
111
112  const int kInterceptorOrAccessCheckNeededMask =
113      (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
114
115  // Bail out if the receiver has a named interceptor or requires access checks.
116  Register map = scratch1;
117  __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
118  __ ldrb(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
119  __ tst(scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
120  __ b(ne, miss_label);
121
122  // Check that receiver is a JSObject.
123  __ ldrb(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
124  __ cmp(scratch0, Operand(FIRST_SPEC_OBJECT_TYPE));
125  __ b(lt, miss_label);
126
127  // Load properties array.
128  Register properties = scratch0;
129  __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
130  // Check that the properties array is a dictionary.
131  __ ldr(map, FieldMemOperand(properties, HeapObject::kMapOffset));
132  Register tmp = properties;
133  __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
134  __ cmp(map, tmp);
135  __ b(ne, miss_label);
136
137  // Restore the temporarily used register.
138  __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
139
140
141  MaybeObject* result = StringDictionaryLookupStub::GenerateNegativeLookup(
142      masm,
143      miss_label,
144      &done,
145      receiver,
146      properties,
147      name,
148      scratch1);
149  if (result->IsFailure()) return result;
150
151  __ bind(&done);
152  __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
153
154  return result;
155}
156
157
158void StubCache::GenerateProbe(MacroAssembler* masm,
159                              Code::Flags flags,
160                              Register receiver,
161                              Register name,
162                              Register scratch,
163                              Register extra,
164                              Register extra2) {
165  Isolate* isolate = masm->isolate();
166  Label miss;
167
168  // Make sure that code is valid. The shifting code relies on the
169  // entry size being 8.
170  ASSERT(sizeof(Entry) == 8);
171
172  // Make sure the flags does not name a specific type.
173  ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
174
175  // Make sure that there are no register conflicts.
176  ASSERT(!scratch.is(receiver));
177  ASSERT(!scratch.is(name));
178  ASSERT(!extra.is(receiver));
179  ASSERT(!extra.is(name));
180  ASSERT(!extra.is(scratch));
181  ASSERT(!extra2.is(receiver));
182  ASSERT(!extra2.is(name));
183  ASSERT(!extra2.is(scratch));
184  ASSERT(!extra2.is(extra));
185
186  // Check scratch, extra and extra2 registers are valid.
187  ASSERT(!scratch.is(no_reg));
188  ASSERT(!extra.is(no_reg));
189  ASSERT(!extra2.is(no_reg));
190
191  // Check that the receiver isn't a smi.
192  __ JumpIfSmi(receiver, &miss);
193
194  // Get the map of the receiver and compute the hash.
195  __ ldr(scratch, FieldMemOperand(name, String::kHashFieldOffset));
196  __ ldr(ip, FieldMemOperand(receiver, HeapObject::kMapOffset));
197  __ add(scratch, scratch, Operand(ip));
198  __ eor(scratch, scratch, Operand(flags));
199  __ and_(scratch,
200          scratch,
201          Operand((kPrimaryTableSize - 1) << kHeapObjectTagSize));
202
203  // Probe the primary table.
204  ProbeTable(isolate, masm, flags, kPrimary, name, scratch, extra, extra2);
205
206  // Primary miss: Compute hash for secondary probe.
207  __ sub(scratch, scratch, Operand(name));
208  __ add(scratch, scratch, Operand(flags));
209  __ and_(scratch,
210          scratch,
211          Operand((kSecondaryTableSize - 1) << kHeapObjectTagSize));
212
213  // Probe the secondary table.
214  ProbeTable(isolate, masm, flags, kSecondary, name, scratch, extra, extra2);
215
216  // Cache miss: Fall-through and let caller handle the miss by
217  // entering the runtime system.
218  __ bind(&miss);
219}
220
221
222void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
223                                                       int index,
224                                                       Register prototype) {
225  // Load the global or builtins object from the current context.
226  __ ldr(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
227  // Load the global context from the global or builtins object.
228  __ ldr(prototype,
229         FieldMemOperand(prototype, GlobalObject::kGlobalContextOffset));
230  // Load the function from the global context.
231  __ ldr(prototype, MemOperand(prototype, Context::SlotOffset(index)));
232  // Load the initial map.  The global functions all have initial maps.
233  __ ldr(prototype,
234         FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
235  // Load the prototype from the initial map.
236  __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
237}
238
239
240void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
241    MacroAssembler* masm, int index, Register prototype, Label* miss) {
242  Isolate* isolate = masm->isolate();
243  // Check we're still in the same context.
244  __ ldr(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
245  __ Move(ip, isolate->global());
246  __ cmp(prototype, ip);
247  __ b(ne, miss);
248  // Get the global function with the given index.
249  JSFunction* function =
250      JSFunction::cast(isolate->global_context()->get(index));
251  // Load its initial map. The global functions all have initial maps.
252  __ Move(prototype, Handle<Map>(function->initial_map()));
253  // Load the prototype from the initial map.
254  __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
255}
256
257
258// Load a fast property out of a holder object (src). In-object properties
259// are loaded directly otherwise the property is loaded from the properties
260// fixed array.
261void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
262                                            Register dst, Register src,
263                                            JSObject* holder, int index) {
264  // Adjust for the number of properties stored in the holder.
265  index -= holder->map()->inobject_properties();
266  if (index < 0) {
267    // Get the property straight out of the holder.
268    int offset = holder->map()->instance_size() + (index * kPointerSize);
269    __ ldr(dst, FieldMemOperand(src, offset));
270  } else {
271    // Calculate the offset into the properties array.
272    int offset = index * kPointerSize + FixedArray::kHeaderSize;
273    __ ldr(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
274    __ ldr(dst, FieldMemOperand(dst, offset));
275  }
276}
277
278
279void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
280                                           Register receiver,
281                                           Register scratch,
282                                           Label* miss_label) {
283  // Check that the receiver isn't a smi.
284  __ JumpIfSmi(receiver, miss_label);
285
286  // Check that the object is a JS array.
287  __ CompareObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE);
288  __ b(ne, miss_label);
289
290  // Load length directly from the JS array.
291  __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
292  __ Ret();
293}
294
295
296// Generate code to check if an object is a string.  If the object is a
297// heap object, its map's instance type is left in the scratch1 register.
298// If this is not needed, scratch1 and scratch2 may be the same register.
299static void GenerateStringCheck(MacroAssembler* masm,
300                                Register receiver,
301                                Register scratch1,
302                                Register scratch2,
303                                Label* smi,
304                                Label* non_string_object) {
305  // Check that the receiver isn't a smi.
306  __ JumpIfSmi(receiver, smi);
307
308  // Check that the object is a string.
309  __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
310  __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
311  __ and_(scratch2, scratch1, Operand(kIsNotStringMask));
312  // The cast is to resolve the overload for the argument of 0x0.
313  __ cmp(scratch2, Operand(static_cast<int32_t>(kStringTag)));
314  __ b(ne, non_string_object);
315}
316
317
318// Generate code to load the length from a string object and return the length.
319// If the receiver object is not a string or a wrapped string object the
320// execution continues at the miss label. The register containing the
321// receiver is potentially clobbered.
322void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
323                                            Register receiver,
324                                            Register scratch1,
325                                            Register scratch2,
326                                            Label* miss,
327                                            bool support_wrappers) {
328  Label check_wrapper;
329
330  // Check if the object is a string leaving the instance type in the
331  // scratch1 register.
332  GenerateStringCheck(masm, receiver, scratch1, scratch2, miss,
333                      support_wrappers ? &check_wrapper : miss);
334
335  // Load length directly from the string.
336  __ ldr(r0, FieldMemOperand(receiver, String::kLengthOffset));
337  __ Ret();
338
339  if (support_wrappers) {
340    // Check if the object is a JSValue wrapper.
341    __ bind(&check_wrapper);
342    __ cmp(scratch1, Operand(JS_VALUE_TYPE));
343    __ b(ne, miss);
344
345    // Unwrap the value and check if the wrapped value is a string.
346    __ ldr(scratch1, FieldMemOperand(receiver, JSValue::kValueOffset));
347    GenerateStringCheck(masm, scratch1, scratch2, scratch2, miss, miss);
348    __ ldr(r0, FieldMemOperand(scratch1, String::kLengthOffset));
349    __ Ret();
350  }
351}
352
353
354void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
355                                                 Register receiver,
356                                                 Register scratch1,
357                                                 Register scratch2,
358                                                 Label* miss_label) {
359  __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
360  __ mov(r0, scratch1);
361  __ Ret();
362}
363
364
365// Generate StoreField code, value is passed in r0 register.
366// When leaving generated code after success, the receiver_reg and name_reg
367// may be clobbered.  Upon branch to miss_label, the receiver and name
368// registers have their original values.
369void StubCompiler::GenerateStoreField(MacroAssembler* masm,
370                                      JSObject* object,
371                                      int index,
372                                      Map* transition,
373                                      Register receiver_reg,
374                                      Register name_reg,
375                                      Register scratch,
376                                      Label* miss_label) {
377  // r0 : value
378  Label exit;
379
380  // Check that the receiver isn't a smi.
381  __ JumpIfSmi(receiver_reg, miss_label);
382
383  // Check that the map of the receiver hasn't changed.
384  __ ldr(scratch, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
385  __ cmp(scratch, Operand(Handle<Map>(object->map())));
386  __ b(ne, miss_label);
387
388  // Perform global security token check if needed.
389  if (object->IsJSGlobalProxy()) {
390    __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
391  }
392
393  // Stub never generated for non-global objects that require access
394  // checks.
395  ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
396
397  // Perform map transition for the receiver if necessary.
398  if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) {
399    // The properties must be extended before we can store the value.
400    // We jump to a runtime call that extends the properties array.
401    __ push(receiver_reg);
402    __ mov(r2, Operand(Handle<Map>(transition)));
403    __ Push(r2, r0);
404    __ TailCallExternalReference(
405        ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
406                          masm->isolate()),
407        3,
408        1);
409    return;
410  }
411
412  if (transition != NULL) {
413    // Update the map of the object; no write barrier updating is
414    // needed because the map is never in new space.
415    __ mov(ip, Operand(Handle<Map>(transition)));
416    __ str(ip, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
417  }
418
419  // Adjust for the number of properties stored in the object. Even in the
420  // face of a transition we can use the old map here because the size of the
421  // object and the number of in-object properties is not going to change.
422  index -= object->map()->inobject_properties();
423
424  if (index < 0) {
425    // Set the property straight into the object.
426    int offset = object->map()->instance_size() + (index * kPointerSize);
427    __ str(r0, FieldMemOperand(receiver_reg, offset));
428
429    // Skip updating write barrier if storing a smi.
430    __ JumpIfSmi(r0, &exit);
431
432    // Update the write barrier for the array address.
433    // Pass the now unused name_reg as a scratch register.
434    __ RecordWrite(receiver_reg, Operand(offset), name_reg, scratch);
435  } else {
436    // Write to the properties array.
437    int offset = index * kPointerSize + FixedArray::kHeaderSize;
438    // Get the properties array
439    __ ldr(scratch, FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
440    __ str(r0, FieldMemOperand(scratch, offset));
441
442    // Skip updating write barrier if storing a smi.
443    __ JumpIfSmi(r0, &exit);
444
445    // Update the write barrier for the array address.
446    // Ok to clobber receiver_reg and name_reg, since we return.
447    __ RecordWrite(scratch, Operand(offset), name_reg, receiver_reg);
448  }
449
450  // Return the value (register r0).
451  __ bind(&exit);
452  __ Ret();
453}
454
455
456void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
457  ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
458  Code* code = NULL;
459  if (kind == Code::LOAD_IC) {
460    code = masm->isolate()->builtins()->builtin(Builtins::kLoadIC_Miss);
461  } else {
462    code = masm->isolate()->builtins()->builtin(Builtins::kKeyedLoadIC_Miss);
463  }
464
465  Handle<Code> ic(code);
466  __ Jump(ic, RelocInfo::CODE_TARGET);
467}
468
469
470static void GenerateCallFunction(MacroAssembler* masm,
471                                 Object* object,
472                                 const ParameterCount& arguments,
473                                 Label* miss,
474                                 Code::ExtraICState extra_ic_state) {
475  // ----------- S t a t e -------------
476  //  -- r0: receiver
477  //  -- r1: function to call
478  // -----------------------------------
479
480  // Check that the function really is a function.
481  __ JumpIfSmi(r1, miss);
482  __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
483  __ b(ne, miss);
484
485  // Patch the receiver on the stack with the global proxy if
486  // necessary.
487  if (object->IsGlobalObject()) {
488    __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
489    __ str(r3, MemOperand(sp, arguments.immediate() * kPointerSize));
490  }
491
492  // Invoke the function.
493  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state)
494      ? CALL_AS_FUNCTION
495      : CALL_AS_METHOD;
496  __ InvokeFunction(r1, arguments, JUMP_FUNCTION, NullCallWrapper(), call_kind);
497}
498
499
500static void PushInterceptorArguments(MacroAssembler* masm,
501                                     Register receiver,
502                                     Register holder,
503                                     Register name,
504                                     JSObject* holder_obj) {
505  __ push(name);
506  InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor();
507  ASSERT(!masm->isolate()->heap()->InNewSpace(interceptor));
508  Register scratch = name;
509  __ mov(scratch, Operand(Handle<Object>(interceptor)));
510  __ push(scratch);
511  __ push(receiver);
512  __ push(holder);
513  __ ldr(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset));
514  __ push(scratch);
515}
516
517
518static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm,
519                                                   Register receiver,
520                                                   Register holder,
521                                                   Register name,
522                                                   JSObject* holder_obj) {
523  PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
524
525  ExternalReference ref =
526      ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
527                        masm->isolate());
528  __ mov(r0, Operand(5));
529  __ mov(r1, Operand(ref));
530
531  CEntryStub stub(1);
532  __ CallStub(&stub);
533}
534
535static const int kFastApiCallArguments = 3;
536
537// Reserves space for the extra arguments to FastHandleApiCall in the
538// caller's frame.
539//
540// These arguments are set by CheckPrototypes and GenerateFastApiDirectCall.
541static void ReserveSpaceForFastApiCall(MacroAssembler* masm,
542                                       Register scratch) {
543  __ mov(scratch, Operand(Smi::FromInt(0)));
544  for (int i = 0; i < kFastApiCallArguments; i++) {
545    __ push(scratch);
546  }
547}
548
549
550// Undoes the effects of ReserveSpaceForFastApiCall.
551static void FreeSpaceForFastApiCall(MacroAssembler* masm) {
552  __ Drop(kFastApiCallArguments);
553}
554
555
556static MaybeObject* GenerateFastApiDirectCall(MacroAssembler* masm,
557                                      const CallOptimization& optimization,
558                                      int argc) {
559  // ----------- S t a t e -------------
560  //  -- sp[0]              : holder (set by CheckPrototypes)
561  //  -- sp[4]              : callee js function
562  //  -- sp[8]              : call data
563  //  -- sp[12]             : last js argument
564  //  -- ...
565  //  -- sp[(argc + 3) * 4] : first js argument
566  //  -- sp[(argc + 4) * 4] : receiver
567  // -----------------------------------
568  // Get the function and setup the context.
569  JSFunction* function = optimization.constant_function();
570  __ mov(r5, Operand(Handle<JSFunction>(function)));
571  __ ldr(cp, FieldMemOperand(r5, JSFunction::kContextOffset));
572
573  // Pass the additional arguments FastHandleApiCall expects.
574  Object* call_data = optimization.api_call_info()->data();
575  Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info());
576  if (masm->isolate()->heap()->InNewSpace(call_data)) {
577    __ Move(r0, api_call_info_handle);
578    __ ldr(r6, FieldMemOperand(r0, CallHandlerInfo::kDataOffset));
579  } else {
580    __ Move(r6, Handle<Object>(call_data));
581  }
582  // Store js function and call data.
583  __ stm(ib, sp, r5.bit() | r6.bit());
584
585  // r2 points to call data as expected by Arguments
586  // (refer to layout above).
587  __ add(r2, sp, Operand(2 * kPointerSize));
588
589  Object* callback = optimization.api_call_info()->callback();
590  Address api_function_address = v8::ToCData<Address>(callback);
591  ApiFunction fun(api_function_address);
592
593  const int kApiStackSpace = 4;
594  __ EnterExitFrame(false, kApiStackSpace);
595
596  // r0 = v8::Arguments&
597  // Arguments is after the return address.
598  __ add(r0, sp, Operand(1 * kPointerSize));
599  // v8::Arguments::implicit_args = data
600  __ str(r2, MemOperand(r0, 0 * kPointerSize));
601  // v8::Arguments::values = last argument
602  __ add(ip, r2, Operand(argc * kPointerSize));
603  __ str(ip, MemOperand(r0, 1 * kPointerSize));
604  // v8::Arguments::length_ = argc
605  __ mov(ip, Operand(argc));
606  __ str(ip, MemOperand(r0, 2 * kPointerSize));
607  // v8::Arguments::is_construct_call = 0
608  __ mov(ip, Operand(0));
609  __ str(ip, MemOperand(r0, 3 * kPointerSize));
610
611  // Emitting a stub call may try to allocate (if the code is not
612  // already generated). Do not allow the assembler to perform a
613  // garbage collection but instead return the allocation failure
614  // object.
615  const int kStackUnwindSpace = argc + kFastApiCallArguments + 1;
616  ExternalReference ref = ExternalReference(&fun,
617                                            ExternalReference::DIRECT_API_CALL,
618                                            masm->isolate());
619  return masm->TryCallApiFunctionAndReturn(ref, kStackUnwindSpace);
620}
621
622class CallInterceptorCompiler BASE_EMBEDDED {
623 public:
624  CallInterceptorCompiler(StubCompiler* stub_compiler,
625                          const ParameterCount& arguments,
626                          Register name,
627                          Code::ExtraICState extra_ic_state)
628      : stub_compiler_(stub_compiler),
629        arguments_(arguments),
630        name_(name),
631        extra_ic_state_(extra_ic_state) {}
632
633  MaybeObject* Compile(MacroAssembler* masm,
634                       JSObject* object,
635                       JSObject* holder,
636                       String* name,
637                       LookupResult* lookup,
638                       Register receiver,
639                       Register scratch1,
640                       Register scratch2,
641                       Register scratch3,
642                       Label* miss) {
643    ASSERT(holder->HasNamedInterceptor());
644    ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
645
646    // Check that the receiver isn't a smi.
647    __ JumpIfSmi(receiver, miss);
648
649    CallOptimization optimization(lookup);
650
651    if (optimization.is_constant_call()) {
652      return CompileCacheable(masm,
653                              object,
654                              receiver,
655                              scratch1,
656                              scratch2,
657                              scratch3,
658                              holder,
659                              lookup,
660                              name,
661                              optimization,
662                              miss);
663    } else {
664      CompileRegular(masm,
665                     object,
666                     receiver,
667                     scratch1,
668                     scratch2,
669                     scratch3,
670                     name,
671                     holder,
672                     miss);
673      return masm->isolate()->heap()->undefined_value();
674    }
675  }
676
677 private:
678  MaybeObject* CompileCacheable(MacroAssembler* masm,
679                                JSObject* object,
680                                Register receiver,
681                                Register scratch1,
682                                Register scratch2,
683                                Register scratch3,
684                                JSObject* interceptor_holder,
685                                LookupResult* lookup,
686                                String* name,
687                                const CallOptimization& optimization,
688                                Label* miss_label) {
689    ASSERT(optimization.is_constant_call());
690    ASSERT(!lookup->holder()->IsGlobalObject());
691
692    Counters* counters = masm->isolate()->counters();
693
694    int depth1 = kInvalidProtoDepth;
695    int depth2 = kInvalidProtoDepth;
696    bool can_do_fast_api_call = false;
697    if (optimization.is_simple_api_call() &&
698       !lookup->holder()->IsGlobalObject()) {
699     depth1 =
700         optimization.GetPrototypeDepthOfExpectedType(object,
701                                                      interceptor_holder);
702     if (depth1 == kInvalidProtoDepth) {
703       depth2 =
704           optimization.GetPrototypeDepthOfExpectedType(interceptor_holder,
705                                                        lookup->holder());
706     }
707     can_do_fast_api_call = (depth1 != kInvalidProtoDepth) ||
708                            (depth2 != kInvalidProtoDepth);
709    }
710
711    __ IncrementCounter(counters->call_const_interceptor(), 1,
712                      scratch1, scratch2);
713
714    if (can_do_fast_api_call) {
715      __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1,
716                          scratch1, scratch2);
717      ReserveSpaceForFastApiCall(masm, scratch1);
718    }
719
720    // Check that the maps from receiver to interceptor's holder
721    // haven't changed and thus we can invoke interceptor.
722    Label miss_cleanup;
723    Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
724    Register holder =
725        stub_compiler_->CheckPrototypes(object, receiver,
726                                        interceptor_holder, scratch1,
727                                        scratch2, scratch3, name, depth1, miss);
728
729    // Invoke an interceptor and if it provides a value,
730    // branch to |regular_invoke|.
731    Label regular_invoke;
732    LoadWithInterceptor(masm, receiver, holder, interceptor_holder, scratch2,
733                        &regular_invoke);
734
735    // Interceptor returned nothing for this property.  Try to use cached
736    // constant function.
737
738    // Check that the maps from interceptor's holder to constant function's
739    // holder haven't changed and thus we can use cached constant function.
740    if (interceptor_holder != lookup->holder()) {
741      stub_compiler_->CheckPrototypes(interceptor_holder, receiver,
742                                      lookup->holder(), scratch1,
743                                      scratch2, scratch3, name, depth2, miss);
744    } else {
745      // CheckPrototypes has a side effect of fetching a 'holder'
746      // for API (object which is instanceof for the signature).  It's
747      // safe to omit it here, as if present, it should be fetched
748      // by the previous CheckPrototypes.
749      ASSERT(depth2 == kInvalidProtoDepth);
750    }
751
752    // Invoke function.
753    if (can_do_fast_api_call) {
754      MaybeObject* result = GenerateFastApiDirectCall(masm,
755                                                      optimization,
756                                                      arguments_.immediate());
757      if (result->IsFailure()) return result;
758    } else {
759      CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
760          ? CALL_AS_FUNCTION
761          : CALL_AS_METHOD;
762      __ InvokeFunction(optimization.constant_function(), arguments_,
763                        JUMP_FUNCTION, call_kind);
764    }
765
766    // Deferred code for fast API call case---clean preallocated space.
767    if (can_do_fast_api_call) {
768      __ bind(&miss_cleanup);
769      FreeSpaceForFastApiCall(masm);
770      __ b(miss_label);
771    }
772
773    // Invoke a regular function.
774    __ bind(&regular_invoke);
775    if (can_do_fast_api_call) {
776      FreeSpaceForFastApiCall(masm);
777    }
778
779    return masm->isolate()->heap()->undefined_value();
780  }
781
782  void CompileRegular(MacroAssembler* masm,
783                      JSObject* object,
784                      Register receiver,
785                      Register scratch1,
786                      Register scratch2,
787                      Register scratch3,
788                      String* name,
789                      JSObject* interceptor_holder,
790                      Label* miss_label) {
791    Register holder =
792        stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
793                                        scratch1, scratch2, scratch3, name,
794                                        miss_label);
795
796    // Call a runtime function to load the interceptor property.
797    __ EnterInternalFrame();
798    // Save the name_ register across the call.
799    __ push(name_);
800
801    PushInterceptorArguments(masm,
802                             receiver,
803                             holder,
804                             name_,
805                             interceptor_holder);
806
807    __ CallExternalReference(
808        ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
809                          masm->isolate()),
810        5);
811
812    // Restore the name_ register.
813    __ pop(name_);
814    __ LeaveInternalFrame();
815  }
816
817  void LoadWithInterceptor(MacroAssembler* masm,
818                           Register receiver,
819                           Register holder,
820                           JSObject* holder_obj,
821                           Register scratch,
822                           Label* interceptor_succeeded) {
823    __ EnterInternalFrame();
824    __ Push(holder, name_);
825
826    CompileCallLoadPropertyWithInterceptor(masm,
827                                           receiver,
828                                           holder,
829                                           name_,
830                                           holder_obj);
831
832    __ pop(name_);  // Restore the name.
833    __ pop(receiver);  // Restore the holder.
834    __ LeaveInternalFrame();
835
836    // If interceptor returns no-result sentinel, call the constant function.
837    __ LoadRoot(scratch, Heap::kNoInterceptorResultSentinelRootIndex);
838    __ cmp(r0, scratch);
839    __ b(ne, interceptor_succeeded);
840  }
841
842  StubCompiler* stub_compiler_;
843  const ParameterCount& arguments_;
844  Register name_;
845  Code::ExtraICState extra_ic_state_;
846};
847
848
849// Generate code to check that a global property cell is empty. Create
850// the property cell at compilation time if no cell exists for the
851// property.
852MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCell(
853    MacroAssembler* masm,
854    GlobalObject* global,
855    String* name,
856    Register scratch,
857    Label* miss) {
858  Object* probe;
859  { MaybeObject* maybe_probe = global->EnsurePropertyCell(name);
860    if (!maybe_probe->ToObject(&probe)) return maybe_probe;
861  }
862  JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(probe);
863  ASSERT(cell->value()->IsTheHole());
864  __ mov(scratch, Operand(Handle<Object>(cell)));
865  __ ldr(scratch,
866         FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
867  __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
868  __ cmp(scratch, ip);
869  __ b(ne, miss);
870  return cell;
871}
872
873// Calls GenerateCheckPropertyCell for each global object in the prototype chain
874// from object to (but not including) holder.
875MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCells(
876    MacroAssembler* masm,
877    JSObject* object,
878    JSObject* holder,
879    String* name,
880    Register scratch,
881    Label* miss) {
882  JSObject* current = object;
883  while (current != holder) {
884    if (current->IsGlobalObject()) {
885      // Returns a cell or a failure.
886      MaybeObject* result = GenerateCheckPropertyCell(
887          masm,
888          GlobalObject::cast(current),
889          name,
890          scratch,
891          miss);
892      if (result->IsFailure()) return result;
893    }
894    ASSERT(current->IsJSObject());
895    current = JSObject::cast(current->GetPrototype());
896  }
897  return NULL;
898}
899
900
901// Convert and store int passed in register ival to IEEE 754 single precision
902// floating point value at memory location (dst + 4 * wordoffset)
903// If VFP3 is available use it for conversion.
904static void StoreIntAsFloat(MacroAssembler* masm,
905                            Register dst,
906                            Register wordoffset,
907                            Register ival,
908                            Register fval,
909                            Register scratch1,
910                            Register scratch2) {
911  if (CpuFeatures::IsSupported(VFP3)) {
912    CpuFeatures::Scope scope(VFP3);
913    __ vmov(s0, ival);
914    __ add(scratch1, dst, Operand(wordoffset, LSL, 2));
915    __ vcvt_f32_s32(s0, s0);
916    __ vstr(s0, scratch1, 0);
917  } else {
918    Label not_special, done;
919    // Move sign bit from source to destination.  This works because the sign
920    // bit in the exponent word of the double has the same position and polarity
921    // as the 2's complement sign bit in a Smi.
922    ASSERT(kBinary32SignMask == 0x80000000u);
923
924    __ and_(fval, ival, Operand(kBinary32SignMask), SetCC);
925    // Negate value if it is negative.
926    __ rsb(ival, ival, Operand(0, RelocInfo::NONE), LeaveCC, ne);
927
928    // We have -1, 0 or 1, which we treat specially. Register ival contains
929    // absolute value: it is either equal to 1 (special case of -1 and 1),
930    // greater than 1 (not a special case) or less than 1 (special case of 0).
931    __ cmp(ival, Operand(1));
932    __ b(gt, &not_special);
933
934    // For 1 or -1 we need to or in the 0 exponent (biased).
935    static const uint32_t exponent_word_for_1 =
936        kBinary32ExponentBias << kBinary32ExponentShift;
937
938    __ orr(fval, fval, Operand(exponent_word_for_1), LeaveCC, eq);
939    __ b(&done);
940
941    __ bind(&not_special);
942    // Count leading zeros.
943    // Gets the wrong answer for 0, but we already checked for that case above.
944    Register zeros = scratch2;
945    __ CountLeadingZeros(zeros, ival, scratch1);
946
947    // Compute exponent and or it into the exponent register.
948    __ rsb(scratch1,
949           zeros,
950           Operand((kBitsPerInt - 1) + kBinary32ExponentBias));
951
952    __ orr(fval,
953           fval,
954           Operand(scratch1, LSL, kBinary32ExponentShift));
955
956    // Shift up the source chopping the top bit off.
957    __ add(zeros, zeros, Operand(1));
958    // This wouldn't work for 1 and -1 as the shift would be 32 which means 0.
959    __ mov(ival, Operand(ival, LSL, zeros));
960    // And the top (top 20 bits).
961    __ orr(fval,
962           fval,
963           Operand(ival, LSR, kBitsPerInt - kBinary32MantissaBits));
964
965    __ bind(&done);
966    __ str(fval, MemOperand(dst, wordoffset, LSL, 2));
967  }
968}
969
970
971// Convert unsigned integer with specified number of leading zeroes in binary
972// representation to IEEE 754 double.
973// Integer to convert is passed in register hiword.
974// Resulting double is returned in registers hiword:loword.
975// This functions does not work correctly for 0.
976static void GenerateUInt2Double(MacroAssembler* masm,
977                                Register hiword,
978                                Register loword,
979                                Register scratch,
980                                int leading_zeroes) {
981  const int meaningful_bits = kBitsPerInt - leading_zeroes - 1;
982  const int biased_exponent = HeapNumber::kExponentBias + meaningful_bits;
983
984  const int mantissa_shift_for_hi_word =
985      meaningful_bits - HeapNumber::kMantissaBitsInTopWord;
986
987  const int mantissa_shift_for_lo_word =
988      kBitsPerInt - mantissa_shift_for_hi_word;
989
990  __ mov(scratch, Operand(biased_exponent << HeapNumber::kExponentShift));
991  if (mantissa_shift_for_hi_word > 0) {
992    __ mov(loword, Operand(hiword, LSL, mantissa_shift_for_lo_word));
993    __ orr(hiword, scratch, Operand(hiword, LSR, mantissa_shift_for_hi_word));
994  } else {
995    __ mov(loword, Operand(0, RelocInfo::NONE));
996    __ orr(hiword, scratch, Operand(hiword, LSL, mantissa_shift_for_hi_word));
997  }
998
999  // If least significant bit of biased exponent was not 1 it was corrupted
1000  // by most significant bit of mantissa so we should fix that.
1001  if (!(biased_exponent & 1)) {
1002    __ bic(hiword, hiword, Operand(1 << HeapNumber::kExponentShift));
1003  }
1004}
1005
1006
1007#undef __
1008#define __ ACCESS_MASM(masm())
1009
1010
1011Register StubCompiler::CheckPrototypes(JSObject* object,
1012                                       Register object_reg,
1013                                       JSObject* holder,
1014                                       Register holder_reg,
1015                                       Register scratch1,
1016                                       Register scratch2,
1017                                       String* name,
1018                                       int save_at_depth,
1019                                       Label* miss) {
1020  // Make sure there's no overlap between holder and object registers.
1021  ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
1022  ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
1023         && !scratch2.is(scratch1));
1024
1025  // Keep track of the current object in register reg.
1026  Register reg = object_reg;
1027  int depth = 0;
1028
1029  if (save_at_depth == depth) {
1030    __ str(reg, MemOperand(sp));
1031  }
1032
1033  // Check the maps in the prototype chain.
1034  // Traverse the prototype chain from the object and do map checks.
1035  JSObject* current = object;
1036  while (current != holder) {
1037    depth++;
1038
1039    // Only global objects and objects that do not require access
1040    // checks are allowed in stubs.
1041    ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
1042
1043    ASSERT(current->GetPrototype()->IsJSObject());
1044    JSObject* prototype = JSObject::cast(current->GetPrototype());
1045    if (!current->HasFastProperties() &&
1046        !current->IsJSGlobalObject() &&
1047        !current->IsJSGlobalProxy()) {
1048      if (!name->IsSymbol()) {
1049        MaybeObject* maybe_lookup_result = heap()->LookupSymbol(name);
1050        Object* lookup_result = NULL;  // Initialization to please compiler.
1051        if (!maybe_lookup_result->ToObject(&lookup_result)) {
1052          set_failure(Failure::cast(maybe_lookup_result));
1053          return reg;
1054        }
1055        name = String::cast(lookup_result);
1056      }
1057      ASSERT(current->property_dictionary()->FindEntry(name) ==
1058             StringDictionary::kNotFound);
1059
1060      MaybeObject* negative_lookup = GenerateDictionaryNegativeLookup(masm(),
1061                                                                      miss,
1062                                                                      reg,
1063                                                                      name,
1064                                                                      scratch1,
1065                                                                      scratch2);
1066      if (negative_lookup->IsFailure()) {
1067        set_failure(Failure::cast(negative_lookup));
1068        return reg;
1069      }
1070
1071      __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1072      reg = holder_reg;  // from now the object is in holder_reg
1073      __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
1074    } else if (heap()->InNewSpace(prototype)) {
1075      // Get the map of the current object.
1076      __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1077      __ cmp(scratch1, Operand(Handle<Map>(current->map())));
1078
1079      // Branch on the result of the map check.
1080      __ b(ne, miss);
1081
1082      // Check access rights to the global object.  This has to happen
1083      // after the map check so that we know that the object is
1084      // actually a global object.
1085      if (current->IsJSGlobalProxy()) {
1086        __ CheckAccessGlobalProxy(reg, scratch1, miss);
1087        // Restore scratch register to be the map of the object.  In the
1088        // new space case below, we load the prototype from the map in
1089        // the scratch register.
1090        __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1091      }
1092
1093      reg = holder_reg;  // from now the object is in holder_reg
1094      // The prototype is in new space; we cannot store a reference
1095      // to it in the code. Load it from the map.
1096      __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
1097    } else {
1098      // Check the map of the current object.
1099      __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1100      __ cmp(scratch1, Operand(Handle<Map>(current->map())));
1101      // Branch on the result of the map check.
1102      __ b(ne, miss);
1103      // Check access rights to the global object.  This has to happen
1104      // after the map check so that we know that the object is
1105      // actually a global object.
1106      if (current->IsJSGlobalProxy()) {
1107        __ CheckAccessGlobalProxy(reg, scratch1, miss);
1108      }
1109      // The prototype is in old space; load it directly.
1110      reg = holder_reg;  // from now the object is in holder_reg
1111      __ mov(reg, Operand(Handle<JSObject>(prototype)));
1112    }
1113
1114    if (save_at_depth == depth) {
1115      __ str(reg, MemOperand(sp));
1116    }
1117
1118    // Go to the next object in the prototype chain.
1119    current = prototype;
1120  }
1121
1122  // Check the holder map.
1123  __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1124  __ cmp(scratch1, Operand(Handle<Map>(current->map())));
1125  __ b(ne, miss);
1126
1127  // Log the check depth.
1128  LOG(masm()->isolate(), IntEvent("check-maps-depth", depth + 1));
1129
1130  // Perform security check for access to the global object.
1131  ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
1132  if (holder->IsJSGlobalProxy()) {
1133    __ CheckAccessGlobalProxy(reg, scratch1, miss);
1134  };
1135
1136  // If we've skipped any global objects, it's not enough to verify
1137  // that their maps haven't changed.  We also need to check that the
1138  // property cell for the property is still empty.
1139  MaybeObject* result = GenerateCheckPropertyCells(masm(),
1140                                                   object,
1141                                                   holder,
1142                                                   name,
1143                                                   scratch1,
1144                                                   miss);
1145  if (result->IsFailure()) set_failure(Failure::cast(result));
1146
1147  // Return the register containing the holder.
1148  return reg;
1149}
1150
1151
1152void StubCompiler::GenerateLoadField(JSObject* object,
1153                                     JSObject* holder,
1154                                     Register receiver,
1155                                     Register scratch1,
1156                                     Register scratch2,
1157                                     Register scratch3,
1158                                     int index,
1159                                     String* name,
1160                                     Label* miss) {
1161  // Check that the receiver isn't a smi.
1162  __ JumpIfSmi(receiver, miss);
1163
1164  // Check that the maps haven't changed.
1165  Register reg =
1166      CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3,
1167                      name, miss);
1168  GenerateFastPropertyLoad(masm(), r0, reg, holder, index);
1169  __ Ret();
1170}
1171
1172
1173void StubCompiler::GenerateLoadConstant(JSObject* object,
1174                                        JSObject* holder,
1175                                        Register receiver,
1176                                        Register scratch1,
1177                                        Register scratch2,
1178                                        Register scratch3,
1179                                        Object* value,
1180                                        String* name,
1181                                        Label* miss) {
1182  // Check that the receiver isn't a smi.
1183  __ JumpIfSmi(receiver, miss);
1184
1185  // Check that the maps haven't changed.
1186  CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3, name,
1187                  miss);
1188
1189  // Return the constant value.
1190  __ mov(r0, Operand(Handle<Object>(value)));
1191  __ Ret();
1192}
1193
1194
1195MaybeObject* StubCompiler::GenerateLoadCallback(JSObject* object,
1196                                                JSObject* holder,
1197                                                Register receiver,
1198                                                Register name_reg,
1199                                                Register scratch1,
1200                                                Register scratch2,
1201                                                Register scratch3,
1202                                                AccessorInfo* callback,
1203                                                String* name,
1204                                                Label* miss) {
1205  // Check that the receiver isn't a smi.
1206  __ JumpIfSmi(receiver, miss);
1207
1208  // Check that the maps haven't changed.
1209  Register reg =
1210      CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3,
1211                      name, miss);
1212
1213  // Build AccessorInfo::args_ list on the stack and push property name below
1214  // the exit frame to make GC aware of them and store pointers to them.
1215  __ push(receiver);
1216  __ mov(scratch2, sp);  // scratch2 = AccessorInfo::args_
1217  Handle<AccessorInfo> callback_handle(callback);
1218  if (heap()->InNewSpace(callback_handle->data())) {
1219    __ Move(scratch3, callback_handle);
1220    __ ldr(scratch3, FieldMemOperand(scratch3, AccessorInfo::kDataOffset));
1221  } else {
1222    __ Move(scratch3, Handle<Object>(callback_handle->data()));
1223  }
1224  __ Push(reg, scratch3, name_reg);
1225  __ mov(r0, sp);  // r0 = Handle<String>
1226
1227  Address getter_address = v8::ToCData<Address>(callback->getter());
1228  ApiFunction fun(getter_address);
1229
1230  const int kApiStackSpace = 1;
1231  __ EnterExitFrame(false, kApiStackSpace);
1232  // Create AccessorInfo instance on the stack above the exit frame with
1233  // scratch2 (internal::Object **args_) as the data.
1234  __ str(scratch2, MemOperand(sp, 1 * kPointerSize));
1235  __ add(r1, sp, Operand(1 * kPointerSize));  // r1 = AccessorInfo&
1236
1237  // Emitting a stub call may try to allocate (if the code is not
1238  // already generated).  Do not allow the assembler to perform a
1239  // garbage collection but instead return the allocation failure
1240  // object.
1241  const int kStackUnwindSpace = 4;
1242  ExternalReference ref =
1243      ExternalReference(&fun,
1244                        ExternalReference::DIRECT_GETTER_CALL,
1245                        masm()->isolate());
1246  return masm()->TryCallApiFunctionAndReturn(ref, kStackUnwindSpace);
1247}
1248
1249
1250void StubCompiler::GenerateLoadInterceptor(JSObject* object,
1251                                           JSObject* interceptor_holder,
1252                                           LookupResult* lookup,
1253                                           Register receiver,
1254                                           Register name_reg,
1255                                           Register scratch1,
1256                                           Register scratch2,
1257                                           Register scratch3,
1258                                           String* name,
1259                                           Label* miss) {
1260  ASSERT(interceptor_holder->HasNamedInterceptor());
1261  ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1262
1263  // Check that the receiver isn't a smi.
1264  __ JumpIfSmi(receiver, miss);
1265
1266  // So far the most popular follow ups for interceptor loads are FIELD
1267  // and CALLBACKS, so inline only them, other cases may be added
1268  // later.
1269  bool compile_followup_inline = false;
1270  if (lookup->IsProperty() && lookup->IsCacheable()) {
1271    if (lookup->type() == FIELD) {
1272      compile_followup_inline = true;
1273    } else if (lookup->type() == CALLBACKS &&
1274        lookup->GetCallbackObject()->IsAccessorInfo() &&
1275        AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL) {
1276      compile_followup_inline = true;
1277    }
1278  }
1279
1280  if (compile_followup_inline) {
1281    // Compile the interceptor call, followed by inline code to load the
1282    // property from further up the prototype chain if the call fails.
1283    // Check that the maps haven't changed.
1284    Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1285                                          scratch1, scratch2, scratch3,
1286                                          name, miss);
1287    ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
1288
1289    // Save necessary data before invoking an interceptor.
1290    // Requires a frame to make GC aware of pushed pointers.
1291    __ EnterInternalFrame();
1292
1293    if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1294      // CALLBACKS case needs a receiver to be passed into C++ callback.
1295      __ Push(receiver, holder_reg, name_reg);
1296    } else {
1297      __ Push(holder_reg, name_reg);
1298    }
1299
1300    // Invoke an interceptor.  Note: map checks from receiver to
1301    // interceptor's holder has been compiled before (see a caller
1302    // of this method.)
1303    CompileCallLoadPropertyWithInterceptor(masm(),
1304                                           receiver,
1305                                           holder_reg,
1306                                           name_reg,
1307                                           interceptor_holder);
1308
1309    // Check if interceptor provided a value for property.  If it's
1310    // the case, return immediately.
1311    Label interceptor_failed;
1312    __ LoadRoot(scratch1, Heap::kNoInterceptorResultSentinelRootIndex);
1313    __ cmp(r0, scratch1);
1314    __ b(eq, &interceptor_failed);
1315    __ LeaveInternalFrame();
1316    __ Ret();
1317
1318    __ bind(&interceptor_failed);
1319    __ pop(name_reg);
1320    __ pop(holder_reg);
1321    if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1322      __ pop(receiver);
1323    }
1324
1325    __ LeaveInternalFrame();
1326
1327    // Check that the maps from interceptor's holder to lookup's holder
1328    // haven't changed.  And load lookup's holder into |holder| register.
1329    if (interceptor_holder != lookup->holder()) {
1330      holder_reg = CheckPrototypes(interceptor_holder,
1331                                   holder_reg,
1332                                   lookup->holder(),
1333                                   scratch1,
1334                                   scratch2,
1335                                   scratch3,
1336                                   name,
1337                                   miss);
1338    }
1339
1340    if (lookup->type() == FIELD) {
1341      // We found FIELD property in prototype chain of interceptor's holder.
1342      // Retrieve a field from field's holder.
1343      GenerateFastPropertyLoad(masm(), r0, holder_reg,
1344                               lookup->holder(), lookup->GetFieldIndex());
1345      __ Ret();
1346    } else {
1347      // We found CALLBACKS property in prototype chain of interceptor's
1348      // holder.
1349      ASSERT(lookup->type() == CALLBACKS);
1350      ASSERT(lookup->GetCallbackObject()->IsAccessorInfo());
1351      AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
1352      ASSERT(callback != NULL);
1353      ASSERT(callback->getter() != NULL);
1354
1355      // Tail call to runtime.
1356      // Important invariant in CALLBACKS case: the code above must be
1357      // structured to never clobber |receiver| register.
1358      __ Move(scratch2, Handle<AccessorInfo>(callback));
1359      // holder_reg is either receiver or scratch1.
1360      if (!receiver.is(holder_reg)) {
1361        ASSERT(scratch1.is(holder_reg));
1362        __ Push(receiver, holder_reg);
1363        __ ldr(scratch3,
1364               FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
1365        __ Push(scratch3, scratch2, name_reg);
1366      } else {
1367        __ push(receiver);
1368        __ ldr(scratch3,
1369               FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
1370        __ Push(holder_reg, scratch3, scratch2, name_reg);
1371      }
1372
1373      ExternalReference ref =
1374          ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
1375                            masm()->isolate());
1376      __ TailCallExternalReference(ref, 5, 1);
1377    }
1378  } else {  // !compile_followup_inline
1379    // Call the runtime system to load the interceptor.
1380    // Check that the maps haven't changed.
1381    Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1382                                          scratch1, scratch2, scratch3,
1383                                          name, miss);
1384    PushInterceptorArguments(masm(), receiver, holder_reg,
1385                             name_reg, interceptor_holder);
1386
1387    ExternalReference ref =
1388        ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad),
1389                          masm()->isolate());
1390    __ TailCallExternalReference(ref, 5, 1);
1391  }
1392}
1393
1394
1395void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) {
1396  if (kind_ == Code::KEYED_CALL_IC) {
1397    __ cmp(r2, Operand(Handle<String>(name)));
1398    __ b(ne, miss);
1399  }
1400}
1401
1402
1403void CallStubCompiler::GenerateGlobalReceiverCheck(JSObject* object,
1404                                                   JSObject* holder,
1405                                                   String* name,
1406                                                   Label* miss) {
1407  ASSERT(holder->IsGlobalObject());
1408
1409  // Get the number of arguments.
1410  const int argc = arguments().immediate();
1411
1412  // Get the receiver from the stack.
1413  __ ldr(r0, MemOperand(sp, argc * kPointerSize));
1414
1415  // If the object is the holder then we know that it's a global
1416  // object which can only happen for contextual calls. In this case,
1417  // the receiver cannot be a smi.
1418  if (object != holder) {
1419    __ JumpIfSmi(r0, miss);
1420  }
1421
1422  // Check that the maps haven't changed.
1423  CheckPrototypes(object, r0, holder, r3, r1, r4, name, miss);
1424}
1425
1426
1427void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
1428                                                    JSFunction* function,
1429                                                    Label* miss) {
1430  // Get the value from the cell.
1431  __ mov(r3, Operand(Handle<JSGlobalPropertyCell>(cell)));
1432  __ ldr(r1, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset));
1433
1434  // Check that the cell contains the same function.
1435  if (heap()->InNewSpace(function)) {
1436    // We can't embed a pointer to a function in new space so we have
1437    // to verify that the shared function info is unchanged. This has
1438    // the nice side effect that multiple closures based on the same
1439    // function can all use this call IC. Before we load through the
1440    // function, we have to verify that it still is a function.
1441    __ JumpIfSmi(r1, miss);
1442    __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
1443    __ b(ne, miss);
1444
1445    // Check the shared function info. Make sure it hasn't changed.
1446    __ Move(r3, Handle<SharedFunctionInfo>(function->shared()));
1447    __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1448    __ cmp(r4, r3);
1449    __ b(ne, miss);
1450  } else {
1451    __ cmp(r1, Operand(Handle<JSFunction>(function)));
1452    __ b(ne, miss);
1453  }
1454}
1455
1456
1457MaybeObject* CallStubCompiler::GenerateMissBranch() {
1458  MaybeObject* maybe_obj =
1459      isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
1460                                               kind_,
1461                                               extra_ic_state_);
1462  Object* obj;
1463  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1464  __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
1465  return obj;
1466}
1467
1468
1469MaybeObject* CallStubCompiler::CompileCallField(JSObject* object,
1470                                                JSObject* holder,
1471                                                int index,
1472                                                String* name) {
1473  // ----------- S t a t e -------------
1474  //  -- r2    : name
1475  //  -- lr    : return address
1476  // -----------------------------------
1477  Label miss;
1478
1479  GenerateNameCheck(name, &miss);
1480
1481  const int argc = arguments().immediate();
1482
1483  // Get the receiver of the function from the stack into r0.
1484  __ ldr(r0, MemOperand(sp, argc * kPointerSize));
1485  // Check that the receiver isn't a smi.
1486  __ JumpIfSmi(r0, &miss);
1487
1488  // Do the right check and compute the holder register.
1489  Register reg = CheckPrototypes(object, r0, holder, r1, r3, r4, name, &miss);
1490  GenerateFastPropertyLoad(masm(), r1, reg, holder, index);
1491
1492  GenerateCallFunction(masm(), object, arguments(), &miss, extra_ic_state_);
1493
1494  // Handle call cache miss.
1495  __ bind(&miss);
1496  MaybeObject* maybe_result = GenerateMissBranch();
1497  if (maybe_result->IsFailure()) return maybe_result;
1498
1499  // Return the generated code.
1500  return GetCode(FIELD, name);
1501}
1502
1503
1504MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object,
1505                                                    JSObject* holder,
1506                                                    JSGlobalPropertyCell* cell,
1507                                                    JSFunction* function,
1508                                                    String* name) {
1509  // ----------- S t a t e -------------
1510  //  -- r2    : name
1511  //  -- lr    : return address
1512  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1513  //  -- ...
1514  //  -- sp[argc * 4]           : receiver
1515  // -----------------------------------
1516
1517  // If object is not an array, bail out to regular call.
1518  if (!object->IsJSArray() || cell != NULL) return heap()->undefined_value();
1519
1520  Label miss;
1521
1522  GenerateNameCheck(name, &miss);
1523
1524  Register receiver = r1;
1525
1526  // Get the receiver from the stack
1527  const int argc = arguments().immediate();
1528  __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
1529
1530  // Check that the receiver isn't a smi.
1531  __ JumpIfSmi(receiver, &miss);
1532
1533  // Check that the maps haven't changed.
1534  CheckPrototypes(JSObject::cast(object), receiver,
1535                  holder, r3, r0, r4, name, &miss);
1536
1537  if (argc == 0) {
1538    // Nothing to do, just return the length.
1539    __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1540    __ Drop(argc + 1);
1541    __ Ret();
1542  } else {
1543    Label call_builtin;
1544
1545    Register elements = r3;
1546    Register end_elements = r5;
1547
1548    // Get the elements array of the object.
1549    __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1550
1551    // Check that the elements are in fast mode and writable.
1552    __ CheckMap(elements,
1553                r0,
1554                Heap::kFixedArrayMapRootIndex,
1555                &call_builtin,
1556                DONT_DO_SMI_CHECK);
1557
1558    if (argc == 1) {  // Otherwise fall through to call the builtin.
1559      Label exit, with_write_barrier, attempt_to_grow_elements;
1560
1561      // Get the array's length into r0 and calculate new length.
1562      __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1563      STATIC_ASSERT(kSmiTagSize == 1);
1564      STATIC_ASSERT(kSmiTag == 0);
1565      __ add(r0, r0, Operand(Smi::FromInt(argc)));
1566
1567      // Get the element's length.
1568      __ ldr(r4, FieldMemOperand(elements, FixedArray::kLengthOffset));
1569
1570      // Check if we could survive without allocation.
1571      __ cmp(r0, r4);
1572      __ b(gt, &attempt_to_grow_elements);
1573
1574      // Save new length.
1575      __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1576
1577      // Push the element.
1578      __ ldr(r4, MemOperand(sp, (argc - 1) * kPointerSize));
1579      // We may need a register containing the address end_elements below,
1580      // so write back the value in end_elements.
1581      __ add(end_elements, elements,
1582             Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1583      const int kEndElementsOffset =
1584          FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize;
1585      __ str(r4, MemOperand(end_elements, kEndElementsOffset, PreIndex));
1586
1587      // Check for a smi.
1588      __ JumpIfNotSmi(r4, &with_write_barrier);
1589      __ bind(&exit);
1590      __ Drop(argc + 1);
1591      __ Ret();
1592
1593      __ bind(&with_write_barrier);
1594      __ InNewSpace(elements, r4, eq, &exit);
1595      __ RecordWriteHelper(elements, end_elements, r4);
1596      __ Drop(argc + 1);
1597      __ Ret();
1598
1599      __ bind(&attempt_to_grow_elements);
1600      // r0: array's length + 1.
1601      // r4: elements' length.
1602
1603      if (!FLAG_inline_new) {
1604        __ b(&call_builtin);
1605      }
1606
1607      Isolate* isolate = masm()->isolate();
1608      ExternalReference new_space_allocation_top =
1609          ExternalReference::new_space_allocation_top_address(isolate);
1610      ExternalReference new_space_allocation_limit =
1611          ExternalReference::new_space_allocation_limit_address(isolate);
1612
1613      const int kAllocationDelta = 4;
1614      // Load top and check if it is the end of elements.
1615      __ add(end_elements, elements,
1616             Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1617      __ add(end_elements, end_elements, Operand(kEndElementsOffset));
1618      __ mov(r7, Operand(new_space_allocation_top));
1619      __ ldr(r6, MemOperand(r7));
1620      __ cmp(end_elements, r6);
1621      __ b(ne, &call_builtin);
1622
1623      __ mov(r9, Operand(new_space_allocation_limit));
1624      __ ldr(r9, MemOperand(r9));
1625      __ add(r6, r6, Operand(kAllocationDelta * kPointerSize));
1626      __ cmp(r6, r9);
1627      __ b(hi, &call_builtin);
1628
1629      // We fit and could grow elements.
1630      // Update new_space_allocation_top.
1631      __ str(r6, MemOperand(r7));
1632      // Push the argument.
1633      __ ldr(r6, MemOperand(sp, (argc - 1) * kPointerSize));
1634      __ str(r6, MemOperand(end_elements));
1635      // Fill the rest with holes.
1636      __ LoadRoot(r6, Heap::kTheHoleValueRootIndex);
1637      for (int i = 1; i < kAllocationDelta; i++) {
1638        __ str(r6, MemOperand(end_elements, i * kPointerSize));
1639      }
1640
1641      // Update elements' and array's sizes.
1642      __ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1643      __ add(r4, r4, Operand(Smi::FromInt(kAllocationDelta)));
1644      __ str(r4, FieldMemOperand(elements, FixedArray::kLengthOffset));
1645
1646      // Elements are in new space, so write barrier is not required.
1647      __ Drop(argc + 1);
1648      __ Ret();
1649    }
1650    __ bind(&call_builtin);
1651    __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush,
1652                                                   masm()->isolate()),
1653                                 argc + 1,
1654                                 1);
1655  }
1656
1657  // Handle call cache miss.
1658  __ bind(&miss);
1659  MaybeObject* maybe_result = GenerateMissBranch();
1660  if (maybe_result->IsFailure()) return maybe_result;
1661
1662  // Return the generated code.
1663  return GetCode(function);
1664}
1665
1666
1667MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object,
1668                                                   JSObject* holder,
1669                                                   JSGlobalPropertyCell* cell,
1670                                                   JSFunction* function,
1671                                                   String* name) {
1672  // ----------- S t a t e -------------
1673  //  -- r2    : name
1674  //  -- lr    : return address
1675  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1676  //  -- ...
1677  //  -- sp[argc * 4]           : receiver
1678  // -----------------------------------
1679
1680  // If object is not an array, bail out to regular call.
1681  if (!object->IsJSArray() || cell != NULL) return heap()->undefined_value();
1682
1683  Label miss, return_undefined, call_builtin;
1684
1685  Register receiver = r1;
1686  Register elements = r3;
1687
1688  GenerateNameCheck(name, &miss);
1689
1690  // Get the receiver from the stack
1691  const int argc = arguments().immediate();
1692  __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
1693
1694  // Check that the receiver isn't a smi.
1695  __ JumpIfSmi(receiver, &miss);
1696
1697  // Check that the maps haven't changed.
1698  CheckPrototypes(JSObject::cast(object),
1699                  receiver, holder, elements, r4, r0, name, &miss);
1700
1701  // Get the elements array of the object.
1702  __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1703
1704  // Check that the elements are in fast mode and writable.
1705  __ CheckMap(elements,
1706              r0,
1707              Heap::kFixedArrayMapRootIndex,
1708              &call_builtin,
1709              DONT_DO_SMI_CHECK);
1710
1711  // Get the array's length into r4 and calculate new length.
1712  __ ldr(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
1713  __ sub(r4, r4, Operand(Smi::FromInt(1)), SetCC);
1714  __ b(lt, &return_undefined);
1715
1716  // Get the last element.
1717  __ LoadRoot(r6, Heap::kTheHoleValueRootIndex);
1718  STATIC_ASSERT(kSmiTagSize == 1);
1719  STATIC_ASSERT(kSmiTag == 0);
1720  // We can't address the last element in one operation. Compute the more
1721  // expensive shift first, and use an offset later on.
1722  __ add(elements, elements, Operand(r4, LSL, kPointerSizeLog2 - kSmiTagSize));
1723  __ ldr(r0, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag));
1724  __ cmp(r0, r6);
1725  __ b(eq, &call_builtin);
1726
1727  // Set the array's length.
1728  __ str(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
1729
1730  // Fill with the hole.
1731  __ str(r6, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag));
1732  __ Drop(argc + 1);
1733  __ Ret();
1734
1735  __ bind(&return_undefined);
1736  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1737  __ Drop(argc + 1);
1738  __ Ret();
1739
1740  __ bind(&call_builtin);
1741  __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop,
1742                                                 masm()->isolate()),
1743                               argc + 1,
1744                               1);
1745
1746  // Handle call cache miss.
1747  __ bind(&miss);
1748  MaybeObject* maybe_result = GenerateMissBranch();
1749  if (maybe_result->IsFailure()) return maybe_result;
1750
1751  // Return the generated code.
1752  return GetCode(function);
1753}
1754
1755
1756MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall(
1757    Object* object,
1758    JSObject* holder,
1759    JSGlobalPropertyCell* cell,
1760    JSFunction* function,
1761    String* name) {
1762  // ----------- S t a t e -------------
1763  //  -- r2                     : function name
1764  //  -- lr                     : return address
1765  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1766  //  -- ...
1767  //  -- sp[argc * 4]           : receiver
1768  // -----------------------------------
1769
1770  // If object is not a string, bail out to regular call.
1771  if (!object->IsString() || cell != NULL) return heap()->undefined_value();
1772
1773  const int argc = arguments().immediate();
1774
1775  Label miss;
1776  Label name_miss;
1777  Label index_out_of_range;
1778  Label* index_out_of_range_label = &index_out_of_range;
1779
1780  if (kind_ == Code::CALL_IC &&
1781      (CallICBase::StringStubState::decode(extra_ic_state_) ==
1782       DEFAULT_STRING_STUB)) {
1783    index_out_of_range_label = &miss;
1784  }
1785
1786  GenerateNameCheck(name, &name_miss);
1787
1788  // Check that the maps starting from the prototype haven't changed.
1789  GenerateDirectLoadGlobalFunctionPrototype(masm(),
1790                                            Context::STRING_FUNCTION_INDEX,
1791                                            r0,
1792                                            &miss);
1793  ASSERT(object != holder);
1794  CheckPrototypes(JSObject::cast(object->GetPrototype()), r0, holder,
1795                  r1, r3, r4, name, &miss);
1796
1797  Register receiver = r1;
1798  Register index = r4;
1799  Register scratch = r3;
1800  Register result = r0;
1801  __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
1802  if (argc > 0) {
1803    __ ldr(index, MemOperand(sp, (argc - 1) * kPointerSize));
1804  } else {
1805    __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1806  }
1807
1808  StringCharCodeAtGenerator char_code_at_generator(receiver,
1809                                                   index,
1810                                                   scratch,
1811                                                   result,
1812                                                   &miss,  // When not a string.
1813                                                   &miss,  // When not a number.
1814                                                   index_out_of_range_label,
1815                                                   STRING_INDEX_IS_NUMBER);
1816  char_code_at_generator.GenerateFast(masm());
1817  __ Drop(argc + 1);
1818  __ Ret();
1819
1820  StubRuntimeCallHelper call_helper;
1821  char_code_at_generator.GenerateSlow(masm(), call_helper);
1822
1823  if (index_out_of_range.is_linked()) {
1824    __ bind(&index_out_of_range);
1825    __ LoadRoot(r0, Heap::kNanValueRootIndex);
1826    __ Drop(argc + 1);
1827    __ Ret();
1828  }
1829
1830  __ bind(&miss);
1831  // Restore function name in r2.
1832  __ Move(r2, Handle<String>(name));
1833  __ bind(&name_miss);
1834  MaybeObject* maybe_result = GenerateMissBranch();
1835  if (maybe_result->IsFailure()) return maybe_result;
1836
1837  // Return the generated code.
1838  return GetCode(function);
1839}
1840
1841
1842MaybeObject* CallStubCompiler::CompileStringCharAtCall(
1843    Object* object,
1844    JSObject* holder,
1845    JSGlobalPropertyCell* cell,
1846    JSFunction* function,
1847    String* name) {
1848  // ----------- S t a t e -------------
1849  //  -- r2                     : function name
1850  //  -- lr                     : return address
1851  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1852  //  -- ...
1853  //  -- sp[argc * 4]           : receiver
1854  // -----------------------------------
1855
1856  // If object is not a string, bail out to regular call.
1857  if (!object->IsString() || cell != NULL) return heap()->undefined_value();
1858
1859  const int argc = arguments().immediate();
1860
1861  Label miss;
1862  Label name_miss;
1863  Label index_out_of_range;
1864  Label* index_out_of_range_label = &index_out_of_range;
1865
1866  if (kind_ == Code::CALL_IC &&
1867      (CallICBase::StringStubState::decode(extra_ic_state_) ==
1868       DEFAULT_STRING_STUB)) {
1869    index_out_of_range_label = &miss;
1870  }
1871
1872  GenerateNameCheck(name, &name_miss);
1873
1874  // Check that the maps starting from the prototype haven't changed.
1875  GenerateDirectLoadGlobalFunctionPrototype(masm(),
1876                                            Context::STRING_FUNCTION_INDEX,
1877                                            r0,
1878                                            &miss);
1879  ASSERT(object != holder);
1880  CheckPrototypes(JSObject::cast(object->GetPrototype()), r0, holder,
1881                  r1, r3, r4, name, &miss);
1882
1883  Register receiver = r0;
1884  Register index = r4;
1885  Register scratch1 = r1;
1886  Register scratch2 = r3;
1887  Register result = r0;
1888  __ ldr(receiver, MemOperand(sp, argc * kPointerSize));
1889  if (argc > 0) {
1890    __ ldr(index, MemOperand(sp, (argc - 1) * kPointerSize));
1891  } else {
1892    __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1893  }
1894
1895  StringCharAtGenerator char_at_generator(receiver,
1896                                          index,
1897                                          scratch1,
1898                                          scratch2,
1899                                          result,
1900                                          &miss,  // When not a string.
1901                                          &miss,  // When not a number.
1902                                          index_out_of_range_label,
1903                                          STRING_INDEX_IS_NUMBER);
1904  char_at_generator.GenerateFast(masm());
1905  __ Drop(argc + 1);
1906  __ Ret();
1907
1908  StubRuntimeCallHelper call_helper;
1909  char_at_generator.GenerateSlow(masm(), call_helper);
1910
1911  if (index_out_of_range.is_linked()) {
1912    __ bind(&index_out_of_range);
1913    __ LoadRoot(r0, Heap::kEmptyStringRootIndex);
1914    __ Drop(argc + 1);
1915    __ Ret();
1916  }
1917
1918  __ bind(&miss);
1919  // Restore function name in r2.
1920  __ Move(r2, Handle<String>(name));
1921  __ bind(&name_miss);
1922  MaybeObject* maybe_result = GenerateMissBranch();
1923  if (maybe_result->IsFailure()) return maybe_result;
1924
1925  // Return the generated code.
1926  return GetCode(function);
1927}
1928
1929
1930MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall(
1931    Object* object,
1932    JSObject* holder,
1933    JSGlobalPropertyCell* cell,
1934    JSFunction* function,
1935    String* name) {
1936  // ----------- S t a t e -------------
1937  //  -- r2                     : function name
1938  //  -- lr                     : return address
1939  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1940  //  -- ...
1941  //  -- sp[argc * 4]           : receiver
1942  // -----------------------------------
1943
1944  const int argc = arguments().immediate();
1945
1946  // If the object is not a JSObject or we got an unexpected number of
1947  // arguments, bail out to the regular call.
1948  if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
1949
1950  Label miss;
1951  GenerateNameCheck(name, &miss);
1952
1953  if (cell == NULL) {
1954    __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
1955
1956    STATIC_ASSERT(kSmiTag == 0);
1957    __ JumpIfSmi(r1, &miss);
1958
1959    CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
1960                    &miss);
1961  } else {
1962    ASSERT(cell->value() == function);
1963    GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
1964    GenerateLoadFunctionFromCell(cell, function, &miss);
1965  }
1966
1967  // Load the char code argument.
1968  Register code = r1;
1969  __ ldr(code, MemOperand(sp, 0 * kPointerSize));
1970
1971  // Check the code is a smi.
1972  Label slow;
1973  STATIC_ASSERT(kSmiTag == 0);
1974  __ JumpIfNotSmi(code, &slow);
1975
1976  // Convert the smi code to uint16.
1977  __ and_(code, code, Operand(Smi::FromInt(0xffff)));
1978
1979  StringCharFromCodeGenerator char_from_code_generator(code, r0);
1980  char_from_code_generator.GenerateFast(masm());
1981  __ Drop(argc + 1);
1982  __ Ret();
1983
1984  StubRuntimeCallHelper call_helper;
1985  char_from_code_generator.GenerateSlow(masm(), call_helper);
1986
1987  // Tail call the full function. We do not have to patch the receiver
1988  // because the function makes no use of it.
1989  __ bind(&slow);
1990  __ InvokeFunction(function, arguments(), JUMP_FUNCTION, CALL_AS_METHOD);
1991
1992  __ bind(&miss);
1993  // r2: function name.
1994  MaybeObject* maybe_result = GenerateMissBranch();
1995  if (maybe_result->IsFailure()) return maybe_result;
1996
1997  // Return the generated code.
1998  return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
1999}
2000
2001
2002MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object,
2003                                                    JSObject* holder,
2004                                                    JSGlobalPropertyCell* cell,
2005                                                    JSFunction* function,
2006                                                    String* name) {
2007  // ----------- S t a t e -------------
2008  //  -- r2                     : function name
2009  //  -- lr                     : return address
2010  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2011  //  -- ...
2012  //  -- sp[argc * 4]           : receiver
2013  // -----------------------------------
2014
2015  if (!CpuFeatures::IsSupported(VFP3)) {
2016      return heap()->undefined_value();
2017  }
2018
2019  CpuFeatures::Scope scope_vfp3(VFP3);
2020
2021  const int argc = arguments().immediate();
2022
2023  // If the object is not a JSObject or we got an unexpected number of
2024  // arguments, bail out to the regular call.
2025  if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
2026
2027  Label miss, slow;
2028  GenerateNameCheck(name, &miss);
2029
2030  if (cell == NULL) {
2031    __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
2032
2033    STATIC_ASSERT(kSmiTag == 0);
2034    __ JumpIfSmi(r1, &miss);
2035
2036    CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
2037                    &miss);
2038  } else {
2039    ASSERT(cell->value() == function);
2040    GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
2041    GenerateLoadFunctionFromCell(cell, function, &miss);
2042  }
2043
2044  // Load the (only) argument into r0.
2045  __ ldr(r0, MemOperand(sp, 0 * kPointerSize));
2046
2047  // If the argument is a smi, just return.
2048  STATIC_ASSERT(kSmiTag == 0);
2049  __ tst(r0, Operand(kSmiTagMask));
2050  __ Drop(argc + 1, eq);
2051  __ Ret(eq);
2052
2053  __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
2054
2055  Label wont_fit_smi, no_vfp_exception, restore_fpscr_and_return;
2056
2057  // If vfp3 is enabled, we use the fpu rounding with the RM (round towards
2058  // minus infinity) mode.
2059
2060  // Load the HeapNumber value.
2061  // We will need access to the value in the core registers, so we load it
2062  // with ldrd and move it to the fpu. It also spares a sub instruction for
2063  // updating the HeapNumber value address, as vldr expects a multiple
2064  // of 4 offset.
2065  __ Ldrd(r4, r5, FieldMemOperand(r0, HeapNumber::kValueOffset));
2066  __ vmov(d1, r4, r5);
2067
2068  // Backup FPSCR.
2069  __ vmrs(r3);
2070  // Set custom FPCSR:
2071  //  - Set rounding mode to "Round towards Minus Infinity"
2072  //    (ie bits [23:22] = 0b10).
2073  //  - Clear vfp cumulative exception flags (bits [3:0]).
2074  //  - Make sure Flush-to-zero mode control bit is unset (bit 22).
2075  __ bic(r9, r3,
2076      Operand(kVFPExceptionMask | kVFPRoundingModeMask | kVFPFlushToZeroMask));
2077  __ orr(r9, r9, Operand(kRoundToMinusInf));
2078  __ vmsr(r9);
2079
2080  // Convert the argument to an integer.
2081  __ vcvt_s32_f64(s0, d1, kFPSCRRounding);
2082
2083  // Use vcvt latency to start checking for special cases.
2084  // Get the argument exponent and clear the sign bit.
2085  __ bic(r6, r5, Operand(HeapNumber::kSignMask));
2086  __ mov(r6, Operand(r6, LSR, HeapNumber::kMantissaBitsInTopWord));
2087
2088  // Retrieve FPSCR and check for vfp exceptions.
2089  __ vmrs(r9);
2090  __ tst(r9, Operand(kVFPExceptionMask));
2091  __ b(&no_vfp_exception, eq);
2092
2093  // Check for NaN, Infinity, and -Infinity.
2094  // They are invariant through a Math.Floor call, so just
2095  // return the original argument.
2096  __ sub(r7, r6, Operand(HeapNumber::kExponentMask
2097        >> HeapNumber::kMantissaBitsInTopWord), SetCC);
2098  __ b(&restore_fpscr_and_return, eq);
2099  // We had an overflow or underflow in the conversion. Check if we
2100  // have a big exponent.
2101  __ cmp(r7, Operand(HeapNumber::kMantissaBits));
2102  // If greater or equal, the argument is already round and in r0.
2103  __ b(&restore_fpscr_and_return, ge);
2104  __ b(&wont_fit_smi);
2105
2106  __ bind(&no_vfp_exception);
2107  // Move the result back to general purpose register r0.
2108  __ vmov(r0, s0);
2109  // Check if the result fits into a smi.
2110  __ add(r1, r0, Operand(0x40000000), SetCC);
2111  __ b(&wont_fit_smi, mi);
2112  // Tag the result.
2113  STATIC_ASSERT(kSmiTag == 0);
2114  __ mov(r0, Operand(r0, LSL, kSmiTagSize));
2115
2116  // Check for -0.
2117  __ cmp(r0, Operand(0, RelocInfo::NONE));
2118  __ b(&restore_fpscr_and_return, ne);
2119  // r5 already holds the HeapNumber exponent.
2120  __ tst(r5, Operand(HeapNumber::kSignMask));
2121  // If our HeapNumber is negative it was -0, so load its address and return.
2122  // Else r0 is loaded with 0, so we can also just return.
2123  __ ldr(r0, MemOperand(sp, 0 * kPointerSize), ne);
2124
2125  __ bind(&restore_fpscr_and_return);
2126  // Restore FPSCR and return.
2127  __ vmsr(r3);
2128  __ Drop(argc + 1);
2129  __ Ret();
2130
2131  __ bind(&wont_fit_smi);
2132  // Restore FPCSR and fall to slow case.
2133  __ vmsr(r3);
2134
2135  __ bind(&slow);
2136  // Tail call the full function. We do not have to patch the receiver
2137  // because the function makes no use of it.
2138  __ InvokeFunction(function, arguments(), JUMP_FUNCTION, CALL_AS_METHOD);
2139
2140  __ bind(&miss);
2141  // r2: function name.
2142  MaybeObject* maybe_result = GenerateMissBranch();
2143  if (maybe_result->IsFailure()) return maybe_result;
2144
2145  // Return the generated code.
2146  return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
2147}
2148
2149
2150MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object,
2151                                                  JSObject* holder,
2152                                                  JSGlobalPropertyCell* cell,
2153                                                  JSFunction* function,
2154                                                  String* name) {
2155  // ----------- S t a t e -------------
2156  //  -- r2                     : function name
2157  //  -- lr                     : return address
2158  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2159  //  -- ...
2160  //  -- sp[argc * 4]           : receiver
2161  // -----------------------------------
2162
2163  const int argc = arguments().immediate();
2164
2165  // If the object is not a JSObject or we got an unexpected number of
2166  // arguments, bail out to the regular call.
2167  if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
2168
2169  Label miss;
2170  GenerateNameCheck(name, &miss);
2171
2172  if (cell == NULL) {
2173    __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
2174
2175    STATIC_ASSERT(kSmiTag == 0);
2176    __ JumpIfSmi(r1, &miss);
2177
2178    CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
2179                    &miss);
2180  } else {
2181    ASSERT(cell->value() == function);
2182    GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
2183    GenerateLoadFunctionFromCell(cell, function, &miss);
2184  }
2185
2186  // Load the (only) argument into r0.
2187  __ ldr(r0, MemOperand(sp, 0 * kPointerSize));
2188
2189  // Check if the argument is a smi.
2190  Label not_smi;
2191  STATIC_ASSERT(kSmiTag == 0);
2192  __ JumpIfNotSmi(r0, &not_smi);
2193
2194  // Do bitwise not or do nothing depending on the sign of the
2195  // argument.
2196  __ eor(r1, r0, Operand(r0, ASR, kBitsPerInt - 1));
2197
2198  // Add 1 or do nothing depending on the sign of the argument.
2199  __ sub(r0, r1, Operand(r0, ASR, kBitsPerInt - 1), SetCC);
2200
2201  // If the result is still negative, go to the slow case.
2202  // This only happens for the most negative smi.
2203  Label slow;
2204  __ b(mi, &slow);
2205
2206  // Smi case done.
2207  __ Drop(argc + 1);
2208  __ Ret();
2209
2210  // Check if the argument is a heap number and load its exponent and
2211  // sign.
2212  __ bind(&not_smi);
2213  __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
2214  __ ldr(r1, FieldMemOperand(r0, HeapNumber::kExponentOffset));
2215
2216  // Check the sign of the argument. If the argument is positive,
2217  // just return it.
2218  Label negative_sign;
2219  __ tst(r1, Operand(HeapNumber::kSignMask));
2220  __ b(ne, &negative_sign);
2221  __ Drop(argc + 1);
2222  __ Ret();
2223
2224  // If the argument is negative, clear the sign, and return a new
2225  // number.
2226  __ bind(&negative_sign);
2227  __ eor(r1, r1, Operand(HeapNumber::kSignMask));
2228  __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
2229  __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
2230  __ AllocateHeapNumber(r0, r4, r5, r6, &slow);
2231  __ str(r1, FieldMemOperand(r0, HeapNumber::kExponentOffset));
2232  __ str(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
2233  __ Drop(argc + 1);
2234  __ Ret();
2235
2236  // Tail call the full function. We do not have to patch the receiver
2237  // because the function makes no use of it.
2238  __ bind(&slow);
2239  __ InvokeFunction(function, arguments(), JUMP_FUNCTION, CALL_AS_METHOD);
2240
2241  __ bind(&miss);
2242  // r2: function name.
2243  MaybeObject* maybe_result = GenerateMissBranch();
2244  if (maybe_result->IsFailure()) return maybe_result;
2245
2246  // Return the generated code.
2247  return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
2248}
2249
2250
2251MaybeObject* CallStubCompiler::CompileFastApiCall(
2252    const CallOptimization& optimization,
2253    Object* object,
2254    JSObject* holder,
2255    JSGlobalPropertyCell* cell,
2256    JSFunction* function,
2257    String* name) {
2258  Counters* counters = isolate()->counters();
2259
2260  ASSERT(optimization.is_simple_api_call());
2261  // Bail out if object is a global object as we don't want to
2262  // repatch it to global receiver.
2263  if (object->IsGlobalObject()) return heap()->undefined_value();
2264  if (cell != NULL) return heap()->undefined_value();
2265  if (!object->IsJSObject()) return heap()->undefined_value();
2266  int depth = optimization.GetPrototypeDepthOfExpectedType(
2267            JSObject::cast(object), holder);
2268  if (depth == kInvalidProtoDepth) return heap()->undefined_value();
2269
2270  Label miss, miss_before_stack_reserved;
2271
2272  GenerateNameCheck(name, &miss_before_stack_reserved);
2273
2274  // Get the receiver from the stack.
2275  const int argc = arguments().immediate();
2276  __ ldr(r1, MemOperand(sp, argc * kPointerSize));
2277
2278  // Check that the receiver isn't a smi.
2279  __ JumpIfSmi(r1, &miss_before_stack_reserved);
2280
2281  __ IncrementCounter(counters->call_const(), 1, r0, r3);
2282  __ IncrementCounter(counters->call_const_fast_api(), 1, r0, r3);
2283
2284  ReserveSpaceForFastApiCall(masm(), r0);
2285
2286  // Check that the maps haven't changed and find a Holder as a side effect.
2287  CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
2288                  depth, &miss);
2289
2290  MaybeObject* result = GenerateFastApiDirectCall(masm(), optimization, argc);
2291  if (result->IsFailure()) return result;
2292
2293  __ bind(&miss);
2294  FreeSpaceForFastApiCall(masm());
2295
2296  __ bind(&miss_before_stack_reserved);
2297  MaybeObject* maybe_result = GenerateMissBranch();
2298  if (maybe_result->IsFailure()) return maybe_result;
2299
2300  // Return the generated code.
2301  return GetCode(function);
2302}
2303
2304
2305MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
2306                                                   JSObject* holder,
2307                                                   JSFunction* function,
2308                                                   String* name,
2309                                                   CheckType check) {
2310  // ----------- S t a t e -------------
2311  //  -- r2    : name
2312  //  -- lr    : return address
2313  // -----------------------------------
2314  if (HasCustomCallGenerator(function)) {
2315    MaybeObject* maybe_result = CompileCustomCall(
2316        object, holder, NULL, function, name);
2317    Object* result;
2318    if (!maybe_result->ToObject(&result)) return maybe_result;
2319    // undefined means bail out to regular compiler.
2320    if (!result->IsUndefined()) return result;
2321  }
2322
2323  Label miss;
2324
2325  GenerateNameCheck(name, &miss);
2326
2327  // Get the receiver from the stack
2328  const int argc = arguments().immediate();
2329  __ ldr(r1, MemOperand(sp, argc * kPointerSize));
2330
2331  // Check that the receiver isn't a smi.
2332  if (check != NUMBER_CHECK) {
2333    __ JumpIfSmi(r1, &miss);
2334  }
2335
2336  // Make sure that it's okay not to patch the on stack receiver
2337  // unless we're doing a receiver map check.
2338  ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
2339
2340  SharedFunctionInfo* function_info = function->shared();
2341  switch (check) {
2342    case RECEIVER_MAP_CHECK:
2343      __ IncrementCounter(masm()->isolate()->counters()->call_const(),
2344                          1, r0, r3);
2345
2346      // Check that the maps haven't changed.
2347      CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
2348                      &miss);
2349
2350      // Patch the receiver on the stack with the global proxy if
2351      // necessary.
2352      if (object->IsGlobalObject()) {
2353        __ ldr(r3, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
2354        __ str(r3, MemOperand(sp, argc * kPointerSize));
2355      }
2356      break;
2357
2358    case STRING_CHECK:
2359      if (!function->IsBuiltin() && !function_info->strict_mode()) {
2360        // Calling non-strict non-builtins with a value as the receiver
2361        // requires boxing.
2362        __ jmp(&miss);
2363      } else {
2364        // Check that the object is a two-byte string or a symbol.
2365        __ CompareObjectType(r1, r3, r3, FIRST_NONSTRING_TYPE);
2366        __ b(ge, &miss);
2367        // Check that the maps starting from the prototype haven't changed.
2368        GenerateDirectLoadGlobalFunctionPrototype(
2369            masm(), Context::STRING_FUNCTION_INDEX, r0, &miss);
2370        CheckPrototypes(JSObject::cast(object->GetPrototype()), r0, holder, r3,
2371                        r1, r4, name, &miss);
2372      }
2373      break;
2374
2375    case NUMBER_CHECK: {
2376      if (!function->IsBuiltin() && !function_info->strict_mode()) {
2377        // Calling non-strict non-builtins with a value as the receiver
2378        // requires boxing.
2379        __ jmp(&miss);
2380      } else {
2381        Label fast;
2382        // Check that the object is a smi or a heap number.
2383        __ JumpIfSmi(r1, &fast);
2384        __ CompareObjectType(r1, r0, r0, HEAP_NUMBER_TYPE);
2385        __ b(ne, &miss);
2386        __ bind(&fast);
2387        // Check that the maps starting from the prototype haven't changed.
2388        GenerateDirectLoadGlobalFunctionPrototype(
2389            masm(), Context::NUMBER_FUNCTION_INDEX, r0, &miss);
2390        CheckPrototypes(JSObject::cast(object->GetPrototype()), r0, holder, r3,
2391                        r1, r4, name, &miss);
2392      }
2393      break;
2394    }
2395
2396    case BOOLEAN_CHECK: {
2397      if (!function->IsBuiltin() && !function_info->strict_mode()) {
2398        // Calling non-strict non-builtins with a value as the receiver
2399        // requires boxing.
2400        __ jmp(&miss);
2401      } else {
2402        Label fast;
2403        // Check that the object is a boolean.
2404        __ LoadRoot(ip, Heap::kTrueValueRootIndex);
2405        __ cmp(r1, ip);
2406        __ b(eq, &fast);
2407        __ LoadRoot(ip, Heap::kFalseValueRootIndex);
2408        __ cmp(r1, ip);
2409        __ b(ne, &miss);
2410        __ bind(&fast);
2411        // Check that the maps starting from the prototype haven't changed.
2412        GenerateDirectLoadGlobalFunctionPrototype(
2413            masm(), Context::BOOLEAN_FUNCTION_INDEX, r0, &miss);
2414        CheckPrototypes(JSObject::cast(object->GetPrototype()), r0, holder, r3,
2415                        r1, r4, name, &miss);
2416      }
2417      break;
2418    }
2419
2420    default:
2421      UNREACHABLE();
2422  }
2423
2424  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
2425      ? CALL_AS_FUNCTION
2426      : CALL_AS_METHOD;
2427  __ InvokeFunction(function, arguments(), JUMP_FUNCTION, call_kind);
2428
2429  // Handle call cache miss.
2430  __ bind(&miss);
2431  MaybeObject* maybe_result = GenerateMissBranch();
2432  if (maybe_result->IsFailure()) return maybe_result;
2433
2434  // Return the generated code.
2435  return GetCode(function);
2436}
2437
2438
2439MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object,
2440                                                      JSObject* holder,
2441                                                      String* name) {
2442  // ----------- S t a t e -------------
2443  //  -- r2    : name
2444  //  -- lr    : return address
2445  // -----------------------------------
2446
2447  Label miss;
2448
2449  GenerateNameCheck(name, &miss);
2450
2451  // Get the number of arguments.
2452  const int argc = arguments().immediate();
2453
2454  LookupResult lookup;
2455  LookupPostInterceptor(holder, name, &lookup);
2456
2457  // Get the receiver from the stack.
2458  __ ldr(r1, MemOperand(sp, argc * kPointerSize));
2459
2460  CallInterceptorCompiler compiler(this, arguments(), r2, extra_ic_state_);
2461  MaybeObject* result = compiler.Compile(masm(),
2462                                         object,
2463                                         holder,
2464                                         name,
2465                                         &lookup,
2466                                         r1,
2467                                         r3,
2468                                         r4,
2469                                         r0,
2470                                         &miss);
2471  if (result->IsFailure()) {
2472      return result;
2473  }
2474
2475  // Move returned value, the function to call, to r1.
2476  __ mov(r1, r0);
2477  // Restore receiver.
2478  __ ldr(r0, MemOperand(sp, argc * kPointerSize));
2479
2480  GenerateCallFunction(masm(), object, arguments(), &miss, extra_ic_state_);
2481
2482  // Handle call cache miss.
2483  __ bind(&miss);
2484  MaybeObject* maybe_result = GenerateMissBranch();
2485  if (maybe_result->IsFailure()) return maybe_result;
2486
2487  // Return the generated code.
2488  return GetCode(INTERCEPTOR, name);
2489}
2490
2491
2492MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object,
2493                                                 GlobalObject* holder,
2494                                                 JSGlobalPropertyCell* cell,
2495                                                 JSFunction* function,
2496                                                 String* name) {
2497  // ----------- S t a t e -------------
2498  //  -- r2    : name
2499  //  -- lr    : return address
2500  // -----------------------------------
2501
2502  if (HasCustomCallGenerator(function)) {
2503    MaybeObject* maybe_result = CompileCustomCall(
2504        object, holder, cell, function, name);
2505    Object* result;
2506    if (!maybe_result->ToObject(&result)) return maybe_result;
2507    // undefined means bail out to regular compiler.
2508    if (!result->IsUndefined()) return result;
2509  }
2510
2511  Label miss;
2512
2513  GenerateNameCheck(name, &miss);
2514
2515  // Get the number of arguments.
2516  const int argc = arguments().immediate();
2517
2518  GenerateGlobalReceiverCheck(object, holder, name, &miss);
2519
2520  GenerateLoadFunctionFromCell(cell, function, &miss);
2521
2522  // Patch the receiver on the stack with the global proxy if
2523  // necessary.
2524  if (object->IsGlobalObject()) {
2525    __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
2526    __ str(r3, MemOperand(sp, argc * kPointerSize));
2527  }
2528
2529  // Setup the context (function already in r1).
2530  __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
2531
2532  // Jump to the cached code (tail call).
2533  Counters* counters = masm()->isolate()->counters();
2534  __ IncrementCounter(counters->call_global_inline(), 1, r3, r4);
2535  ASSERT(function->is_compiled());
2536  Handle<Code> code(function->code());
2537  ParameterCount expected(function->shared()->formal_parameter_count());
2538  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
2539      ? CALL_AS_FUNCTION
2540      : CALL_AS_METHOD;
2541  if (V8::UseCrankshaft()) {
2542    // TODO(kasperl): For now, we always call indirectly through the
2543    // code field in the function to allow recompilation to take effect
2544    // without changing any of the call sites.
2545    __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
2546    __ InvokeCode(r3, expected, arguments(), JUMP_FUNCTION,
2547                  NullCallWrapper(), call_kind);
2548  } else {
2549    __ InvokeCode(code, expected, arguments(), RelocInfo::CODE_TARGET,
2550                  JUMP_FUNCTION, call_kind);
2551  }
2552
2553  // Handle call cache miss.
2554  __ bind(&miss);
2555  __ IncrementCounter(counters->call_global_inline_miss(), 1, r1, r3);
2556  MaybeObject* maybe_result = GenerateMissBranch();
2557  if (maybe_result->IsFailure()) return maybe_result;
2558
2559  // Return the generated code.
2560  return GetCode(NORMAL, name);
2561}
2562
2563
2564MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object,
2565                                                  int index,
2566                                                  Map* transition,
2567                                                  String* name) {
2568  // ----------- S t a t e -------------
2569  //  -- r0    : value
2570  //  -- r1    : receiver
2571  //  -- r2    : name
2572  //  -- lr    : return address
2573  // -----------------------------------
2574  Label miss;
2575
2576  GenerateStoreField(masm(),
2577                     object,
2578                     index,
2579                     transition,
2580                     r1, r2, r3,
2581                     &miss);
2582  __ bind(&miss);
2583  Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2584  __ Jump(ic, RelocInfo::CODE_TARGET);
2585
2586  // Return the generated code.
2587  return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2588}
2589
2590
2591MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object,
2592                                                     AccessorInfo* callback,
2593                                                     String* name) {
2594  // ----------- S t a t e -------------
2595  //  -- r0    : value
2596  //  -- r1    : receiver
2597  //  -- r2    : name
2598  //  -- lr    : return address
2599  // -----------------------------------
2600  Label miss;
2601
2602  // Check that the object isn't a smi.
2603  __ JumpIfSmi(r1, &miss);
2604
2605  // Check that the map of the object hasn't changed.
2606  __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
2607  __ cmp(r3, Operand(Handle<Map>(object->map())));
2608  __ b(ne, &miss);
2609
2610  // Perform global security token check if needed.
2611  if (object->IsJSGlobalProxy()) {
2612    __ CheckAccessGlobalProxy(r1, r3, &miss);
2613  }
2614
2615  // Stub never generated for non-global objects that require access
2616  // checks.
2617  ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2618
2619  __ push(r1);  // receiver
2620  __ mov(ip, Operand(Handle<AccessorInfo>(callback)));  // callback info
2621  __ Push(ip, r2, r0);
2622
2623  // Do tail-call to the runtime system.
2624  ExternalReference store_callback_property =
2625      ExternalReference(IC_Utility(IC::kStoreCallbackProperty),
2626                        masm()->isolate());
2627  __ TailCallExternalReference(store_callback_property, 4, 1);
2628
2629  // Handle store cache miss.
2630  __ bind(&miss);
2631  Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2632  __ Jump(ic, RelocInfo::CODE_TARGET);
2633
2634  // Return the generated code.
2635  return GetCode(CALLBACKS, name);
2636}
2637
2638
2639MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
2640                                                        String* name) {
2641  // ----------- S t a t e -------------
2642  //  -- r0    : value
2643  //  -- r1    : receiver
2644  //  -- r2    : name
2645  //  -- lr    : return address
2646  // -----------------------------------
2647  Label miss;
2648
2649  // Check that the object isn't a smi.
2650  __ JumpIfSmi(r1, &miss);
2651
2652  // Check that the map of the object hasn't changed.
2653  __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
2654  __ cmp(r3, Operand(Handle<Map>(receiver->map())));
2655  __ b(ne, &miss);
2656
2657  // Perform global security token check if needed.
2658  if (receiver->IsJSGlobalProxy()) {
2659    __ CheckAccessGlobalProxy(r1, r3, &miss);
2660  }
2661
2662  // Stub is never generated for non-global objects that require access
2663  // checks.
2664  ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
2665
2666  __ Push(r1, r2, r0);  // Receiver, name, value.
2667
2668  __ mov(r0, Operand(Smi::FromInt(strict_mode_)));
2669  __ push(r0);  // strict mode
2670
2671  // Do tail-call to the runtime system.
2672  ExternalReference store_ic_property =
2673      ExternalReference(IC_Utility(IC::kStoreInterceptorProperty),
2674                        masm()->isolate());
2675  __ TailCallExternalReference(store_ic_property, 4, 1);
2676
2677  // Handle store cache miss.
2678  __ bind(&miss);
2679  Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2680  __ Jump(ic, RelocInfo::CODE_TARGET);
2681
2682  // Return the generated code.
2683  return GetCode(INTERCEPTOR, name);
2684}
2685
2686
2687MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
2688                                                   JSGlobalPropertyCell* cell,
2689                                                   String* name) {
2690  // ----------- S t a t e -------------
2691  //  -- r0    : value
2692  //  -- r1    : receiver
2693  //  -- r2    : name
2694  //  -- lr    : return address
2695  // -----------------------------------
2696  Label miss;
2697
2698  // Check that the map of the global has not changed.
2699  __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
2700  __ cmp(r3, Operand(Handle<Map>(object->map())));
2701  __ b(ne, &miss);
2702
2703  // Check that the value in the cell is not the hole. If it is, this
2704  // cell could have been deleted and reintroducing the global needs
2705  // to update the property details in the property dictionary of the
2706  // global object. We bail out to the runtime system to do that.
2707  __ mov(r4, Operand(Handle<JSGlobalPropertyCell>(cell)));
2708  __ LoadRoot(r5, Heap::kTheHoleValueRootIndex);
2709  __ ldr(r6, FieldMemOperand(r4, JSGlobalPropertyCell::kValueOffset));
2710  __ cmp(r5, r6);
2711  __ b(eq, &miss);
2712
2713  // Store the value in the cell.
2714  __ str(r0, FieldMemOperand(r4, JSGlobalPropertyCell::kValueOffset));
2715
2716  Counters* counters = masm()->isolate()->counters();
2717  __ IncrementCounter(counters->named_store_global_inline(), 1, r4, r3);
2718  __ Ret();
2719
2720  // Handle store cache miss.
2721  __ bind(&miss);
2722  __ IncrementCounter(counters->named_store_global_inline_miss(), 1, r4, r3);
2723  Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2724  __ Jump(ic, RelocInfo::CODE_TARGET);
2725
2726  // Return the generated code.
2727  return GetCode(NORMAL, name);
2728}
2729
2730
2731MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
2732                                                      JSObject* object,
2733                                                      JSObject* last) {
2734  // ----------- S t a t e -------------
2735  //  -- r0    : receiver
2736  //  -- lr    : return address
2737  // -----------------------------------
2738  Label miss;
2739
2740  // Check that receiver is not a smi.
2741  __ JumpIfSmi(r0, &miss);
2742
2743  // Check the maps of the full prototype chain.
2744  CheckPrototypes(object, r0, last, r3, r1, r4, name, &miss);
2745
2746  // If the last object in the prototype chain is a global object,
2747  // check that the global property cell is empty.
2748  if (last->IsGlobalObject()) {
2749    MaybeObject* cell = GenerateCheckPropertyCell(masm(),
2750                                                  GlobalObject::cast(last),
2751                                                  name,
2752                                                  r1,
2753                                                  &miss);
2754    if (cell->IsFailure()) {
2755      miss.Unuse();
2756      return cell;
2757    }
2758  }
2759
2760  // Return undefined if maps of the full prototype chain are still the
2761  // same and no global property with this name contains a value.
2762  __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2763  __ Ret();
2764
2765  __ bind(&miss);
2766  GenerateLoadMiss(masm(), Code::LOAD_IC);
2767
2768  // Return the generated code.
2769  return GetCode(NONEXISTENT, heap()->empty_string());
2770}
2771
2772
2773MaybeObject* LoadStubCompiler::CompileLoadField(JSObject* object,
2774                                                JSObject* holder,
2775                                                int index,
2776                                                String* name) {
2777  // ----------- S t a t e -------------
2778  //  -- r0    : receiver
2779  //  -- r2    : name
2780  //  -- lr    : return address
2781  // -----------------------------------
2782  Label miss;
2783
2784  GenerateLoadField(object, holder, r0, r3, r1, r4, index, name, &miss);
2785  __ bind(&miss);
2786  GenerateLoadMiss(masm(), Code::LOAD_IC);
2787
2788  // Return the generated code.
2789  return GetCode(FIELD, name);
2790}
2791
2792
2793MaybeObject* LoadStubCompiler::CompileLoadCallback(String* name,
2794                                                   JSObject* object,
2795                                                   JSObject* holder,
2796                                                   AccessorInfo* callback) {
2797  // ----------- S t a t e -------------
2798  //  -- r0    : receiver
2799  //  -- r2    : name
2800  //  -- lr    : return address
2801  // -----------------------------------
2802  Label miss;
2803
2804  MaybeObject* result = GenerateLoadCallback(object, holder, r0, r2, r3, r1, r4,
2805                                             callback, name, &miss);
2806  if (result->IsFailure()) {
2807    miss.Unuse();
2808    return result;
2809  }
2810
2811  __ bind(&miss);
2812  GenerateLoadMiss(masm(), Code::LOAD_IC);
2813
2814  // Return the generated code.
2815  return GetCode(CALLBACKS, name);
2816}
2817
2818
2819MaybeObject* LoadStubCompiler::CompileLoadConstant(JSObject* object,
2820                                                   JSObject* holder,
2821                                                   Object* value,
2822                                                   String* name) {
2823  // ----------- S t a t e -------------
2824  //  -- r0    : receiver
2825  //  -- r2    : name
2826  //  -- lr    : return address
2827  // -----------------------------------
2828  Label miss;
2829
2830  GenerateLoadConstant(object, holder, r0, r3, r1, r4, value, name, &miss);
2831  __ bind(&miss);
2832  GenerateLoadMiss(masm(), Code::LOAD_IC);
2833
2834  // Return the generated code.
2835  return GetCode(CONSTANT_FUNCTION, name);
2836}
2837
2838
2839MaybeObject* LoadStubCompiler::CompileLoadInterceptor(JSObject* object,
2840                                                      JSObject* holder,
2841                                                      String* name) {
2842  // ----------- S t a t e -------------
2843  //  -- r0    : receiver
2844  //  -- r2    : name
2845  //  -- lr    : return address
2846  // -----------------------------------
2847  Label miss;
2848
2849  LookupResult lookup;
2850  LookupPostInterceptor(holder, name, &lookup);
2851  GenerateLoadInterceptor(object,
2852                          holder,
2853                          &lookup,
2854                          r0,
2855                          r2,
2856                          r3,
2857                          r1,
2858                          r4,
2859                          name,
2860                          &miss);
2861  __ bind(&miss);
2862  GenerateLoadMiss(masm(), Code::LOAD_IC);
2863
2864  // Return the generated code.
2865  return GetCode(INTERCEPTOR, name);
2866}
2867
2868
2869MaybeObject* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
2870                                                 GlobalObject* holder,
2871                                                 JSGlobalPropertyCell* cell,
2872                                                 String* name,
2873                                                 bool is_dont_delete) {
2874  // ----------- S t a t e -------------
2875  //  -- r0    : receiver
2876  //  -- r2    : name
2877  //  -- lr    : return address
2878  // -----------------------------------
2879  Label miss;
2880
2881  // If the object is the holder then we know that it's a global
2882  // object which can only happen for contextual calls. In this case,
2883  // the receiver cannot be a smi.
2884  if (object != holder) {
2885    __ JumpIfSmi(r0, &miss);
2886  }
2887
2888  // Check that the map of the global has not changed.
2889  CheckPrototypes(object, r0, holder, r3, r4, r1, name, &miss);
2890
2891  // Get the value from the cell.
2892  __ mov(r3, Operand(Handle<JSGlobalPropertyCell>(cell)));
2893  __ ldr(r4, FieldMemOperand(r3, JSGlobalPropertyCell::kValueOffset));
2894
2895  // Check for deleted property if property can actually be deleted.
2896  if (!is_dont_delete) {
2897    __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
2898    __ cmp(r4, ip);
2899    __ b(eq, &miss);
2900  }
2901
2902  __ mov(r0, r4);
2903  Counters* counters = masm()->isolate()->counters();
2904  __ IncrementCounter(counters->named_load_global_stub(), 1, r1, r3);
2905  __ Ret();
2906
2907  __ bind(&miss);
2908  __ IncrementCounter(counters->named_load_global_stub_miss(), 1, r1, r3);
2909  GenerateLoadMiss(masm(), Code::LOAD_IC);
2910
2911  // Return the generated code.
2912  return GetCode(NORMAL, name);
2913}
2914
2915
2916MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name,
2917                                                     JSObject* receiver,
2918                                                     JSObject* holder,
2919                                                     int index) {
2920  // ----------- S t a t e -------------
2921  //  -- lr    : return address
2922  //  -- r0    : key
2923  //  -- r1    : receiver
2924  // -----------------------------------
2925  Label miss;
2926
2927  // Check the key is the cached one.
2928  __ cmp(r0, Operand(Handle<String>(name)));
2929  __ b(ne, &miss);
2930
2931  GenerateLoadField(receiver, holder, r1, r2, r3, r4, index, name, &miss);
2932  __ bind(&miss);
2933  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2934
2935  return GetCode(FIELD, name);
2936}
2937
2938
2939MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback(
2940    String* name,
2941    JSObject* receiver,
2942    JSObject* holder,
2943    AccessorInfo* callback) {
2944  // ----------- S t a t e -------------
2945  //  -- lr    : return address
2946  //  -- r0    : key
2947  //  -- r1    : receiver
2948  // -----------------------------------
2949  Label miss;
2950
2951  // Check the key is the cached one.
2952  __ cmp(r0, Operand(Handle<String>(name)));
2953  __ b(ne, &miss);
2954
2955  MaybeObject* result = GenerateLoadCallback(receiver, holder, r1, r0, r2, r3,
2956                                             r4, callback, name, &miss);
2957  if (result->IsFailure()) {
2958    miss.Unuse();
2959    return result;
2960  }
2961
2962  __ bind(&miss);
2963  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2964
2965  return GetCode(CALLBACKS, name);
2966}
2967
2968
2969MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
2970                                                        JSObject* receiver,
2971                                                        JSObject* holder,
2972                                                        Object* value) {
2973  // ----------- S t a t e -------------
2974  //  -- lr    : return address
2975  //  -- r0    : key
2976  //  -- r1    : receiver
2977  // -----------------------------------
2978  Label miss;
2979
2980  // Check the key is the cached one.
2981  __ cmp(r0, Operand(Handle<String>(name)));
2982  __ b(ne, &miss);
2983
2984  GenerateLoadConstant(receiver, holder, r1, r2, r3, r4, value, name, &miss);
2985  __ bind(&miss);
2986  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2987
2988  // Return the generated code.
2989  return GetCode(CONSTANT_FUNCTION, name);
2990}
2991
2992
2993MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
2994                                                           JSObject* holder,
2995                                                           String* name) {
2996  // ----------- S t a t e -------------
2997  //  -- lr    : return address
2998  //  -- r0    : key
2999  //  -- r1    : receiver
3000  // -----------------------------------
3001  Label miss;
3002
3003  // Check the key is the cached one.
3004  __ cmp(r0, Operand(Handle<String>(name)));
3005  __ b(ne, &miss);
3006
3007  LookupResult lookup;
3008  LookupPostInterceptor(holder, name, &lookup);
3009  GenerateLoadInterceptor(receiver,
3010                          holder,
3011                          &lookup,
3012                          r1,
3013                          r0,
3014                          r2,
3015                          r3,
3016                          r4,
3017                          name,
3018                          &miss);
3019  __ bind(&miss);
3020  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3021
3022  return GetCode(INTERCEPTOR, name);
3023}
3024
3025
3026MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
3027  // ----------- S t a t e -------------
3028  //  -- lr    : return address
3029  //  -- r0    : key
3030  //  -- r1    : receiver
3031  // -----------------------------------
3032  Label miss;
3033
3034  // Check the key is the cached one.
3035  __ cmp(r0, Operand(Handle<String>(name)));
3036  __ b(ne, &miss);
3037
3038  GenerateLoadArrayLength(masm(), r1, r2, &miss);
3039  __ bind(&miss);
3040  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3041
3042  return GetCode(CALLBACKS, name);
3043}
3044
3045
3046MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
3047  // ----------- S t a t e -------------
3048  //  -- lr    : return address
3049  //  -- r0    : key
3050  //  -- r1    : receiver
3051  // -----------------------------------
3052  Label miss;
3053
3054  Counters* counters = masm()->isolate()->counters();
3055  __ IncrementCounter(counters->keyed_load_string_length(), 1, r2, r3);
3056
3057  // Check the key is the cached one.
3058  __ cmp(r0, Operand(Handle<String>(name)));
3059  __ b(ne, &miss);
3060
3061  GenerateLoadStringLength(masm(), r1, r2, r3, &miss, true);
3062  __ bind(&miss);
3063  __ DecrementCounter(counters->keyed_load_string_length(), 1, r2, r3);
3064
3065  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3066
3067  return GetCode(CALLBACKS, name);
3068}
3069
3070
3071MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
3072  // ----------- S t a t e -------------
3073  //  -- lr    : return address
3074  //  -- r0    : key
3075  //  -- r1    : receiver
3076  // -----------------------------------
3077  Label miss;
3078
3079  Counters* counters = masm()->isolate()->counters();
3080  __ IncrementCounter(counters->keyed_load_function_prototype(), 1, r2, r3);
3081
3082  // Check the name hasn't changed.
3083  __ cmp(r0, Operand(Handle<String>(name)));
3084  __ b(ne, &miss);
3085
3086  GenerateLoadFunctionPrototype(masm(), r1, r2, r3, &miss);
3087  __ bind(&miss);
3088  __ DecrementCounter(counters->keyed_load_function_prototype(), 1, r2, r3);
3089  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3090
3091  return GetCode(CALLBACKS, name);
3092}
3093
3094
3095MaybeObject* KeyedLoadStubCompiler::CompileLoadElement(Map* receiver_map) {
3096  // ----------- S t a t e -------------
3097  //  -- lr    : return address
3098  //  -- r0    : key
3099  //  -- r1    : receiver
3100  // -----------------------------------
3101  Code* stub;
3102  ElementsKind elements_kind = receiver_map->elements_kind();
3103  MaybeObject* maybe_stub = KeyedLoadElementStub(elements_kind).TryGetCode();
3104  if (!maybe_stub->To(&stub)) return maybe_stub;
3105  __ DispatchMap(r1,
3106                 r2,
3107                 Handle<Map>(receiver_map),
3108                 Handle<Code>(stub),
3109                 DO_SMI_CHECK);
3110
3111  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Miss();
3112  __ Jump(ic, RelocInfo::CODE_TARGET);
3113
3114  // Return the generated code.
3115  return GetCode(NORMAL, NULL);
3116}
3117
3118
3119MaybeObject* KeyedLoadStubCompiler::CompileLoadMegamorphic(
3120    MapList* receiver_maps,
3121    CodeList* handler_ics) {
3122  // ----------- S t a t e -------------
3123  //  -- lr    : return address
3124  //  -- r0    : key
3125  //  -- r1    : receiver
3126  // -----------------------------------
3127  Label miss;
3128  __ JumpIfSmi(r1, &miss);
3129
3130  int receiver_count = receiver_maps->length();
3131  __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
3132  for (int current = 0; current < receiver_count; ++current) {
3133    Handle<Map> map(receiver_maps->at(current));
3134    Handle<Code> code(handler_ics->at(current));
3135    __ mov(ip, Operand(map));
3136    __ cmp(r2, ip);
3137    __ Jump(code, RelocInfo::CODE_TARGET, eq);
3138  }
3139
3140  __ bind(&miss);
3141  Handle<Code> miss_ic = isolate()->builtins()->KeyedLoadIC_Miss();
3142  __ Jump(miss_ic, RelocInfo::CODE_TARGET, al);
3143
3144  // Return the generated code.
3145  return GetCode(NORMAL, NULL, MEGAMORPHIC);
3146}
3147
3148
3149MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
3150                                                       int index,
3151                                                       Map* transition,
3152                                                       String* name) {
3153  // ----------- S t a t e -------------
3154  //  -- r0    : value
3155  //  -- r1    : name
3156  //  -- r2    : receiver
3157  //  -- lr    : return address
3158  // -----------------------------------
3159  Label miss;
3160
3161  Counters* counters = masm()->isolate()->counters();
3162  __ IncrementCounter(counters->keyed_store_field(), 1, r3, r4);
3163
3164  // Check that the name has not changed.
3165  __ cmp(r1, Operand(Handle<String>(name)));
3166  __ b(ne, &miss);
3167
3168  // r3 is used as scratch register. r1 and r2 keep their values if a jump to
3169  // the miss label is generated.
3170  GenerateStoreField(masm(),
3171                     object,
3172                     index,
3173                     transition,
3174                     r2, r1, r3,
3175                     &miss);
3176  __ bind(&miss);
3177
3178  __ DecrementCounter(counters->keyed_store_field(), 1, r3, r4);
3179  Handle<Code> ic = masm()->isolate()->builtins()->KeyedStoreIC_Miss();
3180  __ Jump(ic, RelocInfo::CODE_TARGET);
3181
3182  // Return the generated code.
3183  return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
3184}
3185
3186
3187MaybeObject* KeyedStoreStubCompiler::CompileStoreElement(Map* receiver_map) {
3188  // ----------- S t a t e -------------
3189  //  -- r0    : value
3190  //  -- r1    : key
3191  //  -- r2    : receiver
3192  //  -- lr    : return address
3193  //  -- r3    : scratch
3194  // -----------------------------------
3195  Code* stub;
3196  ElementsKind elements_kind = receiver_map->elements_kind();
3197  bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
3198  MaybeObject* maybe_stub =
3199      KeyedStoreElementStub(is_js_array, elements_kind).TryGetCode();
3200  if (!maybe_stub->To(&stub)) return maybe_stub;
3201  __ DispatchMap(r2,
3202                 r3,
3203                 Handle<Map>(receiver_map),
3204                 Handle<Code>(stub),
3205                 DO_SMI_CHECK);
3206
3207  Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
3208  __ Jump(ic, RelocInfo::CODE_TARGET);
3209
3210  // Return the generated code.
3211  return GetCode(NORMAL, NULL);
3212}
3213
3214
3215MaybeObject* KeyedStoreStubCompiler::CompileStoreMegamorphic(
3216    MapList* receiver_maps,
3217    CodeList* handler_ics) {
3218  // ----------- S t a t e -------------
3219  //  -- r0    : value
3220  //  -- r1    : key
3221  //  -- r2    : receiver
3222  //  -- lr    : return address
3223  //  -- r3    : scratch
3224  // -----------------------------------
3225  Label miss;
3226  __ JumpIfSmi(r2, &miss);
3227
3228  int receiver_count = receiver_maps->length();
3229  __ ldr(r3, FieldMemOperand(r2, HeapObject::kMapOffset));
3230  for (int current = 0; current < receiver_count; ++current) {
3231    Handle<Map> map(receiver_maps->at(current));
3232    Handle<Code> code(handler_ics->at(current));
3233    __ mov(ip, Operand(map));
3234    __ cmp(r3, ip);
3235    __ Jump(code, RelocInfo::CODE_TARGET, eq);
3236  }
3237
3238  __ bind(&miss);
3239  Handle<Code> miss_ic = isolate()->builtins()->KeyedStoreIC_Miss();
3240  __ Jump(miss_ic, RelocInfo::CODE_TARGET, al);
3241
3242  // Return the generated code.
3243  return GetCode(NORMAL, NULL, MEGAMORPHIC);
3244}
3245
3246
3247MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
3248  // ----------- S t a t e -------------
3249  //  -- r0    : argc
3250  //  -- r1    : constructor
3251  //  -- lr    : return address
3252  //  -- [sp]  : last argument
3253  // -----------------------------------
3254  Label generic_stub_call;
3255
3256  // Use r7 for holding undefined which is used in several places below.
3257  __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
3258
3259#ifdef ENABLE_DEBUGGER_SUPPORT
3260  // Check to see whether there are any break points in the function code. If
3261  // there are jump to the generic constructor stub which calls the actual
3262  // code for the function thereby hitting the break points.
3263  __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
3264  __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kDebugInfoOffset));
3265  __ cmp(r2, r7);
3266  __ b(ne, &generic_stub_call);
3267#endif
3268
3269  // Load the initial map and verify that it is in fact a map.
3270  // r1: constructor function
3271  // r7: undefined
3272  __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
3273  __ JumpIfSmi(r2, &generic_stub_call);
3274  __ CompareObjectType(r2, r3, r4, MAP_TYPE);
3275  __ b(ne, &generic_stub_call);
3276
3277#ifdef DEBUG
3278  // Cannot construct functions this way.
3279  // r0: argc
3280  // r1: constructor function
3281  // r2: initial map
3282  // r7: undefined
3283  __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
3284  __ Check(ne, "Function constructed by construct stub.");
3285#endif
3286
3287  // Now allocate the JSObject in new space.
3288  // r0: argc
3289  // r1: constructor function
3290  // r2: initial map
3291  // r7: undefined
3292  __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
3293  __ AllocateInNewSpace(r3,
3294                        r4,
3295                        r5,
3296                        r6,
3297                        &generic_stub_call,
3298                        SIZE_IN_WORDS);
3299
3300  // Allocated the JSObject, now initialize the fields. Map is set to initial
3301  // map and properties and elements are set to empty fixed array.
3302  // r0: argc
3303  // r1: constructor function
3304  // r2: initial map
3305  // r3: object size (in words)
3306  // r4: JSObject (not tagged)
3307  // r7: undefined
3308  __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
3309  __ mov(r5, r4);
3310  ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
3311  __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
3312  ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
3313  __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
3314  ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
3315  __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
3316
3317  // Calculate the location of the first argument. The stack contains only the
3318  // argc arguments.
3319  __ add(r1, sp, Operand(r0, LSL, kPointerSizeLog2));
3320
3321  // Fill all the in-object properties with undefined.
3322  // r0: argc
3323  // r1: first argument
3324  // r3: object size (in words)
3325  // r4: JSObject (not tagged)
3326  // r5: First in-object property of JSObject (not tagged)
3327  // r7: undefined
3328  // Fill the initialized properties with a constant value or a passed argument
3329  // depending on the this.x = ...; assignment in the function.
3330  SharedFunctionInfo* shared = function->shared();
3331  for (int i = 0; i < shared->this_property_assignments_count(); i++) {
3332    if (shared->IsThisPropertyAssignmentArgument(i)) {
3333      Label not_passed, next;
3334      // Check if the argument assigned to the property is actually passed.
3335      int arg_number = shared->GetThisPropertyAssignmentArgument(i);
3336      __ cmp(r0, Operand(arg_number));
3337      __ b(le, &not_passed);
3338      // Argument passed - find it on the stack.
3339      __ ldr(r2, MemOperand(r1, (arg_number + 1) * -kPointerSize));
3340      __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
3341      __ b(&next);
3342      __ bind(&not_passed);
3343      // Set the property to undefined.
3344      __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
3345      __ bind(&next);
3346    } else {
3347      // Set the property to the constant value.
3348      Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i));
3349      __ mov(r2, Operand(constant));
3350      __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
3351    }
3352  }
3353
3354  // Fill the unused in-object property fields with undefined.
3355  ASSERT(function->has_initial_map());
3356  for (int i = shared->this_property_assignments_count();
3357       i < function->initial_map()->inobject_properties();
3358       i++) {
3359      __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
3360  }
3361
3362  // r0: argc
3363  // r4: JSObject (not tagged)
3364  // Move argc to r1 and the JSObject to return to r0 and tag it.
3365  __ mov(r1, r0);
3366  __ mov(r0, r4);
3367  __ orr(r0, r0, Operand(kHeapObjectTag));
3368
3369  // r0: JSObject
3370  // r1: argc
3371  // Remove caller arguments and receiver from the stack and return.
3372  __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2));
3373  __ add(sp, sp, Operand(kPointerSize));
3374  Counters* counters = masm()->isolate()->counters();
3375  __ IncrementCounter(counters->constructed_objects(), 1, r1, r2);
3376  __ IncrementCounter(counters->constructed_objects_stub(), 1, r1, r2);
3377  __ Jump(lr);
3378
3379  // Jump to the generic stub in case the specialized code cannot handle the
3380  // construction.
3381  __ bind(&generic_stub_call);
3382  Handle<Code> code = masm()->isolate()->builtins()->JSConstructStubGeneric();
3383  __ Jump(code, RelocInfo::CODE_TARGET);
3384
3385  // Return the generated code.
3386  return GetCode();
3387}
3388
3389
3390#undef __
3391#define __ ACCESS_MASM(masm)
3392
3393
3394void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
3395    MacroAssembler* masm) {
3396  // ---------- S t a t e --------------
3397  //  -- lr     : return address
3398  //  -- r0     : key
3399  //  -- r1     : receiver
3400  // -----------------------------------
3401  Label slow, miss_force_generic;
3402
3403  Register key = r0;
3404  Register receiver = r1;
3405
3406  __ JumpIfNotSmi(key, &miss_force_generic);
3407  __ mov(r2, Operand(key, ASR, kSmiTagSize));
3408  __ ldr(r4, FieldMemOperand(receiver, JSObject::kElementsOffset));
3409  __ LoadFromNumberDictionary(&slow, r4, key, r0, r2, r3, r5);
3410  __ Ret();
3411
3412  __ bind(&slow);
3413  __ IncrementCounter(
3414      masm->isolate()->counters()->keyed_load_external_array_slow(),
3415      1, r2, r3);
3416
3417  // ---------- S t a t e --------------
3418  //  -- lr     : return address
3419  //  -- r0     : key
3420  //  -- r1     : receiver
3421  // -----------------------------------
3422  Handle<Code> slow_ic =
3423      masm->isolate()->builtins()->KeyedLoadIC_Slow();
3424  __ Jump(slow_ic, RelocInfo::CODE_TARGET);
3425
3426  // Miss case, call the runtime.
3427  __ bind(&miss_force_generic);
3428
3429  // ---------- S t a t e --------------
3430  //  -- lr     : return address
3431  //  -- r0     : key
3432  //  -- r1     : receiver
3433  // -----------------------------------
3434
3435  Handle<Code> miss_ic =
3436      masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
3437  __ Jump(miss_ic, RelocInfo::CODE_TARGET);
3438}
3439
3440
3441static bool IsElementTypeSigned(ElementsKind elements_kind) {
3442  switch (elements_kind) {
3443    case EXTERNAL_BYTE_ELEMENTS:
3444    case EXTERNAL_SHORT_ELEMENTS:
3445    case EXTERNAL_INT_ELEMENTS:
3446      return true;
3447
3448    case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
3449    case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
3450    case EXTERNAL_UNSIGNED_INT_ELEMENTS:
3451    case EXTERNAL_PIXEL_ELEMENTS:
3452      return false;
3453
3454    case EXTERNAL_FLOAT_ELEMENTS:
3455    case EXTERNAL_DOUBLE_ELEMENTS:
3456    case FAST_ELEMENTS:
3457    case FAST_DOUBLE_ELEMENTS:
3458    case DICTIONARY_ELEMENTS:
3459    case NON_STRICT_ARGUMENTS_ELEMENTS:
3460      UNREACHABLE();
3461      return false;
3462  }
3463  return false;
3464}
3465
3466
3467void KeyedLoadStubCompiler::GenerateLoadExternalArray(
3468    MacroAssembler* masm,
3469    ElementsKind elements_kind) {
3470  // ---------- S t a t e --------------
3471  //  -- lr     : return address
3472  //  -- r0     : key
3473  //  -- r1     : receiver
3474  // -----------------------------------
3475  Label miss_force_generic, slow, failed_allocation;
3476
3477  Register key = r0;
3478  Register receiver = r1;
3479
3480  // This stub is meant to be tail-jumped to, the receiver must already
3481  // have been verified by the caller to not be a smi.
3482
3483  // Check that the key is a smi.
3484  __ JumpIfNotSmi(key, &miss_force_generic);
3485
3486  __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3487  // r3: elements array
3488
3489  // Check that the index is in range.
3490  __ ldr(ip, FieldMemOperand(r3, ExternalArray::kLengthOffset));
3491  __ cmp(key, ip);
3492  // Unsigned comparison catches both negative and too-large values.
3493  __ b(hs, &miss_force_generic);
3494
3495  __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
3496  // r3: base pointer of external storage
3497
3498  // We are not untagging smi key and instead work with it
3499  // as if it was premultiplied by 2.
3500  STATIC_ASSERT((kSmiTag == 0) && (kSmiTagSize == 1));
3501
3502  Register value = r2;
3503  switch (elements_kind) {
3504    case EXTERNAL_BYTE_ELEMENTS:
3505      __ ldrsb(value, MemOperand(r3, key, LSR, 1));
3506      break;
3507    case EXTERNAL_PIXEL_ELEMENTS:
3508    case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
3509      __ ldrb(value, MemOperand(r3, key, LSR, 1));
3510      break;
3511    case EXTERNAL_SHORT_ELEMENTS:
3512      __ ldrsh(value, MemOperand(r3, key, LSL, 0));
3513      break;
3514    case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
3515      __ ldrh(value, MemOperand(r3, key, LSL, 0));
3516      break;
3517    case EXTERNAL_INT_ELEMENTS:
3518    case EXTERNAL_UNSIGNED_INT_ELEMENTS:
3519      __ ldr(value, MemOperand(r3, key, LSL, 1));
3520      break;
3521    case EXTERNAL_FLOAT_ELEMENTS:
3522      if (CpuFeatures::IsSupported(VFP3)) {
3523        CpuFeatures::Scope scope(VFP3);
3524        __ add(r2, r3, Operand(key, LSL, 1));
3525        __ vldr(s0, r2, 0);
3526      } else {
3527        __ ldr(value, MemOperand(r3, key, LSL, 1));
3528      }
3529      break;
3530    case EXTERNAL_DOUBLE_ELEMENTS:
3531      if (CpuFeatures::IsSupported(VFP3)) {
3532        CpuFeatures::Scope scope(VFP3);
3533        __ add(r2, r3, Operand(key, LSL, 2));
3534        __ vldr(d0, r2, 0);
3535      } else {
3536        __ add(r4, r3, Operand(key, LSL, 2));
3537        // r4: pointer to the beginning of the double we want to load.
3538        __ ldr(r2, MemOperand(r4, 0));
3539        __ ldr(r3, MemOperand(r4, Register::kSizeInBytes));
3540      }
3541      break;
3542    case FAST_ELEMENTS:
3543    case FAST_DOUBLE_ELEMENTS:
3544    case DICTIONARY_ELEMENTS:
3545    case NON_STRICT_ARGUMENTS_ELEMENTS:
3546      UNREACHABLE();
3547      break;
3548  }
3549
3550  // For integer array types:
3551  // r2: value
3552  // For float array type:
3553  // s0: value (if VFP3 is supported)
3554  // r2: value (if VFP3 is not supported)
3555  // For double array type:
3556  // d0: value (if VFP3 is supported)
3557  // r2/r3: value (if VFP3 is not supported)
3558
3559  if (elements_kind == EXTERNAL_INT_ELEMENTS) {
3560    // For the Int and UnsignedInt array types, we need to see whether
3561    // the value can be represented in a Smi. If not, we need to convert
3562    // it to a HeapNumber.
3563    Label box_int;
3564    __ cmp(value, Operand(0xC0000000));
3565    __ b(mi, &box_int);
3566    // Tag integer as smi and return it.
3567    __ mov(r0, Operand(value, LSL, kSmiTagSize));
3568    __ Ret();
3569
3570    __ bind(&box_int);
3571    // Allocate a HeapNumber for the result and perform int-to-double
3572    // conversion.  Don't touch r0 or r1 as they are needed if allocation
3573    // fails.
3574    __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3575    __ AllocateHeapNumber(r5, r3, r4, r6, &slow);
3576    // Now we can use r0 for the result as key is not needed any more.
3577    __ mov(r0, r5);
3578
3579    if (CpuFeatures::IsSupported(VFP3)) {
3580      CpuFeatures::Scope scope(VFP3);
3581      __ vmov(s0, value);
3582      __ vcvt_f64_s32(d0, s0);
3583      __ sub(r3, r0, Operand(kHeapObjectTag));
3584      __ vstr(d0, r3, HeapNumber::kValueOffset);
3585      __ Ret();
3586    } else {
3587      Register dst1 = r1;
3588      Register dst2 = r3;
3589      FloatingPointHelper::Destination dest =
3590          FloatingPointHelper::kCoreRegisters;
3591      FloatingPointHelper::ConvertIntToDouble(masm,
3592                                              value,
3593                                              dest,
3594                                              d0,
3595                                              dst1,
3596                                              dst2,
3597                                              r9,
3598                                              s0);
3599      __ str(dst1, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
3600      __ str(dst2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
3601      __ Ret();
3602    }
3603  } else if (elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS) {
3604    // The test is different for unsigned int values. Since we need
3605    // the value to be in the range of a positive smi, we can't
3606    // handle either of the top two bits being set in the value.
3607    if (CpuFeatures::IsSupported(VFP3)) {
3608      CpuFeatures::Scope scope(VFP3);
3609      Label box_int, done;
3610      __ tst(value, Operand(0xC0000000));
3611      __ b(ne, &box_int);
3612      // Tag integer as smi and return it.
3613      __ mov(r0, Operand(value, LSL, kSmiTagSize));
3614      __ Ret();
3615
3616      __ bind(&box_int);
3617      __ vmov(s0, value);
3618      // Allocate a HeapNumber for the result and perform int-to-double
3619      // conversion. Don't use r0 and r1 as AllocateHeapNumber clobbers all
3620      // registers - also when jumping due to exhausted young space.
3621      __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3622      __ AllocateHeapNumber(r2, r3, r4, r6, &slow);
3623
3624      __ vcvt_f64_u32(d0, s0);
3625      __ sub(r1, r2, Operand(kHeapObjectTag));
3626      __ vstr(d0, r1, HeapNumber::kValueOffset);
3627
3628      __ mov(r0, r2);
3629      __ Ret();
3630    } else {
3631      // Check whether unsigned integer fits into smi.
3632      Label box_int_0, box_int_1, done;
3633      __ tst(value, Operand(0x80000000));
3634      __ b(ne, &box_int_0);
3635      __ tst(value, Operand(0x40000000));
3636      __ b(ne, &box_int_1);
3637      // Tag integer as smi and return it.
3638      __ mov(r0, Operand(value, LSL, kSmiTagSize));
3639      __ Ret();
3640
3641      Register hiword = value;  // r2.
3642      Register loword = r3;
3643
3644      __ bind(&box_int_0);
3645      // Integer does not have leading zeros.
3646      GenerateUInt2Double(masm, hiword, loword, r4, 0);
3647      __ b(&done);
3648
3649      __ bind(&box_int_1);
3650      // Integer has one leading zero.
3651      GenerateUInt2Double(masm, hiword, loword, r4, 1);
3652
3653
3654      __ bind(&done);
3655      // Integer was converted to double in registers hiword:loword.
3656      // Wrap it into a HeapNumber. Don't use r0 and r1 as AllocateHeapNumber
3657      // clobbers all registers - also when jumping due to exhausted young
3658      // space.
3659      __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3660      __ AllocateHeapNumber(r4, r5, r7, r6, &slow);
3661
3662      __ str(hiword, FieldMemOperand(r4, HeapNumber::kExponentOffset));
3663      __ str(loword, FieldMemOperand(r4, HeapNumber::kMantissaOffset));
3664
3665      __ mov(r0, r4);
3666      __ Ret();
3667    }
3668  } else if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
3669    // For the floating-point array type, we need to always allocate a
3670    // HeapNumber.
3671    if (CpuFeatures::IsSupported(VFP3)) {
3672      CpuFeatures::Scope scope(VFP3);
3673      // Allocate a HeapNumber for the result. Don't use r0 and r1 as
3674      // AllocateHeapNumber clobbers all registers - also when jumping due to
3675      // exhausted young space.
3676      __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3677      __ AllocateHeapNumber(r2, r3, r4, r6, &slow);
3678      __ vcvt_f64_f32(d0, s0);
3679      __ sub(r1, r2, Operand(kHeapObjectTag));
3680      __ vstr(d0, r1, HeapNumber::kValueOffset);
3681
3682      __ mov(r0, r2);
3683      __ Ret();
3684    } else {
3685      // Allocate a HeapNumber for the result. Don't use r0 and r1 as
3686      // AllocateHeapNumber clobbers all registers - also when jumping due to
3687      // exhausted young space.
3688      __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3689      __ AllocateHeapNumber(r3, r4, r5, r6, &slow);
3690      // VFP is not available, do manual single to double conversion.
3691
3692      // r2: floating point value (binary32)
3693      // r3: heap number for result
3694
3695      // Extract mantissa to r0. OK to clobber r0 now as there are no jumps to
3696      // the slow case from here.
3697      __ and_(r0, value, Operand(kBinary32MantissaMask));
3698
3699      // Extract exponent to r1. OK to clobber r1 now as there are no jumps to
3700      // the slow case from here.
3701      __ mov(r1, Operand(value, LSR, kBinary32MantissaBits));
3702      __ and_(r1, r1, Operand(kBinary32ExponentMask >> kBinary32MantissaBits));
3703
3704      Label exponent_rebiased;
3705      __ teq(r1, Operand(0x00));
3706      __ b(eq, &exponent_rebiased);
3707
3708      __ teq(r1, Operand(0xff));
3709      __ mov(r1, Operand(0x7ff), LeaveCC, eq);
3710      __ b(eq, &exponent_rebiased);
3711
3712      // Rebias exponent.
3713      __ add(r1,
3714             r1,
3715             Operand(-kBinary32ExponentBias + HeapNumber::kExponentBias));
3716
3717      __ bind(&exponent_rebiased);
3718      __ and_(r2, value, Operand(kBinary32SignMask));
3719      value = no_reg;
3720      __ orr(r2, r2, Operand(r1, LSL, HeapNumber::kMantissaBitsInTopWord));
3721
3722      // Shift mantissa.
3723      static const int kMantissaShiftForHiWord =
3724          kBinary32MantissaBits - HeapNumber::kMantissaBitsInTopWord;
3725
3726      static const int kMantissaShiftForLoWord =
3727          kBitsPerInt - kMantissaShiftForHiWord;
3728
3729      __ orr(r2, r2, Operand(r0, LSR, kMantissaShiftForHiWord));
3730      __ mov(r0, Operand(r0, LSL, kMantissaShiftForLoWord));
3731
3732      __ str(r2, FieldMemOperand(r3, HeapNumber::kExponentOffset));
3733      __ str(r0, FieldMemOperand(r3, HeapNumber::kMantissaOffset));
3734
3735      __ mov(r0, r3);
3736      __ Ret();
3737    }
3738  } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
3739    if (CpuFeatures::IsSupported(VFP3)) {
3740      CpuFeatures::Scope scope(VFP3);
3741      // Allocate a HeapNumber for the result. Don't use r0 and r1 as
3742      // AllocateHeapNumber clobbers all registers - also when jumping due to
3743      // exhausted young space.
3744      __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
3745      __ AllocateHeapNumber(r2, r3, r4, r6, &slow);
3746      __ sub(r1, r2, Operand(kHeapObjectTag));
3747      __ vstr(d0, r1, HeapNumber::kValueOffset);
3748
3749      __ mov(r0, r2);
3750      __ Ret();
3751    } else {
3752      // Allocate a HeapNumber for the result. Don't use r0 and r1 as
3753      // AllocateHeapNumber clobbers all registers - also when jumping due to
3754      // exhausted young space.
3755      __ LoadRoot(r7, Heap::kHeapNumberMapRootIndex);
3756      __ AllocateHeapNumber(r4, r5, r6, r7, &slow);
3757
3758      __ str(r2, FieldMemOperand(r4, HeapNumber::kMantissaOffset));
3759      __ str(r3, FieldMemOperand(r4, HeapNumber::kExponentOffset));
3760      __ mov(r0, r4);
3761      __ Ret();
3762    }
3763
3764  } else {
3765    // Tag integer as smi and return it.
3766    __ mov(r0, Operand(value, LSL, kSmiTagSize));
3767    __ Ret();
3768  }
3769
3770  // Slow case, key and receiver still in r0 and r1.
3771  __ bind(&slow);
3772  __ IncrementCounter(
3773      masm->isolate()->counters()->keyed_load_external_array_slow(),
3774      1, r2, r3);
3775
3776  // ---------- S t a t e --------------
3777  //  -- lr     : return address
3778  //  -- r0     : key
3779  //  -- r1     : receiver
3780  // -----------------------------------
3781
3782  __ Push(r1, r0);
3783
3784  __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
3785
3786  __ bind(&miss_force_generic);
3787  Code* stub = masm->isolate()->builtins()->builtin(
3788      Builtins::kKeyedLoadIC_MissForceGeneric);
3789  __ Jump(Handle<Code>(stub), RelocInfo::CODE_TARGET);
3790}
3791
3792
3793void KeyedStoreStubCompiler::GenerateStoreExternalArray(
3794    MacroAssembler* masm,
3795    ElementsKind elements_kind) {
3796  // ---------- S t a t e --------------
3797  //  -- r0     : value
3798  //  -- r1     : key
3799  //  -- r2     : receiver
3800  //  -- lr     : return address
3801  // -----------------------------------
3802  Label slow, check_heap_number, miss_force_generic;
3803
3804  // Register usage.
3805  Register value = r0;
3806  Register key = r1;
3807  Register receiver = r2;
3808  // r3 mostly holds the elements array or the destination external array.
3809
3810  // This stub is meant to be tail-jumped to, the receiver must already
3811  // have been verified by the caller to not be a smi.
3812
3813  // Check that the key is a smi.
3814  __ JumpIfNotSmi(key, &miss_force_generic);
3815
3816  __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3817
3818  // Check that the index is in range
3819  __ ldr(ip, FieldMemOperand(r3, ExternalArray::kLengthOffset));
3820  __ cmp(key, ip);
3821  // Unsigned comparison catches both negative and too-large values.
3822  __ b(hs, &miss_force_generic);
3823
3824  // Handle both smis and HeapNumbers in the fast path. Go to the
3825  // runtime for all other kinds of values.
3826  // r3: external array.
3827  if (elements_kind == EXTERNAL_PIXEL_ELEMENTS) {
3828    // Double to pixel conversion is only implemented in the runtime for now.
3829    __ JumpIfNotSmi(value, &slow);
3830  } else {
3831    __ JumpIfNotSmi(value, &check_heap_number);
3832  }
3833  __ SmiUntag(r5, value);
3834  __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
3835
3836  // r3: base pointer of external storage.
3837  // r5: value (integer).
3838  switch (elements_kind) {
3839    case EXTERNAL_PIXEL_ELEMENTS:
3840      // Clamp the value to [0..255].
3841      __ Usat(r5, 8, Operand(r5));
3842      __ strb(r5, MemOperand(r3, key, LSR, 1));
3843      break;
3844    case EXTERNAL_BYTE_ELEMENTS:
3845    case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
3846      __ strb(r5, MemOperand(r3, key, LSR, 1));
3847      break;
3848    case EXTERNAL_SHORT_ELEMENTS:
3849    case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
3850      __ strh(r5, MemOperand(r3, key, LSL, 0));
3851      break;
3852    case EXTERNAL_INT_ELEMENTS:
3853    case EXTERNAL_UNSIGNED_INT_ELEMENTS:
3854      __ str(r5, MemOperand(r3, key, LSL, 1));
3855      break;
3856    case EXTERNAL_FLOAT_ELEMENTS:
3857      // Perform int-to-float conversion and store to memory.
3858      __ SmiUntag(r4, key);
3859      StoreIntAsFloat(masm, r3, r4, r5, r6, r7, r9);
3860      break;
3861    case EXTERNAL_DOUBLE_ELEMENTS:
3862      __ add(r3, r3, Operand(key, LSL, 2));
3863      // r3: effective address of the double element
3864      FloatingPointHelper::Destination destination;
3865      if (CpuFeatures::IsSupported(VFP3)) {
3866        destination = FloatingPointHelper::kVFPRegisters;
3867      } else {
3868        destination = FloatingPointHelper::kCoreRegisters;
3869      }
3870      FloatingPointHelper::ConvertIntToDouble(
3871          masm, r5, destination,
3872          d0, r6, r7,  // These are: double_dst, dst1, dst2.
3873          r4, s2);  // These are: scratch2, single_scratch.
3874      if (destination == FloatingPointHelper::kVFPRegisters) {
3875        CpuFeatures::Scope scope(VFP3);
3876        __ vstr(d0, r3, 0);
3877      } else {
3878        __ str(r6, MemOperand(r3, 0));
3879        __ str(r7, MemOperand(r3, Register::kSizeInBytes));
3880      }
3881      break;
3882    case FAST_ELEMENTS:
3883    case FAST_DOUBLE_ELEMENTS:
3884    case DICTIONARY_ELEMENTS:
3885    case NON_STRICT_ARGUMENTS_ELEMENTS:
3886      UNREACHABLE();
3887      break;
3888  }
3889
3890  // Entry registers are intact, r0 holds the value which is the return value.
3891  __ Ret();
3892
3893  if (elements_kind != EXTERNAL_PIXEL_ELEMENTS) {
3894    // r3: external array.
3895    __ bind(&check_heap_number);
3896    __ CompareObjectType(value, r5, r6, HEAP_NUMBER_TYPE);
3897    __ b(ne, &slow);
3898
3899    __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
3900
3901    // r3: base pointer of external storage.
3902
3903    // The WebGL specification leaves the behavior of storing NaN and
3904    // +/-Infinity into integer arrays basically undefined. For more
3905    // reproducible behavior, convert these to zero.
3906    if (CpuFeatures::IsSupported(VFP3)) {
3907      CpuFeatures::Scope scope(VFP3);
3908
3909      if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
3910        // vldr requires offset to be a multiple of 4 so we can not
3911        // include -kHeapObjectTag into it.
3912        __ sub(r5, r0, Operand(kHeapObjectTag));
3913        __ vldr(d0, r5, HeapNumber::kValueOffset);
3914        __ add(r5, r3, Operand(key, LSL, 1));
3915        __ vcvt_f32_f64(s0, d0);
3916        __ vstr(s0, r5, 0);
3917      } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
3918        __ sub(r5, r0, Operand(kHeapObjectTag));
3919        __ vldr(d0, r5, HeapNumber::kValueOffset);
3920        __ add(r5, r3, Operand(key, LSL, 2));
3921        __ vstr(d0, r5, 0);
3922      } else {
3923        // Hoisted load.  vldr requires offset to be a multiple of 4 so we can
3924        // not include -kHeapObjectTag into it.
3925        __ sub(r5, value, Operand(kHeapObjectTag));
3926        __ vldr(d0, r5, HeapNumber::kValueOffset);
3927        __ EmitECMATruncate(r5, d0, s2, r6, r7, r9);
3928
3929        switch (elements_kind) {
3930          case EXTERNAL_BYTE_ELEMENTS:
3931          case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
3932            __ strb(r5, MemOperand(r3, key, LSR, 1));
3933            break;
3934          case EXTERNAL_SHORT_ELEMENTS:
3935          case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
3936            __ strh(r5, MemOperand(r3, key, LSL, 0));
3937            break;
3938          case EXTERNAL_INT_ELEMENTS:
3939          case EXTERNAL_UNSIGNED_INT_ELEMENTS:
3940            __ str(r5, MemOperand(r3, key, LSL, 1));
3941            break;
3942          case EXTERNAL_PIXEL_ELEMENTS:
3943          case EXTERNAL_FLOAT_ELEMENTS:
3944          case EXTERNAL_DOUBLE_ELEMENTS:
3945          case FAST_ELEMENTS:
3946          case FAST_DOUBLE_ELEMENTS:
3947          case DICTIONARY_ELEMENTS:
3948          case NON_STRICT_ARGUMENTS_ELEMENTS:
3949            UNREACHABLE();
3950            break;
3951        }
3952      }
3953
3954      // Entry registers are intact, r0 holds the value which is the return
3955      // value.
3956      __ Ret();
3957    } else {
3958      // VFP3 is not available do manual conversions.
3959      __ ldr(r5, FieldMemOperand(value, HeapNumber::kExponentOffset));
3960      __ ldr(r6, FieldMemOperand(value, HeapNumber::kMantissaOffset));
3961
3962      if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
3963        Label done, nan_or_infinity_or_zero;
3964        static const int kMantissaInHiWordShift =
3965            kBinary32MantissaBits - HeapNumber::kMantissaBitsInTopWord;
3966
3967        static const int kMantissaInLoWordShift =
3968            kBitsPerInt - kMantissaInHiWordShift;
3969
3970        // Test for all special exponent values: zeros, subnormal numbers, NaNs
3971        // and infinities. All these should be converted to 0.
3972        __ mov(r7, Operand(HeapNumber::kExponentMask));
3973        __ and_(r9, r5, Operand(r7), SetCC);
3974        __ b(eq, &nan_or_infinity_or_zero);
3975
3976        __ teq(r9, Operand(r7));
3977        __ mov(r9, Operand(kBinary32ExponentMask), LeaveCC, eq);
3978        __ b(eq, &nan_or_infinity_or_zero);
3979
3980        // Rebias exponent.
3981        __ mov(r9, Operand(r9, LSR, HeapNumber::kExponentShift));
3982        __ add(r9,
3983               r9,
3984               Operand(kBinary32ExponentBias - HeapNumber::kExponentBias));
3985
3986        __ cmp(r9, Operand(kBinary32MaxExponent));
3987        __ and_(r5, r5, Operand(HeapNumber::kSignMask), LeaveCC, gt);
3988        __ orr(r5, r5, Operand(kBinary32ExponentMask), LeaveCC, gt);
3989        __ b(gt, &done);
3990
3991        __ cmp(r9, Operand(kBinary32MinExponent));
3992        __ and_(r5, r5, Operand(HeapNumber::kSignMask), LeaveCC, lt);
3993        __ b(lt, &done);
3994
3995        __ and_(r7, r5, Operand(HeapNumber::kSignMask));
3996        __ and_(r5, r5, Operand(HeapNumber::kMantissaMask));
3997        __ orr(r7, r7, Operand(r5, LSL, kMantissaInHiWordShift));
3998        __ orr(r7, r7, Operand(r6, LSR, kMantissaInLoWordShift));
3999        __ orr(r5, r7, Operand(r9, LSL, kBinary32ExponentShift));
4000
4001        __ bind(&done);
4002        __ str(r5, MemOperand(r3, key, LSL, 1));
4003        // Entry registers are intact, r0 holds the value which is the return
4004        // value.
4005        __ Ret();
4006
4007        __ bind(&nan_or_infinity_or_zero);
4008        __ and_(r7, r5, Operand(HeapNumber::kSignMask));
4009        __ and_(r5, r5, Operand(HeapNumber::kMantissaMask));
4010        __ orr(r9, r9, r7);
4011        __ orr(r9, r9, Operand(r5, LSL, kMantissaInHiWordShift));
4012        __ orr(r5, r9, Operand(r6, LSR, kMantissaInLoWordShift));
4013        __ b(&done);
4014      } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
4015        __ add(r7, r3, Operand(key, LSL, 2));
4016        // r7: effective address of destination element.
4017        __ str(r6, MemOperand(r7, 0));
4018        __ str(r5, MemOperand(r7, Register::kSizeInBytes));
4019        __ Ret();
4020      } else {
4021        bool is_signed_type = IsElementTypeSigned(elements_kind);
4022        int meaningfull_bits = is_signed_type ? (kBitsPerInt - 1) : kBitsPerInt;
4023        int32_t min_value = is_signed_type ? 0x80000000 : 0x00000000;
4024
4025        Label done, sign;
4026
4027        // Test for all special exponent values: zeros, subnormal numbers, NaNs
4028        // and infinities. All these should be converted to 0.
4029        __ mov(r7, Operand(HeapNumber::kExponentMask));
4030        __ and_(r9, r5, Operand(r7), SetCC);
4031        __ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, eq);
4032        __ b(eq, &done);
4033
4034        __ teq(r9, Operand(r7));
4035        __ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, eq);
4036        __ b(eq, &done);
4037
4038        // Unbias exponent.
4039        __ mov(r9, Operand(r9, LSR, HeapNumber::kExponentShift));
4040        __ sub(r9, r9, Operand(HeapNumber::kExponentBias), SetCC);
4041        // If exponent is negative then result is 0.
4042        __ mov(r5, Operand(0, RelocInfo::NONE), LeaveCC, mi);
4043        __ b(mi, &done);
4044
4045        // If exponent is too big then result is minimal value.
4046        __ cmp(r9, Operand(meaningfull_bits - 1));
4047        __ mov(r5, Operand(min_value), LeaveCC, ge);
4048        __ b(ge, &done);
4049
4050        __ and_(r7, r5, Operand(HeapNumber::kSignMask), SetCC);
4051        __ and_(r5, r5, Operand(HeapNumber::kMantissaMask));
4052        __ orr(r5, r5, Operand(1u << HeapNumber::kMantissaBitsInTopWord));
4053
4054        __ rsb(r9, r9, Operand(HeapNumber::kMantissaBitsInTopWord), SetCC);
4055        __ mov(r5, Operand(r5, LSR, r9), LeaveCC, pl);
4056        __ b(pl, &sign);
4057
4058        __ rsb(r9, r9, Operand(0, RelocInfo::NONE));
4059        __ mov(r5, Operand(r5, LSL, r9));
4060        __ rsb(r9, r9, Operand(meaningfull_bits));
4061        __ orr(r5, r5, Operand(r6, LSR, r9));
4062
4063        __ bind(&sign);
4064        __ teq(r7, Operand(0, RelocInfo::NONE));
4065        __ rsb(r5, r5, Operand(0, RelocInfo::NONE), LeaveCC, ne);
4066
4067        __ bind(&done);
4068        switch (elements_kind) {
4069          case EXTERNAL_BYTE_ELEMENTS:
4070          case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
4071            __ strb(r5, MemOperand(r3, key, LSR, 1));
4072            break;
4073          case EXTERNAL_SHORT_ELEMENTS:
4074          case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
4075            __ strh(r5, MemOperand(r3, key, LSL, 0));
4076            break;
4077          case EXTERNAL_INT_ELEMENTS:
4078          case EXTERNAL_UNSIGNED_INT_ELEMENTS:
4079            __ str(r5, MemOperand(r3, key, LSL, 1));
4080            break;
4081          case EXTERNAL_PIXEL_ELEMENTS:
4082          case EXTERNAL_FLOAT_ELEMENTS:
4083          case EXTERNAL_DOUBLE_ELEMENTS:
4084          case FAST_ELEMENTS:
4085          case FAST_DOUBLE_ELEMENTS:
4086          case DICTIONARY_ELEMENTS:
4087          case NON_STRICT_ARGUMENTS_ELEMENTS:
4088            UNREACHABLE();
4089            break;
4090        }
4091      }
4092    }
4093  }
4094
4095  // Slow case, key and receiver still in r0 and r1.
4096  __ bind(&slow);
4097  __ IncrementCounter(
4098      masm->isolate()->counters()->keyed_load_external_array_slow(),
4099      1, r2, r3);
4100
4101  // ---------- S t a t e --------------
4102  //  -- lr     : return address
4103  //  -- r0     : key
4104  //  -- r1     : receiver
4105  // -----------------------------------
4106  Handle<Code> slow_ic =
4107      masm->isolate()->builtins()->KeyedStoreIC_Slow();
4108  __ Jump(slow_ic, RelocInfo::CODE_TARGET);
4109
4110  // Miss case, call the runtime.
4111  __ bind(&miss_force_generic);
4112
4113  // ---------- S t a t e --------------
4114  //  -- lr     : return address
4115  //  -- r0     : key
4116  //  -- r1     : receiver
4117  // -----------------------------------
4118
4119  Handle<Code> miss_ic =
4120      masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4121  __ Jump(miss_ic, RelocInfo::CODE_TARGET);
4122}
4123
4124
4125void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) {
4126  // ----------- S t a t e -------------
4127  //  -- lr    : return address
4128  //  -- r0    : key
4129  //  -- r1    : receiver
4130  // -----------------------------------
4131  Label miss_force_generic;
4132
4133  // This stub is meant to be tail-jumped to, the receiver must already
4134  // have been verified by the caller to not be a smi.
4135
4136  // Check that the key is a smi.
4137  __ JumpIfNotSmi(r0, &miss_force_generic);
4138
4139  // Get the elements array.
4140  __ ldr(r2, FieldMemOperand(r1, JSObject::kElementsOffset));
4141  __ AssertFastElements(r2);
4142
4143  // Check that the key is within bounds.
4144  __ ldr(r3, FieldMemOperand(r2, FixedArray::kLengthOffset));
4145  __ cmp(r0, Operand(r3));
4146  __ b(hs, &miss_force_generic);
4147
4148  // Load the result and make sure it's not the hole.
4149  __ add(r3, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4150  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
4151  __ ldr(r4,
4152         MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
4153  __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
4154  __ cmp(r4, ip);
4155  __ b(eq, &miss_force_generic);
4156  __ mov(r0, r4);
4157  __ Ret();
4158
4159  __ bind(&miss_force_generic);
4160  Code* stub = masm->isolate()->builtins()->builtin(
4161      Builtins::kKeyedLoadIC_MissForceGeneric);
4162  __ Jump(Handle<Code>(stub), RelocInfo::CODE_TARGET);
4163}
4164
4165
4166void KeyedLoadStubCompiler::GenerateLoadFastDoubleElement(
4167    MacroAssembler* masm) {
4168  // ----------- S t a t e -------------
4169  //  -- lr    : return address
4170  //  -- r0    : key
4171  //  -- r1    : receiver
4172  // -----------------------------------
4173  Label miss_force_generic, slow_allocate_heapnumber;
4174
4175  Register key_reg = r0;
4176  Register receiver_reg = r1;
4177  Register elements_reg = r2;
4178  Register heap_number_reg = r2;
4179  Register indexed_double_offset = r3;
4180  Register scratch = r4;
4181  Register scratch2 = r5;
4182  Register scratch3 = r6;
4183  Register heap_number_map = r7;
4184
4185  // This stub is meant to be tail-jumped to, the receiver must already
4186  // have been verified by the caller to not be a smi.
4187
4188  // Check that the key is a smi.
4189  __ JumpIfNotSmi(key_reg, &miss_force_generic);
4190
4191  // Get the elements array.
4192  __ ldr(elements_reg,
4193         FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
4194
4195  // Check that the key is within bounds.
4196  __ ldr(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4197  __ cmp(key_reg, Operand(scratch));
4198  __ b(hs, &miss_force_generic);
4199
4200  // Load the upper word of the double in the fixed array and test for NaN.
4201  __ add(indexed_double_offset, elements_reg,
4202         Operand(key_reg, LSL, kDoubleSizeLog2 - kSmiTagSize));
4203  uint32_t upper_32_offset = FixedArray::kHeaderSize + sizeof(kHoleNanLower32);
4204  __ ldr(scratch, FieldMemOperand(indexed_double_offset, upper_32_offset));
4205  __ cmp(scratch, Operand(kHoleNanUpper32));
4206  __ b(&miss_force_generic, eq);
4207
4208  // Non-NaN. Allocate a new heap number and copy the double value into it.
4209  __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
4210  __ AllocateHeapNumber(heap_number_reg, scratch2, scratch3,
4211                        heap_number_map, &slow_allocate_heapnumber);
4212
4213  // Don't need to reload the upper 32 bits of the double, it's already in
4214  // scratch.
4215  __ str(scratch, FieldMemOperand(heap_number_reg,
4216                                  HeapNumber::kExponentOffset));
4217  __ ldr(scratch, FieldMemOperand(indexed_double_offset,
4218                                  FixedArray::kHeaderSize));
4219  __ str(scratch, FieldMemOperand(heap_number_reg,
4220                                  HeapNumber::kMantissaOffset));
4221
4222  __ mov(r0, heap_number_reg);
4223  __ Ret();
4224
4225  __ bind(&slow_allocate_heapnumber);
4226  Handle<Code> slow_ic =
4227      masm->isolate()->builtins()->KeyedLoadIC_Slow();
4228  __ Jump(slow_ic, RelocInfo::CODE_TARGET);
4229
4230  __ bind(&miss_force_generic);
4231  Handle<Code> miss_ic =
4232      masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
4233  __ Jump(miss_ic, RelocInfo::CODE_TARGET);
4234}
4235
4236
4237void KeyedStoreStubCompiler::GenerateStoreFastElement(MacroAssembler* masm,
4238                                                      bool is_js_array) {
4239  // ----------- S t a t e -------------
4240  //  -- r0    : value
4241  //  -- r1    : key
4242  //  -- r2    : receiver
4243  //  -- lr    : return address
4244  //  -- r3    : scratch
4245  //  -- r4    : scratch (elements)
4246  // -----------------------------------
4247  Label miss_force_generic;
4248
4249  Register value_reg = r0;
4250  Register key_reg = r1;
4251  Register receiver_reg = r2;
4252  Register scratch = r3;
4253  Register elements_reg = r4;
4254
4255  // This stub is meant to be tail-jumped to, the receiver must already
4256  // have been verified by the caller to not be a smi.
4257
4258  // Check that the key is a smi.
4259  __ JumpIfNotSmi(key_reg, &miss_force_generic);
4260
4261  // Get the elements array and make sure it is a fast element array, not 'cow'.
4262  __ ldr(elements_reg,
4263         FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
4264  __ CheckMap(elements_reg,
4265              scratch,
4266              Heap::kFixedArrayMapRootIndex,
4267              &miss_force_generic,
4268              DONT_DO_SMI_CHECK);
4269
4270  // Check that the key is within bounds.
4271  if (is_js_array) {
4272    __ ldr(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4273  } else {
4274    __ ldr(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4275  }
4276  // Compare smis.
4277  __ cmp(key_reg, scratch);
4278  __ b(hs, &miss_force_generic);
4279
4280  __ add(scratch,
4281         elements_reg, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4282  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
4283  __ str(value_reg,
4284         MemOperand(scratch, key_reg, LSL, kPointerSizeLog2 - kSmiTagSize));
4285  __ RecordWrite(scratch,
4286                 Operand(key_reg, LSL, kPointerSizeLog2 - kSmiTagSize),
4287                 receiver_reg , elements_reg);
4288
4289  // value_reg (r0) is preserved.
4290  // Done.
4291  __ Ret();
4292
4293  __ bind(&miss_force_generic);
4294  Handle<Code> ic =
4295      masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4296  __ Jump(ic, RelocInfo::CODE_TARGET);
4297}
4298
4299
4300void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
4301    MacroAssembler* masm,
4302    bool is_js_array) {
4303  // ----------- S t a t e -------------
4304  //  -- r0    : value
4305  //  -- r1    : key
4306  //  -- r2    : receiver
4307  //  -- lr    : return address
4308  //  -- r3    : scratch
4309  //  -- r4    : scratch
4310  //  -- r5    : scratch
4311  // -----------------------------------
4312  Label miss_force_generic, smi_value, is_nan, maybe_nan, have_double_value;
4313
4314  Register value_reg = r0;
4315  Register key_reg = r1;
4316  Register receiver_reg = r2;
4317  Register scratch = r3;
4318  Register elements_reg = r4;
4319  Register mantissa_reg = r5;
4320  Register exponent_reg = r6;
4321  Register scratch4 = r7;
4322
4323  // This stub is meant to be tail-jumped to, the receiver must already
4324  // have been verified by the caller to not be a smi.
4325  __ JumpIfNotSmi(key_reg, &miss_force_generic);
4326
4327  __ ldr(elements_reg,
4328         FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
4329
4330  // Check that the key is within bounds.
4331  if (is_js_array) {
4332    __ ldr(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4333  } else {
4334    __ ldr(scratch,
4335           FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4336  }
4337  // Compare smis, unsigned compare catches both negative and out-of-bound
4338  // indexes.
4339  __ cmp(key_reg, scratch);
4340  __ b(hs, &miss_force_generic);
4341
4342  // Handle smi values specially.
4343  __ JumpIfSmi(value_reg, &smi_value);
4344
4345  // Ensure that the object is a heap number
4346  __ CheckMap(value_reg,
4347              scratch,
4348              masm->isolate()->factory()->heap_number_map(),
4349              &miss_force_generic,
4350              DONT_DO_SMI_CHECK);
4351
4352  // Check for nan: all NaN values have a value greater (signed) than 0x7ff00000
4353  // in the exponent.
4354  __ mov(scratch, Operand(kNaNOrInfinityLowerBoundUpper32));
4355  __ ldr(exponent_reg, FieldMemOperand(value_reg, HeapNumber::kExponentOffset));
4356  __ cmp(exponent_reg, scratch);
4357  __ b(ge, &maybe_nan);
4358
4359  __ ldr(mantissa_reg, FieldMemOperand(value_reg, HeapNumber::kMantissaOffset));
4360
4361  __ bind(&have_double_value);
4362  __ add(scratch, elements_reg,
4363         Operand(key_reg, LSL, kDoubleSizeLog2 - kSmiTagSize));
4364  __ str(mantissa_reg, FieldMemOperand(scratch, FixedDoubleArray::kHeaderSize));
4365  uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32);
4366  __ str(exponent_reg, FieldMemOperand(scratch, offset));
4367  __ Ret();
4368
4369  __ bind(&maybe_nan);
4370  // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise
4371  // it's an Infinity, and the non-NaN code path applies.
4372  __ b(gt, &is_nan);
4373  __ ldr(mantissa_reg, FieldMemOperand(value_reg, HeapNumber::kMantissaOffset));
4374  __ cmp(mantissa_reg, Operand(0));
4375  __ b(eq, &have_double_value);
4376  __ bind(&is_nan);
4377  // Load canonical NaN for storing into the double array.
4378  uint64_t nan_int64 = BitCast<uint64_t>(
4379      FixedDoubleArray::canonical_not_the_hole_nan_as_double());
4380  __ mov(mantissa_reg, Operand(static_cast<uint32_t>(nan_int64)));
4381  __ mov(exponent_reg, Operand(static_cast<uint32_t>(nan_int64 >> 32)));
4382  __ jmp(&have_double_value);
4383
4384  __ bind(&smi_value);
4385  __ add(scratch, elements_reg,
4386         Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag));
4387  __ add(scratch, scratch,
4388         Operand(key_reg, LSL, kDoubleSizeLog2 - kSmiTagSize));
4389  // scratch is now effective address of the double element
4390
4391  FloatingPointHelper::Destination destination;
4392  if (CpuFeatures::IsSupported(VFP3)) {
4393    destination = FloatingPointHelper::kVFPRegisters;
4394  } else {
4395    destination = FloatingPointHelper::kCoreRegisters;
4396  }
4397
4398  Register untagged_value = receiver_reg;
4399  __ SmiUntag(untagged_value, value_reg);
4400  FloatingPointHelper::ConvertIntToDouble(
4401      masm,
4402      untagged_value,
4403      destination,
4404      d0,
4405      mantissa_reg,
4406      exponent_reg,
4407      scratch4,
4408      s2);
4409  if (destination == FloatingPointHelper::kVFPRegisters) {
4410    CpuFeatures::Scope scope(VFP3);
4411    __ vstr(d0, scratch, 0);
4412  } else {
4413    __ str(mantissa_reg, MemOperand(scratch, 0));
4414    __ str(exponent_reg, MemOperand(scratch, Register::kSizeInBytes));
4415  }
4416  __ Ret();
4417
4418  // Handle store cache miss, replacing the ic with the generic stub.
4419  __ bind(&miss_force_generic);
4420  Handle<Code> ic =
4421      masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4422  __ Jump(ic, RelocInfo::CODE_TARGET);
4423}
4424
4425
4426#undef __
4427
4428} }  // namespace v8::internal
4429
4430#endif  // V8_TARGET_ARCH_ARM
4431