ic-arm.cc revision 257744e915dfc84d6d07a6b2accf8402d9ffc708
1// Copyright 2011 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#if defined(V8_TARGET_ARCH_ARM)
31
32#include "assembler-arm.h"
33#include "code-stubs.h"
34#include "codegen.h"
35#include "disasm.h"
36#include "ic-inl.h"
37#include "runtime.h"
38#include "stub-cache.h"
39
40namespace v8 {
41namespace internal {
42
43
44// ----------------------------------------------------------------------------
45// Static IC stub generators.
46//
47
48#define __ ACCESS_MASM(masm)
49
50
51static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm,
52                                            Register type,
53                                            Label* global_object) {
54  // Register usage:
55  //   type: holds the receiver instance type on entry.
56  __ cmp(type, Operand(JS_GLOBAL_OBJECT_TYPE));
57  __ b(eq, global_object);
58  __ cmp(type, Operand(JS_BUILTINS_OBJECT_TYPE));
59  __ b(eq, global_object);
60  __ cmp(type, Operand(JS_GLOBAL_PROXY_TYPE));
61  __ b(eq, global_object);
62}
63
64
65// Generated code falls through if the receiver is a regular non-global
66// JS object with slow properties and no interceptors.
67static void GenerateStringDictionaryReceiverCheck(MacroAssembler* masm,
68                                                  Register receiver,
69                                                  Register elements,
70                                                  Register t0,
71                                                  Register t1,
72                                                  Label* miss) {
73  // Register usage:
74  //   receiver: holds the receiver on entry and is unchanged.
75  //   elements: holds the property dictionary on fall through.
76  // Scratch registers:
77  //   t0: used to holds the receiver map.
78  //   t1: used to holds the receiver instance type, receiver bit mask and
79  //       elements map.
80
81  // Check that the receiver isn't a smi.
82  __ tst(receiver, Operand(kSmiTagMask));
83  __ b(eq, miss);
84
85  // Check that the receiver is a valid JS object.
86  __ CompareObjectType(receiver, t0, t1, FIRST_JS_OBJECT_TYPE);
87  __ b(lt, miss);
88
89  // If this assert fails, we have to check upper bound too.
90  ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
91
92  GenerateGlobalInstanceTypeCheck(masm, t1, miss);
93
94  // Check that the global object does not require access checks.
95  __ ldrb(t1, FieldMemOperand(t0, Map::kBitFieldOffset));
96  __ tst(t1, Operand((1 << Map::kIsAccessCheckNeeded) |
97                     (1 << Map::kHasNamedInterceptor)));
98  __ b(ne, miss);
99
100  __ ldr(elements, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
101  __ ldr(t1, FieldMemOperand(elements, HeapObject::kMapOffset));
102  __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
103  __ cmp(t1, ip);
104  __ b(ne, miss);
105}
106
107
108// Helper function used from LoadIC/CallIC GenerateNormal.
109//
110// elements: Property dictionary. It is not clobbered if a jump to the miss
111//           label is done.
112// name:     Property name. It is not clobbered if a jump to the miss label is
113//           done
114// result:   Register for the result. It is only updated if a jump to the miss
115//           label is not done. Can be the same as elements or name clobbering
116//           one of these in the case of not jumping to the miss label.
117// The two scratch registers need to be different from elements, name and
118// result.
119// The generated code assumes that the receiver has slow properties,
120// is not a global object and does not have interceptors.
121static void GenerateDictionaryLoad(MacroAssembler* masm,
122                                   Label* miss,
123                                   Register elements,
124                                   Register name,
125                                   Register result,
126                                   Register scratch1,
127                                   Register scratch2) {
128  // Main use of the scratch registers.
129  // scratch1: Used as temporary and to hold the capacity of the property
130  //           dictionary.
131  // scratch2: Used as temporary.
132  Label done;
133
134  // Probe the dictionary.
135  StringDictionaryLookupStub::GeneratePositiveLookup(masm,
136                                                     miss,
137                                                     &done,
138                                                     elements,
139                                                     name,
140                                                     scratch1,
141                                                     scratch2);
142
143  // If probing finds an entry check that the value is a normal
144  // property.
145  __ bind(&done);  // scratch2 == elements + 4 * index
146  const int kElementsStartOffset = StringDictionary::kHeaderSize +
147      StringDictionary::kElementsStartIndex * kPointerSize;
148  const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
149  __ ldr(scratch1, FieldMemOperand(scratch2, kDetailsOffset));
150  __ tst(scratch1, Operand(PropertyDetails::TypeField::mask() << kSmiTagSize));
151  __ b(ne, miss);
152
153  // Get the value at the masked, scaled index and return.
154  __ ldr(result,
155         FieldMemOperand(scratch2, kElementsStartOffset + 1 * kPointerSize));
156}
157
158
159// Helper function used from StoreIC::GenerateNormal.
160//
161// elements: Property dictionary. It is not clobbered if a jump to the miss
162//           label is done.
163// name:     Property name. It is not clobbered if a jump to the miss label is
164//           done
165// value:    The value to store.
166// The two scratch registers need to be different from elements, name and
167// result.
168// The generated code assumes that the receiver has slow properties,
169// is not a global object and does not have interceptors.
170static void GenerateDictionaryStore(MacroAssembler* masm,
171                                    Label* miss,
172                                    Register elements,
173                                    Register name,
174                                    Register value,
175                                    Register scratch1,
176                                    Register scratch2) {
177  // Main use of the scratch registers.
178  // scratch1: Used as temporary and to hold the capacity of the property
179  //           dictionary.
180  // scratch2: Used as temporary.
181  Label done;
182
183  // Probe the dictionary.
184  StringDictionaryLookupStub::GeneratePositiveLookup(masm,
185                                                     miss,
186                                                     &done,
187                                                     elements,
188                                                     name,
189                                                     scratch1,
190                                                     scratch2);
191
192  // If probing finds an entry in the dictionary check that the value
193  // is a normal property that is not read only.
194  __ bind(&done);  // scratch2 == elements + 4 * index
195  const int kElementsStartOffset = StringDictionary::kHeaderSize +
196      StringDictionary::kElementsStartIndex * kPointerSize;
197  const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
198  const int kTypeAndReadOnlyMask
199      = (PropertyDetails::TypeField::mask() |
200         PropertyDetails::AttributesField::encode(READ_ONLY)) << kSmiTagSize;
201  __ ldr(scratch1, FieldMemOperand(scratch2, kDetailsOffset));
202  __ tst(scratch1, Operand(kTypeAndReadOnlyMask));
203  __ b(ne, miss);
204
205  // Store the value at the masked, scaled index and return.
206  const int kValueOffset = kElementsStartOffset + kPointerSize;
207  __ add(scratch2, scratch2, Operand(kValueOffset - kHeapObjectTag));
208  __ str(value, MemOperand(scratch2));
209
210  // Update the write barrier. Make sure not to clobber the value.
211  __ mov(scratch1, value);
212  __ RecordWrite(elements, scratch2, scratch1);
213}
214
215
216static void GenerateNumberDictionaryLoad(MacroAssembler* masm,
217                                         Label* miss,
218                                         Register elements,
219                                         Register key,
220                                         Register result,
221                                         Register t0,
222                                         Register t1,
223                                         Register t2) {
224  // Register use:
225  //
226  // elements - holds the slow-case elements of the receiver on entry.
227  //            Unchanged unless 'result' is the same register.
228  //
229  // key      - holds the smi key on entry.
230  //            Unchanged unless 'result' is the same register.
231  //
232  // result   - holds the result on exit if the load succeeded.
233  //            Allowed to be the same as 'key' or 'result'.
234  //            Unchanged on bailout so 'key' or 'result' can be used
235  //            in further computation.
236  //
237  // Scratch registers:
238  //
239  // t0 - holds the untagged key on entry and holds the hash once computed.
240  //
241  // t1 - used to hold the capacity mask of the dictionary
242  //
243  // t2 - used for the index into the dictionary.
244  Label done;
245
246  // Compute the hash code from the untagged key.  This must be kept in sync
247  // with ComputeIntegerHash in utils.h.
248  //
249  // hash = ~hash + (hash << 15);
250  __ mvn(t1, Operand(t0));
251  __ add(t0, t1, Operand(t0, LSL, 15));
252  // hash = hash ^ (hash >> 12);
253  __ eor(t0, t0, Operand(t0, LSR, 12));
254  // hash = hash + (hash << 2);
255  __ add(t0, t0, Operand(t0, LSL, 2));
256  // hash = hash ^ (hash >> 4);
257  __ eor(t0, t0, Operand(t0, LSR, 4));
258  // hash = hash * 2057;
259  __ mov(t1, Operand(2057));
260  __ mul(t0, t0, t1);
261  // hash = hash ^ (hash >> 16);
262  __ eor(t0, t0, Operand(t0, LSR, 16));
263
264  // Compute the capacity mask.
265  __ ldr(t1, FieldMemOperand(elements, NumberDictionary::kCapacityOffset));
266  __ mov(t1, Operand(t1, ASR, kSmiTagSize));  // convert smi to int
267  __ sub(t1, t1, Operand(1));
268
269  // Generate an unrolled loop that performs a few probes before giving up.
270  static const int kProbes = 4;
271  for (int i = 0; i < kProbes; i++) {
272    // Use t2 for index calculations and keep the hash intact in t0.
273    __ mov(t2, t0);
274    // Compute the masked index: (hash + i + i * i) & mask.
275    if (i > 0) {
276      __ add(t2, t2, Operand(NumberDictionary::GetProbeOffset(i)));
277    }
278    __ and_(t2, t2, Operand(t1));
279
280    // Scale the index by multiplying by the element size.
281    ASSERT(NumberDictionary::kEntrySize == 3);
282    __ add(t2, t2, Operand(t2, LSL, 1));  // t2 = t2 * 3
283
284    // Check if the key is identical to the name.
285    __ add(t2, elements, Operand(t2, LSL, kPointerSizeLog2));
286    __ ldr(ip, FieldMemOperand(t2, NumberDictionary::kElementsStartOffset));
287    __ cmp(key, Operand(ip));
288    if (i != kProbes - 1) {
289      __ b(eq, &done);
290    } else {
291      __ b(ne, miss);
292    }
293  }
294
295  __ bind(&done);
296  // Check that the value is a normal property.
297  // t2: elements + (index * kPointerSize)
298  const int kDetailsOffset =
299      NumberDictionary::kElementsStartOffset + 2 * kPointerSize;
300  __ ldr(t1, FieldMemOperand(t2, kDetailsOffset));
301  __ tst(t1, Operand(Smi::FromInt(PropertyDetails::TypeField::mask())));
302  __ b(ne, miss);
303
304  // Get the value at the masked, scaled index and return.
305  const int kValueOffset =
306      NumberDictionary::kElementsStartOffset + kPointerSize;
307  __ ldr(result, FieldMemOperand(t2, kValueOffset));
308}
309
310
311void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
312  // ----------- S t a t e -------------
313  //  -- r2    : name
314  //  -- lr    : return address
315  //  -- r0    : receiver
316  //  -- sp[0] : receiver
317  // -----------------------------------
318  Label miss;
319
320  StubCompiler::GenerateLoadArrayLength(masm, r0, r3, &miss);
321  __ bind(&miss);
322  StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
323}
324
325
326void LoadIC::GenerateStringLength(MacroAssembler* masm, bool support_wrappers) {
327  // ----------- S t a t e -------------
328  //  -- r2    : name
329  //  -- lr    : return address
330  //  -- r0    : receiver
331  //  -- sp[0] : receiver
332  // -----------------------------------
333  Label miss;
334
335  StubCompiler::GenerateLoadStringLength(masm, r0, r1, r3, &miss,
336                                         support_wrappers);
337  // Cache miss: Jump to runtime.
338  __ bind(&miss);
339  StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
340}
341
342
343void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) {
344  // ----------- S t a t e -------------
345  //  -- r2    : name
346  //  -- lr    : return address
347  //  -- r0    : receiver
348  //  -- sp[0] : receiver
349  // -----------------------------------
350  Label miss;
351
352  StubCompiler::GenerateLoadFunctionPrototype(masm, r0, r1, r3, &miss);
353  __ bind(&miss);
354  StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
355}
356
357
358// Checks the receiver for special cases (value type, slow case bits).
359// Falls through for regular JS object.
360static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
361                                           Register receiver,
362                                           Register map,
363                                           Register scratch,
364                                           int interceptor_bit,
365                                           Label* slow) {
366  // Check that the object isn't a smi.
367  __ JumpIfSmi(receiver, slow);
368  // Get the map of the receiver.
369  __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
370  // Check bit field.
371  __ ldrb(scratch, FieldMemOperand(map, Map::kBitFieldOffset));
372  __ tst(scratch,
373         Operand((1 << Map::kIsAccessCheckNeeded) | (1 << interceptor_bit)));
374  __ b(ne, slow);
375  // Check that the object is some kind of JS object EXCEPT JS Value type.
376  // In the case that the object is a value-wrapper object,
377  // we enter the runtime system to make sure that indexing into string
378  // objects work as intended.
379  ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE);
380  __ ldrb(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset));
381  __ cmp(scratch, Operand(JS_OBJECT_TYPE));
382  __ b(lt, slow);
383}
384
385
386// Loads an indexed element from a fast case array.
387// If not_fast_array is NULL, doesn't perform the elements map check.
388static void GenerateFastArrayLoad(MacroAssembler* masm,
389                                  Register receiver,
390                                  Register key,
391                                  Register elements,
392                                  Register scratch1,
393                                  Register scratch2,
394                                  Register result,
395                                  Label* not_fast_array,
396                                  Label* out_of_range) {
397  // Register use:
398  //
399  // receiver - holds the receiver on entry.
400  //            Unchanged unless 'result' is the same register.
401  //
402  // key      - holds the smi key on entry.
403  //            Unchanged unless 'result' is the same register.
404  //
405  // elements - holds the elements of the receiver on exit.
406  //
407  // result   - holds the result on exit if the load succeeded.
408  //            Allowed to be the the same as 'receiver' or 'key'.
409  //            Unchanged on bailout so 'receiver' and 'key' can be safely
410  //            used by further computation.
411  //
412  // Scratch registers:
413  //
414  // scratch1 - used to hold elements map and elements length.
415  //            Holds the elements map if not_fast_array branch is taken.
416  //
417  // scratch2 - used to hold the loaded value.
418
419  __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
420  if (not_fast_array != NULL) {
421    // Check that the object is in fast mode and writable.
422    __ ldr(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset));
423    __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
424    __ cmp(scratch1, ip);
425    __ b(ne, not_fast_array);
426  } else {
427    __ AssertFastElements(elements);
428  }
429  // Check that the key (index) is within bounds.
430  __ ldr(scratch1, FieldMemOperand(elements, FixedArray::kLengthOffset));
431  __ cmp(key, Operand(scratch1));
432  __ b(hs, out_of_range);
433  // Fast case: Do the load.
434  __ add(scratch1, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
435  // The key is a smi.
436  ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
437  __ ldr(scratch2,
438         MemOperand(scratch1, key, LSL, kPointerSizeLog2 - kSmiTagSize));
439  __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
440  __ cmp(scratch2, ip);
441  // In case the loaded value is the_hole we have to consult GetProperty
442  // to ensure the prototype chain is searched.
443  __ b(eq, out_of_range);
444  __ mov(result, scratch2);
445}
446
447
448// Checks whether a key is an array index string or a symbol string.
449// Falls through if a key is a symbol.
450static void GenerateKeyStringCheck(MacroAssembler* masm,
451                                   Register key,
452                                   Register map,
453                                   Register hash,
454                                   Label* index_string,
455                                   Label* not_symbol) {
456  // The key is not a smi.
457  // Is it a string?
458  __ CompareObjectType(key, map, hash, FIRST_NONSTRING_TYPE);
459  __ b(ge, not_symbol);
460
461  // Is the string an array index, with cached numeric value?
462  __ ldr(hash, FieldMemOperand(key, String::kHashFieldOffset));
463  __ tst(hash, Operand(String::kContainsCachedArrayIndexMask));
464  __ b(eq, index_string);
465
466  // Is the string a symbol?
467  // map: key map
468  __ ldrb(hash, FieldMemOperand(map, Map::kInstanceTypeOffset));
469  ASSERT(kSymbolTag != 0);
470  __ tst(hash, Operand(kIsSymbolMask));
471  __ b(eq, not_symbol);
472}
473
474
475// Defined in ic.cc.
476Object* CallIC_Miss(Arguments args);
477
478// The generated code does not accept smi keys.
479// The generated code falls through if both probes miss.
480static void GenerateMonomorphicCacheProbe(MacroAssembler* masm,
481                                          int argc,
482                                          Code::Kind kind,
483                                          Code::ExtraICState extra_ic_state) {
484  // ----------- S t a t e -------------
485  //  -- r1    : receiver
486  //  -- r2    : name
487  // -----------------------------------
488  Label number, non_number, non_string, boolean, probe, miss;
489
490  // Probe the stub cache.
491  Code::Flags flags = Code::ComputeFlags(kind,
492                                         NOT_IN_LOOP,
493                                         MONOMORPHIC,
494                                         extra_ic_state,
495                                         NORMAL,
496                                         argc);
497  Isolate::Current()->stub_cache()->GenerateProbe(
498      masm, flags, r1, r2, r3, r4, r5);
499
500  // If the stub cache probing failed, the receiver might be a value.
501  // For value objects, we use the map of the prototype objects for
502  // the corresponding JSValue for the cache and that is what we need
503  // to probe.
504  //
505  // Check for number.
506  __ tst(r1, Operand(kSmiTagMask));
507  __ b(eq, &number);
508  __ CompareObjectType(r1, r3, r3, HEAP_NUMBER_TYPE);
509  __ b(ne, &non_number);
510  __ bind(&number);
511  StubCompiler::GenerateLoadGlobalFunctionPrototype(
512      masm, Context::NUMBER_FUNCTION_INDEX, r1);
513  __ b(&probe);
514
515  // Check for string.
516  __ bind(&non_number);
517  __ cmp(r3, Operand(FIRST_NONSTRING_TYPE));
518  __ b(hs, &non_string);
519  StubCompiler::GenerateLoadGlobalFunctionPrototype(
520      masm, Context::STRING_FUNCTION_INDEX, r1);
521  __ b(&probe);
522
523  // Check for boolean.
524  __ bind(&non_string);
525  __ LoadRoot(ip, Heap::kTrueValueRootIndex);
526  __ cmp(r1, ip);
527  __ b(eq, &boolean);
528  __ LoadRoot(ip, Heap::kFalseValueRootIndex);
529  __ cmp(r1, ip);
530  __ b(ne, &miss);
531  __ bind(&boolean);
532  StubCompiler::GenerateLoadGlobalFunctionPrototype(
533      masm, Context::BOOLEAN_FUNCTION_INDEX, r1);
534
535  // Probe the stub cache for the value object.
536  __ bind(&probe);
537  Isolate::Current()->stub_cache()->GenerateProbe(
538      masm, flags, r1, r2, r3, r4, r5);
539
540  __ bind(&miss);
541}
542
543
544static void GenerateFunctionTailCall(MacroAssembler* masm,
545                                     int argc,
546                                     Label* miss,
547                                     Register scratch) {
548  // r1: function
549
550  // Check that the value isn't a smi.
551  __ tst(r1, Operand(kSmiTagMask));
552  __ b(eq, miss);
553
554  // Check that the value is a JSFunction.
555  __ CompareObjectType(r1, scratch, scratch, JS_FUNCTION_TYPE);
556  __ b(ne, miss);
557
558  // Invoke the function.
559  ParameterCount actual(argc);
560  __ InvokeFunction(r1, actual, JUMP_FUNCTION,
561                    NullCallWrapper(), CALL_AS_METHOD);
562}
563
564
565static void GenerateCallNormal(MacroAssembler* masm, int argc) {
566  // ----------- S t a t e -------------
567  //  -- r2    : name
568  //  -- lr    : return address
569  // -----------------------------------
570  Label miss;
571
572  // Get the receiver of the function from the stack into r1.
573  __ ldr(r1, MemOperand(sp, argc * kPointerSize));
574
575  GenerateStringDictionaryReceiverCheck(masm, r1, r0, r3, r4, &miss);
576
577  // r0: elements
578  // Search the dictionary - put result in register r1.
579  GenerateDictionaryLoad(masm, &miss, r0, r2, r1, r3, r4);
580
581  GenerateFunctionTailCall(masm, argc, &miss, r4);
582
583  __ bind(&miss);
584}
585
586
587static void GenerateCallMiss(MacroAssembler* masm,
588                             int argc,
589                             IC::UtilityId id,
590                             Code::ExtraICState extra_ic_state) {
591  // ----------- S t a t e -------------
592  //  -- r2    : name
593  //  -- lr    : return address
594  // -----------------------------------
595  Isolate* isolate = masm->isolate();
596
597  if (id == IC::kCallIC_Miss) {
598    __ IncrementCounter(isolate->counters()->call_miss(), 1, r3, r4);
599  } else {
600    __ IncrementCounter(isolate->counters()->keyed_call_miss(), 1, r3, r4);
601  }
602
603  // Get the receiver of the function from the stack.
604  __ ldr(r3, MemOperand(sp, argc * kPointerSize));
605
606  __ EnterInternalFrame();
607
608  // Push the receiver and the name of the function.
609  __ Push(r3, r2);
610
611  // Call the entry.
612  __ mov(r0, Operand(2));
613  __ mov(r1, Operand(ExternalReference(IC_Utility(id), isolate)));
614
615  CEntryStub stub(1);
616  __ CallStub(&stub);
617
618  // Move result to r1 and leave the internal frame.
619  __ mov(r1, Operand(r0));
620  __ LeaveInternalFrame();
621
622  // Check if the receiver is a global object of some sort.
623  // This can happen only for regular CallIC but not KeyedCallIC.
624  if (id == IC::kCallIC_Miss) {
625    Label invoke, global;
626    __ ldr(r2, MemOperand(sp, argc * kPointerSize));  // receiver
627    __ tst(r2, Operand(kSmiTagMask));
628    __ b(eq, &invoke);
629    __ CompareObjectType(r2, r3, r3, JS_GLOBAL_OBJECT_TYPE);
630    __ b(eq, &global);
631    __ cmp(r3, Operand(JS_BUILTINS_OBJECT_TYPE));
632    __ b(ne, &invoke);
633
634    // Patch the receiver on the stack.
635    __ bind(&global);
636    __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset));
637    __ str(r2, MemOperand(sp, argc * kPointerSize));
638    __ bind(&invoke);
639  }
640
641  // Invoke the function.
642  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state)
643      ? CALL_AS_FUNCTION
644      : CALL_AS_METHOD;
645  ParameterCount actual(argc);
646  __ InvokeFunction(r1,
647                    actual,
648                    JUMP_FUNCTION,
649                    NullCallWrapper(),
650                    call_kind);
651}
652
653
654void CallIC::GenerateMiss(MacroAssembler* masm,
655                          int argc,
656                          Code::ExtraICState extra_ic_state) {
657  // ----------- S t a t e -------------
658  //  -- r2    : name
659  //  -- lr    : return address
660  // -----------------------------------
661
662  GenerateCallMiss(masm, argc, IC::kCallIC_Miss, extra_ic_state);
663}
664
665
666void CallIC::GenerateMegamorphic(MacroAssembler* masm,
667                                 int argc,
668                                 Code::ExtraICState extra_ic_state) {
669  // ----------- S t a t e -------------
670  //  -- r2    : name
671  //  -- lr    : return address
672  // -----------------------------------
673
674  // Get the receiver of the function from the stack into r1.
675  __ ldr(r1, MemOperand(sp, argc * kPointerSize));
676  GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC, extra_ic_state);
677  GenerateMiss(masm, argc, extra_ic_state);
678}
679
680
681void CallIC::GenerateNormal(MacroAssembler* masm, int argc) {
682  // ----------- S t a t e -------------
683  //  -- r2    : name
684  //  -- lr    : return address
685  // -----------------------------------
686
687  GenerateCallNormal(masm, argc);
688  GenerateMiss(masm, argc, Code::kNoExtraICState);
689}
690
691
692void KeyedCallIC::GenerateMiss(MacroAssembler* masm, int argc) {
693  // ----------- S t a t e -------------
694  //  -- r2    : name
695  //  -- lr    : return address
696  // -----------------------------------
697
698  GenerateCallMiss(masm, argc, IC::kKeyedCallIC_Miss, Code::kNoExtraICState);
699}
700
701
702void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
703  // ----------- S t a t e -------------
704  //  -- r2    : name
705  //  -- lr    : return address
706  // -----------------------------------
707
708  // Get the receiver of the function from the stack into r1.
709  __ ldr(r1, MemOperand(sp, argc * kPointerSize));
710
711  Label do_call, slow_call, slow_load, slow_reload_receiver;
712  Label check_number_dictionary, check_string, lookup_monomorphic_cache;
713  Label index_smi, index_string;
714
715  // Check that the key is a smi.
716  __ JumpIfNotSmi(r2, &check_string);
717  __ bind(&index_smi);
718  // Now the key is known to be a smi. This place is also jumped to from below
719  // where a numeric string is converted to a smi.
720
721  GenerateKeyedLoadReceiverCheck(
722      masm, r1, r0, r3, Map::kHasIndexedInterceptor, &slow_call);
723
724  GenerateFastArrayLoad(
725      masm, r1, r2, r4, r3, r0, r1, &check_number_dictionary, &slow_load);
726  Counters* counters = masm->isolate()->counters();
727  __ IncrementCounter(counters->keyed_call_generic_smi_fast(), 1, r0, r3);
728
729  __ bind(&do_call);
730  // receiver in r1 is not used after this point.
731  // r2: key
732  // r1: function
733  GenerateFunctionTailCall(masm, argc, &slow_call, r0);
734
735  __ bind(&check_number_dictionary);
736  // r2: key
737  // r3: elements map
738  // r4: elements
739  // Check whether the elements is a number dictionary.
740  __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
741  __ cmp(r3, ip);
742  __ b(ne, &slow_load);
743  __ mov(r0, Operand(r2, ASR, kSmiTagSize));
744  // r0: untagged index
745  GenerateNumberDictionaryLoad(masm, &slow_load, r4, r2, r1, r0, r3, r5);
746  __ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1, r0, r3);
747  __ jmp(&do_call);
748
749  __ bind(&slow_load);
750  // This branch is taken when calling KeyedCallIC_Miss is neither required
751  // nor beneficial.
752  __ IncrementCounter(counters->keyed_call_generic_slow_load(), 1, r0, r3);
753  __ EnterInternalFrame();
754  __ push(r2);  // save the key
755  __ Push(r1, r2);  // pass the receiver and the key
756  __ CallRuntime(Runtime::kKeyedGetProperty, 2);
757  __ pop(r2);  // restore the key
758  __ LeaveInternalFrame();
759  __ mov(r1, r0);
760  __ jmp(&do_call);
761
762  __ bind(&check_string);
763  GenerateKeyStringCheck(masm, r2, r0, r3, &index_string, &slow_call);
764
765  // The key is known to be a symbol.
766  // If the receiver is a regular JS object with slow properties then do
767  // a quick inline probe of the receiver's dictionary.
768  // Otherwise do the monomorphic cache probe.
769  GenerateKeyedLoadReceiverCheck(
770      masm, r1, r0, r3, Map::kHasNamedInterceptor, &lookup_monomorphic_cache);
771
772  __ ldr(r0, FieldMemOperand(r1, JSObject::kPropertiesOffset));
773  __ ldr(r3, FieldMemOperand(r0, HeapObject::kMapOffset));
774  __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
775  __ cmp(r3, ip);
776  __ b(ne, &lookup_monomorphic_cache);
777
778  GenerateDictionaryLoad(masm, &slow_load, r0, r2, r1, r3, r4);
779  __ IncrementCounter(counters->keyed_call_generic_lookup_dict(), 1, r0, r3);
780  __ jmp(&do_call);
781
782  __ bind(&lookup_monomorphic_cache);
783  __ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1, r0, r3);
784  GenerateMonomorphicCacheProbe(masm,
785                                argc,
786                                Code::KEYED_CALL_IC,
787                                Code::kNoExtraICState);
788  // Fall through on miss.
789
790  __ bind(&slow_call);
791  // This branch is taken if:
792  // - the receiver requires boxing or access check,
793  // - the key is neither smi nor symbol,
794  // - the value loaded is not a function,
795  // - there is hope that the runtime will create a monomorphic call stub
796  //   that will get fetched next time.
797  __ IncrementCounter(counters->keyed_call_generic_slow(), 1, r0, r3);
798  GenerateMiss(masm, argc);
799
800  __ bind(&index_string);
801  __ IndexFromHash(r3, r2);
802  // Now jump to the place where smi keys are handled.
803  __ jmp(&index_smi);
804}
805
806
807void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) {
808  // ----------- S t a t e -------------
809  //  -- r2    : name
810  //  -- lr    : return address
811  // -----------------------------------
812
813  // Check if the name is a string.
814  Label miss;
815  __ tst(r2, Operand(kSmiTagMask));
816  __ b(eq, &miss);
817  __ IsObjectJSStringType(r2, r0, &miss);
818
819  GenerateCallNormal(masm, argc);
820  __ bind(&miss);
821  GenerateMiss(masm, argc);
822}
823
824
825// Defined in ic.cc.
826Object* LoadIC_Miss(Arguments args);
827
828void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
829  // ----------- S t a t e -------------
830  //  -- r2    : name
831  //  -- lr    : return address
832  //  -- r0    : receiver
833  //  -- sp[0] : receiver
834  // -----------------------------------
835
836  // Probe the stub cache.
837  Code::Flags flags = Code::ComputeFlags(Code::LOAD_IC,
838                                         NOT_IN_LOOP,
839                                         MONOMORPHIC);
840  Isolate::Current()->stub_cache()->GenerateProbe(
841      masm, flags, r0, r2, r3, r4, r5);
842
843  // Cache miss: Jump to runtime.
844  GenerateMiss(masm);
845}
846
847
848void LoadIC::GenerateNormal(MacroAssembler* masm) {
849  // ----------- S t a t e -------------
850  //  -- r2    : name
851  //  -- lr    : return address
852  //  -- r0    : receiver
853  //  -- sp[0] : receiver
854  // -----------------------------------
855  Label miss;
856
857  GenerateStringDictionaryReceiverCheck(masm, r0, r1, r3, r4, &miss);
858
859  // r1: elements
860  GenerateDictionaryLoad(masm, &miss, r1, r2, r0, r3, r4);
861  __ Ret();
862
863  // Cache miss: Jump to runtime.
864  __ bind(&miss);
865  GenerateMiss(masm);
866}
867
868
869void LoadIC::GenerateMiss(MacroAssembler* masm) {
870  // ----------- S t a t e -------------
871  //  -- r2    : name
872  //  -- lr    : return address
873  //  -- r0    : receiver
874  //  -- sp[0] : receiver
875  // -----------------------------------
876  Isolate* isolate = masm->isolate();
877
878  __ IncrementCounter(isolate->counters()->load_miss(), 1, r3, r4);
879
880  __ mov(r3, r0);
881  __ Push(r3, r2);
882
883  // Perform tail call to the entry.
884  ExternalReference ref =
885      ExternalReference(IC_Utility(kLoadIC_Miss), isolate);
886  __ TailCallExternalReference(ref, 2, 1);
887}
888
889
890void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
891  // ---------- S t a t e --------------
892  //  -- lr     : return address
893  //  -- r0     : key
894  //  -- r1     : receiver
895  // -----------------------------------
896  Isolate* isolate = masm->isolate();
897
898  __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, r3, r4);
899
900  __ Push(r1, r0);
901
902  // Perform tail call to the entry.
903  ExternalReference ref = force_generic
904      ? ExternalReference(IC_Utility(kKeyedLoadIC_MissForceGeneric), isolate)
905      : ExternalReference(IC_Utility(kKeyedLoadIC_Miss), isolate);
906
907  __ TailCallExternalReference(ref, 2, 1);
908}
909
910
911void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
912  // ---------- S t a t e --------------
913  //  -- lr     : return address
914  //  -- r0     : key
915  //  -- r1     : receiver
916  // -----------------------------------
917
918  __ Push(r1, r0);
919
920  __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
921}
922
923
924void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
925  // ---------- S t a t e --------------
926  //  -- lr     : return address
927  //  -- r0     : key
928  //  -- r1     : receiver
929  // -----------------------------------
930  Label slow, check_string, index_smi, index_string, property_array_property;
931  Label probe_dictionary, check_number_dictionary;
932
933  Register key = r0;
934  Register receiver = r1;
935
936  Isolate* isolate = masm->isolate();
937
938  // Check that the key is a smi.
939  __ JumpIfNotSmi(key, &check_string);
940  __ bind(&index_smi);
941  // Now the key is known to be a smi. This place is also jumped to from below
942  // where a numeric string is converted to a smi.
943
944  GenerateKeyedLoadReceiverCheck(
945      masm, receiver, r2, r3, Map::kHasIndexedInterceptor, &slow);
946
947  // Check the "has fast elements" bit in the receiver's map which is
948  // now in r2.
949  __ ldrb(r3, FieldMemOperand(r2, Map::kBitField2Offset));
950  __ tst(r3, Operand(1 << Map::kHasFastElements));
951  __ b(eq, &check_number_dictionary);
952
953  GenerateFastArrayLoad(
954      masm, receiver, key, r4, r3, r2, r0, NULL, &slow);
955  __ IncrementCounter(isolate->counters()->keyed_load_generic_smi(), 1, r2, r3);
956  __ Ret();
957
958  __ bind(&check_number_dictionary);
959  __ ldr(r4, FieldMemOperand(receiver, JSObject::kElementsOffset));
960  __ ldr(r3, FieldMemOperand(r4, JSObject::kMapOffset));
961
962  // Check whether the elements is a number dictionary.
963  // r0: key
964  // r3: elements map
965  // r4: elements
966  __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
967  __ cmp(r3, ip);
968  __ b(ne, &slow);
969  __ mov(r2, Operand(r0, ASR, kSmiTagSize));
970  GenerateNumberDictionaryLoad(masm, &slow, r4, r0, r0, r2, r3, r5);
971  __ Ret();
972
973  // Slow case, key and receiver still in r0 and r1.
974  __ bind(&slow);
975  __ IncrementCounter(isolate->counters()->keyed_load_generic_slow(),
976                      1, r2, r3);
977  GenerateRuntimeGetProperty(masm);
978
979  __ bind(&check_string);
980  GenerateKeyStringCheck(masm, key, r2, r3, &index_string, &slow);
981
982  GenerateKeyedLoadReceiverCheck(
983      masm, receiver, r2, r3, Map::kHasNamedInterceptor, &slow);
984
985  // If the receiver is a fast-case object, check the keyed lookup
986  // cache. Otherwise probe the dictionary.
987  __ ldr(r3, FieldMemOperand(r1, JSObject::kPropertiesOffset));
988  __ ldr(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
989  __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
990  __ cmp(r4, ip);
991  __ b(eq, &probe_dictionary);
992
993  // Load the map of the receiver, compute the keyed lookup cache hash
994  // based on 32 bits of the map pointer and the string hash.
995  __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
996  __ mov(r3, Operand(r2, ASR, KeyedLookupCache::kMapHashShift));
997  __ ldr(r4, FieldMemOperand(r0, String::kHashFieldOffset));
998  __ eor(r3, r3, Operand(r4, ASR, String::kHashShift));
999  __ And(r3, r3, Operand(KeyedLookupCache::kCapacityMask));
1000
1001  // Load the key (consisting of map and symbol) from the cache and
1002  // check for match.
1003  ExternalReference cache_keys =
1004      ExternalReference::keyed_lookup_cache_keys(isolate);
1005  __ mov(r4, Operand(cache_keys));
1006  __ add(r4, r4, Operand(r3, LSL, kPointerSizeLog2 + 1));
1007  __ ldr(r5, MemOperand(r4, kPointerSize, PostIndex));  // Move r4 to symbol.
1008  __ cmp(r2, r5);
1009  __ b(ne, &slow);
1010  __ ldr(r5, MemOperand(r4));
1011  __ cmp(r0, r5);
1012  __ b(ne, &slow);
1013
1014  // Get field offset.
1015  // r0     : key
1016  // r1     : receiver
1017  // r2     : receiver's map
1018  // r3     : lookup cache index
1019  ExternalReference cache_field_offsets =
1020      ExternalReference::keyed_lookup_cache_field_offsets(isolate);
1021  __ mov(r4, Operand(cache_field_offsets));
1022  __ ldr(r5, MemOperand(r4, r3, LSL, kPointerSizeLog2));
1023  __ ldrb(r6, FieldMemOperand(r2, Map::kInObjectPropertiesOffset));
1024  __ sub(r5, r5, r6, SetCC);
1025  __ b(ge, &property_array_property);
1026
1027  // Load in-object property.
1028  __ ldrb(r6, FieldMemOperand(r2, Map::kInstanceSizeOffset));
1029  __ add(r6, r6, r5);  // Index from start of object.
1030  __ sub(r1, r1, Operand(kHeapObjectTag));  // Remove the heap tag.
1031  __ ldr(r0, MemOperand(r1, r6, LSL, kPointerSizeLog2));
1032  __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(),
1033                      1, r2, r3);
1034  __ Ret();
1035
1036  // Load property array property.
1037  __ bind(&property_array_property);
1038  __ ldr(r1, FieldMemOperand(r1, JSObject::kPropertiesOffset));
1039  __ add(r1, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1040  __ ldr(r0, MemOperand(r1, r5, LSL, kPointerSizeLog2));
1041  __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(),
1042                      1, r2, r3);
1043  __ Ret();
1044
1045  // Do a quick inline probe of the receiver's dictionary, if it
1046  // exists.
1047  __ bind(&probe_dictionary);
1048  // r1: receiver
1049  // r0: key
1050  // r3: elements
1051  __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
1052  __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
1053  GenerateGlobalInstanceTypeCheck(masm, r2, &slow);
1054  // Load the property to r0.
1055  GenerateDictionaryLoad(masm, &slow, r3, r0, r0, r2, r4);
1056  __ IncrementCounter(isolate->counters()->keyed_load_generic_symbol(),
1057                      1, r2, r3);
1058  __ Ret();
1059
1060  __ bind(&index_string);
1061  __ IndexFromHash(r3, key);
1062  // Now jump to the place where smi keys are handled.
1063  __ jmp(&index_smi);
1064}
1065
1066
1067void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
1068  // ---------- S t a t e --------------
1069  //  -- lr     : return address
1070  //  -- r0     : key (index)
1071  //  -- r1     : receiver
1072  // -----------------------------------
1073  Label miss;
1074
1075  Register receiver = r1;
1076  Register index = r0;
1077  Register scratch1 = r2;
1078  Register scratch2 = r3;
1079  Register result = r0;
1080
1081  StringCharAtGenerator char_at_generator(receiver,
1082                                          index,
1083                                          scratch1,
1084                                          scratch2,
1085                                          result,
1086                                          &miss,  // When not a string.
1087                                          &miss,  // When not a number.
1088                                          &miss,  // When index out of range.
1089                                          STRING_INDEX_IS_ARRAY_INDEX);
1090  char_at_generator.GenerateFast(masm);
1091  __ Ret();
1092
1093  StubRuntimeCallHelper call_helper;
1094  char_at_generator.GenerateSlow(masm, call_helper);
1095
1096  __ bind(&miss);
1097  GenerateMiss(masm, false);
1098}
1099
1100
1101void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
1102  // ---------- S t a t e --------------
1103  //  -- lr     : return address
1104  //  -- r0     : key
1105  //  -- r1     : receiver
1106  // -----------------------------------
1107  Label slow;
1108
1109  // Check that the receiver isn't a smi.
1110  __ JumpIfSmi(r1, &slow);
1111
1112  // Check that the key is an array index, that is Uint32.
1113  __ tst(r0, Operand(kSmiTagMask | kSmiSignMask));
1114  __ b(ne, &slow);
1115
1116  // Get the map of the receiver.
1117  __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
1118
1119  // Check that it has indexed interceptor and access checks
1120  // are not enabled for this object.
1121  __ ldrb(r3, FieldMemOperand(r2, Map::kBitFieldOffset));
1122  __ and_(r3, r3, Operand(kSlowCaseBitFieldMask));
1123  __ cmp(r3, Operand(1 << Map::kHasIndexedInterceptor));
1124  __ b(ne, &slow);
1125
1126  // Everything is fine, call runtime.
1127  __ Push(r1, r0);  // Receiver, key.
1128
1129  // Perform tail call to the entry.
1130  __ TailCallExternalReference(
1131      ExternalReference(IC_Utility(kKeyedLoadPropertyWithInterceptor),
1132                        masm->isolate()),
1133      2,
1134      1);
1135
1136  __ bind(&slow);
1137  GenerateMiss(masm, false);
1138}
1139
1140
1141void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
1142  // ---------- S t a t e --------------
1143  //  -- r0     : value
1144  //  -- r1     : key
1145  //  -- r2     : receiver
1146  //  -- lr     : return address
1147  // -----------------------------------
1148
1149  // Push receiver, key and value for runtime call.
1150  __ Push(r2, r1, r0);
1151
1152  ExternalReference ref = force_generic
1153      ? ExternalReference(IC_Utility(kKeyedStoreIC_MissForceGeneric),
1154                          masm->isolate())
1155      : ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
1156  __ TailCallExternalReference(ref, 3, 1);
1157}
1158
1159
1160void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
1161  // ---------- S t a t e --------------
1162  //  -- r0     : value
1163  //  -- r1     : key
1164  //  -- r2     : receiver
1165  //  -- lr     : return address
1166  // -----------------------------------
1167
1168  // Push receiver, key and value for runtime call.
1169  __ Push(r2, r1, r0);
1170
1171  // The slow case calls into the runtime to complete the store without causing
1172  // an IC miss that would otherwise cause a transition to the generic stub.
1173  ExternalReference ref =
1174      ExternalReference(IC_Utility(kKeyedStoreIC_Slow), masm->isolate());
1175  __ TailCallExternalReference(ref, 3, 1);
1176}
1177
1178
1179void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
1180                                              StrictModeFlag strict_mode) {
1181  // ---------- S t a t e --------------
1182  //  -- r0     : value
1183  //  -- r1     : key
1184  //  -- r2     : receiver
1185  //  -- lr     : return address
1186  // -----------------------------------
1187
1188  // Push receiver, key and value for runtime call.
1189  __ Push(r2, r1, r0);
1190
1191  __ mov(r1, Operand(Smi::FromInt(NONE)));          // PropertyAttributes
1192  __ mov(r0, Operand(Smi::FromInt(strict_mode)));   // Strict mode.
1193  __ Push(r1, r0);
1194
1195  __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1196}
1197
1198
1199void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
1200                                   StrictModeFlag strict_mode) {
1201  // ---------- S t a t e --------------
1202  //  -- r0     : value
1203  //  -- r1     : key
1204  //  -- r2     : receiver
1205  //  -- lr     : return address
1206  // -----------------------------------
1207  Label slow, fast, array, extra;
1208
1209  // Register usage.
1210  Register value = r0;
1211  Register key = r1;
1212  Register receiver = r2;
1213  Register elements = r3;  // Elements array of the receiver.
1214  // r4 and r5 are used as general scratch registers.
1215
1216  // Check that the key is a smi.
1217  __ tst(key, Operand(kSmiTagMask));
1218  __ b(ne, &slow);
1219  // Check that the object isn't a smi.
1220  __ tst(receiver, Operand(kSmiTagMask));
1221  __ b(eq, &slow);
1222  // Get the map of the object.
1223  __ ldr(r4, FieldMemOperand(receiver, HeapObject::kMapOffset));
1224  // Check that the receiver does not require access checks.  We need
1225  // to do this because this generic stub does not perform map checks.
1226  __ ldrb(ip, FieldMemOperand(r4, Map::kBitFieldOffset));
1227  __ tst(ip, Operand(1 << Map::kIsAccessCheckNeeded));
1228  __ b(ne, &slow);
1229  // Check if the object is a JS array or not.
1230  __ ldrb(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset));
1231  __ cmp(r4, Operand(JS_ARRAY_TYPE));
1232  __ b(eq, &array);
1233  // Check that the object is some kind of JS object.
1234  __ cmp(r4, Operand(FIRST_JS_OBJECT_TYPE));
1235  __ b(lt, &slow);
1236
1237  // Object case: Check key against length in the elements array.
1238  __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1239  // Check that the object is in fast mode and writable.
1240  __ ldr(r4, FieldMemOperand(elements, HeapObject::kMapOffset));
1241  __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
1242  __ cmp(r4, ip);
1243  __ b(ne, &slow);
1244  // Check array bounds. Both the key and the length of FixedArray are smis.
1245  __ ldr(ip, FieldMemOperand(elements, FixedArray::kLengthOffset));
1246  __ cmp(key, Operand(ip));
1247  __ b(lo, &fast);
1248
1249  // Slow case, handle jump to runtime.
1250  __ bind(&slow);
1251  // Entry registers are intact.
1252  // r0: value.
1253  // r1: key.
1254  // r2: receiver.
1255  GenerateRuntimeSetProperty(masm, strict_mode);
1256
1257  // Extra capacity case: Check if there is extra capacity to
1258  // perform the store and update the length. Used for adding one
1259  // element to the array by writing to array[array.length].
1260  __ bind(&extra);
1261  // Condition code from comparing key and array length is still available.
1262  __ b(ne, &slow);  // Only support writing to writing to array[array.length].
1263  // Check for room in the elements backing store.
1264  // Both the key and the length of FixedArray are smis.
1265  __ ldr(ip, FieldMemOperand(elements, FixedArray::kLengthOffset));
1266  __ cmp(key, Operand(ip));
1267  __ b(hs, &slow);
1268  // Calculate key + 1 as smi.
1269  ASSERT_EQ(0, kSmiTag);
1270  __ add(r4, key, Operand(Smi::FromInt(1)));
1271  __ str(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
1272  __ b(&fast);
1273
1274  // Array case: Get the length and the elements array from the JS
1275  // array. Check that the array is in fast mode (and writable); if it
1276  // is the length is always a smi.
1277  __ bind(&array);
1278  __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1279  __ ldr(r4, FieldMemOperand(elements, HeapObject::kMapOffset));
1280  __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
1281  __ cmp(r4, ip);
1282  __ b(ne, &slow);
1283
1284  // Check the key against the length in the array.
1285  __ ldr(ip, FieldMemOperand(receiver, JSArray::kLengthOffset));
1286  __ cmp(key, Operand(ip));
1287  __ b(hs, &extra);
1288  // Fall through to fast case.
1289
1290  __ bind(&fast);
1291  // Fast case, store the value to the elements backing store.
1292  __ add(r5, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1293  __ add(r5, r5, Operand(key, LSL, kPointerSizeLog2 - kSmiTagSize));
1294  __ str(value, MemOperand(r5));
1295  // Skip write barrier if the written value is a smi.
1296  __ tst(value, Operand(kSmiTagMask));
1297  __ Ret(eq);
1298  // Update write barrier for the elements array address.
1299  __ sub(r4, r5, Operand(elements));
1300  __ RecordWrite(elements, Operand(r4), r5, r6);
1301
1302  __ Ret();
1303}
1304
1305
1306void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
1307                                  StrictModeFlag strict_mode) {
1308  // ----------- S t a t e -------------
1309  //  -- r0    : value
1310  //  -- r1    : receiver
1311  //  -- r2    : name
1312  //  -- lr    : return address
1313  // -----------------------------------
1314
1315  // Get the receiver from the stack and probe the stub cache.
1316  Code::Flags flags = Code::ComputeFlags(Code::STORE_IC,
1317                                         NOT_IN_LOOP,
1318                                         MONOMORPHIC,
1319                                         strict_mode);
1320
1321  Isolate::Current()->stub_cache()->GenerateProbe(
1322      masm, flags, r1, r2, r3, r4, r5);
1323
1324  // Cache miss: Jump to runtime.
1325  GenerateMiss(masm);
1326}
1327
1328
1329void StoreIC::GenerateMiss(MacroAssembler* masm) {
1330  // ----------- S t a t e -------------
1331  //  -- r0    : value
1332  //  -- r1    : receiver
1333  //  -- r2    : name
1334  //  -- lr    : return address
1335  // -----------------------------------
1336
1337  __ Push(r1, r2, r0);
1338
1339  // Perform tail call to the entry.
1340  ExternalReference ref =
1341      ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate());
1342  __ TailCallExternalReference(ref, 3, 1);
1343}
1344
1345
1346void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
1347  // ----------- S t a t e -------------
1348  //  -- r0    : value
1349  //  -- r1    : receiver
1350  //  -- r2    : name
1351  //  -- lr    : return address
1352  // -----------------------------------
1353  //
1354  // This accepts as a receiver anything JSObject::SetElementsLength accepts
1355  // (currently anything except for external and pixel arrays which means
1356  // anything with elements of FixedArray type.), but currently is restricted
1357  // to JSArray.
1358  // Value must be a number, but only smis are accepted as the most common case.
1359
1360  Label miss;
1361
1362  Register receiver = r1;
1363  Register value = r0;
1364  Register scratch = r3;
1365
1366  // Check that the receiver isn't a smi.
1367  __ JumpIfSmi(receiver, &miss);
1368
1369  // Check that the object is a JS array.
1370  __ CompareObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE);
1371  __ b(ne, &miss);
1372
1373  // Check that elements are FixedArray.
1374  // We rely on StoreIC_ArrayLength below to deal with all types of
1375  // fast elements (including COW).
1376  __ ldr(scratch, FieldMemOperand(receiver, JSArray::kElementsOffset));
1377  __ CompareObjectType(scratch, scratch, scratch, FIXED_ARRAY_TYPE);
1378  __ b(ne, &miss);
1379
1380  // Check that value is a smi.
1381  __ JumpIfNotSmi(value, &miss);
1382
1383  // Prepare tail call to StoreIC_ArrayLength.
1384  __ Push(receiver, value);
1385
1386  ExternalReference ref =
1387      ExternalReference(IC_Utility(kStoreIC_ArrayLength), masm->isolate());
1388  __ TailCallExternalReference(ref, 2, 1);
1389
1390  __ bind(&miss);
1391
1392  GenerateMiss(masm);
1393}
1394
1395
1396void StoreIC::GenerateNormal(MacroAssembler* masm) {
1397  // ----------- S t a t e -------------
1398  //  -- r0    : value
1399  //  -- r1    : receiver
1400  //  -- r2    : name
1401  //  -- lr    : return address
1402  // -----------------------------------
1403  Label miss;
1404
1405  GenerateStringDictionaryReceiverCheck(masm, r1, r3, r4, r5, &miss);
1406
1407  GenerateDictionaryStore(masm, &miss, r3, r2, r0, r4, r5);
1408  Counters* counters = masm->isolate()->counters();
1409  __ IncrementCounter(counters->store_normal_hit(),
1410                      1, r4, r5);
1411  __ Ret();
1412
1413  __ bind(&miss);
1414  __ IncrementCounter(counters->store_normal_miss(), 1, r4, r5);
1415  GenerateMiss(masm);
1416}
1417
1418
1419void StoreIC::GenerateGlobalProxy(MacroAssembler* masm,
1420                                  StrictModeFlag strict_mode) {
1421  // ----------- S t a t e -------------
1422  //  -- r0    : value
1423  //  -- r1    : receiver
1424  //  -- r2    : name
1425  //  -- lr    : return address
1426  // -----------------------------------
1427
1428  __ Push(r1, r2, r0);
1429
1430  __ mov(r1, Operand(Smi::FromInt(NONE)));  // PropertyAttributes
1431  __ mov(r0, Operand(Smi::FromInt(strict_mode)));
1432  __ Push(r1, r0);
1433
1434  // Do tail-call to runtime routine.
1435  __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1436}
1437
1438
1439#undef __
1440
1441
1442Condition CompareIC::ComputeCondition(Token::Value op) {
1443  switch (op) {
1444    case Token::EQ_STRICT:
1445    case Token::EQ:
1446      return eq;
1447    case Token::LT:
1448      return lt;
1449    case Token::GT:
1450      // Reverse left and right operands to obtain ECMA-262 conversion order.
1451      return lt;
1452    case Token::LTE:
1453      // Reverse left and right operands to obtain ECMA-262 conversion order.
1454      return ge;
1455    case Token::GTE:
1456      return ge;
1457    default:
1458      UNREACHABLE();
1459      return kNoCondition;
1460  }
1461}
1462
1463
1464void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
1465  HandleScope scope;
1466  Handle<Code> rewritten;
1467  State previous_state = GetState();
1468  State state = TargetState(previous_state, false, x, y);
1469  if (state == GENERIC) {
1470    CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS, r1, r0);
1471    rewritten = stub.GetCode();
1472  } else {
1473    ICCompareStub stub(op_, state);
1474    rewritten = stub.GetCode();
1475  }
1476  set_target(*rewritten);
1477
1478#ifdef DEBUG
1479  if (FLAG_trace_ic) {
1480    PrintF("[CompareIC (%s->%s)#%s]\n",
1481           GetStateName(previous_state),
1482           GetStateName(state),
1483           Token::Name(op_));
1484  }
1485#endif
1486
1487  // Activate inlined smi code.
1488  if (previous_state == UNINITIALIZED) {
1489    PatchInlinedSmiCode(address());
1490  }
1491}
1492
1493
1494void PatchInlinedSmiCode(Address address) {
1495  Address cmp_instruction_address =
1496      address + Assembler::kCallTargetAddressOffset;
1497
1498  // If the instruction following the call is not a cmp rx, #yyy, nothing
1499  // was inlined.
1500  Instr instr = Assembler::instr_at(cmp_instruction_address);
1501  if (!Assembler::IsCmpImmediate(instr)) {
1502    return;
1503  }
1504
1505  // The delta to the start of the map check instruction and the
1506  // condition code uses at the patched jump.
1507  int delta = Assembler::GetCmpImmediateRawImmediate(instr);
1508  delta +=
1509      Assembler::GetCmpImmediateRegister(instr).code() * kOff12Mask;
1510  // If the delta is 0 the instruction is cmp r0, #0 which also signals that
1511  // nothing was inlined.
1512  if (delta == 0) {
1513    return;
1514  }
1515
1516#ifdef DEBUG
1517  if (FLAG_trace_ic) {
1518    PrintF("[  patching ic at %p, cmp=%p, delta=%d\n",
1519           address, cmp_instruction_address, delta);
1520  }
1521#endif
1522
1523  Address patch_address =
1524      cmp_instruction_address - delta * Instruction::kInstrSize;
1525  Instr instr_at_patch = Assembler::instr_at(patch_address);
1526  Instr branch_instr =
1527      Assembler::instr_at(patch_address + Instruction::kInstrSize);
1528  ASSERT(Assembler::IsCmpRegister(instr_at_patch));
1529  ASSERT_EQ(Assembler::GetRn(instr_at_patch).code(),
1530            Assembler::GetRm(instr_at_patch).code());
1531  ASSERT(Assembler::IsBranch(branch_instr));
1532  if (Assembler::GetCondition(branch_instr) == eq) {
1533    // This is patching a "jump if not smi" site to be active.
1534    // Changing
1535    //   cmp rx, rx
1536    //   b eq, <target>
1537    // to
1538    //   tst rx, #kSmiTagMask
1539    //   b ne, <target>
1540    CodePatcher patcher(patch_address, 2);
1541    Register reg = Assembler::GetRn(instr_at_patch);
1542    patcher.masm()->tst(reg, Operand(kSmiTagMask));
1543    patcher.EmitCondition(ne);
1544  } else {
1545    ASSERT(Assembler::GetCondition(branch_instr) == ne);
1546    // This is patching a "jump if smi" site to be active.
1547    // Changing
1548    //   cmp rx, rx
1549    //   b ne, <target>
1550    // to
1551    //   tst rx, #kSmiTagMask
1552    //   b eq, <target>
1553    CodePatcher patcher(patch_address, 2);
1554    Register reg = Assembler::GetRn(instr_at_patch);
1555    patcher.masm()->tst(reg, Operand(kSmiTagMask));
1556    patcher.EmitCondition(eq);
1557  }
1558}
1559
1560
1561} }  // namespace v8::internal
1562
1563#endif  // V8_TARGET_ARCH_ARM
1564