1// Copyright 2012 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#if V8_TARGET_ARCH_X64
31
32#include "codegen.h"
33#include "ic-inl.h"
34#include "runtime.h"
35#include "stub-cache.h"
36
37namespace v8 {
38namespace internal {
39
40// ----------------------------------------------------------------------------
41// Static IC stub generators.
42//
43
44#define __ ACCESS_MASM(masm)
45
46
47static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm,
48                                            Register type,
49                                            Label* global_object) {
50  // Register usage:
51  //   type: holds the receiver instance type on entry.
52  __ cmpb(type, Immediate(JS_GLOBAL_OBJECT_TYPE));
53  __ j(equal, global_object);
54  __ cmpb(type, Immediate(JS_BUILTINS_OBJECT_TYPE));
55  __ j(equal, global_object);
56  __ cmpb(type, Immediate(JS_GLOBAL_PROXY_TYPE));
57  __ j(equal, global_object);
58}
59
60
61// Generated code falls through if the receiver is a regular non-global
62// JS object with slow properties and no interceptors.
63static void GenerateNameDictionaryReceiverCheck(MacroAssembler* masm,
64                                                Register receiver,
65                                                Register r0,
66                                                Register r1,
67                                                Label* miss) {
68  // Register usage:
69  //   receiver: holds the receiver on entry and is unchanged.
70  //   r0: used to hold receiver instance type.
71  //       Holds the property dictionary on fall through.
72  //   r1: used to hold receivers map.
73
74  __ JumpIfSmi(receiver, miss);
75
76  // Check that the receiver is a valid JS object.
77  __ movq(r1, FieldOperand(receiver, HeapObject::kMapOffset));
78  __ movb(r0, FieldOperand(r1, Map::kInstanceTypeOffset));
79  __ cmpb(r0, Immediate(FIRST_SPEC_OBJECT_TYPE));
80  __ j(below, miss);
81
82  // If this assert fails, we have to check upper bound too.
83  STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE);
84
85  GenerateGlobalInstanceTypeCheck(masm, r0, miss);
86
87  // Check for non-global object that requires access check.
88  __ testb(FieldOperand(r1, Map::kBitFieldOffset),
89           Immediate((1 << Map::kIsAccessCheckNeeded) |
90                     (1 << Map::kHasNamedInterceptor)));
91  __ j(not_zero, miss);
92
93  __ movq(r0, FieldOperand(receiver, JSObject::kPropertiesOffset));
94  __ CompareRoot(FieldOperand(r0, HeapObject::kMapOffset),
95                 Heap::kHashTableMapRootIndex);
96  __ j(not_equal, miss);
97}
98
99
100
101// Helper function used to load a property from a dictionary backing storage.
102// This function may return false negatives, so miss_label
103// must always call a backup property load that is complete.
104// This function is safe to call if name is not an internalized string,
105// and will jump to the miss_label in that case.
106// The generated code assumes that the receiver has slow properties,
107// is not a global object and does not have interceptors.
108static void GenerateDictionaryLoad(MacroAssembler* masm,
109                                   Label* miss_label,
110                                   Register elements,
111                                   Register name,
112                                   Register r0,
113                                   Register r1,
114                                   Register result) {
115  // Register use:
116  //
117  // elements - holds the property dictionary on entry and is unchanged.
118  //
119  // name - holds the name of the property on entry and is unchanged.
120  //
121  // r0   - used to hold the capacity of the property dictionary.
122  //
123  // r1   - used to hold the index into the property dictionary.
124  //
125  // result - holds the result on exit if the load succeeded.
126
127  Label done;
128
129  // Probe the dictionary.
130  NameDictionaryLookupStub::GeneratePositiveLookup(masm,
131                                                   miss_label,
132                                                   &done,
133                                                   elements,
134                                                   name,
135                                                   r0,
136                                                   r1);
137
138  // If probing finds an entry in the dictionary, r1 contains the
139  // index into the dictionary. Check that the value is a normal
140  // property.
141  __ bind(&done);
142  const int kElementsStartOffset =
143      NameDictionary::kHeaderSize +
144      NameDictionary::kElementsStartIndex * kPointerSize;
145  const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
146  __ Test(Operand(elements, r1, times_pointer_size,
147                  kDetailsOffset - kHeapObjectTag),
148          Smi::FromInt(PropertyDetails::TypeField::kMask));
149  __ j(not_zero, miss_label);
150
151  // Get the value at the masked, scaled index.
152  const int kValueOffset = kElementsStartOffset + kPointerSize;
153  __ movq(result,
154          Operand(elements, r1, times_pointer_size,
155                  kValueOffset - kHeapObjectTag));
156}
157
158
159// Helper function used to store a property to a dictionary backing
160// storage. This function may fail to store a property even though it
161// is in the dictionary, so code at miss_label must always call a
162// backup property store that is complete. This function is safe to
163// call if name is not an internalized string, and will jump to the miss_label
164// in that case. The generated code assumes that the receiver has slow
165// properties, is not a global object and does not have interceptors.
166static void GenerateDictionaryStore(MacroAssembler* masm,
167                                    Label* miss_label,
168                                    Register elements,
169                                    Register name,
170                                    Register value,
171                                    Register scratch0,
172                                    Register scratch1) {
173  // Register use:
174  //
175  // elements - holds the property dictionary on entry and is clobbered.
176  //
177  // name - holds the name of the property on entry and is unchanged.
178  //
179  // value - holds the value to store and is unchanged.
180  //
181  // scratch0 - used during the positive dictionary lookup and is clobbered.
182  //
183  // scratch1 - used for index into the property dictionary and is clobbered.
184  Label done;
185
186  // Probe the dictionary.
187  NameDictionaryLookupStub::GeneratePositiveLookup(masm,
188                                                   miss_label,
189                                                   &done,
190                                                   elements,
191                                                   name,
192                                                   scratch0,
193                                                   scratch1);
194
195  // If probing finds an entry in the dictionary, scratch0 contains the
196  // index into the dictionary. Check that the value is a normal
197  // property that is not read only.
198  __ bind(&done);
199  const int kElementsStartOffset =
200      NameDictionary::kHeaderSize +
201      NameDictionary::kElementsStartIndex * kPointerSize;
202  const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
203  const int kTypeAndReadOnlyMask =
204      (PropertyDetails::TypeField::kMask |
205       PropertyDetails::AttributesField::encode(READ_ONLY)) << kSmiTagSize;
206  __ Test(Operand(elements,
207                  scratch1,
208                  times_pointer_size,
209                  kDetailsOffset - kHeapObjectTag),
210          Smi::FromInt(kTypeAndReadOnlyMask));
211  __ j(not_zero, miss_label);
212
213  // Store the value at the masked, scaled index.
214  const int kValueOffset = kElementsStartOffset + kPointerSize;
215  __ lea(scratch1, Operand(elements,
216                           scratch1,
217                           times_pointer_size,
218                           kValueOffset - kHeapObjectTag));
219  __ movq(Operand(scratch1, 0), value);
220
221  // Update write barrier. Make sure not to clobber the value.
222  __ movq(scratch0, value);
223  __ RecordWrite(elements, scratch1, scratch0, kDontSaveFPRegs);
224}
225
226
227// Checks the receiver for special cases (value type, slow case bits).
228// Falls through for regular JS object.
229static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
230                                           Register receiver,
231                                           Register map,
232                                           int interceptor_bit,
233                                           Label* slow) {
234  // Register use:
235  //   receiver - holds the receiver and is unchanged.
236  // Scratch registers:
237  //   map - used to hold the map of the receiver.
238
239  // Check that the object isn't a smi.
240  __ JumpIfSmi(receiver, slow);
241
242  // Check that the object is some kind of JS object EXCEPT JS Value type.
243  // In the case that the object is a value-wrapper object,
244  // we enter the runtime system to make sure that indexing
245  // into string objects work as intended.
246  ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE);
247  __ CmpObjectType(receiver, JS_OBJECT_TYPE, map);
248  __ j(below, slow);
249
250  // Check bit field.
251  __ testb(FieldOperand(map, Map::kBitFieldOffset),
252           Immediate((1 << Map::kIsAccessCheckNeeded) |
253                     (1 << interceptor_bit)));
254  __ j(not_zero, slow);
255}
256
257
258// Loads an indexed element from a fast case array.
259// If not_fast_array is NULL, doesn't perform the elements map check.
260static void GenerateFastArrayLoad(MacroAssembler* masm,
261                                  Register receiver,
262                                  Register key,
263                                  Register elements,
264                                  Register scratch,
265                                  Register result,
266                                  Label* not_fast_array,
267                                  Label* out_of_range) {
268  // Register use:
269  //
270  // receiver - holds the receiver on entry.
271  //            Unchanged unless 'result' is the same register.
272  //
273  // key      - holds the smi key on entry.
274  //            Unchanged unless 'result' is the same register.
275  //
276  // elements - holds the elements of the receiver on exit.
277  //
278  // result   - holds the result on exit if the load succeeded.
279  //            Allowed to be the the same as 'receiver' or 'key'.
280  //            Unchanged on bailout so 'receiver' and 'key' can be safely
281  //            used by further computation.
282  //
283  // Scratch registers:
284  //
285  //   scratch - used to hold elements of the receiver and the loaded value.
286
287  __ movq(elements, FieldOperand(receiver, JSObject::kElementsOffset));
288  if (not_fast_array != NULL) {
289    // Check that the object is in fast mode and writable.
290    __ CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
291                   Heap::kFixedArrayMapRootIndex);
292    __ j(not_equal, not_fast_array);
293  } else {
294    __ AssertFastElements(elements);
295  }
296  // Check that the key (index) is within bounds.
297  __ SmiCompare(key, FieldOperand(elements, FixedArray::kLengthOffset));
298  // Unsigned comparison rejects negative indices.
299  __ j(above_equal, out_of_range);
300  // Fast case: Do the load.
301  SmiIndex index = masm->SmiToIndex(scratch, key, kPointerSizeLog2);
302  __ movq(scratch, FieldOperand(elements,
303                                index.reg,
304                                index.scale,
305                                FixedArray::kHeaderSize));
306  __ CompareRoot(scratch, Heap::kTheHoleValueRootIndex);
307  // In case the loaded value is the_hole we have to consult GetProperty
308  // to ensure the prototype chain is searched.
309  __ j(equal, out_of_range);
310  if (!result.is(scratch)) {
311    __ movq(result, scratch);
312  }
313}
314
315
316// Checks whether a key is an array index string or a unique name.
317// Falls through if the key is a unique name.
318static void GenerateKeyNameCheck(MacroAssembler* masm,
319                                 Register key,
320                                 Register map,
321                                 Register hash,
322                                 Label* index_string,
323                                 Label* not_unique) {
324  // Register use:
325  //   key - holds the key and is unchanged. Assumed to be non-smi.
326  // Scratch registers:
327  //   map - used to hold the map of the key.
328  //   hash - used to hold the hash of the key.
329  Label unique;
330  __ CmpObjectType(key, LAST_UNIQUE_NAME_TYPE, map);
331  __ j(above, not_unique);
332  STATIC_ASSERT(LAST_UNIQUE_NAME_TYPE == FIRST_NONSTRING_TYPE);
333  __ j(equal, &unique);
334
335  // Is the string an array index, with cached numeric value?
336  __ movl(hash, FieldOperand(key, Name::kHashFieldOffset));
337  __ testl(hash, Immediate(Name::kContainsCachedArrayIndexMask));
338  __ j(zero, index_string);  // The value in hash is used at jump target.
339
340  // Is the string internalized? We already know it's a string so a single
341  // bit test is enough.
342  STATIC_ASSERT(kNotInternalizedTag != 0);
343  __ testb(FieldOperand(map, Map::kInstanceTypeOffset),
344           Immediate(kIsNotInternalizedMask));
345  __ j(not_zero, not_unique);
346
347  __ bind(&unique);
348}
349
350
351
352void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
353  // ----------- S t a t e -------------
354  //  -- rax    : key
355  //  -- rdx    : receiver
356  //  -- rsp[0] : return address
357  // -----------------------------------
358  Label slow, check_name, index_smi, index_name, property_array_property;
359  Label probe_dictionary, check_number_dictionary;
360
361  // Check that the key is a smi.
362  __ JumpIfNotSmi(rax, &check_name);
363  __ bind(&index_smi);
364  // Now the key is known to be a smi. This place is also jumped to from below
365  // where a numeric string is converted to a smi.
366
367  GenerateKeyedLoadReceiverCheck(
368      masm, rdx, rcx, Map::kHasIndexedInterceptor, &slow);
369
370  // Check the receiver's map to see if it has fast elements.
371  __ CheckFastElements(rcx, &check_number_dictionary);
372
373  GenerateFastArrayLoad(masm,
374                        rdx,
375                        rax,
376                        rcx,
377                        rbx,
378                        rax,
379                        NULL,
380                        &slow);
381  Counters* counters = masm->isolate()->counters();
382  __ IncrementCounter(counters->keyed_load_generic_smi(), 1);
383  __ ret(0);
384
385  __ bind(&check_number_dictionary);
386  __ SmiToInteger32(rbx, rax);
387  __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
388
389  // Check whether the elements is a number dictionary.
390  // rdx: receiver
391  // rax: key
392  // rbx: key as untagged int32
393  // rcx: elements
394  __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
395                 Heap::kHashTableMapRootIndex);
396  __ j(not_equal, &slow);
397  __ LoadFromNumberDictionary(&slow, rcx, rax, rbx, r9, rdi, rax);
398  __ ret(0);
399
400  __ bind(&slow);
401  // Slow case: Jump to runtime.
402  // rdx: receiver
403  // rax: key
404  __ IncrementCounter(counters->keyed_load_generic_slow(), 1);
405  GenerateRuntimeGetProperty(masm);
406
407  __ bind(&check_name);
408  GenerateKeyNameCheck(masm, rax, rcx, rbx, &index_name, &slow);
409
410  GenerateKeyedLoadReceiverCheck(
411      masm, rdx, rcx, Map::kHasNamedInterceptor, &slow);
412
413  // If the receiver is a fast-case object, check the keyed lookup
414  // cache. Otherwise probe the dictionary leaving result in rcx.
415  __ movq(rbx, FieldOperand(rdx, JSObject::kPropertiesOffset));
416  __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
417                 Heap::kHashTableMapRootIndex);
418  __ j(equal, &probe_dictionary);
419
420  // Load the map of the receiver, compute the keyed lookup cache hash
421  // based on 32 bits of the map pointer and the string hash.
422  __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
423  __ movl(rcx, rbx);
424  __ shr(rcx, Immediate(KeyedLookupCache::kMapHashShift));
425  __ movl(rdi, FieldOperand(rax, String::kHashFieldOffset));
426  __ shr(rdi, Immediate(String::kHashShift));
427  __ xor_(rcx, rdi);
428  int mask = (KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask);
429  __ and_(rcx, Immediate(mask));
430
431  // Load the key (consisting of map and internalized string) from the cache and
432  // check for match.
433  Label load_in_object_property;
434  static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket;
435  Label hit_on_nth_entry[kEntriesPerBucket];
436  ExternalReference cache_keys
437      = ExternalReference::keyed_lookup_cache_keys(masm->isolate());
438
439  for (int i = 0; i < kEntriesPerBucket - 1; i++) {
440    Label try_next_entry;
441    __ movq(rdi, rcx);
442    __ shl(rdi, Immediate(kPointerSizeLog2 + 1));
443    __ LoadAddress(kScratchRegister, cache_keys);
444    int off = kPointerSize * i * 2;
445    __ cmpq(rbx, Operand(kScratchRegister, rdi, times_1, off));
446    __ j(not_equal, &try_next_entry);
447    __ cmpq(rax, Operand(kScratchRegister, rdi, times_1, off + kPointerSize));
448    __ j(equal, &hit_on_nth_entry[i]);
449    __ bind(&try_next_entry);
450  }
451
452  int off = kPointerSize * (kEntriesPerBucket - 1) * 2;
453  __ cmpq(rbx, Operand(kScratchRegister, rdi, times_1, off));
454  __ j(not_equal, &slow);
455  __ cmpq(rax, Operand(kScratchRegister, rdi, times_1, off + kPointerSize));
456  __ j(not_equal, &slow);
457
458  // Get field offset, which is a 32-bit integer.
459  ExternalReference cache_field_offsets
460      = ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate());
461
462  // Hit on nth entry.
463  for (int i = kEntriesPerBucket - 1; i >= 0; i--) {
464    __ bind(&hit_on_nth_entry[i]);
465    if (i != 0) {
466      __ addl(rcx, Immediate(i));
467    }
468    __ LoadAddress(kScratchRegister, cache_field_offsets);
469    __ movl(rdi, Operand(kScratchRegister, rcx, times_4, 0));
470    __ movzxbq(rcx, FieldOperand(rbx, Map::kInObjectPropertiesOffset));
471    __ subq(rdi, rcx);
472    __ j(above_equal, &property_array_property);
473    if (i != 0) {
474      __ jmp(&load_in_object_property);
475    }
476  }
477
478  // Load in-object property.
479  __ bind(&load_in_object_property);
480  __ movzxbq(rcx, FieldOperand(rbx, Map::kInstanceSizeOffset));
481  __ addq(rcx, rdi);
482  __ movq(rax, FieldOperand(rdx, rcx, times_pointer_size, 0));
483  __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
484  __ ret(0);
485
486  // Load property array property.
487  __ bind(&property_array_property);
488  __ movq(rax, FieldOperand(rdx, JSObject::kPropertiesOffset));
489  __ movq(rax, FieldOperand(rax, rdi, times_pointer_size,
490                            FixedArray::kHeaderSize));
491  __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
492  __ ret(0);
493
494  // Do a quick inline probe of the receiver's dictionary, if it
495  // exists.
496  __ bind(&probe_dictionary);
497  // rdx: receiver
498  // rax: key
499  // rbx: elements
500
501  __ movq(rcx, FieldOperand(rdx, JSObject::kMapOffset));
502  __ movb(rcx, FieldOperand(rcx, Map::kInstanceTypeOffset));
503  GenerateGlobalInstanceTypeCheck(masm, rcx, &slow);
504
505  GenerateDictionaryLoad(masm, &slow, rbx, rax, rcx, rdi, rax);
506  __ IncrementCounter(counters->keyed_load_generic_symbol(), 1);
507  __ ret(0);
508
509  __ bind(&index_name);
510  __ IndexFromHash(rbx, rax);
511  __ jmp(&index_smi);
512}
513
514
515void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
516  // ----------- S t a t e -------------
517  //  -- rax    : key
518  //  -- rdx    : receiver
519  //  -- rsp[0] : return address
520  // -----------------------------------
521  Label miss;
522
523  Register receiver = rdx;
524  Register index = rax;
525  Register scratch = rcx;
526  Register result = rax;
527
528  StringCharAtGenerator char_at_generator(receiver,
529                                          index,
530                                          scratch,
531                                          result,
532                                          &miss,  // When not a string.
533                                          &miss,  // When not a number.
534                                          &miss,  // When index out of range.
535                                          STRING_INDEX_IS_ARRAY_INDEX);
536  char_at_generator.GenerateFast(masm);
537  __ ret(0);
538
539  StubRuntimeCallHelper call_helper;
540  char_at_generator.GenerateSlow(masm, call_helper);
541
542  __ bind(&miss);
543  GenerateMiss(masm, MISS);
544}
545
546
547void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
548  // ----------- S t a t e -------------
549  //  -- rax    : key
550  //  -- rdx    : receiver
551  //  -- rsp[0] : return address
552  // -----------------------------------
553  Label slow;
554
555  // Check that the receiver isn't a smi.
556  __ JumpIfSmi(rdx, &slow);
557
558  // Check that the key is an array index, that is Uint32.
559  STATIC_ASSERT(kSmiValueSize <= 32);
560  __ JumpUnlessNonNegativeSmi(rax, &slow);
561
562  // Get the map of the receiver.
563  __ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
564
565  // Check that it has indexed interceptor and access checks
566  // are not enabled for this object.
567  __ movb(rcx, FieldOperand(rcx, Map::kBitFieldOffset));
568  __ andb(rcx, Immediate(kSlowCaseBitFieldMask));
569  __ cmpb(rcx, Immediate(1 << Map::kHasIndexedInterceptor));
570  __ j(not_zero, &slow);
571
572  // Everything is fine, call runtime.
573  __ PopReturnAddressTo(rcx);
574  __ push(rdx);  // receiver
575  __ push(rax);  // key
576  __ PushReturnAddressFrom(rcx);
577
578  // Perform tail call to the entry.
579  __ TailCallExternalReference(
580      ExternalReference(IC_Utility(kKeyedLoadPropertyWithInterceptor),
581                        masm->isolate()),
582      2,
583      1);
584
585  __ bind(&slow);
586  GenerateMiss(masm, MISS);
587}
588
589
590static void KeyedStoreGenerateGenericHelper(
591    MacroAssembler* masm,
592    Label* fast_object,
593    Label* fast_double,
594    Label* slow,
595    KeyedStoreCheckMap check_map,
596    KeyedStoreIncrementLength increment_length) {
597  Label transition_smi_elements;
598  Label finish_object_store, non_double_value, transition_double_elements;
599  Label fast_double_without_map_check;
600  // Fast case: Do the store, could be either Object or double.
601  __ bind(fast_object);
602  // rax: value
603  // rbx: receiver's elements array (a FixedArray)
604  // rcx: index
605  // rdx: receiver (a JSArray)
606  // r9: map of receiver
607  if (check_map == kCheckMap) {
608    __ movq(rdi, FieldOperand(rbx, HeapObject::kMapOffset));
609    __ CompareRoot(rdi, Heap::kFixedArrayMapRootIndex);
610    __ j(not_equal, fast_double);
611  }
612  // Smi stores don't require further checks.
613  Label non_smi_value;
614  __ JumpIfNotSmi(rax, &non_smi_value);
615  if (increment_length == kIncrementLength) {
616    // Add 1 to receiver->length.
617    __ leal(rdi, Operand(rcx, 1));
618    __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rdi);
619  }
620  // It's irrelevant whether array is smi-only or not when writing a smi.
621  __ movq(FieldOperand(rbx, rcx, times_pointer_size, FixedArray::kHeaderSize),
622          rax);
623  __ ret(0);
624
625  __ bind(&non_smi_value);
626  // Writing a non-smi, check whether array allows non-smi elements.
627  // r9: receiver's map
628  __ CheckFastObjectElements(r9, &transition_smi_elements);
629
630  __ bind(&finish_object_store);
631  if (increment_length == kIncrementLength) {
632    // Add 1 to receiver->length.
633    __ leal(rdi, Operand(rcx, 1));
634    __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rdi);
635  }
636  __ movq(FieldOperand(rbx, rcx, times_pointer_size, FixedArray::kHeaderSize),
637          rax);
638  __ movq(rdx, rax);  // Preserve the value which is returned.
639  __ RecordWriteArray(
640      rbx, rdx, rcx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
641  __ ret(0);
642
643  __ bind(fast_double);
644  if (check_map == kCheckMap) {
645    // Check for fast double array case. If this fails, call through to the
646    // runtime.
647    // rdi: elements array's map
648    __ CompareRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex);
649    __ j(not_equal, slow);
650  }
651  __ bind(&fast_double_without_map_check);
652  __ StoreNumberToDoubleElements(rax, rbx, rcx, xmm0,
653                                 &transition_double_elements);
654  if (increment_length == kIncrementLength) {
655    // Add 1 to receiver->length.
656    __ leal(rdi, Operand(rcx, 1));
657    __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rdi);
658  }
659  __ ret(0);
660
661  __ bind(&transition_smi_elements);
662  __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
663
664  // Transition the array appropriately depending on the value type.
665  __ movq(r9, FieldOperand(rax, HeapObject::kMapOffset));
666  __ CompareRoot(r9, Heap::kHeapNumberMapRootIndex);
667  __ j(not_equal, &non_double_value);
668
669  // Value is a double. Transition FAST_SMI_ELEMENTS ->
670  // FAST_DOUBLE_ELEMENTS and complete the store.
671  __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
672                                         FAST_DOUBLE_ELEMENTS,
673                                         rbx,
674                                         rdi,
675                                         slow);
676  AllocationSiteMode mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS,
677                                                    FAST_DOUBLE_ELEMENTS);
678  ElementsTransitionGenerator::GenerateSmiToDouble(masm, mode, slow);
679  __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
680  __ jmp(&fast_double_without_map_check);
681
682  __ bind(&non_double_value);
683  // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS
684  __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
685                                         FAST_ELEMENTS,
686                                         rbx,
687                                         rdi,
688                                         slow);
689  mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS, FAST_ELEMENTS);
690  ElementsTransitionGenerator::GenerateMapChangeElementsTransition(masm, mode,
691                                                                   slow);
692  __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
693  __ jmp(&finish_object_store);
694
695  __ bind(&transition_double_elements);
696  // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
697  // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
698  // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
699  __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
700  __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS,
701                                         FAST_ELEMENTS,
702                                         rbx,
703                                         rdi,
704                                         slow);
705  mode = AllocationSite::GetMode(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS);
706  ElementsTransitionGenerator::GenerateDoubleToObject(masm, mode, slow);
707  __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
708  __ jmp(&finish_object_store);
709}
710
711
712void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
713                                   StrictModeFlag strict_mode) {
714  // ----------- S t a t e -------------
715  //  -- rax    : value
716  //  -- rcx    : key
717  //  -- rdx    : receiver
718  //  -- rsp[0] : return address
719  // -----------------------------------
720  Label slow, slow_with_tagged_index, fast_object, fast_object_grow;
721  Label fast_double, fast_double_grow;
722  Label array, extra, check_if_double_array;
723
724  // Check that the object isn't a smi.
725  __ JumpIfSmi(rdx, &slow_with_tagged_index);
726  // Get the map from the receiver.
727  __ movq(r9, FieldOperand(rdx, HeapObject::kMapOffset));
728  // Check that the receiver does not require access checks.  We need
729  // to do this because this generic stub does not perform map checks.
730  __ testb(FieldOperand(r9, Map::kBitFieldOffset),
731           Immediate(1 << Map::kIsAccessCheckNeeded));
732  __ j(not_zero, &slow_with_tagged_index);
733  // Check that the key is a smi.
734  __ JumpIfNotSmi(rcx, &slow_with_tagged_index);
735  __ SmiToInteger32(rcx, rcx);
736
737  __ CmpInstanceType(r9, JS_ARRAY_TYPE);
738  __ j(equal, &array);
739  // Check that the object is some kind of JSObject.
740  __ CmpInstanceType(r9, FIRST_JS_OBJECT_TYPE);
741  __ j(below, &slow);
742
743  // Object case: Check key against length in the elements array.
744  // rax: value
745  // rdx: JSObject
746  // rcx: index
747  __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
748  // Check array bounds.
749  __ SmiCompareInteger32(FieldOperand(rbx, FixedArray::kLengthOffset), rcx);
750  // rax: value
751  // rbx: FixedArray
752  // rcx: index
753  __ j(above, &fast_object);
754
755  // Slow case: call runtime.
756  __ bind(&slow);
757  __ Integer32ToSmi(rcx, rcx);
758  __ bind(&slow_with_tagged_index);
759  GenerateRuntimeSetProperty(masm, strict_mode);
760  // Never returns to here.
761
762  // Extra capacity case: Check if there is extra capacity to
763  // perform the store and update the length. Used for adding one
764  // element to the array by writing to array[array.length].
765  __ bind(&extra);
766  // rax: value
767  // rdx: receiver (a JSArray)
768  // rbx: receiver's elements array (a FixedArray)
769  // rcx: index
770  // flags: smicompare (rdx.length(), rbx)
771  __ j(not_equal, &slow);  // do not leave holes in the array
772  __ SmiCompareInteger32(FieldOperand(rbx, FixedArray::kLengthOffset), rcx);
773  __ j(below_equal, &slow);
774  // Increment index to get new length.
775  __ movq(rdi, FieldOperand(rbx, HeapObject::kMapOffset));
776  __ CompareRoot(rdi, Heap::kFixedArrayMapRootIndex);
777  __ j(not_equal, &check_if_double_array);
778  __ jmp(&fast_object_grow);
779
780  __ bind(&check_if_double_array);
781  // rdi: elements array's map
782  __ CompareRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex);
783  __ j(not_equal, &slow);
784  __ jmp(&fast_double_grow);
785
786  // Array case: Get the length and the elements array from the JS
787  // array. Check that the array is in fast mode (and writable); if it
788  // is the length is always a smi.
789  __ bind(&array);
790  // rax: value
791  // rdx: receiver (a JSArray)
792  // rcx: index
793  __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
794
795  // Check the key against the length in the array, compute the
796  // address to store into and fall through to fast case.
797  __ SmiCompareInteger32(FieldOperand(rdx, JSArray::kLengthOffset), rcx);
798  __ j(below_equal, &extra);
799
800  KeyedStoreGenerateGenericHelper(masm, &fast_object, &fast_double,
801                                  &slow, kCheckMap, kDontIncrementLength);
802  KeyedStoreGenerateGenericHelper(masm, &fast_object_grow, &fast_double_grow,
803                                  &slow, kDontCheckMap, kIncrementLength);
804}
805
806
807// The generated code does not accept smi keys.
808// The generated code falls through if both probes miss.
809void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
810                                               int argc,
811                                               Code::Kind kind,
812                                               Code::ExtraICState extra_state) {
813  // ----------- S t a t e -------------
814  // rcx                      : function name
815  // rdx                      : receiver
816  // -----------------------------------
817  Label number, non_number, non_string, boolean, probe, miss;
818
819  // Probe the stub cache.
820  Code::Flags flags = Code::ComputeFlags(kind,
821                                         MONOMORPHIC,
822                                         extra_state,
823                                         Code::NORMAL,
824                                         argc);
825  Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rdx, rcx, rbx,
826                                                  rax);
827
828  // If the stub cache probing failed, the receiver might be a value.
829  // For value objects, we use the map of the prototype objects for
830  // the corresponding JSValue for the cache and that is what we need
831  // to probe.
832  //
833  // Check for number.
834  __ JumpIfSmi(rdx, &number);
835  __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rbx);
836  __ j(not_equal, &non_number);
837  __ bind(&number);
838  StubCompiler::GenerateLoadGlobalFunctionPrototype(
839      masm, Context::NUMBER_FUNCTION_INDEX, rdx);
840  __ jmp(&probe);
841
842  // Check for string.
843  __ bind(&non_number);
844  __ CmpInstanceType(rbx, FIRST_NONSTRING_TYPE);
845  __ j(above_equal, &non_string);
846  StubCompiler::GenerateLoadGlobalFunctionPrototype(
847      masm, Context::STRING_FUNCTION_INDEX, rdx);
848  __ jmp(&probe);
849
850  // Check for boolean.
851  __ bind(&non_string);
852  __ CompareRoot(rdx, Heap::kTrueValueRootIndex);
853  __ j(equal, &boolean);
854  __ CompareRoot(rdx, Heap::kFalseValueRootIndex);
855  __ j(not_equal, &miss);
856  __ bind(&boolean);
857  StubCompiler::GenerateLoadGlobalFunctionPrototype(
858      masm, Context::BOOLEAN_FUNCTION_INDEX, rdx);
859
860  // Probe the stub cache for the value object.
861  __ bind(&probe);
862  Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rdx, rcx, rbx,
863                                                  no_reg);
864
865  __ bind(&miss);
866}
867
868
869static void GenerateFunctionTailCall(MacroAssembler* masm,
870                                     int argc,
871                                     Label* miss) {
872  // ----------- S t a t e -------------
873  // rcx                 : function name
874  // rdi                 : function
875  // rsp[0]              : return address
876  // rsp[8]              : argument argc
877  // rsp[16]             : argument argc - 1
878  // ...
879  // rsp[argc * 8]       : argument 1
880  // rsp[(argc + 1) * 8] : argument 0 = receiver
881  // -----------------------------------
882  __ JumpIfSmi(rdi, miss);
883  // Check that the value is a JavaScript function.
884  __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rdx);
885  __ j(not_equal, miss);
886
887  // Invoke the function.
888  ParameterCount actual(argc);
889  __ InvokeFunction(rdi, actual, JUMP_FUNCTION,
890                    NullCallWrapper(), CALL_AS_METHOD);
891}
892
893
894// The generated code falls through if the call should be handled by runtime.
895void CallICBase::GenerateNormal(MacroAssembler* masm, int argc) {
896  // ----------- S t a t e -------------
897  // rcx                 : function name
898  // rsp[0]              : return address
899  // rsp[8]              : argument argc
900  // rsp[16]             : argument argc - 1
901  // ...
902  // rsp[argc * 8]       : argument 1
903  // rsp[(argc + 1) * 8] : argument 0 = receiver
904  // -----------------------------------
905  Label miss;
906
907  // Get the receiver of the function from the stack.
908  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
909
910  GenerateNameDictionaryReceiverCheck(masm, rdx, rax, rbx, &miss);
911
912  // rax: elements
913  // Search the dictionary placing the result in rdi.
914  GenerateDictionaryLoad(masm, &miss, rax, rcx, rbx, rdi, rdi);
915
916  GenerateFunctionTailCall(masm, argc, &miss);
917
918  __ bind(&miss);
919}
920
921
922void CallICBase::GenerateMiss(MacroAssembler* masm,
923                              int argc,
924                              IC::UtilityId id,
925                              Code::ExtraICState extra_state) {
926  // ----------- S t a t e -------------
927  // rcx                 : function name
928  // rsp[0]              : return address
929  // rsp[8]              : argument argc
930  // rsp[16]             : argument argc - 1
931  // ...
932  // rsp[argc * 8]       : argument 1
933  // rsp[(argc + 1) * 8] : argument 0 = receiver
934  // -----------------------------------
935
936  Counters* counters = masm->isolate()->counters();
937  if (id == IC::kCallIC_Miss) {
938    __ IncrementCounter(counters->call_miss(), 1);
939  } else {
940    __ IncrementCounter(counters->keyed_call_miss(), 1);
941  }
942
943  // Get the receiver of the function from the stack; 1 ~ return address.
944  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
945
946  // Enter an internal frame.
947  {
948    FrameScope scope(masm, StackFrame::INTERNAL);
949
950    // Push the receiver and the name of the function.
951    __ push(rdx);
952    __ push(rcx);
953
954    // Call the entry.
955    CEntryStub stub(1);
956    __ Set(rax, 2);
957    __ LoadAddress(rbx, ExternalReference(IC_Utility(id), masm->isolate()));
958    __ CallStub(&stub);
959
960    // Move result to rdi and exit the internal frame.
961    __ movq(rdi, rax);
962  }
963
964  // Check if the receiver is a global object of some sort.
965  // This can happen only for regular CallIC but not KeyedCallIC.
966  if (id == IC::kCallIC_Miss) {
967    Label invoke, global;
968    __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));  // receiver
969    __ JumpIfSmi(rdx, &invoke);
970    __ CmpObjectType(rdx, JS_GLOBAL_OBJECT_TYPE, rcx);
971    __ j(equal, &global);
972    __ CmpInstanceType(rcx, JS_BUILTINS_OBJECT_TYPE);
973    __ j(not_equal, &invoke);
974
975    // Patch the receiver on the stack.
976    __ bind(&global);
977    __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
978    __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
979    __ bind(&invoke);
980  }
981
982  // Invoke the function.
983  CallKind call_kind = CallICBase::Contextual::decode(extra_state)
984      ? CALL_AS_FUNCTION
985      : CALL_AS_METHOD;
986  ParameterCount actual(argc);
987  __ InvokeFunction(rdi,
988                    actual,
989                    JUMP_FUNCTION,
990                    NullCallWrapper(),
991                    call_kind);
992}
993
994
995void CallIC::GenerateMegamorphic(MacroAssembler* masm,
996                                 int argc,
997                                 Code::ExtraICState extra_ic_state) {
998  // ----------- S t a t e -------------
999  // rcx                 : function name
1000  // rsp[0]              : return address
1001  // rsp[8]              : argument argc
1002  // rsp[16]             : argument argc - 1
1003  // ...
1004  // rsp[argc * 8]       : argument 1
1005  // rsp[(argc + 1) * 8] : argument 0 = receiver
1006  // -----------------------------------
1007
1008  // Get the receiver of the function from the stack; 1 ~ return address.
1009  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1010  GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC, extra_ic_state);
1011  GenerateMiss(masm, argc, extra_ic_state);
1012}
1013
1014
1015void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
1016  // ----------- S t a t e -------------
1017  // rcx                 : function name
1018  // rsp[0]              : return address
1019  // rsp[8]              : argument argc
1020  // rsp[16]             : argument argc - 1
1021  // ...
1022  // rsp[argc * 8]       : argument 1
1023  // rsp[(argc + 1) * 8] : argument 0 = receiver
1024  // -----------------------------------
1025
1026  // Get the receiver of the function from the stack; 1 ~ return address.
1027  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1028
1029  Label do_call, slow_call, slow_load;
1030  Label check_number_dictionary, check_name, lookup_monomorphic_cache;
1031  Label index_smi, index_name;
1032
1033  // Check that the key is a smi.
1034  __ JumpIfNotSmi(rcx, &check_name);
1035
1036  __ bind(&index_smi);
1037  // Now the key is known to be a smi. This place is also jumped to from below
1038  // where a numeric string is converted to a smi.
1039
1040  GenerateKeyedLoadReceiverCheck(
1041      masm, rdx, rax, Map::kHasIndexedInterceptor, &slow_call);
1042
1043  GenerateFastArrayLoad(
1044      masm, rdx, rcx, rax, rbx, rdi, &check_number_dictionary, &slow_load);
1045  Counters* counters = masm->isolate()->counters();
1046  __ IncrementCounter(counters->keyed_call_generic_smi_fast(), 1);
1047
1048  __ bind(&do_call);
1049  // receiver in rdx is not used after this point.
1050  // rcx: key
1051  // rdi: function
1052  GenerateFunctionTailCall(masm, argc, &slow_call);
1053
1054  __ bind(&check_number_dictionary);
1055  // rax: elements
1056  // rcx: smi key
1057  // Check whether the elements is a number dictionary.
1058  __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
1059                 Heap::kHashTableMapRootIndex);
1060  __ j(not_equal, &slow_load);
1061  __ SmiToInteger32(rbx, rcx);
1062  // ebx: untagged index
1063  __ LoadFromNumberDictionary(&slow_load, rax, rcx, rbx, r9, rdi, rdi);
1064  __ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1);
1065  __ jmp(&do_call);
1066
1067  __ bind(&slow_load);
1068  // This branch is taken when calling KeyedCallIC_Miss is neither required
1069  // nor beneficial.
1070  __ IncrementCounter(counters->keyed_call_generic_slow_load(), 1);
1071  {
1072    FrameScope scope(masm, StackFrame::INTERNAL);
1073    __ push(rcx);  // save the key
1074    __ push(rdx);  // pass the receiver
1075    __ push(rcx);  // pass the key
1076    __ CallRuntime(Runtime::kKeyedGetProperty, 2);
1077    __ pop(rcx);  // restore the key
1078  }
1079  __ movq(rdi, rax);
1080  __ jmp(&do_call);
1081
1082  __ bind(&check_name);
1083  GenerateKeyNameCheck(masm, rcx, rax, rbx, &index_name, &slow_call);
1084
1085  // The key is known to be a unique name.
1086  // If the receiver is a regular JS object with slow properties then do
1087  // a quick inline probe of the receiver's dictionary.
1088  // Otherwise do the monomorphic cache probe.
1089  GenerateKeyedLoadReceiverCheck(
1090      masm, rdx, rax, Map::kHasNamedInterceptor, &lookup_monomorphic_cache);
1091
1092  __ movq(rbx, FieldOperand(rdx, JSObject::kPropertiesOffset));
1093  __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
1094                 Heap::kHashTableMapRootIndex);
1095  __ j(not_equal, &lookup_monomorphic_cache);
1096
1097  GenerateDictionaryLoad(masm, &slow_load, rbx, rcx, rax, rdi, rdi);
1098  __ IncrementCounter(counters->keyed_call_generic_lookup_dict(), 1);
1099  __ jmp(&do_call);
1100
1101  __ bind(&lookup_monomorphic_cache);
1102  __ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1);
1103  GenerateMonomorphicCacheProbe(masm,
1104                                argc,
1105                                Code::KEYED_CALL_IC,
1106                                Code::kNoExtraICState);
1107  // Fall through on miss.
1108
1109  __ bind(&slow_call);
1110  // This branch is taken if:
1111  // - the receiver requires boxing or access check,
1112  // - the key is neither smi nor a unique name,
1113  // - the value loaded is not a function,
1114  // - there is hope that the runtime will create a monomorphic call stub
1115  //   that will get fetched next time.
1116  __ IncrementCounter(counters->keyed_call_generic_slow(), 1);
1117  GenerateMiss(masm, argc);
1118
1119  __ bind(&index_name);
1120  __ IndexFromHash(rbx, rcx);
1121  // Now jump to the place where smi keys are handled.
1122  __ jmp(&index_smi);
1123}
1124
1125
1126void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) {
1127  // ----------- S t a t e -------------
1128  // rcx                 : function name
1129  // rsp[0]              : return address
1130  // rsp[8]              : argument argc
1131  // rsp[16]             : argument argc - 1
1132  // ...
1133  // rsp[argc * 8]       : argument 1
1134  // rsp[(argc + 1) * 8] : argument 0 = receiver
1135  // -----------------------------------
1136
1137  // Check if the name is really a name.
1138  Label miss;
1139  __ JumpIfSmi(rcx, &miss);
1140  Condition cond = masm->IsObjectNameType(rcx, rax, rax);
1141  __ j(NegateCondition(cond), &miss);
1142  CallICBase::GenerateNormal(masm, argc);
1143  __ bind(&miss);
1144  GenerateMiss(masm, argc);
1145}
1146
1147
1148static Operand GenerateMappedArgumentsLookup(MacroAssembler* masm,
1149                                             Register object,
1150                                             Register key,
1151                                             Register scratch1,
1152                                             Register scratch2,
1153                                             Register scratch3,
1154                                             Label* unmapped_case,
1155                                             Label* slow_case) {
1156  Heap* heap = masm->isolate()->heap();
1157
1158  // Check that the receiver is a JSObject. Because of the elements
1159  // map check later, we do not need to check for interceptors or
1160  // whether it requires access checks.
1161  __ JumpIfSmi(object, slow_case);
1162  // Check that the object is some kind of JSObject.
1163  __ CmpObjectType(object, FIRST_JS_RECEIVER_TYPE, scratch1);
1164  __ j(below, slow_case);
1165
1166  // Check that the key is a positive smi.
1167  Condition check = masm->CheckNonNegativeSmi(key);
1168  __ j(NegateCondition(check), slow_case);
1169
1170  // Load the elements into scratch1 and check its map. If not, jump
1171  // to the unmapped lookup with the parameter map in scratch1.
1172  Handle<Map> arguments_map(heap->non_strict_arguments_elements_map());
1173  __ movq(scratch1, FieldOperand(object, JSObject::kElementsOffset));
1174  __ CheckMap(scratch1, arguments_map, slow_case, DONT_DO_SMI_CHECK);
1175
1176  // Check if element is in the range of mapped arguments.
1177  __ movq(scratch2, FieldOperand(scratch1, FixedArray::kLengthOffset));
1178  __ SmiSubConstant(scratch2, scratch2, Smi::FromInt(2));
1179  __ cmpq(key, scratch2);
1180  __ j(greater_equal, unmapped_case);
1181
1182  // Load element index and check whether it is the hole.
1183  const int kHeaderSize = FixedArray::kHeaderSize + 2 * kPointerSize;
1184  __ SmiToInteger64(scratch3, key);
1185  __ movq(scratch2, FieldOperand(scratch1,
1186                                 scratch3,
1187                                 times_pointer_size,
1188                                 kHeaderSize));
1189  __ CompareRoot(scratch2, Heap::kTheHoleValueRootIndex);
1190  __ j(equal, unmapped_case);
1191
1192  // Load value from context and return it. We can reuse scratch1 because
1193  // we do not jump to the unmapped lookup (which requires the parameter
1194  // map in scratch1).
1195  __ movq(scratch1, FieldOperand(scratch1, FixedArray::kHeaderSize));
1196  __ SmiToInteger64(scratch3, scratch2);
1197  return FieldOperand(scratch1,
1198                      scratch3,
1199                      times_pointer_size,
1200                      Context::kHeaderSize);
1201}
1202
1203
1204static Operand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
1205                                               Register key,
1206                                               Register parameter_map,
1207                                               Register scratch,
1208                                               Label* slow_case) {
1209  // Element is in arguments backing store, which is referenced by the
1210  // second element of the parameter_map. The parameter_map register
1211  // must be loaded with the parameter map of the arguments object and is
1212  // overwritten.
1213  const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
1214  Register backing_store = parameter_map;
1215  __ movq(backing_store, FieldOperand(parameter_map, kBackingStoreOffset));
1216  Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map());
1217  __ CheckMap(backing_store, fixed_array_map, slow_case, DONT_DO_SMI_CHECK);
1218  __ movq(scratch, FieldOperand(backing_store, FixedArray::kLengthOffset));
1219  __ cmpq(key, scratch);
1220  __ j(greater_equal, slow_case);
1221  __ SmiToInteger64(scratch, key);
1222  return FieldOperand(backing_store,
1223                      scratch,
1224                      times_pointer_size,
1225                      FixedArray::kHeaderSize);
1226}
1227
1228
1229void KeyedLoadIC::GenerateNonStrictArguments(MacroAssembler* masm) {
1230  // ----------- S t a t e -------------
1231  //  -- rax    : key
1232  //  -- rdx    : receiver
1233  //  -- rsp[0] : return address
1234  // -----------------------------------
1235  Label slow, notin;
1236  Operand mapped_location =
1237      GenerateMappedArgumentsLookup(
1238          masm, rdx, rax, rbx, rcx, rdi, &notin, &slow);
1239  __ movq(rax, mapped_location);
1240  __ Ret();
1241  __ bind(&notin);
1242  // The unmapped lookup expects that the parameter map is in rbx.
1243  Operand unmapped_location =
1244      GenerateUnmappedArgumentsLookup(masm, rax, rbx, rcx, &slow);
1245  __ CompareRoot(unmapped_location, Heap::kTheHoleValueRootIndex);
1246  __ j(equal, &slow);
1247  __ movq(rax, unmapped_location);
1248  __ Ret();
1249  __ bind(&slow);
1250  GenerateMiss(masm, MISS);
1251}
1252
1253
1254void KeyedStoreIC::GenerateNonStrictArguments(MacroAssembler* masm) {
1255  // ----------- S t a t e -------------
1256  //  -- rax    : value
1257  //  -- rcx    : key
1258  //  -- rdx    : receiver
1259  //  -- rsp[0] : return address
1260  // -----------------------------------
1261  Label slow, notin;
1262  Operand mapped_location = GenerateMappedArgumentsLookup(
1263      masm, rdx, rcx, rbx, rdi, r8, &notin, &slow);
1264  __ movq(mapped_location, rax);
1265  __ lea(r9, mapped_location);
1266  __ movq(r8, rax);
1267  __ RecordWrite(rbx,
1268                 r9,
1269                 r8,
1270                 kDontSaveFPRegs,
1271                 EMIT_REMEMBERED_SET,
1272                 INLINE_SMI_CHECK);
1273  __ Ret();
1274  __ bind(&notin);
1275  // The unmapped lookup expects that the parameter map is in rbx.
1276  Operand unmapped_location =
1277      GenerateUnmappedArgumentsLookup(masm, rcx, rbx, rdi, &slow);
1278  __ movq(unmapped_location, rax);
1279  __ lea(r9, unmapped_location);
1280  __ movq(r8, rax);
1281  __ RecordWrite(rbx,
1282                 r9,
1283                 r8,
1284                 kDontSaveFPRegs,
1285                 EMIT_REMEMBERED_SET,
1286                 INLINE_SMI_CHECK);
1287  __ Ret();
1288  __ bind(&slow);
1289  GenerateMiss(masm, MISS);
1290}
1291
1292
1293void KeyedCallIC::GenerateNonStrictArguments(MacroAssembler* masm,
1294                                             int argc) {
1295  // ----------- S t a t e -------------
1296  // rcx                 : function name
1297  // rsp[0]              : return address
1298  // rsp[8]              : argument argc
1299  // rsp[16]             : argument argc - 1
1300  // ...
1301  // rsp[argc * 8]       : argument 1
1302  // rsp[(argc + 1) * 8] : argument 0 = receiver
1303  // -----------------------------------
1304  Label slow, notin;
1305  __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1306  Operand mapped_location = GenerateMappedArgumentsLookup(
1307      masm, rdx, rcx, rbx, rax, r8, &notin, &slow);
1308  __ movq(rdi, mapped_location);
1309  GenerateFunctionTailCall(masm, argc, &slow);
1310  __ bind(&notin);
1311  // The unmapped lookup expects that the parameter map is in rbx.
1312  Operand unmapped_location =
1313      GenerateUnmappedArgumentsLookup(masm, rcx, rbx, rax, &slow);
1314  __ CompareRoot(unmapped_location, Heap::kTheHoleValueRootIndex);
1315  __ j(equal, &slow);
1316  __ movq(rdi, unmapped_location);
1317  GenerateFunctionTailCall(masm, argc, &slow);
1318  __ bind(&slow);
1319  GenerateMiss(masm, argc);
1320}
1321
1322
1323void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
1324  // ----------- S t a t e -------------
1325  //  -- rax    : receiver
1326  //  -- rcx    : name
1327  //  -- rsp[0] : return address
1328  // -----------------------------------
1329
1330  // Probe the stub cache.
1331  Code::Flags flags = Code::ComputeFlags(
1332      Code::STUB, MONOMORPHIC, Code::kNoExtraICState,
1333      Code::NORMAL, Code::LOAD_IC);
1334  Isolate::Current()->stub_cache()->GenerateProbe(
1335      masm, flags, rax, rcx, rbx, rdx);
1336
1337  GenerateMiss(masm);
1338}
1339
1340
1341void LoadIC::GenerateNormal(MacroAssembler* masm) {
1342  // ----------- S t a t e -------------
1343  //  -- rax    : receiver
1344  //  -- rcx    : name
1345  //  -- rsp[0] : return address
1346  // -----------------------------------
1347  Label miss;
1348
1349  GenerateNameDictionaryReceiverCheck(masm, rax, rdx, rbx, &miss);
1350
1351  //  rdx: elements
1352  // Search the dictionary placing the result in rax.
1353  GenerateDictionaryLoad(masm, &miss, rdx, rcx, rbx, rdi, rax);
1354  __ ret(0);
1355
1356  // Cache miss: Jump to runtime.
1357  __ bind(&miss);
1358  GenerateMiss(masm);
1359}
1360
1361
1362void LoadIC::GenerateMiss(MacroAssembler* masm) {
1363  // ----------- S t a t e -------------
1364  //  -- rax    : receiver
1365  //  -- rcx    : name
1366  //  -- rsp[0] : return address
1367  // -----------------------------------
1368
1369  Counters* counters = masm->isolate()->counters();
1370  __ IncrementCounter(counters->load_miss(), 1);
1371
1372  __ PopReturnAddressTo(rbx);
1373  __ push(rax);  // receiver
1374  __ push(rcx);  // name
1375  __ PushReturnAddressFrom(rbx);
1376
1377  // Perform tail call to the entry.
1378  ExternalReference ref =
1379      ExternalReference(IC_Utility(kLoadIC_Miss), masm->isolate());
1380  __ TailCallExternalReference(ref, 2, 1);
1381}
1382
1383
1384void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
1385  // ----------- S t a t e -------------
1386  //  -- rax    : receiver
1387  //  -- rcx    : name
1388  //  -- rsp[0] : return address
1389  // -----------------------------------
1390
1391  __ PopReturnAddressTo(rbx);
1392  __ push(rax);  // receiver
1393  __ push(rcx);  // name
1394  __ PushReturnAddressFrom(rbx);
1395
1396  // Perform tail call to the entry.
1397  __ TailCallRuntime(Runtime::kGetProperty, 2, 1);
1398}
1399
1400
1401void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, ICMissMode miss_mode) {
1402  // ----------- S t a t e -------------
1403  //  -- rax    : key
1404  //  -- rdx    : receiver
1405  //  -- rsp[0] : return address
1406  // -----------------------------------
1407
1408  Counters* counters = masm->isolate()->counters();
1409  __ IncrementCounter(counters->keyed_load_miss(), 1);
1410
1411  __ PopReturnAddressTo(rbx);
1412  __ push(rdx);  // receiver
1413  __ push(rax);  // name
1414  __ PushReturnAddressFrom(rbx);
1415
1416  // Perform tail call to the entry.
1417  ExternalReference ref = miss_mode == MISS_FORCE_GENERIC
1418      ? ExternalReference(IC_Utility(kKeyedLoadIC_MissForceGeneric),
1419                          masm->isolate())
1420      : ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate());
1421  __ TailCallExternalReference(ref, 2, 1);
1422}
1423
1424
1425void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
1426  // ----------- S t a t e -------------
1427  //  -- rax    : key
1428  //  -- rdx    : receiver
1429  //  -- rsp[0] : return address
1430  // -----------------------------------
1431
1432  __ PopReturnAddressTo(rbx);
1433  __ push(rdx);  // receiver
1434  __ push(rax);  // name
1435  __ PushReturnAddressFrom(rbx);
1436
1437  // Perform tail call to the entry.
1438  __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
1439}
1440
1441
1442void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
1443                                  StrictModeFlag strict_mode) {
1444  // ----------- S t a t e -------------
1445  //  -- rax    : value
1446  //  -- rcx    : name
1447  //  -- rdx    : receiver
1448  //  -- rsp[0] : return address
1449  // -----------------------------------
1450
1451  // Get the receiver from the stack and probe the stub cache.
1452  Code::Flags flags = Code::ComputeFlags(
1453      Code::STUB, MONOMORPHIC, strict_mode,
1454      Code::NORMAL, Code::STORE_IC);
1455  Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rdx, rcx, rbx,
1456                                                  no_reg);
1457
1458  // Cache miss: Jump to runtime.
1459  GenerateMiss(masm);
1460}
1461
1462
1463void StoreIC::GenerateMiss(MacroAssembler* masm) {
1464  // ----------- S t a t e -------------
1465  //  -- rax    : value
1466  //  -- rcx    : name
1467  //  -- rdx    : receiver
1468  //  -- rsp[0] : return address
1469  // -----------------------------------
1470
1471  __ PopReturnAddressTo(rbx);
1472  __ push(rdx);  // receiver
1473  __ push(rcx);  // name
1474  __ push(rax);  // value
1475  __ PushReturnAddressFrom(rbx);
1476
1477  // Perform tail call to the entry.
1478  ExternalReference ref =
1479      ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate());
1480  __ TailCallExternalReference(ref, 3, 1);
1481}
1482
1483
1484void StoreIC::GenerateNormal(MacroAssembler* masm) {
1485  // ----------- S t a t e -------------
1486  //  -- rax    : value
1487  //  -- rcx    : name
1488  //  -- rdx    : receiver
1489  //  -- rsp[0] : return address
1490  // -----------------------------------
1491
1492  Label miss;
1493
1494  GenerateNameDictionaryReceiverCheck(masm, rdx, rbx, rdi, &miss);
1495
1496  GenerateDictionaryStore(masm, &miss, rbx, rcx, rax, r8, r9);
1497  Counters* counters = masm->isolate()->counters();
1498  __ IncrementCounter(counters->store_normal_hit(), 1);
1499  __ ret(0);
1500
1501  __ bind(&miss);
1502  __ IncrementCounter(counters->store_normal_miss(), 1);
1503  GenerateMiss(masm);
1504}
1505
1506
1507void StoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
1508                                         StrictModeFlag strict_mode) {
1509  // ----------- S t a t e -------------
1510  //  -- rax    : value
1511  //  -- rcx    : name
1512  //  -- rdx    : receiver
1513  //  -- rsp[0] : return address
1514  // -----------------------------------
1515  __ PopReturnAddressTo(rbx);
1516  __ push(rdx);
1517  __ push(rcx);
1518  __ push(rax);
1519  __ Push(Smi::FromInt(NONE));  // PropertyAttributes
1520  __ Push(Smi::FromInt(strict_mode));
1521  __ PushReturnAddressFrom(rbx);
1522
1523  // Do tail-call to runtime routine.
1524  __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1525}
1526
1527
1528void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
1529                                              StrictModeFlag strict_mode) {
1530  // ----------- S t a t e -------------
1531  //  -- rax    : value
1532  //  -- rcx    : key
1533  //  -- rdx    : receiver
1534  //  -- rsp[0] : return address
1535  // -----------------------------------
1536
1537  __ PopReturnAddressTo(rbx);
1538  __ push(rdx);  // receiver
1539  __ push(rcx);  // key
1540  __ push(rax);  // value
1541  __ Push(Smi::FromInt(NONE));          // PropertyAttributes
1542  __ Push(Smi::FromInt(strict_mode));   // Strict mode.
1543  __ PushReturnAddressFrom(rbx);
1544
1545  // Do tail-call to runtime routine.
1546  __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1547}
1548
1549
1550void StoreIC::GenerateSlow(MacroAssembler* masm) {
1551  // ----------- S t a t e -------------
1552  //  -- rax    : value
1553  //  -- rcx    : key
1554  //  -- rdx    : receiver
1555  //  -- rsp[0] : return address
1556  // -----------------------------------
1557
1558  __ PopReturnAddressTo(rbx);
1559  __ push(rdx);  // receiver
1560  __ push(rcx);  // key
1561  __ push(rax);  // value
1562  __ PushReturnAddressFrom(rbx);
1563
1564  // Do tail-call to runtime routine.
1565  ExternalReference ref(IC_Utility(kStoreIC_Slow), masm->isolate());
1566  __ TailCallExternalReference(ref, 3, 1);
1567}
1568
1569
1570void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
1571  // ----------- S t a t e -------------
1572  //  -- rax    : value
1573  //  -- rcx    : key
1574  //  -- rdx    : receiver
1575  //  -- rsp[0] : return address
1576  // -----------------------------------
1577
1578  __ PopReturnAddressTo(rbx);
1579  __ push(rdx);  // receiver
1580  __ push(rcx);  // key
1581  __ push(rax);  // value
1582  __ PushReturnAddressFrom(rbx);
1583
1584  // Do tail-call to runtime routine.
1585  ExternalReference ref(IC_Utility(kKeyedStoreIC_Slow), masm->isolate());
1586  __ TailCallExternalReference(ref, 3, 1);
1587}
1588
1589
1590void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, ICMissMode miss_mode) {
1591  // ----------- S t a t e -------------
1592  //  -- rax    : value
1593  //  -- rcx    : key
1594  //  -- rdx    : receiver
1595  //  -- rsp[0] : return address
1596  // -----------------------------------
1597
1598  __ PopReturnAddressTo(rbx);
1599  __ push(rdx);  // receiver
1600  __ push(rcx);  // key
1601  __ push(rax);  // value
1602  __ PushReturnAddressFrom(rbx);
1603
1604  // Do tail-call to runtime routine.
1605  ExternalReference ref = miss_mode == MISS_FORCE_GENERIC
1606    ? ExternalReference(IC_Utility(kKeyedStoreIC_MissForceGeneric),
1607                        masm->isolate())
1608    : ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
1609  __ TailCallExternalReference(ref, 3, 1);
1610}
1611
1612
1613#undef __
1614
1615
1616Condition CompareIC::ComputeCondition(Token::Value op) {
1617  switch (op) {
1618    case Token::EQ_STRICT:
1619    case Token::EQ:
1620      return equal;
1621    case Token::LT:
1622      return less;
1623    case Token::GT:
1624      return greater;
1625    case Token::LTE:
1626      return less_equal;
1627    case Token::GTE:
1628      return greater_equal;
1629    default:
1630      UNREACHABLE();
1631      return no_condition;
1632  }
1633}
1634
1635
1636bool CompareIC::HasInlinedSmiCode(Address address) {
1637  // The address of the instruction following the call.
1638  Address test_instruction_address =
1639      address + Assembler::kCallTargetAddressOffset;
1640
1641  // If the instruction following the call is not a test al, nothing
1642  // was inlined.
1643  return *test_instruction_address == Assembler::kTestAlByte;
1644}
1645
1646
1647void PatchInlinedSmiCode(Address address, InlinedSmiCheck check) {
1648  // The address of the instruction following the call.
1649  Address test_instruction_address =
1650      address + Assembler::kCallTargetAddressOffset;
1651
1652  // If the instruction following the call is not a test al, nothing
1653  // was inlined.
1654  if (*test_instruction_address != Assembler::kTestAlByte) {
1655    ASSERT(*test_instruction_address == Assembler::kNopByte);
1656    return;
1657  }
1658
1659  Address delta_address = test_instruction_address + 1;
1660  // The delta to the start of the map check instruction and the
1661  // condition code uses at the patched jump.
1662  int8_t delta = *reinterpret_cast<int8_t*>(delta_address);
1663  if (FLAG_trace_ic) {
1664    PrintF("[  patching ic at %p, test=%p, delta=%d\n",
1665           address, test_instruction_address, delta);
1666  }
1667
1668  // Patch with a short conditional jump. Enabling means switching from a short
1669  // jump-if-carry/not-carry to jump-if-zero/not-zero, whereas disabling is the
1670  // reverse operation of that.
1671  Address jmp_address = test_instruction_address - delta;
1672  ASSERT((check == ENABLE_INLINED_SMI_CHECK)
1673         ? (*jmp_address == Assembler::kJncShortOpcode ||
1674            *jmp_address == Assembler::kJcShortOpcode)
1675         : (*jmp_address == Assembler::kJnzShortOpcode ||
1676            *jmp_address == Assembler::kJzShortOpcode));
1677  Condition cc = (check == ENABLE_INLINED_SMI_CHECK)
1678      ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero)
1679      : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry);
1680  *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc);
1681}
1682
1683
1684} }  // namespace v8::internal
1685
1686#endif  // V8_TARGET_ARCH_X64
1687