1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/v8.h"
6
7#if V8_TARGET_ARCH_X64
8
9#include "src/codegen.h"
10#include "src/ic/ic.h"
11#include "src/ic/ic-compiler.h"
12#include "src/ic/stub-cache.h"
13
14namespace v8 {
15namespace internal {
16
17// ----------------------------------------------------------------------------
18// Static IC stub generators.
19//
20
21#define __ ACCESS_MASM(masm)
22
23
24static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm, Register type,
25                                            Label* global_object) {
26  // Register usage:
27  //   type: holds the receiver instance type on entry.
28  __ cmpb(type, Immediate(JS_GLOBAL_OBJECT_TYPE));
29  __ j(equal, global_object);
30  __ cmpb(type, Immediate(JS_BUILTINS_OBJECT_TYPE));
31  __ j(equal, global_object);
32  __ cmpb(type, Immediate(JS_GLOBAL_PROXY_TYPE));
33  __ j(equal, global_object);
34}
35
36
37// Helper function used to load a property from a dictionary backing storage.
38// This function may return false negatives, so miss_label
39// must always call a backup property load that is complete.
40// This function is safe to call if name is not an internalized string,
41// and will jump to the miss_label in that case.
42// The generated code assumes that the receiver has slow properties,
43// is not a global object and does not have interceptors.
44static void GenerateDictionaryLoad(MacroAssembler* masm, Label* miss_label,
45                                   Register elements, Register name,
46                                   Register r0, Register r1, Register result) {
47  // Register use:
48  //
49  // elements - holds the property dictionary on entry and is unchanged.
50  //
51  // name - holds the name of the property on entry and is unchanged.
52  //
53  // r0   - used to hold the capacity of the property dictionary.
54  //
55  // r1   - used to hold the index into the property dictionary.
56  //
57  // result - holds the result on exit if the load succeeded.
58
59  Label done;
60
61  // Probe the dictionary.
62  NameDictionaryLookupStub::GeneratePositiveLookup(masm, miss_label, &done,
63                                                   elements, name, r0, r1);
64
65  // If probing finds an entry in the dictionary, r1 contains the
66  // index into the dictionary. Check that the value is a normal
67  // property.
68  __ bind(&done);
69  const int kElementsStartOffset =
70      NameDictionary::kHeaderSize +
71      NameDictionary::kElementsStartIndex * kPointerSize;
72  const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
73  __ Test(Operand(elements, r1, times_pointer_size,
74                  kDetailsOffset - kHeapObjectTag),
75          Smi::FromInt(PropertyDetails::TypeField::kMask));
76  __ j(not_zero, miss_label);
77
78  // Get the value at the masked, scaled index.
79  const int kValueOffset = kElementsStartOffset + kPointerSize;
80  __ movp(result, Operand(elements, r1, times_pointer_size,
81                          kValueOffset - kHeapObjectTag));
82}
83
84
85// Helper function used to store a property to a dictionary backing
86// storage. This function may fail to store a property even though it
87// is in the dictionary, so code at miss_label must always call a
88// backup property store that is complete. This function is safe to
89// call if name is not an internalized string, and will jump to the miss_label
90// in that case. The generated code assumes that the receiver has slow
91// properties, is not a global object and does not have interceptors.
92static void GenerateDictionaryStore(MacroAssembler* masm, Label* miss_label,
93                                    Register elements, Register name,
94                                    Register value, Register scratch0,
95                                    Register scratch1) {
96  // Register use:
97  //
98  // elements - holds the property dictionary on entry and is clobbered.
99  //
100  // name - holds the name of the property on entry and is unchanged.
101  //
102  // value - holds the value to store and is unchanged.
103  //
104  // scratch0 - used during the positive dictionary lookup and is clobbered.
105  //
106  // scratch1 - used for index into the property dictionary and is clobbered.
107  Label done;
108
109  // Probe the dictionary.
110  NameDictionaryLookupStub::GeneratePositiveLookup(
111      masm, miss_label, &done, elements, name, scratch0, scratch1);
112
113  // If probing finds an entry in the dictionary, scratch0 contains the
114  // index into the dictionary. Check that the value is a normal
115  // property that is not read only.
116  __ bind(&done);
117  const int kElementsStartOffset =
118      NameDictionary::kHeaderSize +
119      NameDictionary::kElementsStartIndex * kPointerSize;
120  const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
121  const int kTypeAndReadOnlyMask =
122      (PropertyDetails::TypeField::kMask |
123       PropertyDetails::AttributesField::encode(READ_ONLY))
124      << kSmiTagSize;
125  __ Test(Operand(elements, scratch1, times_pointer_size,
126                  kDetailsOffset - kHeapObjectTag),
127          Smi::FromInt(kTypeAndReadOnlyMask));
128  __ j(not_zero, miss_label);
129
130  // Store the value at the masked, scaled index.
131  const int kValueOffset = kElementsStartOffset + kPointerSize;
132  __ leap(scratch1, Operand(elements, scratch1, times_pointer_size,
133                            kValueOffset - kHeapObjectTag));
134  __ movp(Operand(scratch1, 0), value);
135
136  // Update write barrier. Make sure not to clobber the value.
137  __ movp(scratch0, value);
138  __ RecordWrite(elements, scratch1, scratch0, kDontSaveFPRegs);
139}
140
141
142// Checks the receiver for special cases (value type, slow case bits).
143// Falls through for regular JS object.
144static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
145                                           Register receiver, Register map,
146                                           int interceptor_bit, Label* slow) {
147  // Register use:
148  //   receiver - holds the receiver and is unchanged.
149  // Scratch registers:
150  //   map - used to hold the map of the receiver.
151
152  // Check that the object isn't a smi.
153  __ JumpIfSmi(receiver, slow);
154
155  // Check that the object is some kind of JS object EXCEPT JS Value type.
156  // In the case that the object is a value-wrapper object,
157  // we enter the runtime system to make sure that indexing
158  // into string objects work as intended.
159  DCHECK(JS_OBJECT_TYPE > JS_VALUE_TYPE);
160  __ CmpObjectType(receiver, JS_OBJECT_TYPE, map);
161  __ j(below, slow);
162
163  // Check bit field.
164  __ testb(
165      FieldOperand(map, Map::kBitFieldOffset),
166      Immediate((1 << Map::kIsAccessCheckNeeded) | (1 << interceptor_bit)));
167  __ j(not_zero, slow);
168}
169
170
171// Loads an indexed element from a fast case array.
172// If not_fast_array is NULL, doesn't perform the elements map check.
173static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver,
174                                  Register key, Register elements,
175                                  Register scratch, Register result,
176                                  Label* not_fast_array, Label* out_of_range) {
177  // Register use:
178  //
179  // receiver - holds the receiver on entry.
180  //            Unchanged unless 'result' is the same register.
181  //
182  // key      - holds the smi key on entry.
183  //            Unchanged unless 'result' is the same register.
184  //
185  // elements - holds the elements of the receiver on exit.
186  //
187  // result   - holds the result on exit if the load succeeded.
188  //            Allowed to be the the same as 'receiver' or 'key'.
189  //            Unchanged on bailout so 'receiver' and 'key' can be safely
190  //            used by further computation.
191  //
192  // Scratch registers:
193  //
194  //   scratch - used to hold elements of the receiver and the loaded value.
195
196  __ movp(elements, FieldOperand(receiver, JSObject::kElementsOffset));
197  if (not_fast_array != NULL) {
198    // Check that the object is in fast mode and writable.
199    __ CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
200                   Heap::kFixedArrayMapRootIndex);
201    __ j(not_equal, not_fast_array);
202  } else {
203    __ AssertFastElements(elements);
204  }
205  // Check that the key (index) is within bounds.
206  __ SmiCompare(key, FieldOperand(elements, FixedArray::kLengthOffset));
207  // Unsigned comparison rejects negative indices.
208  __ j(above_equal, out_of_range);
209  // Fast case: Do the load.
210  SmiIndex index = masm->SmiToIndex(scratch, key, kPointerSizeLog2);
211  __ movp(scratch, FieldOperand(elements, index.reg, index.scale,
212                                FixedArray::kHeaderSize));
213  __ CompareRoot(scratch, Heap::kTheHoleValueRootIndex);
214  // In case the loaded value is the_hole we have to consult GetProperty
215  // to ensure the prototype chain is searched.
216  __ j(equal, out_of_range);
217  if (!result.is(scratch)) {
218    __ movp(result, scratch);
219  }
220}
221
222
223// Checks whether a key is an array index string or a unique name.
224// Falls through if the key is a unique name.
225static void GenerateKeyNameCheck(MacroAssembler* masm, Register key,
226                                 Register map, Register hash,
227                                 Label* index_string, Label* not_unique) {
228  // Register use:
229  //   key - holds the key and is unchanged. Assumed to be non-smi.
230  // Scratch registers:
231  //   map - used to hold the map of the key.
232  //   hash - used to hold the hash of the key.
233  Label unique;
234  __ CmpObjectType(key, LAST_UNIQUE_NAME_TYPE, map);
235  __ j(above, not_unique);
236  STATIC_ASSERT(LAST_UNIQUE_NAME_TYPE == FIRST_NONSTRING_TYPE);
237  __ j(equal, &unique);
238
239  // Is the string an array index, with cached numeric value?
240  __ movl(hash, FieldOperand(key, Name::kHashFieldOffset));
241  __ testl(hash, Immediate(Name::kContainsCachedArrayIndexMask));
242  __ j(zero, index_string);  // The value in hash is used at jump target.
243
244  // Is the string internalized? We already know it's a string so a single
245  // bit test is enough.
246  STATIC_ASSERT(kNotInternalizedTag != 0);
247  __ testb(FieldOperand(map, Map::kInstanceTypeOffset),
248           Immediate(kIsNotInternalizedMask));
249  __ j(not_zero, not_unique);
250
251  __ bind(&unique);
252}
253
254
255void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
256  // The return address is on the stack.
257  Label slow, check_name, index_smi, index_name, property_array_property;
258  Label probe_dictionary, check_number_dictionary;
259
260  Register receiver = LoadDescriptor::ReceiverRegister();
261  Register key = LoadDescriptor::NameRegister();
262  DCHECK(receiver.is(rdx));
263  DCHECK(key.is(rcx));
264
265  // Check that the key is a smi.
266  __ JumpIfNotSmi(key, &check_name);
267  __ bind(&index_smi);
268  // Now the key is known to be a smi. This place is also jumped to from below
269  // where a numeric string is converted to a smi.
270
271  GenerateKeyedLoadReceiverCheck(masm, receiver, rax,
272                                 Map::kHasIndexedInterceptor, &slow);
273
274  // Check the receiver's map to see if it has fast elements.
275  __ CheckFastElements(rax, &check_number_dictionary);
276
277  GenerateFastArrayLoad(masm, receiver, key, rax, rbx, rax, NULL, &slow);
278  Counters* counters = masm->isolate()->counters();
279  __ IncrementCounter(counters->keyed_load_generic_smi(), 1);
280  __ ret(0);
281
282  __ bind(&check_number_dictionary);
283  __ SmiToInteger32(rbx, key);
284  __ movp(rax, FieldOperand(receiver, JSObject::kElementsOffset));
285
286  // Check whether the elements is a number dictionary.
287  // rbx: key as untagged int32
288  // rax: elements
289  __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
290                 Heap::kHashTableMapRootIndex);
291  __ j(not_equal, &slow);
292  __ LoadFromNumberDictionary(&slow, rax, key, rbx, r9, rdi, rax);
293  __ ret(0);
294
295  __ bind(&slow);
296  // Slow case: Jump to runtime.
297  __ IncrementCounter(counters->keyed_load_generic_slow(), 1);
298  GenerateRuntimeGetProperty(masm);
299
300  __ bind(&check_name);
301  GenerateKeyNameCheck(masm, key, rax, rbx, &index_name, &slow);
302
303  GenerateKeyedLoadReceiverCheck(masm, receiver, rax, Map::kHasNamedInterceptor,
304                                 &slow);
305
306  // If the receiver is a fast-case object, check the keyed lookup
307  // cache. Otherwise probe the dictionary leaving result in key.
308  __ movp(rbx, FieldOperand(receiver, JSObject::kPropertiesOffset));
309  __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
310                 Heap::kHashTableMapRootIndex);
311  __ j(equal, &probe_dictionary);
312
313  // Load the map of the receiver, compute the keyed lookup cache hash
314  // based on 32 bits of the map pointer and the string hash.
315  __ movp(rbx, FieldOperand(receiver, HeapObject::kMapOffset));
316  __ movl(rax, rbx);
317  __ shrl(rax, Immediate(KeyedLookupCache::kMapHashShift));
318  __ movl(rdi, FieldOperand(key, String::kHashFieldOffset));
319  __ shrl(rdi, Immediate(String::kHashShift));
320  __ xorp(rax, rdi);
321  int mask = (KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask);
322  __ andp(rax, Immediate(mask));
323
324  // Load the key (consisting of map and internalized string) from the cache and
325  // check for match.
326  Label load_in_object_property;
327  static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket;
328  Label hit_on_nth_entry[kEntriesPerBucket];
329  ExternalReference cache_keys =
330      ExternalReference::keyed_lookup_cache_keys(masm->isolate());
331
332  for (int i = 0; i < kEntriesPerBucket - 1; i++) {
333    Label try_next_entry;
334    __ movp(rdi, rax);
335    __ shlp(rdi, Immediate(kPointerSizeLog2 + 1));
336    __ LoadAddress(kScratchRegister, cache_keys);
337    int off = kPointerSize * i * 2;
338    __ cmpp(rbx, Operand(kScratchRegister, rdi, times_1, off));
339    __ j(not_equal, &try_next_entry);
340    __ cmpp(key, Operand(kScratchRegister, rdi, times_1, off + kPointerSize));
341    __ j(equal, &hit_on_nth_entry[i]);
342    __ bind(&try_next_entry);
343  }
344
345  int off = kPointerSize * (kEntriesPerBucket - 1) * 2;
346  __ cmpp(rbx, Operand(kScratchRegister, rdi, times_1, off));
347  __ j(not_equal, &slow);
348  __ cmpp(key, Operand(kScratchRegister, rdi, times_1, off + kPointerSize));
349  __ j(not_equal, &slow);
350
351  // Get field offset, which is a 32-bit integer.
352  ExternalReference cache_field_offsets =
353      ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate());
354
355  // Hit on nth entry.
356  for (int i = kEntriesPerBucket - 1; i >= 0; i--) {
357    __ bind(&hit_on_nth_entry[i]);
358    if (i != 0) {
359      __ addl(rax, Immediate(i));
360    }
361    __ LoadAddress(kScratchRegister, cache_field_offsets);
362    __ movl(rdi, Operand(kScratchRegister, rax, times_4, 0));
363    __ movzxbp(rax, FieldOperand(rbx, Map::kInObjectPropertiesOffset));
364    __ subp(rdi, rax);
365    __ j(above_equal, &property_array_property);
366    if (i != 0) {
367      __ jmp(&load_in_object_property);
368    }
369  }
370
371  // Load in-object property.
372  __ bind(&load_in_object_property);
373  __ movzxbp(rax, FieldOperand(rbx, Map::kInstanceSizeOffset));
374  __ addp(rax, rdi);
375  __ movp(rax, FieldOperand(receiver, rax, times_pointer_size, 0));
376  __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
377  __ ret(0);
378
379  // Load property array property.
380  __ bind(&property_array_property);
381  __ movp(rax, FieldOperand(receiver, JSObject::kPropertiesOffset));
382  __ movp(rax,
383          FieldOperand(rax, rdi, times_pointer_size, FixedArray::kHeaderSize));
384  __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
385  __ ret(0);
386
387  // Do a quick inline probe of the receiver's dictionary, if it
388  // exists.
389  __ bind(&probe_dictionary);
390  // rbx: elements
391
392  __ movp(rax, FieldOperand(receiver, JSObject::kMapOffset));
393  __ movb(rax, FieldOperand(rax, Map::kInstanceTypeOffset));
394  GenerateGlobalInstanceTypeCheck(masm, rax, &slow);
395
396  GenerateDictionaryLoad(masm, &slow, rbx, key, rax, rdi, rax);
397  __ IncrementCounter(counters->keyed_load_generic_symbol(), 1);
398  __ ret(0);
399
400  __ bind(&index_name);
401  __ IndexFromHash(rbx, key);
402  __ jmp(&index_smi);
403}
404
405
406void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
407  // Return address is on the stack.
408  Label miss;
409
410  Register receiver = LoadDescriptor::ReceiverRegister();
411  Register index = LoadDescriptor::NameRegister();
412  Register scratch = rbx;
413  Register result = rax;
414  DCHECK(!scratch.is(receiver) && !scratch.is(index));
415
416  StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
417                                          &miss,  // When not a string.
418                                          &miss,  // When not a number.
419                                          &miss,  // When index out of range.
420                                          STRING_INDEX_IS_ARRAY_INDEX);
421  char_at_generator.GenerateFast(masm);
422  __ ret(0);
423
424  StubRuntimeCallHelper call_helper;
425  char_at_generator.GenerateSlow(masm, call_helper);
426
427  __ bind(&miss);
428  GenerateMiss(masm);
429}
430
431
432static void KeyedStoreGenerateGenericHelper(
433    MacroAssembler* masm, Label* fast_object, Label* fast_double, Label* slow,
434    KeyedStoreCheckMap check_map, KeyedStoreIncrementLength increment_length) {
435  Label transition_smi_elements;
436  Label finish_object_store, non_double_value, transition_double_elements;
437  Label fast_double_without_map_check;
438  Register receiver = StoreDescriptor::ReceiverRegister();
439  Register key = StoreDescriptor::NameRegister();
440  Register value = StoreDescriptor::ValueRegister();
441  DCHECK(receiver.is(rdx));
442  DCHECK(key.is(rcx));
443  DCHECK(value.is(rax));
444  // Fast case: Do the store, could be either Object or double.
445  __ bind(fast_object);
446  // rbx: receiver's elements array (a FixedArray)
447  // receiver is a JSArray.
448  // r9: map of receiver
449  if (check_map == kCheckMap) {
450    __ movp(rdi, FieldOperand(rbx, HeapObject::kMapOffset));
451    __ CompareRoot(rdi, Heap::kFixedArrayMapRootIndex);
452    __ j(not_equal, fast_double);
453  }
454
455  // HOLECHECK: guards "A[i] = V"
456  // We have to go to the runtime if the current value is the hole because
457  // there may be a callback on the element
458  Label holecheck_passed1;
459  __ movp(kScratchRegister,
460          FieldOperand(rbx, key, times_pointer_size, FixedArray::kHeaderSize));
461  __ CompareRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
462  __ j(not_equal, &holecheck_passed1);
463  __ JumpIfDictionaryInPrototypeChain(receiver, rdi, kScratchRegister, slow);
464
465  __ bind(&holecheck_passed1);
466
467  // Smi stores don't require further checks.
468  Label non_smi_value;
469  __ JumpIfNotSmi(value, &non_smi_value);
470  if (increment_length == kIncrementLength) {
471    // Add 1 to receiver->length.
472    __ leal(rdi, Operand(key, 1));
473    __ Integer32ToSmiField(FieldOperand(receiver, JSArray::kLengthOffset), rdi);
474  }
475  // It's irrelevant whether array is smi-only or not when writing a smi.
476  __ movp(FieldOperand(rbx, key, times_pointer_size, FixedArray::kHeaderSize),
477          value);
478  __ ret(0);
479
480  __ bind(&non_smi_value);
481  // Writing a non-smi, check whether array allows non-smi elements.
482  // r9: receiver's map
483  __ CheckFastObjectElements(r9, &transition_smi_elements);
484
485  __ bind(&finish_object_store);
486  if (increment_length == kIncrementLength) {
487    // Add 1 to receiver->length.
488    __ leal(rdi, Operand(key, 1));
489    __ Integer32ToSmiField(FieldOperand(receiver, JSArray::kLengthOffset), rdi);
490  }
491  __ movp(FieldOperand(rbx, key, times_pointer_size, FixedArray::kHeaderSize),
492          value);
493  __ movp(rdx, value);  // Preserve the value which is returned.
494  __ RecordWriteArray(rbx, rdx, key, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
495                      OMIT_SMI_CHECK);
496  __ ret(0);
497
498  __ bind(fast_double);
499  if (check_map == kCheckMap) {
500    // Check for fast double array case. If this fails, call through to the
501    // runtime.
502    // rdi: elements array's map
503    __ CompareRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex);
504    __ j(not_equal, slow);
505  }
506
507  // HOLECHECK: guards "A[i] double hole?"
508  // We have to see if the double version of the hole is present. If so
509  // go to the runtime.
510  uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32);
511  __ cmpl(FieldOperand(rbx, key, times_8, offset), Immediate(kHoleNanUpper32));
512  __ j(not_equal, &fast_double_without_map_check);
513  __ JumpIfDictionaryInPrototypeChain(receiver, rdi, kScratchRegister, slow);
514
515  __ bind(&fast_double_without_map_check);
516  __ StoreNumberToDoubleElements(value, rbx, key, xmm0,
517                                 &transition_double_elements);
518  if (increment_length == kIncrementLength) {
519    // Add 1 to receiver->length.
520    __ leal(rdi, Operand(key, 1));
521    __ Integer32ToSmiField(FieldOperand(receiver, JSArray::kLengthOffset), rdi);
522  }
523  __ ret(0);
524
525  __ bind(&transition_smi_elements);
526  __ movp(rbx, FieldOperand(receiver, HeapObject::kMapOffset));
527
528  // Transition the array appropriately depending on the value type.
529  __ movp(r9, FieldOperand(value, HeapObject::kMapOffset));
530  __ CompareRoot(r9, Heap::kHeapNumberMapRootIndex);
531  __ j(not_equal, &non_double_value);
532
533  // Value is a double. Transition FAST_SMI_ELEMENTS ->
534  // FAST_DOUBLE_ELEMENTS and complete the store.
535  __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
536                                         FAST_DOUBLE_ELEMENTS, rbx, rdi, slow);
537  AllocationSiteMode mode =
538      AllocationSite::GetMode(FAST_SMI_ELEMENTS, FAST_DOUBLE_ELEMENTS);
539  ElementsTransitionGenerator::GenerateSmiToDouble(masm, receiver, key, value,
540                                                   rbx, mode, slow);
541  __ movp(rbx, FieldOperand(receiver, JSObject::kElementsOffset));
542  __ jmp(&fast_double_without_map_check);
543
544  __ bind(&non_double_value);
545  // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS
546  __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, FAST_ELEMENTS, rbx,
547                                         rdi, slow);
548  mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS, FAST_ELEMENTS);
549  ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
550      masm, receiver, key, value, rbx, mode, slow);
551  __ movp(rbx, FieldOperand(receiver, JSObject::kElementsOffset));
552  __ jmp(&finish_object_store);
553
554  __ bind(&transition_double_elements);
555  // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
556  // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
557  // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
558  __ movp(rbx, FieldOperand(receiver, HeapObject::kMapOffset));
559  __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS,
560                                         rbx, rdi, slow);
561  mode = AllocationSite::GetMode(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS);
562  ElementsTransitionGenerator::GenerateDoubleToObject(masm, receiver, key,
563                                                      value, rbx, mode, slow);
564  __ movp(rbx, FieldOperand(receiver, JSObject::kElementsOffset));
565  __ jmp(&finish_object_store);
566}
567
568
569void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
570                                   StrictMode strict_mode) {
571  // Return address is on the stack.
572  Label slow, slow_with_tagged_index, fast_object, fast_object_grow;
573  Label fast_double, fast_double_grow;
574  Label array, extra, check_if_double_array;
575  Register receiver = StoreDescriptor::ReceiverRegister();
576  Register key = StoreDescriptor::NameRegister();
577  DCHECK(receiver.is(rdx));
578  DCHECK(key.is(rcx));
579
580  // Check that the object isn't a smi.
581  __ JumpIfSmi(receiver, &slow_with_tagged_index);
582  // Get the map from the receiver.
583  __ movp(r9, FieldOperand(receiver, HeapObject::kMapOffset));
584  // Check that the receiver does not require access checks and is not observed.
585  // The generic stub does not perform map checks or handle observed objects.
586  __ testb(FieldOperand(r9, Map::kBitFieldOffset),
587           Immediate(1 << Map::kIsAccessCheckNeeded | 1 << Map::kIsObserved));
588  __ j(not_zero, &slow_with_tagged_index);
589  // Check that the key is a smi.
590  __ JumpIfNotSmi(key, &slow_with_tagged_index);
591  __ SmiToInteger32(key, key);
592
593  __ CmpInstanceType(r9, JS_ARRAY_TYPE);
594  __ j(equal, &array);
595  // Check that the object is some kind of JSObject.
596  __ CmpInstanceType(r9, FIRST_JS_OBJECT_TYPE);
597  __ j(below, &slow);
598
599  // Object case: Check key against length in the elements array.
600  __ movp(rbx, FieldOperand(receiver, JSObject::kElementsOffset));
601  // Check array bounds.
602  __ SmiCompareInteger32(FieldOperand(rbx, FixedArray::kLengthOffset), key);
603  // rbx: FixedArray
604  __ j(above, &fast_object);
605
606  // Slow case: call runtime.
607  __ bind(&slow);
608  __ Integer32ToSmi(key, key);
609  __ bind(&slow_with_tagged_index);
610  PropertyICCompiler::GenerateRuntimeSetProperty(masm, strict_mode);
611  // Never returns to here.
612
613  // Extra capacity case: Check if there is extra capacity to
614  // perform the store and update the length. Used for adding one
615  // element to the array by writing to array[array.length].
616  __ bind(&extra);
617  // receiver is a JSArray.
618  // rbx: receiver's elements array (a FixedArray)
619  // flags: smicompare (receiver.length(), rbx)
620  __ j(not_equal, &slow);  // do not leave holes in the array
621  __ SmiCompareInteger32(FieldOperand(rbx, FixedArray::kLengthOffset), key);
622  __ j(below_equal, &slow);
623  // Increment index to get new length.
624  __ movp(rdi, FieldOperand(rbx, HeapObject::kMapOffset));
625  __ CompareRoot(rdi, Heap::kFixedArrayMapRootIndex);
626  __ j(not_equal, &check_if_double_array);
627  __ jmp(&fast_object_grow);
628
629  __ bind(&check_if_double_array);
630  // rdi: elements array's map
631  __ CompareRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex);
632  __ j(not_equal, &slow);
633  __ jmp(&fast_double_grow);
634
635  // Array case: Get the length and the elements array from the JS
636  // array. Check that the array is in fast mode (and writable); if it
637  // is the length is always a smi.
638  __ bind(&array);
639  // receiver is a JSArray.
640  __ movp(rbx, FieldOperand(receiver, JSObject::kElementsOffset));
641
642  // Check the key against the length in the array, compute the
643  // address to store into and fall through to fast case.
644  __ SmiCompareInteger32(FieldOperand(receiver, JSArray::kLengthOffset), key);
645  __ j(below_equal, &extra);
646
647  KeyedStoreGenerateGenericHelper(masm, &fast_object, &fast_double, &slow,
648                                  kCheckMap, kDontIncrementLength);
649  KeyedStoreGenerateGenericHelper(masm, &fast_object_grow, &fast_double_grow,
650                                  &slow, kDontCheckMap, kIncrementLength);
651}
652
653
654static Operand GenerateMappedArgumentsLookup(
655    MacroAssembler* masm, Register object, Register key, Register scratch1,
656    Register scratch2, Register scratch3, Label* unmapped_case,
657    Label* slow_case) {
658  Heap* heap = masm->isolate()->heap();
659
660  // Check that the receiver is a JSObject. Because of the elements
661  // map check later, we do not need to check for interceptors or
662  // whether it requires access checks.
663  __ JumpIfSmi(object, slow_case);
664  // Check that the object is some kind of JSObject.
665  __ CmpObjectType(object, FIRST_JS_RECEIVER_TYPE, scratch1);
666  __ j(below, slow_case);
667
668  // Check that the key is a positive smi.
669  Condition check = masm->CheckNonNegativeSmi(key);
670  __ j(NegateCondition(check), slow_case);
671
672  // Load the elements into scratch1 and check its map. If not, jump
673  // to the unmapped lookup with the parameter map in scratch1.
674  Handle<Map> arguments_map(heap->sloppy_arguments_elements_map());
675  __ movp(scratch1, FieldOperand(object, JSObject::kElementsOffset));
676  __ CheckMap(scratch1, arguments_map, slow_case, DONT_DO_SMI_CHECK);
677
678  // Check if element is in the range of mapped arguments.
679  __ movp(scratch2, FieldOperand(scratch1, FixedArray::kLengthOffset));
680  __ SmiSubConstant(scratch2, scratch2, Smi::FromInt(2));
681  __ cmpp(key, scratch2);
682  __ j(greater_equal, unmapped_case);
683
684  // Load element index and check whether it is the hole.
685  const int kHeaderSize = FixedArray::kHeaderSize + 2 * kPointerSize;
686  __ SmiToInteger64(scratch3, key);
687  __ movp(scratch2,
688          FieldOperand(scratch1, scratch3, times_pointer_size, kHeaderSize));
689  __ CompareRoot(scratch2, Heap::kTheHoleValueRootIndex);
690  __ j(equal, unmapped_case);
691
692  // Load value from context and return it. We can reuse scratch1 because
693  // we do not jump to the unmapped lookup (which requires the parameter
694  // map in scratch1).
695  __ movp(scratch1, FieldOperand(scratch1, FixedArray::kHeaderSize));
696  __ SmiToInteger64(scratch3, scratch2);
697  return FieldOperand(scratch1, scratch3, times_pointer_size,
698                      Context::kHeaderSize);
699}
700
701
702static Operand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
703                                               Register key,
704                                               Register parameter_map,
705                                               Register scratch,
706                                               Label* slow_case) {
707  // Element is in arguments backing store, which is referenced by the
708  // second element of the parameter_map. The parameter_map register
709  // must be loaded with the parameter map of the arguments object and is
710  // overwritten.
711  const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
712  Register backing_store = parameter_map;
713  __ movp(backing_store, FieldOperand(parameter_map, kBackingStoreOffset));
714  Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map());
715  __ CheckMap(backing_store, fixed_array_map, slow_case, DONT_DO_SMI_CHECK);
716  __ movp(scratch, FieldOperand(backing_store, FixedArray::kLengthOffset));
717  __ cmpp(key, scratch);
718  __ j(greater_equal, slow_case);
719  __ SmiToInteger64(scratch, key);
720  return FieldOperand(backing_store, scratch, times_pointer_size,
721                      FixedArray::kHeaderSize);
722}
723
724
725void KeyedStoreIC::GenerateSloppyArguments(MacroAssembler* masm) {
726  // The return address is on the stack.
727  Label slow, notin;
728  Register receiver = StoreDescriptor::ReceiverRegister();
729  Register name = StoreDescriptor::NameRegister();
730  Register value = StoreDescriptor::ValueRegister();
731  DCHECK(receiver.is(rdx));
732  DCHECK(name.is(rcx));
733  DCHECK(value.is(rax));
734
735  Operand mapped_location = GenerateMappedArgumentsLookup(
736      masm, receiver, name, rbx, rdi, r8, &notin, &slow);
737  __ movp(mapped_location, value);
738  __ leap(r9, mapped_location);
739  __ movp(r8, value);
740  __ RecordWrite(rbx, r9, r8, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
741                 INLINE_SMI_CHECK);
742  __ Ret();
743  __ bind(&notin);
744  // The unmapped lookup expects that the parameter map is in rbx.
745  Operand unmapped_location =
746      GenerateUnmappedArgumentsLookup(masm, name, rbx, rdi, &slow);
747  __ movp(unmapped_location, value);
748  __ leap(r9, unmapped_location);
749  __ movp(r8, value);
750  __ RecordWrite(rbx, r9, r8, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
751                 INLINE_SMI_CHECK);
752  __ Ret();
753  __ bind(&slow);
754  GenerateMiss(masm);
755}
756
757
758void LoadIC::GenerateNormal(MacroAssembler* masm) {
759  Register dictionary = rax;
760  DCHECK(!dictionary.is(LoadDescriptor::ReceiverRegister()));
761  DCHECK(!dictionary.is(LoadDescriptor::NameRegister()));
762
763  Label slow;
764
765  __ movp(dictionary, FieldOperand(LoadDescriptor::ReceiverRegister(),
766                                   JSObject::kPropertiesOffset));
767  GenerateDictionaryLoad(masm, &slow, dictionary,
768                         LoadDescriptor::NameRegister(), rbx, rdi, rax);
769  __ ret(0);
770
771  // Dictionary load failed, go slow (but don't miss).
772  __ bind(&slow);
773  GenerateRuntimeGetProperty(masm);
774}
775
776
777// A register that isn't one of the parameters to the load ic.
778static const Register LoadIC_TempRegister() { return rbx; }
779
780
781static const Register KeyedLoadIC_TempRegister() { return rbx; }
782
783
784void LoadIC::GenerateMiss(MacroAssembler* masm) {
785  // The return address is on the stack.
786
787  Counters* counters = masm->isolate()->counters();
788  __ IncrementCounter(counters->load_miss(), 1);
789
790  __ PopReturnAddressTo(LoadIC_TempRegister());
791  __ Push(LoadDescriptor::ReceiverRegister());  // receiver
792  __ Push(LoadDescriptor::NameRegister());      // name
793  __ PushReturnAddressFrom(LoadIC_TempRegister());
794
795  // Perform tail call to the entry.
796  ExternalReference ref =
797      ExternalReference(IC_Utility(kLoadIC_Miss), masm->isolate());
798  __ TailCallExternalReference(ref, 2, 1);
799}
800
801
802void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
803  // The return address is on the stack.
804
805  __ PopReturnAddressTo(LoadIC_TempRegister());
806  __ Push(LoadDescriptor::ReceiverRegister());  // receiver
807  __ Push(LoadDescriptor::NameRegister());      // name
808  __ PushReturnAddressFrom(LoadIC_TempRegister());
809
810  // Perform tail call to the entry.
811  __ TailCallRuntime(Runtime::kGetProperty, 2, 1);
812}
813
814
815void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
816  // The return address is on the stack.
817  Counters* counters = masm->isolate()->counters();
818  __ IncrementCounter(counters->keyed_load_miss(), 1);
819
820  __ PopReturnAddressTo(KeyedLoadIC_TempRegister());
821  __ Push(LoadDescriptor::ReceiverRegister());  // receiver
822  __ Push(LoadDescriptor::NameRegister());      // name
823  __ PushReturnAddressFrom(KeyedLoadIC_TempRegister());
824
825  // Perform tail call to the entry.
826  ExternalReference ref =
827      ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate());
828  __ TailCallExternalReference(ref, 2, 1);
829}
830
831
832void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
833  // The return address is on the stack.
834
835  __ PopReturnAddressTo(KeyedLoadIC_TempRegister());
836  __ Push(LoadDescriptor::ReceiverRegister());  // receiver
837  __ Push(LoadDescriptor::NameRegister());      // name
838  __ PushReturnAddressFrom(KeyedLoadIC_TempRegister());
839
840  // Perform tail call to the entry.
841  __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
842}
843
844
845void StoreIC::GenerateMegamorphic(MacroAssembler* masm) {
846  // The return address is on the stack.
847
848  // Get the receiver from the stack and probe the stub cache.
849  Code::Flags flags = Code::RemoveTypeAndHolderFromFlags(
850      Code::ComputeHandlerFlags(Code::STORE_IC));
851  masm->isolate()->stub_cache()->GenerateProbe(
852      masm, flags, false, StoreDescriptor::ReceiverRegister(),
853      StoreDescriptor::NameRegister(), rbx, no_reg);
854
855  // Cache miss: Jump to runtime.
856  GenerateMiss(masm);
857}
858
859
860static void StoreIC_PushArgs(MacroAssembler* masm) {
861  Register receiver = StoreDescriptor::ReceiverRegister();
862  Register name = StoreDescriptor::NameRegister();
863  Register value = StoreDescriptor::ValueRegister();
864
865  DCHECK(!rbx.is(receiver) && !rbx.is(name) && !rbx.is(value));
866
867  __ PopReturnAddressTo(rbx);
868  __ Push(receiver);
869  __ Push(name);
870  __ Push(value);
871  __ PushReturnAddressFrom(rbx);
872}
873
874
875void StoreIC::GenerateMiss(MacroAssembler* masm) {
876  // Return address is on the stack.
877  StoreIC_PushArgs(masm);
878
879  // Perform tail call to the entry.
880  ExternalReference ref =
881      ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate());
882  __ TailCallExternalReference(ref, 3, 1);
883}
884
885
886void StoreIC::GenerateNormal(MacroAssembler* masm) {
887  Register receiver = StoreDescriptor::ReceiverRegister();
888  Register name = StoreDescriptor::NameRegister();
889  Register value = StoreDescriptor::ValueRegister();
890  Register dictionary = rbx;
891
892  Label miss;
893
894  __ movp(dictionary, FieldOperand(receiver, JSObject::kPropertiesOffset));
895  GenerateDictionaryStore(masm, &miss, dictionary, name, value, r8, r9);
896  Counters* counters = masm->isolate()->counters();
897  __ IncrementCounter(counters->store_normal_hit(), 1);
898  __ ret(0);
899
900  __ bind(&miss);
901  __ IncrementCounter(counters->store_normal_miss(), 1);
902  GenerateMiss(masm);
903}
904
905
906void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) {
907  // Return address is on the stack.
908  StoreIC_PushArgs(masm);
909
910  // Do tail-call to runtime routine.
911  ExternalReference ref =
912      ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
913  __ TailCallExternalReference(ref, 3, 1);
914}
915
916
917#undef __
918
919
920Condition CompareIC::ComputeCondition(Token::Value op) {
921  switch (op) {
922    case Token::EQ_STRICT:
923    case Token::EQ:
924      return equal;
925    case Token::LT:
926      return less;
927    case Token::GT:
928      return greater;
929    case Token::LTE:
930      return less_equal;
931    case Token::GTE:
932      return greater_equal;
933    default:
934      UNREACHABLE();
935      return no_condition;
936  }
937}
938
939
940bool CompareIC::HasInlinedSmiCode(Address address) {
941  // The address of the instruction following the call.
942  Address test_instruction_address =
943      address + Assembler::kCallTargetAddressOffset;
944
945  // If the instruction following the call is not a test al, nothing
946  // was inlined.
947  return *test_instruction_address == Assembler::kTestAlByte;
948}
949
950
951void PatchInlinedSmiCode(Address address, InlinedSmiCheck check) {
952  // The address of the instruction following the call.
953  Address test_instruction_address =
954      address + Assembler::kCallTargetAddressOffset;
955
956  // If the instruction following the call is not a test al, nothing
957  // was inlined.
958  if (*test_instruction_address != Assembler::kTestAlByte) {
959    DCHECK(*test_instruction_address == Assembler::kNopByte);
960    return;
961  }
962
963  Address delta_address = test_instruction_address + 1;
964  // The delta to the start of the map check instruction and the
965  // condition code uses at the patched jump.
966  uint8_t delta = *reinterpret_cast<uint8_t*>(delta_address);
967  if (FLAG_trace_ic) {
968    PrintF("[  patching ic at %p, test=%p, delta=%d\n", address,
969           test_instruction_address, delta);
970  }
971
972  // Patch with a short conditional jump. Enabling means switching from a short
973  // jump-if-carry/not-carry to jump-if-zero/not-zero, whereas disabling is the
974  // reverse operation of that.
975  Address jmp_address = test_instruction_address - delta;
976  DCHECK((check == ENABLE_INLINED_SMI_CHECK)
977             ? (*jmp_address == Assembler::kJncShortOpcode ||
978                *jmp_address == Assembler::kJcShortOpcode)
979             : (*jmp_address == Assembler::kJnzShortOpcode ||
980                *jmp_address == Assembler::kJzShortOpcode));
981  Condition cc =
982      (check == ENABLE_INLINED_SMI_CHECK)
983          ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero)
984          : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry);
985  *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc);
986}
987}
988}  // namespace v8::internal
989
990#endif  // V8_TARGET_ARCH_X64
991