1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/v8.h"
6
7#if V8_TARGET_ARCH_MIPS
8
9#include "src/ic-inl.h"
10#include "src/codegen.h"
11#include "src/stub-cache.h"
12
13namespace v8 {
14namespace internal {
15
16#define __ ACCESS_MASM(masm)
17
18
19static void ProbeTable(Isolate* isolate,
20                       MacroAssembler* masm,
21                       Code::Flags flags,
22                       StubCache::Table table,
23                       Register receiver,
24                       Register name,
25                       // Number of the cache entry, not scaled.
26                       Register offset,
27                       Register scratch,
28                       Register scratch2,
29                       Register offset_scratch) {
30  ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
31  ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
32  ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
33
34  uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address());
35  uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address());
36  uint32_t map_off_addr = reinterpret_cast<uint32_t>(map_offset.address());
37
38  // Check the relative positions of the address fields.
39  ASSERT(value_off_addr > key_off_addr);
40  ASSERT((value_off_addr - key_off_addr) % 4 == 0);
41  ASSERT((value_off_addr - key_off_addr) < (256 * 4));
42  ASSERT(map_off_addr > key_off_addr);
43  ASSERT((map_off_addr - key_off_addr) % 4 == 0);
44  ASSERT((map_off_addr - key_off_addr) < (256 * 4));
45
46  Label miss;
47  Register base_addr = scratch;
48  scratch = no_reg;
49
50  // Multiply by 3 because there are 3 fields per entry (name, code, map).
51  __ sll(offset_scratch, offset, 1);
52  __ Addu(offset_scratch, offset_scratch, offset);
53
54  // Calculate the base address of the entry.
55  __ li(base_addr, Operand(key_offset));
56  __ sll(at, offset_scratch, kPointerSizeLog2);
57  __ Addu(base_addr, base_addr, at);
58
59  // Check that the key in the entry matches the name.
60  __ lw(at, MemOperand(base_addr, 0));
61  __ Branch(&miss, ne, name, Operand(at));
62
63  // Check the map matches.
64  __ lw(at, MemOperand(base_addr, map_off_addr - key_off_addr));
65  __ lw(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset));
66  __ Branch(&miss, ne, at, Operand(scratch2));
67
68  // Get the code entry from the cache.
69  Register code = scratch2;
70  scratch2 = no_reg;
71  __ lw(code, MemOperand(base_addr, value_off_addr - key_off_addr));
72
73  // Check that the flags match what we're looking for.
74  Register flags_reg = base_addr;
75  base_addr = no_reg;
76  __ lw(flags_reg, FieldMemOperand(code, Code::kFlagsOffset));
77  __ And(flags_reg, flags_reg, Operand(~Code::kFlagsNotUsedInLookup));
78  __ Branch(&miss, ne, flags_reg, Operand(flags));
79
80#ifdef DEBUG
81    if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
82      __ jmp(&miss);
83    } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
84      __ jmp(&miss);
85    }
86#endif
87
88  // Jump to the first instruction in the code stub.
89  __ Addu(at, code, Operand(Code::kHeaderSize - kHeapObjectTag));
90  __ Jump(at);
91
92  // Miss: fall through.
93  __ bind(&miss);
94}
95
96
97void StubCompiler::GenerateDictionaryNegativeLookup(MacroAssembler* masm,
98                                                    Label* miss_label,
99                                                    Register receiver,
100                                                    Handle<Name> name,
101                                                    Register scratch0,
102                                                    Register scratch1) {
103  ASSERT(name->IsUniqueName());
104  ASSERT(!receiver.is(scratch0));
105  Counters* counters = masm->isolate()->counters();
106  __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
107  __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
108
109  Label done;
110
111  const int kInterceptorOrAccessCheckNeededMask =
112      (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
113
114  // Bail out if the receiver has a named interceptor or requires access checks.
115  Register map = scratch1;
116  __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
117  __ lbu(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
118  __ And(scratch0, scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
119  __ Branch(miss_label, ne, scratch0, Operand(zero_reg));
120
121  // Check that receiver is a JSObject.
122  __ lbu(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
123  __ Branch(miss_label, lt, scratch0, Operand(FIRST_SPEC_OBJECT_TYPE));
124
125  // Load properties array.
126  Register properties = scratch0;
127  __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
128  // Check that the properties array is a dictionary.
129  __ lw(map, FieldMemOperand(properties, HeapObject::kMapOffset));
130  Register tmp = properties;
131  __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
132  __ Branch(miss_label, ne, map, Operand(tmp));
133
134  // Restore the temporarily used register.
135  __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
136
137
138  NameDictionaryLookupStub::GenerateNegativeLookup(masm,
139                                                   miss_label,
140                                                   &done,
141                                                   receiver,
142                                                   properties,
143                                                   name,
144                                                   scratch1);
145  __ bind(&done);
146  __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
147}
148
149
150void StubCache::GenerateProbe(MacroAssembler* masm,
151                              Code::Flags flags,
152                              Register receiver,
153                              Register name,
154                              Register scratch,
155                              Register extra,
156                              Register extra2,
157                              Register extra3) {
158  Isolate* isolate = masm->isolate();
159  Label miss;
160
161  // Make sure that code is valid. The multiplying code relies on the
162  // entry size being 12.
163  ASSERT(sizeof(Entry) == 12);
164
165  // Make sure the flags does not name a specific type.
166  ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
167
168  // Make sure that there are no register conflicts.
169  ASSERT(!scratch.is(receiver));
170  ASSERT(!scratch.is(name));
171  ASSERT(!extra.is(receiver));
172  ASSERT(!extra.is(name));
173  ASSERT(!extra.is(scratch));
174  ASSERT(!extra2.is(receiver));
175  ASSERT(!extra2.is(name));
176  ASSERT(!extra2.is(scratch));
177  ASSERT(!extra2.is(extra));
178
179  // Check register validity.
180  ASSERT(!scratch.is(no_reg));
181  ASSERT(!extra.is(no_reg));
182  ASSERT(!extra2.is(no_reg));
183  ASSERT(!extra3.is(no_reg));
184
185  Counters* counters = masm->isolate()->counters();
186  __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1,
187                      extra2, extra3);
188
189  // Check that the receiver isn't a smi.
190  __ JumpIfSmi(receiver, &miss);
191
192  // Get the map of the receiver and compute the hash.
193  __ lw(scratch, FieldMemOperand(name, Name::kHashFieldOffset));
194  __ lw(at, FieldMemOperand(receiver, HeapObject::kMapOffset));
195  __ Addu(scratch, scratch, at);
196  uint32_t mask = kPrimaryTableSize - 1;
197  // We shift out the last two bits because they are not part of the hash and
198  // they are always 01 for maps.
199  __ srl(scratch, scratch, kHeapObjectTagSize);
200  __ Xor(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask));
201  __ And(scratch, scratch, Operand(mask));
202
203  // Probe the primary table.
204  ProbeTable(isolate,
205             masm,
206             flags,
207             kPrimary,
208             receiver,
209             name,
210             scratch,
211             extra,
212             extra2,
213             extra3);
214
215  // Primary miss: Compute hash for secondary probe.
216  __ srl(at, name, kHeapObjectTagSize);
217  __ Subu(scratch, scratch, at);
218  uint32_t mask2 = kSecondaryTableSize - 1;
219  __ Addu(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask2));
220  __ And(scratch, scratch, Operand(mask2));
221
222  // Probe the secondary table.
223  ProbeTable(isolate,
224             masm,
225             flags,
226             kSecondary,
227             receiver,
228             name,
229             scratch,
230             extra,
231             extra2,
232             extra3);
233
234  // Cache miss: Fall-through and let caller handle the miss by
235  // entering the runtime system.
236  __ bind(&miss);
237  __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1,
238                      extra2, extra3);
239}
240
241
242void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
243                                                       int index,
244                                                       Register prototype) {
245  // Load the global or builtins object from the current context.
246  __ lw(prototype,
247        MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
248  // Load the native context from the global or builtins object.
249  __ lw(prototype,
250         FieldMemOperand(prototype, GlobalObject::kNativeContextOffset));
251  // Load the function from the native context.
252  __ lw(prototype, MemOperand(prototype, Context::SlotOffset(index)));
253  // Load the initial map.  The global functions all have initial maps.
254  __ lw(prototype,
255         FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
256  // Load the prototype from the initial map.
257  __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
258}
259
260
261void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
262    MacroAssembler* masm,
263    int index,
264    Register prototype,
265    Label* miss) {
266  Isolate* isolate = masm->isolate();
267  // Get the global function with the given index.
268  Handle<JSFunction> function(
269      JSFunction::cast(isolate->native_context()->get(index)));
270
271  // Check we're still in the same context.
272  Register scratch = prototype;
273  const int offset = Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX);
274  __ lw(scratch, MemOperand(cp, offset));
275  __ lw(scratch, FieldMemOperand(scratch, GlobalObject::kNativeContextOffset));
276  __ lw(scratch, MemOperand(scratch, Context::SlotOffset(index)));
277  __ li(at, function);
278  __ Branch(miss, ne, at, Operand(scratch));
279
280  // Load its initial map. The global functions all have initial maps.
281  __ li(prototype, Handle<Map>(function->initial_map()));
282  // Load the prototype from the initial map.
283  __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
284}
285
286
287void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
288                                            Register dst,
289                                            Register src,
290                                            bool inobject,
291                                            int index,
292                                            Representation representation) {
293  ASSERT(!representation.IsDouble());
294  int offset = index * kPointerSize;
295  if (!inobject) {
296    // Calculate the offset into the properties array.
297    offset = offset + FixedArray::kHeaderSize;
298    __ lw(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
299    src = dst;
300  }
301  __ lw(dst, FieldMemOperand(src, offset));
302}
303
304
305void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
306                                           Register receiver,
307                                           Register scratch,
308                                           Label* miss_label) {
309  // Check that the receiver isn't a smi.
310  __ JumpIfSmi(receiver, miss_label);
311
312  // Check that the object is a JS array.
313  __ GetObjectType(receiver, scratch, scratch);
314  __ Branch(miss_label, ne, scratch, Operand(JS_ARRAY_TYPE));
315
316  // Load length directly from the JS array.
317  __ Ret(USE_DELAY_SLOT);
318  __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
319}
320
321
322void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
323                                                 Register receiver,
324                                                 Register scratch1,
325                                                 Register scratch2,
326                                                 Label* miss_label) {
327  __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
328  __ Ret(USE_DELAY_SLOT);
329  __ mov(v0, scratch1);
330}
331
332
333void StubCompiler::GenerateCheckPropertyCell(MacroAssembler* masm,
334                                             Handle<JSGlobalObject> global,
335                                             Handle<Name> name,
336                                             Register scratch,
337                                             Label* miss) {
338  Handle<Cell> cell = JSGlobalObject::EnsurePropertyCell(global, name);
339  ASSERT(cell->value()->IsTheHole());
340  __ li(scratch, Operand(cell));
341  __ lw(scratch, FieldMemOperand(scratch, Cell::kValueOffset));
342  __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
343  __ Branch(miss, ne, scratch, Operand(at));
344}
345
346
347void StoreStubCompiler::GenerateNegativeHolderLookup(
348    MacroAssembler* masm,
349    Handle<JSObject> holder,
350    Register holder_reg,
351    Handle<Name> name,
352    Label* miss) {
353  if (holder->IsJSGlobalObject()) {
354    GenerateCheckPropertyCell(
355        masm, Handle<JSGlobalObject>::cast(holder), name, scratch1(), miss);
356  } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
357    GenerateDictionaryNegativeLookup(
358        masm, miss, holder_reg, name, scratch1(), scratch2());
359  }
360}
361
362
363// Generate StoreTransition code, value is passed in a0 register.
364// After executing generated code, the receiver_reg and name_reg
365// may be clobbered.
366void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
367                                                Handle<JSObject> object,
368                                                LookupResult* lookup,
369                                                Handle<Map> transition,
370                                                Handle<Name> name,
371                                                Register receiver_reg,
372                                                Register storage_reg,
373                                                Register value_reg,
374                                                Register scratch1,
375                                                Register scratch2,
376                                                Register scratch3,
377                                                Label* miss_label,
378                                                Label* slow) {
379  // a0 : value.
380  Label exit;
381
382  int descriptor = transition->LastAdded();
383  DescriptorArray* descriptors = transition->instance_descriptors();
384  PropertyDetails details = descriptors->GetDetails(descriptor);
385  Representation representation = details.representation();
386  ASSERT(!representation.IsNone());
387
388  if (details.type() == CONSTANT) {
389    Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
390    __ li(scratch1, constant);
391    __ Branch(miss_label, ne, value_reg, Operand(scratch1));
392  } else if (representation.IsSmi()) {
393    __ JumpIfNotSmi(value_reg, miss_label);
394  } else if (representation.IsHeapObject()) {
395    __ JumpIfSmi(value_reg, miss_label);
396    HeapType* field_type = descriptors->GetFieldType(descriptor);
397    HeapType::Iterator<Map> it = field_type->Classes();
398    Handle<Map> current;
399    if (!it.Done()) {
400      __ lw(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset));
401      Label do_store;
402      while (true) {
403        // Do the CompareMap() directly within the Branch() functions.
404        current = it.Current();
405        it.Advance();
406        if (it.Done()) {
407          __ Branch(miss_label, ne, scratch1, Operand(current));
408          break;
409        }
410        __ Branch(&do_store, eq, scratch1, Operand(current));
411      }
412      __ bind(&do_store);
413    }
414  } else if (representation.IsDouble()) {
415    Label do_store, heap_number;
416    __ LoadRoot(scratch3, Heap::kHeapNumberMapRootIndex);
417    __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow);
418
419    __ JumpIfNotSmi(value_reg, &heap_number);
420    __ SmiUntag(scratch1, value_reg);
421    __ mtc1(scratch1, f6);
422    __ cvt_d_w(f4, f6);
423    __ jmp(&do_store);
424
425    __ bind(&heap_number);
426    __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex,
427                miss_label, DONT_DO_SMI_CHECK);
428    __ ldc1(f4, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
429
430    __ bind(&do_store);
431    __ sdc1(f4, FieldMemOperand(storage_reg, HeapNumber::kValueOffset));
432  }
433
434  // Stub never generated for non-global objects that require access
435  // checks.
436  ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
437
438  // Perform map transition for the receiver if necessary.
439  if (details.type() == FIELD &&
440      object->map()->unused_property_fields() == 0) {
441    // The properties must be extended before we can store the value.
442    // We jump to a runtime call that extends the properties array.
443    __ push(receiver_reg);
444    __ li(a2, Operand(transition));
445    __ Push(a2, a0);
446    __ TailCallExternalReference(
447           ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
448                             masm->isolate()),
449           3, 1);
450    return;
451  }
452
453  // Update the map of the object.
454  __ li(scratch1, Operand(transition));
455  __ sw(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
456
457  // Update the write barrier for the map field.
458  __ RecordWriteField(receiver_reg,
459                      HeapObject::kMapOffset,
460                      scratch1,
461                      scratch2,
462                      kRAHasNotBeenSaved,
463                      kDontSaveFPRegs,
464                      OMIT_REMEMBERED_SET,
465                      OMIT_SMI_CHECK);
466
467  if (details.type() == CONSTANT) {
468    ASSERT(value_reg.is(a0));
469    __ Ret(USE_DELAY_SLOT);
470    __ mov(v0, a0);
471    return;
472  }
473
474  int index = transition->instance_descriptors()->GetFieldIndex(
475      transition->LastAdded());
476
477  // Adjust for the number of properties stored in the object. Even in the
478  // face of a transition we can use the old map here because the size of the
479  // object and the number of in-object properties is not going to change.
480  index -= object->map()->inobject_properties();
481
482  // TODO(verwaest): Share this code as a code stub.
483  SmiCheck smi_check = representation.IsTagged()
484      ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
485  if (index < 0) {
486    // Set the property straight into the object.
487    int offset = object->map()->instance_size() + (index * kPointerSize);
488    if (representation.IsDouble()) {
489      __ sw(storage_reg, FieldMemOperand(receiver_reg, offset));
490    } else {
491      __ sw(value_reg, FieldMemOperand(receiver_reg, offset));
492    }
493
494    if (!representation.IsSmi()) {
495      // Update the write barrier for the array address.
496      if (!representation.IsDouble()) {
497        __ mov(storage_reg, value_reg);
498      }
499      __ RecordWriteField(receiver_reg,
500                          offset,
501                          storage_reg,
502                          scratch1,
503                          kRAHasNotBeenSaved,
504                          kDontSaveFPRegs,
505                          EMIT_REMEMBERED_SET,
506                          smi_check);
507    }
508  } else {
509    // Write to the properties array.
510    int offset = index * kPointerSize + FixedArray::kHeaderSize;
511    // Get the properties array
512    __ lw(scratch1,
513          FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
514    if (representation.IsDouble()) {
515      __ sw(storage_reg, FieldMemOperand(scratch1, offset));
516    } else {
517      __ sw(value_reg, FieldMemOperand(scratch1, offset));
518    }
519
520    if (!representation.IsSmi()) {
521      // Update the write barrier for the array address.
522      if (!representation.IsDouble()) {
523        __ mov(storage_reg, value_reg);
524      }
525      __ RecordWriteField(scratch1,
526                          offset,
527                          storage_reg,
528                          receiver_reg,
529                          kRAHasNotBeenSaved,
530                          kDontSaveFPRegs,
531                          EMIT_REMEMBERED_SET,
532                          smi_check);
533    }
534  }
535
536  // Return the value (register v0).
537  ASSERT(value_reg.is(a0));
538  __ bind(&exit);
539  __ Ret(USE_DELAY_SLOT);
540  __ mov(v0, a0);
541}
542
543
544// Generate StoreField code, value is passed in a0 register.
545// When leaving generated code after success, the receiver_reg and name_reg
546// may be clobbered.  Upon branch to miss_label, the receiver and name
547// registers have their original values.
548void StoreStubCompiler::GenerateStoreField(MacroAssembler* masm,
549                                           Handle<JSObject> object,
550                                           LookupResult* lookup,
551                                           Register receiver_reg,
552                                           Register name_reg,
553                                           Register value_reg,
554                                           Register scratch1,
555                                           Register scratch2,
556                                           Label* miss_label) {
557  // a0 : value
558  Label exit;
559
560  // Stub never generated for non-global objects that require access
561  // checks.
562  ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
563
564  FieldIndex index = lookup->GetFieldIndex();
565
566  Representation representation = lookup->representation();
567  ASSERT(!representation.IsNone());
568  if (representation.IsSmi()) {
569    __ JumpIfNotSmi(value_reg, miss_label);
570  } else if (representation.IsHeapObject()) {
571    __ JumpIfSmi(value_reg, miss_label);
572    HeapType* field_type = lookup->GetFieldType();
573    HeapType::Iterator<Map> it = field_type->Classes();
574    if (!it.Done()) {
575      __ lw(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset));
576      Label do_store;
577      Handle<Map> current;
578      while (true) {
579        // Do the CompareMap() directly within the Branch() functions.
580        current = it.Current();
581        it.Advance();
582        if (it.Done()) {
583          __ Branch(miss_label, ne, scratch1, Operand(current));
584          break;
585        }
586        __ Branch(&do_store, eq, scratch1, Operand(current));
587      }
588      __ bind(&do_store);
589    }
590  } else if (representation.IsDouble()) {
591    // Load the double storage.
592    if (index.is_inobject()) {
593      __ lw(scratch1, FieldMemOperand(receiver_reg, index.offset()));
594    } else {
595      __ lw(scratch1,
596            FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
597      __ lw(scratch1, FieldMemOperand(scratch1, index.offset()));
598    }
599
600    // Store the value into the storage.
601    Label do_store, heap_number;
602    __ JumpIfNotSmi(value_reg, &heap_number);
603    __ SmiUntag(scratch2, value_reg);
604    __ mtc1(scratch2, f6);
605    __ cvt_d_w(f4, f6);
606    __ jmp(&do_store);
607
608    __ bind(&heap_number);
609    __ CheckMap(value_reg, scratch2, Heap::kHeapNumberMapRootIndex,
610                miss_label, DONT_DO_SMI_CHECK);
611    __ ldc1(f4, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
612
613    __ bind(&do_store);
614    __ sdc1(f4, FieldMemOperand(scratch1, HeapNumber::kValueOffset));
615    // Return the value (register v0).
616    ASSERT(value_reg.is(a0));
617    __ Ret(USE_DELAY_SLOT);
618    __ mov(v0, a0);
619    return;
620  }
621
622  // TODO(verwaest): Share this code as a code stub.
623  SmiCheck smi_check = representation.IsTagged()
624      ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
625  if (index.is_inobject()) {
626    // Set the property straight into the object.
627    __ sw(value_reg, FieldMemOperand(receiver_reg, index.offset()));
628
629    if (!representation.IsSmi()) {
630      // Skip updating write barrier if storing a smi.
631      __ JumpIfSmi(value_reg, &exit);
632
633      // Update the write barrier for the array address.
634      // Pass the now unused name_reg as a scratch register.
635      __ mov(name_reg, value_reg);
636      __ RecordWriteField(receiver_reg,
637                          index.offset(),
638                          name_reg,
639                          scratch1,
640                          kRAHasNotBeenSaved,
641                          kDontSaveFPRegs,
642                          EMIT_REMEMBERED_SET,
643                          smi_check);
644    }
645  } else {
646    // Write to the properties array.
647    // Get the properties array.
648    __ lw(scratch1,
649          FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
650    __ sw(value_reg, FieldMemOperand(scratch1, index.offset()));
651
652    if (!representation.IsSmi()) {
653      // Skip updating write barrier if storing a smi.
654      __ JumpIfSmi(value_reg, &exit);
655
656      // Update the write barrier for the array address.
657      // Ok to clobber receiver_reg and name_reg, since we return.
658      __ mov(name_reg, value_reg);
659      __ RecordWriteField(scratch1,
660                          index.offset(),
661                          name_reg,
662                          receiver_reg,
663                          kRAHasNotBeenSaved,
664                          kDontSaveFPRegs,
665                          EMIT_REMEMBERED_SET,
666                          smi_check);
667    }
668  }
669
670  // Return the value (register v0).
671  ASSERT(value_reg.is(a0));
672  __ bind(&exit);
673  __ Ret(USE_DELAY_SLOT);
674  __ mov(v0, a0);
675}
676
677
678void StoreStubCompiler::GenerateRestoreName(MacroAssembler* masm,
679                                            Label* label,
680                                            Handle<Name> name) {
681  if (!label->is_unused()) {
682    __ bind(label);
683    __ li(this->name(), Operand(name));
684  }
685}
686
687
688static void PushInterceptorArguments(MacroAssembler* masm,
689                                     Register receiver,
690                                     Register holder,
691                                     Register name,
692                                     Handle<JSObject> holder_obj) {
693  STATIC_ASSERT(StubCache::kInterceptorArgsNameIndex == 0);
694  STATIC_ASSERT(StubCache::kInterceptorArgsInfoIndex == 1);
695  STATIC_ASSERT(StubCache::kInterceptorArgsThisIndex == 2);
696  STATIC_ASSERT(StubCache::kInterceptorArgsHolderIndex == 3);
697  STATIC_ASSERT(StubCache::kInterceptorArgsLength == 4);
698  __ push(name);
699  Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
700  ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
701  Register scratch = name;
702  __ li(scratch, Operand(interceptor));
703  __ Push(scratch, receiver, holder);
704}
705
706
707static void CompileCallLoadPropertyWithInterceptor(
708    MacroAssembler* masm,
709    Register receiver,
710    Register holder,
711    Register name,
712    Handle<JSObject> holder_obj,
713    IC::UtilityId id) {
714  PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
715  __ CallExternalReference(
716      ExternalReference(IC_Utility(id), masm->isolate()),
717      StubCache::kInterceptorArgsLength);
718}
719
720
721// Generate call to api function.
722void StubCompiler::GenerateFastApiCall(MacroAssembler* masm,
723                                       const CallOptimization& optimization,
724                                       Handle<Map> receiver_map,
725                                       Register receiver,
726                                       Register scratch_in,
727                                       bool is_store,
728                                       int argc,
729                                       Register* values) {
730  ASSERT(!receiver.is(scratch_in));
731  // Preparing to push, adjust sp.
732  __ Subu(sp, sp, Operand((argc + 1) * kPointerSize));
733  __ sw(receiver, MemOperand(sp, argc * kPointerSize));  // Push receiver.
734  // Write the arguments to stack frame.
735  for (int i = 0; i < argc; i++) {
736    Register arg = values[argc-1-i];
737    ASSERT(!receiver.is(arg));
738    ASSERT(!scratch_in.is(arg));
739    __ sw(arg, MemOperand(sp, (argc-1-i) * kPointerSize));  // Push arg.
740  }
741  ASSERT(optimization.is_simple_api_call());
742
743  // Abi for CallApiFunctionStub.
744  Register callee = a0;
745  Register call_data = t0;
746  Register holder = a2;
747  Register api_function_address = a1;
748
749  // Put holder in place.
750  CallOptimization::HolderLookup holder_lookup;
751  Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType(
752      receiver_map,
753      &holder_lookup);
754  switch (holder_lookup) {
755    case CallOptimization::kHolderIsReceiver:
756      __ Move(holder, receiver);
757      break;
758    case CallOptimization::kHolderFound:
759      __ li(holder, api_holder);
760     break;
761    case CallOptimization::kHolderNotFound:
762      UNREACHABLE();
763      break;
764  }
765
766  Isolate* isolate = masm->isolate();
767  Handle<JSFunction> function = optimization.constant_function();
768  Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
769  Handle<Object> call_data_obj(api_call_info->data(), isolate);
770
771  // Put callee in place.
772  __ li(callee, function);
773
774  bool call_data_undefined = false;
775  // Put call_data in place.
776  if (isolate->heap()->InNewSpace(*call_data_obj)) {
777    __ li(call_data, api_call_info);
778    __ lw(call_data, FieldMemOperand(call_data, CallHandlerInfo::kDataOffset));
779  } else if (call_data_obj->IsUndefined()) {
780    call_data_undefined = true;
781    __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex);
782  } else {
783    __ li(call_data, call_data_obj);
784  }
785  // Put api_function_address in place.
786  Address function_address = v8::ToCData<Address>(api_call_info->callback());
787  ApiFunction fun(function_address);
788  ExternalReference::Type type = ExternalReference::DIRECT_API_CALL;
789  ExternalReference ref =
790      ExternalReference(&fun,
791                        type,
792                        masm->isolate());
793  __ li(api_function_address, Operand(ref));
794
795  // Jump to stub.
796  CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc);
797  __ TailCallStub(&stub);
798}
799
800
801void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code> code) {
802  __ Jump(code, RelocInfo::CODE_TARGET);
803}
804
805
806#undef __
807#define __ ACCESS_MASM(masm())
808
809
810Register StubCompiler::CheckPrototypes(Handle<HeapType> type,
811                                       Register object_reg,
812                                       Handle<JSObject> holder,
813                                       Register holder_reg,
814                                       Register scratch1,
815                                       Register scratch2,
816                                       Handle<Name> name,
817                                       Label* miss,
818                                       PrototypeCheckType check) {
819  Handle<Map> receiver_map(IC::TypeToMap(*type, isolate()));
820
821  // Make sure there's no overlap between holder and object registers.
822  ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
823  ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
824         && !scratch2.is(scratch1));
825
826  // Keep track of the current object in register reg.
827  Register reg = object_reg;
828  int depth = 0;
829
830  Handle<JSObject> current = Handle<JSObject>::null();
831  if (type->IsConstant()) {
832    current = Handle<JSObject>::cast(type->AsConstant()->Value());
833  }
834  Handle<JSObject> prototype = Handle<JSObject>::null();
835  Handle<Map> current_map = receiver_map;
836  Handle<Map> holder_map(holder->map());
837  // Traverse the prototype chain and check the maps in the prototype chain for
838  // fast and global objects or do negative lookup for normal objects.
839  while (!current_map.is_identical_to(holder_map)) {
840    ++depth;
841
842    // Only global objects and objects that do not require access
843    // checks are allowed in stubs.
844    ASSERT(current_map->IsJSGlobalProxyMap() ||
845           !current_map->is_access_check_needed());
846
847    prototype = handle(JSObject::cast(current_map->prototype()));
848    if (current_map->is_dictionary_map() &&
849        !current_map->IsJSGlobalObjectMap() &&
850        !current_map->IsJSGlobalProxyMap()) {
851      if (!name->IsUniqueName()) {
852        ASSERT(name->IsString());
853        name = factory()->InternalizeString(Handle<String>::cast(name));
854      }
855      ASSERT(current.is_null() ||
856             current->property_dictionary()->FindEntry(name) ==
857             NameDictionary::kNotFound);
858
859      GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
860                                       scratch1, scratch2);
861
862      __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
863      reg = holder_reg;  // From now on the object will be in holder_reg.
864      __ lw(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
865    } else {
866      Register map_reg = scratch1;
867      if (depth != 1 || check == CHECK_ALL_MAPS) {
868        // CheckMap implicitly loads the map of |reg| into |map_reg|.
869        __ CheckMap(reg, map_reg, current_map, miss, DONT_DO_SMI_CHECK);
870      } else {
871        __ lw(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
872      }
873
874      // Check access rights to the global object.  This has to happen after
875      // the map check so that we know that the object is actually a global
876      // object.
877      if (current_map->IsJSGlobalProxyMap()) {
878        __ CheckAccessGlobalProxy(reg, scratch2, miss);
879      } else if (current_map->IsJSGlobalObjectMap()) {
880        GenerateCheckPropertyCell(
881            masm(), Handle<JSGlobalObject>::cast(current), name,
882            scratch2, miss);
883      }
884
885      reg = holder_reg;  // From now on the object will be in holder_reg.
886
887      if (heap()->InNewSpace(*prototype)) {
888        // The prototype is in new space; we cannot store a reference to it
889        // in the code.  Load it from the map.
890        __ lw(reg, FieldMemOperand(map_reg, Map::kPrototypeOffset));
891      } else {
892        // The prototype is in old space; load it directly.
893        __ li(reg, Operand(prototype));
894      }
895    }
896
897    // Go to the next object in the prototype chain.
898    current = prototype;
899    current_map = handle(current->map());
900  }
901
902  // Log the check depth.
903  LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
904
905  if (depth != 0 || check == CHECK_ALL_MAPS) {
906    // Check the holder map.
907    __ CheckMap(reg, scratch1, current_map, miss, DONT_DO_SMI_CHECK);
908  }
909
910  // Perform security check for access to the global object.
911  ASSERT(current_map->IsJSGlobalProxyMap() ||
912         !current_map->is_access_check_needed());
913  if (current_map->IsJSGlobalProxyMap()) {
914    __ CheckAccessGlobalProxy(reg, scratch1, miss);
915  }
916
917  // Return the register containing the holder.
918  return reg;
919}
920
921
922void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
923  if (!miss->is_unused()) {
924    Label success;
925    __ Branch(&success);
926    __ bind(miss);
927    TailCallBuiltin(masm(), MissBuiltin(kind()));
928    __ bind(&success);
929  }
930}
931
932
933void StoreStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
934  if (!miss->is_unused()) {
935    Label success;
936    __ Branch(&success);
937    GenerateRestoreName(masm(), miss, name);
938    TailCallBuiltin(masm(), MissBuiltin(kind()));
939    __ bind(&success);
940  }
941}
942
943
944Register LoadStubCompiler::CallbackHandlerFrontend(
945    Handle<HeapType> type,
946    Register object_reg,
947    Handle<JSObject> holder,
948    Handle<Name> name,
949    Handle<Object> callback) {
950  Label miss;
951
952  Register reg = HandlerFrontendHeader(type, object_reg, holder, name, &miss);
953
954  if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
955    ASSERT(!reg.is(scratch2()));
956    ASSERT(!reg.is(scratch3()));
957    ASSERT(!reg.is(scratch4()));
958
959    // Load the properties dictionary.
960    Register dictionary = scratch4();
961    __ lw(dictionary, FieldMemOperand(reg, JSObject::kPropertiesOffset));
962
963    // Probe the dictionary.
964    Label probe_done;
965    NameDictionaryLookupStub::GeneratePositiveLookup(masm(),
966                                                     &miss,
967                                                     &probe_done,
968                                                     dictionary,
969                                                     this->name(),
970                                                     scratch2(),
971                                                     scratch3());
972    __ bind(&probe_done);
973
974    // If probing finds an entry in the dictionary, scratch3 contains the
975    // pointer into the dictionary. Check that the value is the callback.
976    Register pointer = scratch3();
977    const int kElementsStartOffset = NameDictionary::kHeaderSize +
978        NameDictionary::kElementsStartIndex * kPointerSize;
979    const int kValueOffset = kElementsStartOffset + kPointerSize;
980    __ lw(scratch2(), FieldMemOperand(pointer, kValueOffset));
981    __ Branch(&miss, ne, scratch2(), Operand(callback));
982  }
983
984  HandlerFrontendFooter(name, &miss);
985  return reg;
986}
987
988
989void LoadStubCompiler::GenerateLoadField(Register reg,
990                                         Handle<JSObject> holder,
991                                         FieldIndex field,
992                                         Representation representation) {
993  if (!reg.is(receiver())) __ mov(receiver(), reg);
994  if (kind() == Code::LOAD_IC) {
995    LoadFieldStub stub(isolate(), field);
996    GenerateTailCall(masm(), stub.GetCode());
997  } else {
998    KeyedLoadFieldStub stub(isolate(), field);
999    GenerateTailCall(masm(), stub.GetCode());
1000  }
1001}
1002
1003
1004void LoadStubCompiler::GenerateLoadConstant(Handle<Object> value) {
1005  // Return the constant value.
1006  __ li(v0, value);
1007  __ Ret();
1008}
1009
1010
1011void LoadStubCompiler::GenerateLoadCallback(
1012    Register reg,
1013    Handle<ExecutableAccessorInfo> callback) {
1014  // Build AccessorInfo::args_ list on the stack and push property name below
1015  // the exit frame to make GC aware of them and store pointers to them.
1016  STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0);
1017  STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1);
1018  STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2);
1019  STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3);
1020  STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4);
1021  STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5);
1022  STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6);
1023  ASSERT(!scratch2().is(reg));
1024  ASSERT(!scratch3().is(reg));
1025  ASSERT(!scratch4().is(reg));
1026  __ push(receiver());
1027  if (heap()->InNewSpace(callback->data())) {
1028    __ li(scratch3(), callback);
1029    __ lw(scratch3(), FieldMemOperand(scratch3(),
1030                                      ExecutableAccessorInfo::kDataOffset));
1031  } else {
1032    __ li(scratch3(), Handle<Object>(callback->data(), isolate()));
1033  }
1034  __ Subu(sp, sp, 6 * kPointerSize);
1035  __ sw(scratch3(), MemOperand(sp, 5 * kPointerSize));
1036  __ LoadRoot(scratch3(), Heap::kUndefinedValueRootIndex);
1037  __ sw(scratch3(), MemOperand(sp, 4 * kPointerSize));
1038  __ sw(scratch3(), MemOperand(sp, 3 * kPointerSize));
1039  __ li(scratch4(),
1040        Operand(ExternalReference::isolate_address(isolate())));
1041  __ sw(scratch4(), MemOperand(sp, 2 * kPointerSize));
1042  __ sw(reg, MemOperand(sp, 1 * kPointerSize));
1043  __ sw(name(), MemOperand(sp, 0 * kPointerSize));
1044  __ Addu(scratch2(), sp, 1 * kPointerSize);
1045
1046  __ mov(a2, scratch2());  // Saved in case scratch2 == a1.
1047  // Abi for CallApiGetter.
1048  Register getter_address_reg = a2;
1049
1050  Address getter_address = v8::ToCData<Address>(callback->getter());
1051  ApiFunction fun(getter_address);
1052  ExternalReference::Type type = ExternalReference::DIRECT_GETTER_CALL;
1053  ExternalReference ref = ExternalReference(&fun, type, isolate());
1054  __ li(getter_address_reg, Operand(ref));
1055
1056  CallApiGetterStub stub(isolate());
1057  __ TailCallStub(&stub);
1058}
1059
1060
1061void LoadStubCompiler::GenerateLoadInterceptor(
1062    Register holder_reg,
1063    Handle<Object> object,
1064    Handle<JSObject> interceptor_holder,
1065    LookupResult* lookup,
1066    Handle<Name> name) {
1067  ASSERT(interceptor_holder->HasNamedInterceptor());
1068  ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1069
1070  // So far the most popular follow ups for interceptor loads are FIELD
1071  // and CALLBACKS, so inline only them, other cases may be added
1072  // later.
1073  bool compile_followup_inline = false;
1074  if (lookup->IsFound() && lookup->IsCacheable()) {
1075    if (lookup->IsField()) {
1076      compile_followup_inline = true;
1077    } else if (lookup->type() == CALLBACKS &&
1078        lookup->GetCallbackObject()->IsExecutableAccessorInfo()) {
1079      ExecutableAccessorInfo* callback =
1080          ExecutableAccessorInfo::cast(lookup->GetCallbackObject());
1081      compile_followup_inline = callback->getter() != NULL &&
1082          callback->IsCompatibleReceiver(*object);
1083    }
1084  }
1085
1086  if (compile_followup_inline) {
1087    // Compile the interceptor call, followed by inline code to load the
1088    // property from further up the prototype chain if the call fails.
1089    // Check that the maps haven't changed.
1090    ASSERT(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
1091
1092    // Preserve the receiver register explicitly whenever it is different from
1093    // the holder and it is needed should the interceptor return without any
1094    // result. The CALLBACKS case needs the receiver to be passed into C++ code,
1095    // the FIELD case might cause a miss during the prototype check.
1096    bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
1097    bool must_preserve_receiver_reg = !receiver().is(holder_reg) &&
1098        (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
1099
1100    // Save necessary data before invoking an interceptor.
1101    // Requires a frame to make GC aware of pushed pointers.
1102    {
1103      FrameScope frame_scope(masm(), StackFrame::INTERNAL);
1104      if (must_preserve_receiver_reg) {
1105        __ Push(receiver(), holder_reg, this->name());
1106      } else {
1107        __ Push(holder_reg, this->name());
1108      }
1109      // Invoke an interceptor.  Note: map checks from receiver to
1110      // interceptor's holder has been compiled before (see a caller
1111      // of this method).
1112      CompileCallLoadPropertyWithInterceptor(
1113          masm(), receiver(), holder_reg, this->name(), interceptor_holder,
1114          IC::kLoadPropertyWithInterceptorOnly);
1115
1116      // Check if interceptor provided a value for property.  If it's
1117      // the case, return immediately.
1118      Label interceptor_failed;
1119      __ LoadRoot(scratch1(), Heap::kNoInterceptorResultSentinelRootIndex);
1120      __ Branch(&interceptor_failed, eq, v0, Operand(scratch1()));
1121      frame_scope.GenerateLeaveFrame();
1122      __ Ret();
1123
1124      __ bind(&interceptor_failed);
1125      __ pop(this->name());
1126      __ pop(holder_reg);
1127      if (must_preserve_receiver_reg) {
1128        __ pop(receiver());
1129      }
1130      // Leave the internal frame.
1131    }
1132    GenerateLoadPostInterceptor(holder_reg, interceptor_holder, name, lookup);
1133  } else {  // !compile_followup_inline
1134    // Call the runtime system to load the interceptor.
1135    // Check that the maps haven't changed.
1136    PushInterceptorArguments(masm(), receiver(), holder_reg,
1137                             this->name(), interceptor_holder);
1138
1139    ExternalReference ref = ExternalReference(
1140        IC_Utility(IC::kLoadPropertyWithInterceptor), isolate());
1141    __ TailCallExternalReference(ref, StubCache::kInterceptorArgsLength, 1);
1142  }
1143}
1144
1145
1146Handle<Code> StoreStubCompiler::CompileStoreCallback(
1147    Handle<JSObject> object,
1148    Handle<JSObject> holder,
1149    Handle<Name> name,
1150    Handle<ExecutableAccessorInfo> callback) {
1151  Register holder_reg = HandlerFrontend(
1152      IC::CurrentTypeOf(object, isolate()), receiver(), holder, name);
1153
1154  // Stub never generated for non-global objects that require access
1155  // checks.
1156  ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
1157
1158  __ Push(receiver(), holder_reg);  // Receiver.
1159  __ li(at, Operand(callback));  // Callback info.
1160  __ push(at);
1161  __ li(at, Operand(name));
1162  __ Push(at, value());
1163
1164  // Do tail-call to the runtime system.
1165  ExternalReference store_callback_property =
1166      ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
1167  __ TailCallExternalReference(store_callback_property, 5, 1);
1168
1169  // Return the generated code.
1170  return GetCode(kind(), Code::FAST, name);
1171}
1172
1173
1174#undef __
1175#define __ ACCESS_MASM(masm)
1176
1177
1178void StoreStubCompiler::GenerateStoreViaSetter(
1179    MacroAssembler* masm,
1180    Handle<HeapType> type,
1181    Register receiver,
1182    Handle<JSFunction> setter) {
1183  // ----------- S t a t e -------------
1184  //  -- ra    : return address
1185  // -----------------------------------
1186  {
1187    FrameScope scope(masm, StackFrame::INTERNAL);
1188
1189    // Save value register, so we can restore it later.
1190    __ push(value());
1191
1192    if (!setter.is_null()) {
1193      // Call the JavaScript setter with receiver and value on the stack.
1194      if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1195        // Swap in the global receiver.
1196        __ lw(receiver,
1197               FieldMemOperand(
1198                   receiver, JSGlobalObject::kGlobalReceiverOffset));
1199      }
1200      __ Push(receiver, value());
1201      ParameterCount actual(1);
1202      ParameterCount expected(setter);
1203      __ InvokeFunction(setter, expected, actual,
1204                        CALL_FUNCTION, NullCallWrapper());
1205    } else {
1206      // If we generate a global code snippet for deoptimization only, remember
1207      // the place to continue after deoptimization.
1208      masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
1209    }
1210
1211    // We have to return the passed value, not the return value of the setter.
1212    __ pop(v0);
1213
1214    // Restore context register.
1215    __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1216  }
1217  __ Ret();
1218}
1219
1220
1221#undef __
1222#define __ ACCESS_MASM(masm())
1223
1224
1225Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
1226    Handle<JSObject> object,
1227    Handle<Name> name) {
1228  __ Push(receiver(), this->name(), value());
1229
1230  // Do tail-call to the runtime system.
1231  ExternalReference store_ic_property =
1232      ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
1233  __ TailCallExternalReference(store_ic_property, 3, 1);
1234
1235  // Return the generated code.
1236  return GetCode(kind(), Code::FAST, name);
1237}
1238
1239
1240Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<HeapType> type,
1241                                                      Handle<JSObject> last,
1242                                                      Handle<Name> name) {
1243  NonexistentHandlerFrontend(type, last, name);
1244
1245  // Return undefined if maps of the full prototype chain is still the same.
1246  __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
1247  __ Ret();
1248
1249  // Return the generated code.
1250  return GetCode(kind(), Code::FAST, name);
1251}
1252
1253
1254Register* LoadStubCompiler::registers() {
1255  // receiver, name, scratch1, scratch2, scratch3, scratch4.
1256  static Register registers[] = { a0, a2, a3, a1, t0, t1 };
1257  return registers;
1258}
1259
1260
1261Register* KeyedLoadStubCompiler::registers() {
1262  // receiver, name, scratch1, scratch2, scratch3, scratch4.
1263  static Register registers[] = { a1, a0, a2, a3, t0, t1 };
1264  return registers;
1265}
1266
1267
1268Register StoreStubCompiler::value() {
1269  return a0;
1270}
1271
1272
1273Register* StoreStubCompiler::registers() {
1274  // receiver, name, scratch1, scratch2, scratch3.
1275  static Register registers[] = { a1, a2, a3, t0, t1 };
1276  return registers;
1277}
1278
1279
1280Register* KeyedStoreStubCompiler::registers() {
1281  // receiver, name, scratch1, scratch2, scratch3.
1282  static Register registers[] = { a2, a1, a3, t0, t1 };
1283  return registers;
1284}
1285
1286
1287#undef __
1288#define __ ACCESS_MASM(masm)
1289
1290
1291void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
1292                                             Handle<HeapType> type,
1293                                             Register receiver,
1294                                             Handle<JSFunction> getter) {
1295  // ----------- S t a t e -------------
1296  //  -- a0    : receiver
1297  //  -- a2    : name
1298  //  -- ra    : return address
1299  // -----------------------------------
1300  {
1301    FrameScope scope(masm, StackFrame::INTERNAL);
1302
1303    if (!getter.is_null()) {
1304      // Call the JavaScript getter with the receiver on the stack.
1305      if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1306        // Swap in the global receiver.
1307        __ lw(receiver,
1308                FieldMemOperand(
1309                    receiver, JSGlobalObject::kGlobalReceiverOffset));
1310      }
1311      __ push(receiver);
1312      ParameterCount actual(0);
1313      ParameterCount expected(getter);
1314      __ InvokeFunction(getter, expected, actual,
1315                        CALL_FUNCTION, NullCallWrapper());
1316    } else {
1317      // If we generate a global code snippet for deoptimization only, remember
1318      // the place to continue after deoptimization.
1319      masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
1320    }
1321
1322    // Restore context register.
1323    __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1324  }
1325  __ Ret();
1326}
1327
1328
1329#undef __
1330#define __ ACCESS_MASM(masm())
1331
1332
1333Handle<Code> LoadStubCompiler::CompileLoadGlobal(
1334    Handle<HeapType> type,
1335    Handle<GlobalObject> global,
1336    Handle<PropertyCell> cell,
1337    Handle<Name> name,
1338    bool is_dont_delete) {
1339  Label miss;
1340
1341  HandlerFrontendHeader(type, receiver(), global, name, &miss);
1342
1343  // Get the value from the cell.
1344  __ li(a3, Operand(cell));
1345  __ lw(t0, FieldMemOperand(a3, Cell::kValueOffset));
1346
1347  // Check for deleted property if property can actually be deleted.
1348  if (!is_dont_delete) {
1349    __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1350    __ Branch(&miss, eq, t0, Operand(at));
1351  }
1352
1353  Counters* counters = isolate()->counters();
1354  __ IncrementCounter(counters->named_load_global_stub(), 1, a1, a3);
1355  __ Ret(USE_DELAY_SLOT);
1356  __ mov(v0, t0);
1357
1358  HandlerFrontendFooter(name, &miss);
1359
1360  // Return the generated code.
1361  return GetCode(kind(), Code::NORMAL, name);
1362}
1363
1364
1365Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC(
1366    TypeHandleList* types,
1367    CodeHandleList* handlers,
1368    Handle<Name> name,
1369    Code::StubType type,
1370    IcCheckType check) {
1371  Label miss;
1372
1373  if (check == PROPERTY &&
1374      (kind() == Code::KEYED_LOAD_IC || kind() == Code::KEYED_STORE_IC)) {
1375    __ Branch(&miss, ne, this->name(), Operand(name));
1376  }
1377
1378  Label number_case;
1379  Register match = scratch1();
1380  Label* smi_target = IncludesNumberType(types) ? &number_case : &miss;
1381  __ JumpIfSmi(receiver(), smi_target, match);  // Reg match is 0 if Smi.
1382
1383  Register map_reg = scratch2();
1384
1385  int receiver_count = types->length();
1386  int number_of_handled_maps = 0;
1387  __ lw(map_reg, FieldMemOperand(receiver(), HeapObject::kMapOffset));
1388  for (int current = 0; current < receiver_count; ++current) {
1389    Handle<HeapType> type = types->at(current);
1390    Handle<Map> map = IC::TypeToMap(*type, isolate());
1391    if (!map->is_deprecated()) {
1392      number_of_handled_maps++;
1393      // Check map and tail call if there's a match.
1394      // Separate compare from branch, to provide path for above JumpIfSmi().
1395      __ Subu(match, map_reg, Operand(map));
1396      if (type->Is(HeapType::Number())) {
1397        ASSERT(!number_case.is_unused());
1398        __ bind(&number_case);
1399      }
1400      __ Jump(handlers->at(current), RelocInfo::CODE_TARGET,
1401          eq, match, Operand(zero_reg));
1402    }
1403  }
1404  ASSERT(number_of_handled_maps != 0);
1405
1406  __ bind(&miss);
1407  TailCallBuiltin(masm(), MissBuiltin(kind()));
1408
1409  // Return the generated code.
1410  InlineCacheState state =
1411      number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC;
1412  return GetICCode(kind(), type, name, state);
1413}
1414
1415
1416void StoreStubCompiler::GenerateStoreArrayLength() {
1417  // Prepare tail call to StoreIC_ArrayLength.
1418  __ Push(receiver(), value());
1419
1420  ExternalReference ref =
1421      ExternalReference(IC_Utility(IC::kStoreIC_ArrayLength),
1422                        masm()->isolate());
1423  __ TailCallExternalReference(ref, 2, 1);
1424}
1425
1426
1427Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
1428    MapHandleList* receiver_maps,
1429    CodeHandleList* handler_stubs,
1430    MapHandleList* transitioned_maps) {
1431  Label miss;
1432  __ JumpIfSmi(receiver(), &miss);
1433
1434  int receiver_count = receiver_maps->length();
1435  __ lw(scratch1(), FieldMemOperand(receiver(), HeapObject::kMapOffset));
1436  for (int i = 0; i < receiver_count; ++i) {
1437    if (transitioned_maps->at(i).is_null()) {
1438      __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET, eq,
1439          scratch1(), Operand(receiver_maps->at(i)));
1440    } else {
1441      Label next_map;
1442      __ Branch(&next_map, ne, scratch1(), Operand(receiver_maps->at(i)));
1443      __ li(transition_map(), Operand(transitioned_maps->at(i)));
1444      __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET);
1445      __ bind(&next_map);
1446    }
1447  }
1448
1449  __ bind(&miss);
1450  TailCallBuiltin(masm(), MissBuiltin(kind()));
1451
1452  // Return the generated code.
1453  return GetICCode(
1454      kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC);
1455}
1456
1457
1458#undef __
1459#define __ ACCESS_MASM(masm)
1460
1461
1462void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
1463    MacroAssembler* masm) {
1464  // ---------- S t a t e --------------
1465  //  -- ra     : return address
1466  //  -- a0     : key
1467  //  -- a1     : receiver
1468  // -----------------------------------
1469  Label slow, miss;
1470
1471  Register key = a0;
1472  Register receiver = a1;
1473
1474  __ JumpIfNotSmi(key, &miss);
1475  __ lw(t0, FieldMemOperand(receiver, JSObject::kElementsOffset));
1476  __ sra(a2, a0, kSmiTagSize);
1477  __ LoadFromNumberDictionary(&slow, t0, a0, v0, a2, a3, t1);
1478  __ Ret();
1479
1480  // Slow case, key and receiver still in a0 and a1.
1481  __ bind(&slow);
1482  __ IncrementCounter(
1483      masm->isolate()->counters()->keyed_load_external_array_slow(),
1484      1, a2, a3);
1485  // Entry registers are intact.
1486  // ---------- S t a t e --------------
1487  //  -- ra     : return address
1488  //  -- a0     : key
1489  //  -- a1     : receiver
1490  // -----------------------------------
1491  TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow);
1492
1493  // Miss case, call the runtime.
1494  __ bind(&miss);
1495
1496  // ---------- S t a t e --------------
1497  //  -- ra     : return address
1498  //  -- a0     : key
1499  //  -- a1     : receiver
1500  // -----------------------------------
1501  TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss);
1502}
1503
1504
1505#undef __
1506
1507} }  // namespace v8::internal
1508
1509#endif  // V8_TARGET_ARCH_MIPS
1510