stub-cache-mips.cc revision 85b71799222b55eb5dd74ea26efe0c64ab655c8c
1// Copyright 2011 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#if defined(V8_TARGET_ARCH_MIPS)
31
32#include "ic-inl.h"
33#include "codegen.h"
34#include "stub-cache.h"
35
36namespace v8 {
37namespace internal {
38
39#define __ ACCESS_MASM(masm)
40
41
42static void ProbeTable(Isolate* isolate,
43                       MacroAssembler* masm,
44                       Code::Flags flags,
45                       StubCache::Table table,
46                       Register name,
47                       Register offset,
48                       Register scratch,
49                       Register scratch2) {
50  ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
51  ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
52
53  uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address());
54  uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address());
55
56  // Check the relative positions of the address fields.
57  ASSERT(value_off_addr > key_off_addr);
58  ASSERT((value_off_addr - key_off_addr) % 4 == 0);
59  ASSERT((value_off_addr - key_off_addr) < (256 * 4));
60
61  Label miss;
62  Register offsets_base_addr = scratch;
63
64  // Check that the key in the entry matches the name.
65  __ li(offsets_base_addr, Operand(key_offset));
66  __ sll(scratch2, offset, 1);
67  __ addu(scratch2, offsets_base_addr, scratch2);
68  __ lw(scratch2, MemOperand(scratch2));
69  __ Branch(&miss, ne, name, Operand(scratch2));
70
71  // Get the code entry from the cache.
72  __ Addu(offsets_base_addr, offsets_base_addr,
73         Operand(value_off_addr - key_off_addr));
74  __ sll(scratch2, offset, 1);
75  __ addu(scratch2, offsets_base_addr, scratch2);
76  __ lw(scratch2, MemOperand(scratch2));
77
78  // Check that the flags match what we're looking for.
79  __ lw(scratch2, FieldMemOperand(scratch2, Code::kFlagsOffset));
80  __ And(scratch2, scratch2, Operand(~Code::kFlagsNotUsedInLookup));
81  __ Branch(&miss, ne, scratch2, Operand(flags));
82
83  // Re-load code entry from cache.
84  __ sll(offset, offset, 1);
85  __ addu(offset, offset, offsets_base_addr);
86  __ lw(offset, MemOperand(offset));
87
88  // Jump to the first instruction in the code stub.
89  __ Addu(offset, offset, Operand(Code::kHeaderSize - kHeapObjectTag));
90  __ Jump(offset);
91
92  // Miss: fall through.
93  __ bind(&miss);
94}
95
96
97// Helper function used to check that the dictionary doesn't contain
98// the property. This function may return false negatives, so miss_label
99// must always call a backup property check that is complete.
100// This function is safe to call if the receiver has fast properties.
101// Name must be a symbol and receiver must be a heap object.
102MUST_USE_RESULT static MaybeObject* GenerateDictionaryNegativeLookup(
103    MacroAssembler* masm,
104    Label* miss_label,
105    Register receiver,
106    String* name,
107    Register scratch0,
108    Register scratch1) {
109  ASSERT(name->IsSymbol());
110  Counters* counters = masm->isolate()->counters();
111  __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
112  __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
113
114  Label done;
115
116  const int kInterceptorOrAccessCheckNeededMask =
117      (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
118
119  // Bail out if the receiver has a named interceptor or requires access checks.
120  Register map = scratch1;
121  __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
122  __ lbu(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
123  __ And(at, scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
124  __ Branch(miss_label, ne, at, Operand(zero_reg));
125
126
127  // Check that receiver is a JSObject.
128  __ lbu(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
129  __ Branch(miss_label, lt, scratch0, Operand(FIRST_SPEC_OBJECT_TYPE));
130
131  // Load properties array.
132  Register properties = scratch0;
133  __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
134  // Check that the properties array is a dictionary.
135  __ lw(map, FieldMemOperand(properties, HeapObject::kMapOffset));
136  Register tmp = properties;
137  __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
138  __ Branch(miss_label, ne, map, Operand(tmp));
139
140  // Restore the temporarily used register.
141  __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
142
143  MaybeObject* result = StringDictionaryLookupStub::GenerateNegativeLookup(
144      masm,
145      miss_label,
146      &done,
147      receiver,
148      properties,
149      name,
150      scratch1);
151  if (result->IsFailure()) return result;
152
153  __ bind(&done);
154  __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
155
156  return result;
157}
158
159
160void StubCache::GenerateProbe(MacroAssembler* masm,
161                              Code::Flags flags,
162                              Register receiver,
163                              Register name,
164                              Register scratch,
165                              Register extra,
166                              Register extra2) {
167  Isolate* isolate = masm->isolate();
168  Label miss;
169
170  // Make sure that code is valid. The shifting code relies on the
171  // entry size being 8.
172  ASSERT(sizeof(Entry) == 8);
173
174  // Make sure the flags does not name a specific type.
175  ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
176
177  // Make sure that there are no register conflicts.
178  ASSERT(!scratch.is(receiver));
179  ASSERT(!scratch.is(name));
180  ASSERT(!extra.is(receiver));
181  ASSERT(!extra.is(name));
182  ASSERT(!extra.is(scratch));
183  ASSERT(!extra2.is(receiver));
184  ASSERT(!extra2.is(name));
185  ASSERT(!extra2.is(scratch));
186  ASSERT(!extra2.is(extra));
187
188  // Check scratch, extra and extra2 registers are valid.
189  ASSERT(!scratch.is(no_reg));
190  ASSERT(!extra.is(no_reg));
191  ASSERT(!extra2.is(no_reg));
192
193  // Check that the receiver isn't a smi.
194  __ JumpIfSmi(receiver, &miss, t0);
195
196  // Get the map of the receiver and compute the hash.
197  __ lw(scratch, FieldMemOperand(name, String::kHashFieldOffset));
198  __ lw(t8, FieldMemOperand(receiver, HeapObject::kMapOffset));
199  __ Addu(scratch, scratch, Operand(t8));
200  __ Xor(scratch, scratch, Operand(flags));
201  __ And(scratch,
202         scratch,
203         Operand((kPrimaryTableSize - 1) << kHeapObjectTagSize));
204
205  // Probe the primary table.
206  ProbeTable(isolate, masm, flags, kPrimary, name, scratch, extra, extra2);
207
208  // Primary miss: Compute hash for secondary probe.
209  __ Subu(scratch, scratch, Operand(name));
210  __ Addu(scratch, scratch, Operand(flags));
211  __ And(scratch,
212         scratch,
213         Operand((kSecondaryTableSize - 1) << kHeapObjectTagSize));
214
215  // Probe the secondary table.
216  ProbeTable(isolate, masm, flags, kSecondary, name, scratch, extra, extra2);
217
218  // Cache miss: Fall-through and let caller handle the miss by
219  // entering the runtime system.
220  __ bind(&miss);
221}
222
223
224void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
225                                                       int index,
226                                                       Register prototype) {
227  // Load the global or builtins object from the current context.
228  __ lw(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
229  // Load the global context from the global or builtins object.
230  __ lw(prototype,
231         FieldMemOperand(prototype, GlobalObject::kGlobalContextOffset));
232  // Load the function from the global context.
233  __ lw(prototype, MemOperand(prototype, Context::SlotOffset(index)));
234  // Load the initial map.  The global functions all have initial maps.
235  __ lw(prototype,
236         FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
237  // Load the prototype from the initial map.
238  __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
239}
240
241
242void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
243    MacroAssembler* masm, int index, Register prototype, Label* miss) {
244  Isolate* isolate = masm->isolate();
245  // Check we're still in the same context.
246  __ lw(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
247  ASSERT(!prototype.is(at));
248  __ li(at, isolate->global());
249  __ Branch(miss, ne, prototype, Operand(at));
250  // Get the global function with the given index.
251  JSFunction* function =
252      JSFunction::cast(isolate->global_context()->get(index));
253  // Load its initial map. The global functions all have initial maps.
254  __ li(prototype, Handle<Map>(function->initial_map()));
255  // Load the prototype from the initial map.
256  __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
257}
258
259
260// Load a fast property out of a holder object (src). In-object properties
261// are loaded directly otherwise the property is loaded from the properties
262// fixed array.
263void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
264                                            Register dst, Register src,
265                                            JSObject* holder, int index) {
266  // Adjust for the number of properties stored in the holder.
267  index -= holder->map()->inobject_properties();
268  if (index < 0) {
269    // Get the property straight out of the holder.
270    int offset = holder->map()->instance_size() + (index * kPointerSize);
271    __ lw(dst, FieldMemOperand(src, offset));
272  } else {
273    // Calculate the offset into the properties array.
274    int offset = index * kPointerSize + FixedArray::kHeaderSize;
275    __ lw(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
276    __ lw(dst, FieldMemOperand(dst, offset));
277  }
278}
279
280
281void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
282                                           Register receiver,
283                                           Register scratch,
284                                           Label* miss_label) {
285  // Check that the receiver isn't a smi.
286  __ And(scratch, receiver, Operand(kSmiTagMask));
287  __ Branch(miss_label, eq, scratch, Operand(zero_reg));
288
289  // Check that the object is a JS array.
290  __ GetObjectType(receiver, scratch, scratch);
291  __ Branch(miss_label, ne, scratch, Operand(JS_ARRAY_TYPE));
292
293  // Load length directly from the JS array.
294  __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
295  __ Ret();
296}
297
298
299// Generate code to check if an object is a string.  If the object is a
300// heap object, its map's instance type is left in the scratch1 register.
301// If this is not needed, scratch1 and scratch2 may be the same register.
302static void GenerateStringCheck(MacroAssembler* masm,
303                                Register receiver,
304                                Register scratch1,
305                                Register scratch2,
306                                Label* smi,
307                                Label* non_string_object) {
308  // Check that the receiver isn't a smi.
309  __ JumpIfSmi(receiver, smi, t0);
310
311  // Check that the object is a string.
312  __ lw(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
313  __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
314  __ And(scratch2, scratch1, Operand(kIsNotStringMask));
315  // The cast is to resolve the overload for the argument of 0x0.
316  __ Branch(non_string_object,
317            ne,
318            scratch2,
319            Operand(static_cast<int32_t>(kStringTag)));
320}
321
322
323// Generate code to load the length from a string object and return the length.
324// If the receiver object is not a string or a wrapped string object the
325// execution continues at the miss label. The register containing the
326// receiver is potentially clobbered.
327void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
328                                            Register receiver,
329                                            Register scratch1,
330                                            Register scratch2,
331                                            Label* miss,
332                                            bool support_wrappers) {
333  Label check_wrapper;
334
335  // Check if the object is a string leaving the instance type in the
336  // scratch1 register.
337  GenerateStringCheck(masm, receiver, scratch1, scratch2, miss,
338                      support_wrappers ? &check_wrapper : miss);
339
340  // Load length directly from the string.
341  __ lw(v0, FieldMemOperand(receiver, String::kLengthOffset));
342  __ Ret();
343
344  if (support_wrappers) {
345    // Check if the object is a JSValue wrapper.
346    __ bind(&check_wrapper);
347    __ Branch(miss, ne, scratch1, Operand(JS_VALUE_TYPE));
348
349    // Unwrap the value and check if the wrapped value is a string.
350    __ lw(scratch1, FieldMemOperand(receiver, JSValue::kValueOffset));
351    GenerateStringCheck(masm, scratch1, scratch2, scratch2, miss, miss);
352    __ lw(v0, FieldMemOperand(scratch1, String::kLengthOffset));
353    __ Ret();
354  }
355}
356
357
358void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
359                                                 Register receiver,
360                                                 Register scratch1,
361                                                 Register scratch2,
362                                                 Label* miss_label) {
363  __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
364  __ mov(v0, scratch1);
365  __ Ret();
366}
367
368
369// Generate StoreField code, value is passed in a0 register.
370// After executing generated code, the receiver_reg and name_reg
371// may be clobbered.
372void StubCompiler::GenerateStoreField(MacroAssembler* masm,
373                                      JSObject* object,
374                                      int index,
375                                      Map* transition,
376                                      Register receiver_reg,
377                                      Register name_reg,
378                                      Register scratch,
379                                      Label* miss_label) {
380  // a0 : value.
381  Label exit;
382
383  // Check that the receiver isn't a smi.
384  __ JumpIfSmi(receiver_reg, miss_label, scratch);
385
386  // Check that the map of the receiver hasn't changed.
387  __ lw(scratch, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
388  __ Branch(miss_label, ne, scratch, Operand(Handle<Map>(object->map())));
389
390  // Perform global security token check if needed.
391  if (object->IsJSGlobalProxy()) {
392    __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
393  }
394
395  // Stub never generated for non-global objects that require access
396  // checks.
397  ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
398
399  // Perform map transition for the receiver if necessary.
400  if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) {
401    // The properties must be extended before we can store the value.
402    // We jump to a runtime call that extends the properties array.
403    __ push(receiver_reg);
404    __ li(a2, Operand(Handle<Map>(transition)));
405    __ Push(a2, a0);
406    __ TailCallExternalReference(
407           ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
408                             masm->isolate()),
409           3, 1);
410    return;
411  }
412
413  if (transition != NULL) {
414    // Update the map of the object; no write barrier updating is
415    // needed because the map is never in new space.
416    __ li(t0, Operand(Handle<Map>(transition)));
417    __ sw(t0, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
418  }
419
420  // Adjust for the number of properties stored in the object. Even in the
421  // face of a transition we can use the old map here because the size of the
422  // object and the number of in-object properties is not going to change.
423  index -= object->map()->inobject_properties();
424
425  if (index < 0) {
426    // Set the property straight into the object.
427    int offset = object->map()->instance_size() + (index * kPointerSize);
428    __ sw(a0, FieldMemOperand(receiver_reg, offset));
429
430    // Skip updating write barrier if storing a smi.
431    __ JumpIfSmi(a0, &exit, scratch);
432
433    // Update the write barrier for the array address.
434    // Pass the now unused name_reg as a scratch register.
435    __ RecordWrite(receiver_reg, Operand(offset), name_reg, scratch);
436  } else {
437    // Write to the properties array.
438    int offset = index * kPointerSize + FixedArray::kHeaderSize;
439    // Get the properties array.
440    __ lw(scratch, FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
441    __ sw(a0, FieldMemOperand(scratch, offset));
442
443    // Skip updating write barrier if storing a smi.
444    __ JumpIfSmi(a0, &exit);
445
446    // Update the write barrier for the array address.
447    // Ok to clobber receiver_reg and name_reg, since we return.
448    __ RecordWrite(scratch, Operand(offset), name_reg, receiver_reg);
449  }
450
451  // Return the value (register v0).
452  __ bind(&exit);
453  __ mov(v0, a0);
454  __ Ret();
455}
456
457
458void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
459  ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
460  Code* code = NULL;
461  if (kind == Code::LOAD_IC) {
462    code = masm->isolate()->builtins()->builtin(Builtins::kLoadIC_Miss);
463  } else {
464    code = masm->isolate()->builtins()->builtin(Builtins::kKeyedLoadIC_Miss);
465  }
466
467  Handle<Code> ic(code);
468  __ Jump(ic, RelocInfo::CODE_TARGET);
469}
470
471
472static void GenerateCallFunction(MacroAssembler* masm,
473                                 Object* object,
474                                 const ParameterCount& arguments,
475                                 Label* miss,
476                                 Code::ExtraICState extra_ic_state) {
477  // ----------- S t a t e -------------
478  //  -- a0: receiver
479  //  -- a1: function to call
480  // -----------------------------------
481  // Check that the function really is a function.
482  __ JumpIfSmi(a1, miss);
483  __ GetObjectType(a1, a3, a3);
484  __ Branch(miss, ne, a3, Operand(JS_FUNCTION_TYPE));
485
486  // Patch the receiver on the stack with the global proxy if
487  // necessary.
488  if (object->IsGlobalObject()) {
489    __ lw(a3, FieldMemOperand(a0, GlobalObject::kGlobalReceiverOffset));
490    __ sw(a3, MemOperand(sp, arguments.immediate() * kPointerSize));
491  }
492
493  // Invoke the function.
494  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state)
495      ? CALL_AS_FUNCTION
496      : CALL_AS_METHOD;
497  __ InvokeFunction(a1, arguments, JUMP_FUNCTION, NullCallWrapper(), call_kind);
498}
499
500
501static void PushInterceptorArguments(MacroAssembler* masm,
502                                     Register receiver,
503                                     Register holder,
504                                     Register name,
505                                     JSObject* holder_obj) {
506  __ push(name);
507  InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor();
508  ASSERT(!masm->isolate()->heap()->InNewSpace(interceptor));
509  Register scratch = name;
510  __ li(scratch, Operand(Handle<Object>(interceptor)));
511  __ Push(scratch, receiver, holder);
512  __ lw(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset));
513  __ push(scratch);
514}
515
516
517static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm,
518                                                   Register receiver,
519                                                   Register holder,
520                                                   Register name,
521                                                   JSObject* holder_obj) {
522  PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
523
524  ExternalReference ref =
525      ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
526          masm->isolate());
527  __ li(a0, Operand(5));
528  __ li(a1, Operand(ref));
529
530  CEntryStub stub(1);
531  __ CallStub(&stub);
532}
533
534
535static const int kFastApiCallArguments = 3;
536
537
538// Reserves space for the extra arguments to FastHandleApiCall in the
539// caller's frame.
540//
541// These arguments are set by CheckPrototypes and GenerateFastApiDirectCall.
542static void ReserveSpaceForFastApiCall(MacroAssembler* masm,
543                                       Register scratch) {
544  ASSERT(Smi::FromInt(0) == 0);
545  for (int i = 0; i < kFastApiCallArguments; i++) {
546    __ push(zero_reg);
547  }
548}
549
550
551// Undoes the effects of ReserveSpaceForFastApiCall.
552static void FreeSpaceForFastApiCall(MacroAssembler* masm) {
553  __ Drop(kFastApiCallArguments);
554}
555
556
557static MaybeObject* GenerateFastApiDirectCall(MacroAssembler* masm,
558                                      const CallOptimization& optimization,
559                                      int argc) {
560  // ----------- S t a t e -------------
561  //  -- sp[0]              : holder (set by CheckPrototypes)
562  //  -- sp[4]              : callee js function
563  //  -- sp[8]              : call data
564  //  -- sp[12]             : last js argument
565  //  -- ...
566  //  -- sp[(argc + 3) * 4] : first js argument
567  //  -- sp[(argc + 4) * 4] : receiver
568  // -----------------------------------
569  // Get the function and setup the context.
570  JSFunction* function = optimization.constant_function();
571  __ li(t1, Operand(Handle<JSFunction>(function)));
572  __ lw(cp, FieldMemOperand(t1, JSFunction::kContextOffset));
573
574  // Pass the additional arguments FastHandleApiCall expects.
575  Object* call_data = optimization.api_call_info()->data();
576  Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info());
577  if (masm->isolate()->heap()->InNewSpace(call_data)) {
578    __ li(a0, api_call_info_handle);
579    __ lw(t2, FieldMemOperand(a0, CallHandlerInfo::kDataOffset));
580  } else {
581    __ li(t2, Operand(Handle<Object>(call_data)));
582  }
583
584  // Store js function and call data.
585  __ sw(t1, MemOperand(sp, 1 * kPointerSize));
586  __ sw(t2, MemOperand(sp, 2 * kPointerSize));
587
588  // a2 points to call data as expected by Arguments
589  // (refer to layout above).
590  __ Addu(a2, sp, Operand(2 * kPointerSize));
591
592  Object* callback = optimization.api_call_info()->callback();
593  Address api_function_address = v8::ToCData<Address>(callback);
594  ApiFunction fun(api_function_address);
595
596  const int kApiStackSpace = 4;
597
598  __ EnterExitFrame(false, kApiStackSpace);
599
600  // NOTE: the O32 abi requires a0 to hold a special pointer when returning a
601  // struct from the function (which is currently the case). This means we pass
602  // the first argument in a1 instead of a0. TryCallApiFunctionAndReturn
603  // will handle setting up a0.
604
605  // a1 = v8::Arguments&
606  // Arguments is built at sp + 1 (sp is a reserved spot for ra).
607  __ Addu(a1, sp, kPointerSize);
608
609  // v8::Arguments::implicit_args = data
610  __ sw(a2, MemOperand(a1, 0 * kPointerSize));
611  // v8::Arguments::values = last argument
612  __ Addu(t0, a2, Operand(argc * kPointerSize));
613  __ sw(t0, MemOperand(a1, 1 * kPointerSize));
614  // v8::Arguments::length_ = argc
615  __ li(t0, Operand(argc));
616  __ sw(t0, MemOperand(a1, 2 * kPointerSize));
617  // v8::Arguments::is_construct_call = 0
618  __ sw(zero_reg, MemOperand(a1, 3 * kPointerSize));
619
620  // Emitting a stub call may try to allocate (if the code is not
621  // already generated). Do not allow the assembler to perform a
622  // garbage collection but instead return the allocation failure
623  // object.
624  const int kStackUnwindSpace = argc + kFastApiCallArguments + 1;
625  ExternalReference ref =
626      ExternalReference(&fun,
627                        ExternalReference::DIRECT_API_CALL,
628                        masm->isolate());
629  return masm->TryCallApiFunctionAndReturn(ref, kStackUnwindSpace);
630}
631
632class CallInterceptorCompiler BASE_EMBEDDED {
633 public:
634  CallInterceptorCompiler(StubCompiler* stub_compiler,
635                          const ParameterCount& arguments,
636                          Register name,
637                          Code::ExtraICState extra_ic_state)
638      : stub_compiler_(stub_compiler),
639        arguments_(arguments),
640        name_(name),
641        extra_ic_state_(extra_ic_state) {}
642
643  MaybeObject* Compile(MacroAssembler* masm,
644                       JSObject* object,
645                       JSObject* holder,
646                       String* name,
647                       LookupResult* lookup,
648                       Register receiver,
649                       Register scratch1,
650                       Register scratch2,
651                       Register scratch3,
652                       Label* miss) {
653    ASSERT(holder->HasNamedInterceptor());
654    ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
655
656    // Check that the receiver isn't a smi.
657    __ JumpIfSmi(receiver, miss);
658
659    CallOptimization optimization(lookup);
660
661    if (optimization.is_constant_call()) {
662      return CompileCacheable(masm,
663                              object,
664                              receiver,
665                              scratch1,
666                              scratch2,
667                              scratch3,
668                              holder,
669                              lookup,
670                              name,
671                              optimization,
672                              miss);
673    } else {
674      CompileRegular(masm,
675                     object,
676                     receiver,
677                     scratch1,
678                     scratch2,
679                     scratch3,
680                     name,
681                     holder,
682                     miss);
683      return masm->isolate()->heap()->undefined_value();
684    }
685  }
686
687 private:
688  MaybeObject* CompileCacheable(MacroAssembler* masm,
689                                JSObject* object,
690                                Register receiver,
691                                Register scratch1,
692                                Register scratch2,
693                                Register scratch3,
694                                JSObject* interceptor_holder,
695                                LookupResult* lookup,
696                                String* name,
697                                const CallOptimization& optimization,
698                                Label* miss_label) {
699    ASSERT(optimization.is_constant_call());
700    ASSERT(!lookup->holder()->IsGlobalObject());
701
702    Counters* counters = masm->isolate()->counters();
703
704    int depth1 = kInvalidProtoDepth;
705    int depth2 = kInvalidProtoDepth;
706    bool can_do_fast_api_call = false;
707    if (optimization.is_simple_api_call() &&
708        !lookup->holder()->IsGlobalObject()) {
709      depth1 =
710          optimization.GetPrototypeDepthOfExpectedType(object,
711                                                      interceptor_holder);
712      if (depth1 == kInvalidProtoDepth) {
713        depth2 =
714            optimization.GetPrototypeDepthOfExpectedType(interceptor_holder,
715                                                        lookup->holder());
716      }
717      can_do_fast_api_call = (depth1 != kInvalidProtoDepth) ||
718                             (depth2 != kInvalidProtoDepth);
719    }
720
721    __ IncrementCounter(counters->call_const_interceptor(), 1,
722                      scratch1, scratch2);
723
724    if (can_do_fast_api_call) {
725      __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1,
726                          scratch1, scratch2);
727      ReserveSpaceForFastApiCall(masm, scratch1);
728    }
729
730    // Check that the maps from receiver to interceptor's holder
731    // haven't changed and thus we can invoke interceptor.
732    Label miss_cleanup;
733    Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
734    Register holder =
735      stub_compiler_->CheckPrototypes(object, receiver,
736                                      interceptor_holder, scratch1,
737                                      scratch2, scratch3, name, depth1, miss);
738
739    // Invoke an interceptor and if it provides a value,
740    // branch to |regular_invoke|.
741    Label regular_invoke;
742    LoadWithInterceptor(masm, receiver, holder, interceptor_holder, scratch2,
743                        &regular_invoke);
744
745    // Interceptor returned nothing for this property.  Try to use cached
746    // constant function.
747
748    // Check that the maps from interceptor's holder to constant function's
749    // holder haven't changed and thus we can use cached constant function.
750    if (interceptor_holder != lookup->holder()) {
751      stub_compiler_->CheckPrototypes(interceptor_holder, receiver,
752                                      lookup->holder(), scratch1,
753                                      scratch2, scratch3, name, depth2, miss);
754    } else {
755      // CheckPrototypes has a side effect of fetching a 'holder'
756      // for API (object which is instanceof for the signature).  It's
757      // safe to omit it here, as if present, it should be fetched
758      // by the previous CheckPrototypes.
759      ASSERT(depth2 == kInvalidProtoDepth);
760    }
761
762    // Invoke function.
763    if (can_do_fast_api_call) {
764      MaybeObject* result = GenerateFastApiDirectCall(masm,
765                                                      optimization,
766                                                      arguments_.immediate());
767      if (result->IsFailure()) return result;
768    } else {
769      CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
770          ? CALL_AS_FUNCTION
771          : CALL_AS_METHOD;
772      __ InvokeFunction(optimization.constant_function(), arguments_,
773                        JUMP_FUNCTION, call_kind);
774    }
775
776    // Deferred code for fast API call case---clean preallocated space.
777    if (can_do_fast_api_call) {
778      __ bind(&miss_cleanup);
779      FreeSpaceForFastApiCall(masm);
780      __ Branch(miss_label);
781    }
782
783    // Invoke a regular function.
784    __ bind(&regular_invoke);
785    if (can_do_fast_api_call) {
786      FreeSpaceForFastApiCall(masm);
787    }
788
789    return masm->isolate()->heap()->undefined_value();
790  }
791
792  void CompileRegular(MacroAssembler* masm,
793                      JSObject* object,
794                      Register receiver,
795                      Register scratch1,
796                      Register scratch2,
797                      Register scratch3,
798                      String* name,
799                      JSObject* interceptor_holder,
800                      Label* miss_label) {
801    Register holder =
802        stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
803                                        scratch1, scratch2, scratch3, name,
804                                        miss_label);
805
806    // Call a runtime function to load the interceptor property.
807    __ EnterInternalFrame();
808    // Save the name_ register across the call.
809    __ push(name_);
810
811    PushInterceptorArguments(masm,
812                             receiver,
813                             holder,
814                             name_,
815                             interceptor_holder);
816
817    __ CallExternalReference(
818          ExternalReference(
819              IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
820              masm->isolate()),
821          5);
822
823    // Restore the name_ register.
824    __ pop(name_);
825    __ LeaveInternalFrame();
826  }
827
828  void LoadWithInterceptor(MacroAssembler* masm,
829                           Register receiver,
830                           Register holder,
831                           JSObject* holder_obj,
832                           Register scratch,
833                           Label* interceptor_succeeded) {
834    __ EnterInternalFrame();
835
836    __ Push(holder, name_);
837
838    CompileCallLoadPropertyWithInterceptor(masm,
839                                           receiver,
840                                           holder,
841                                           name_,
842                                           holder_obj);
843
844    __ pop(name_);  // Restore the name.
845    __ pop(receiver);  // Restore the holder.
846    __ LeaveInternalFrame();
847
848    // If interceptor returns no-result sentinel, call the constant function.
849    __ LoadRoot(scratch, Heap::kNoInterceptorResultSentinelRootIndex);
850    __ Branch(interceptor_succeeded, ne, v0, Operand(scratch));
851  }
852
853  StubCompiler* stub_compiler_;
854  const ParameterCount& arguments_;
855  Register name_;
856  Code::ExtraICState extra_ic_state_;
857};
858
859
860
861// Generate code to check that a global property cell is empty. Create
862// the property cell at compilation time if no cell exists for the
863// property.
864MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCell(
865    MacroAssembler* masm,
866    GlobalObject* global,
867    String* name,
868    Register scratch,
869    Label* miss) {
870  Object* probe;
871  { MaybeObject* maybe_probe = global->EnsurePropertyCell(name);
872    if (!maybe_probe->ToObject(&probe)) return maybe_probe;
873  }
874  JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(probe);
875  ASSERT(cell->value()->IsTheHole());
876  __ li(scratch, Operand(Handle<Object>(cell)));
877  __ lw(scratch,
878        FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
879  __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
880  __ Branch(miss, ne, scratch, Operand(at));
881  return cell;
882}
883
884
885// Calls GenerateCheckPropertyCell for each global object in the prototype chain
886// from object to (but not including) holder.
887MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCells(
888    MacroAssembler* masm,
889    JSObject* object,
890    JSObject* holder,
891    String* name,
892    Register scratch,
893    Label* miss) {
894  JSObject* current = object;
895  while (current != holder) {
896    if (current->IsGlobalObject()) {
897      // Returns a cell or a failure.
898      MaybeObject* result = GenerateCheckPropertyCell(
899          masm,
900          GlobalObject::cast(current),
901          name,
902          scratch,
903          miss);
904      if (result->IsFailure()) return result;
905    }
906    ASSERT(current->IsJSObject());
907    current = JSObject::cast(current->GetPrototype());
908  }
909  return NULL;
910}
911
912
913// Convert and store int passed in register ival to IEEE 754 single precision
914// floating point value at memory location (dst + 4 * wordoffset)
915// If FPU is available use it for conversion.
916static void StoreIntAsFloat(MacroAssembler* masm,
917                            Register dst,
918                            Register wordoffset,
919                            Register ival,
920                            Register fval,
921                            Register scratch1,
922                            Register scratch2) {
923  if (CpuFeatures::IsSupported(FPU)) {
924    CpuFeatures::Scope scope(FPU);
925    __ mtc1(ival, f0);
926    __ cvt_s_w(f0, f0);
927    __ sll(scratch1, wordoffset, 2);
928    __ addu(scratch1, dst, scratch1);
929    __ swc1(f0, MemOperand(scratch1, 0));
930  } else {
931    // FPU is not available,  do manual conversions.
932
933    Label not_special, done;
934    // Move sign bit from source to destination.  This works because the sign
935    // bit in the exponent word of the double has the same position and polarity
936    // as the 2's complement sign bit in a Smi.
937    ASSERT(kBinary32SignMask == 0x80000000u);
938
939    __ And(fval, ival, Operand(kBinary32SignMask));
940    // Negate value if it is negative.
941    __ subu(scratch1, zero_reg, ival);
942    __ movn(ival, scratch1, fval);
943
944    // We have -1, 0 or 1, which we treat specially. Register ival contains
945    // absolute value: it is either equal to 1 (special case of -1 and 1),
946    // greater than 1 (not a special case) or less than 1 (special case of 0).
947    __ Branch(&not_special, gt, ival, Operand(1));
948
949    // For 1 or -1 we need to or in the 0 exponent (biased).
950    static const uint32_t exponent_word_for_1 =
951        kBinary32ExponentBias << kBinary32ExponentShift;
952
953    __ Xor(scratch1, ival, Operand(1));
954    __ li(scratch2, exponent_word_for_1);
955    __ or_(scratch2, fval, scratch2);
956    __ movz(fval, scratch2, scratch1);  // Only if ival is equal to 1.
957    __ Branch(&done);
958
959    __ bind(&not_special);
960    // Count leading zeros.
961    // Gets the wrong answer for 0, but we already checked for that case above.
962    Register zeros = scratch2;
963    __ clz(zeros, ival);
964
965    // Compute exponent and or it into the exponent register.
966    __ li(scratch1, (kBitsPerInt - 1) + kBinary32ExponentBias);
967    __ subu(scratch1, scratch1, zeros);
968
969    __ sll(scratch1, scratch1, kBinary32ExponentShift);
970    __ or_(fval, fval, scratch1);
971
972    // Shift up the source chopping the top bit off.
973    __ Addu(zeros, zeros, Operand(1));
974    // This wouldn't work for 1 and -1 as the shift would be 32 which means 0.
975    __ sllv(ival, ival, zeros);
976    // And the top (top 20 bits).
977    __ srl(scratch1, ival, kBitsPerInt - kBinary32MantissaBits);
978    __ or_(fval, fval, scratch1);
979
980    __ bind(&done);
981
982    __ sll(scratch1, wordoffset, 2);
983    __ addu(scratch1, dst, scratch1);
984    __ sw(fval, MemOperand(scratch1, 0));
985  }
986}
987
988
989// Convert unsigned integer with specified number of leading zeroes in binary
990// representation to IEEE 754 double.
991// Integer to convert is passed in register hiword.
992// Resulting double is returned in registers hiword:loword.
993// This functions does not work correctly for 0.
994static void GenerateUInt2Double(MacroAssembler* masm,
995                                Register hiword,
996                                Register loword,
997                                Register scratch,
998                                int leading_zeroes) {
999  const int meaningful_bits = kBitsPerInt - leading_zeroes - 1;
1000  const int biased_exponent = HeapNumber::kExponentBias + meaningful_bits;
1001
1002  const int mantissa_shift_for_hi_word =
1003      meaningful_bits - HeapNumber::kMantissaBitsInTopWord;
1004
1005  const int mantissa_shift_for_lo_word =
1006      kBitsPerInt - mantissa_shift_for_hi_word;
1007
1008  __ li(scratch, biased_exponent << HeapNumber::kExponentShift);
1009  if (mantissa_shift_for_hi_word > 0) {
1010    __ sll(loword, hiword, mantissa_shift_for_lo_word);
1011    __ srl(hiword, hiword, mantissa_shift_for_hi_word);
1012    __ or_(hiword, scratch, hiword);
1013  } else {
1014    __ mov(loword, zero_reg);
1015    __ sll(hiword, hiword, mantissa_shift_for_hi_word);
1016    __ or_(hiword, scratch, hiword);
1017  }
1018
1019  // If least significant bit of biased exponent was not 1 it was corrupted
1020  // by most significant bit of mantissa so we should fix that.
1021  if (!(biased_exponent & 1)) {
1022    __ li(scratch, 1 << HeapNumber::kExponentShift);
1023    __ nor(scratch, scratch, scratch);
1024    __ and_(hiword, hiword, scratch);
1025  }
1026}
1027
1028
1029#undef __
1030#define __ ACCESS_MASM(masm())
1031
1032
1033Register StubCompiler::CheckPrototypes(JSObject* object,
1034                                       Register object_reg,
1035                                       JSObject* holder,
1036                                       Register holder_reg,
1037                                       Register scratch1,
1038                                       Register scratch2,
1039                                       String* name,
1040                                       int save_at_depth,
1041                                       Label* miss) {
1042  // Make sure there's no overlap between holder and object registers.
1043  ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
1044  ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
1045         && !scratch2.is(scratch1));
1046
1047  // Keep track of the current object in register reg.
1048  Register reg = object_reg;
1049  int depth = 0;
1050
1051  if (save_at_depth == depth) {
1052    __ sw(reg, MemOperand(sp));
1053  }
1054
1055  // Check the maps in the prototype chain.
1056  // Traverse the prototype chain from the object and do map checks.
1057  JSObject* current = object;
1058  while (current != holder) {
1059    depth++;
1060
1061    // Only global objects and objects that do not require access
1062    // checks are allowed in stubs.
1063    ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
1064
1065    ASSERT(current->GetPrototype()->IsJSObject());
1066    JSObject* prototype = JSObject::cast(current->GetPrototype());
1067    if (!current->HasFastProperties() &&
1068        !current->IsJSGlobalObject() &&
1069        !current->IsJSGlobalProxy()) {
1070      if (!name->IsSymbol()) {
1071        MaybeObject* maybe_lookup_result = heap()->LookupSymbol(name);
1072        Object* lookup_result = NULL;  // Initialization to please compiler.
1073        if (!maybe_lookup_result->ToObject(&lookup_result)) {
1074          set_failure(Failure::cast(maybe_lookup_result));
1075          return reg;
1076        }
1077        name = String::cast(lookup_result);
1078      }
1079      ASSERT(current->property_dictionary()->FindEntry(name) ==
1080             StringDictionary::kNotFound);
1081
1082      MaybeObject* negative_lookup = GenerateDictionaryNegativeLookup(masm(),
1083                                                                      miss,
1084                                                                      reg,
1085                                                                      name,
1086                                                                      scratch1,
1087                                                                      scratch2);
1088      if (negative_lookup->IsFailure()) {
1089        set_failure(Failure::cast(negative_lookup));
1090        return reg;
1091      }
1092
1093      __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1094      reg = holder_reg;  // From now the object is in holder_reg.
1095      __ lw(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
1096    } else if (heap()->InNewSpace(prototype)) {
1097      // Get the map of the current object.
1098      __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1099
1100      // Branch on the result of the map check.
1101      __ Branch(miss, ne, scratch1, Operand(Handle<Map>(current->map())));
1102
1103      // Check access rights to the global object.  This has to happen
1104      // after the map check so that we know that the object is
1105      // actually a global object.
1106      if (current->IsJSGlobalProxy()) {
1107        __ CheckAccessGlobalProxy(reg, scratch1, miss);
1108        // Restore scratch register to be the map of the object.  In the
1109        // new space case below, we load the prototype from the map in
1110        // the scratch register.
1111        __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1112      }
1113
1114      reg = holder_reg;  // From now the object is in holder_reg.
1115      // The prototype is in new space; we cannot store a reference
1116      // to it in the code. Load it from the map.
1117      __ lw(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
1118    } else {
1119      // Check the map of the current object.
1120      __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1121      // Branch on the result of the map check.
1122      __ Branch(miss, ne, scratch1, Operand(Handle<Map>(current->map())));
1123      // Check access rights to the global object.  This has to happen
1124      // after the map check so that we know that the object is
1125      // actually a global object.
1126      if (current->IsJSGlobalProxy()) {
1127        __ CheckAccessGlobalProxy(reg, scratch1, miss);
1128      }
1129      // The prototype is in old space; load it directly.
1130      reg = holder_reg;  // From now the object is in holder_reg.
1131      __ li(reg, Operand(Handle<JSObject>(prototype)));
1132    }
1133
1134    if (save_at_depth == depth) {
1135      __ sw(reg, MemOperand(sp));
1136    }
1137
1138    // Go to the next object in the prototype chain.
1139    current = prototype;
1140  }
1141
1142  // Check the holder map.
1143  __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1144  __ Branch(miss, ne, scratch1, Operand(Handle<Map>(current->map())));
1145
1146  // Log the check depth.
1147  LOG(masm()->isolate(), IntEvent("check-maps-depth", depth + 1));
1148  // Perform security check for access to the global object.
1149  ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
1150  if (holder->IsJSGlobalProxy()) {
1151    __ CheckAccessGlobalProxy(reg, scratch1, miss);
1152  };
1153
1154  // If we've skipped any global objects, it's not enough to verify
1155  // that their maps haven't changed.  We also need to check that the
1156  // property cell for the property is still empty.
1157
1158  MaybeObject* result = GenerateCheckPropertyCells(masm(),
1159                                                   object,
1160                                                   holder,
1161                                                   name,
1162                                                   scratch1,
1163                                                   miss);
1164  if (result->IsFailure()) set_failure(Failure::cast(result));
1165
1166  // Return the register containing the holder.
1167  return reg;
1168}
1169
1170
1171void StubCompiler::GenerateLoadField(JSObject* object,
1172                                     JSObject* holder,
1173                                     Register receiver,
1174                                     Register scratch1,
1175                                     Register scratch2,
1176                                     Register scratch3,
1177                                     int index,
1178                                     String* name,
1179                                     Label* miss) {
1180  // Check that the receiver isn't a smi.
1181  __ And(scratch1, receiver, Operand(kSmiTagMask));
1182  __ Branch(miss, eq, scratch1, Operand(zero_reg));
1183
1184  // Check that the maps haven't changed.
1185  Register reg =
1186      CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3,
1187                      name, miss);
1188  GenerateFastPropertyLoad(masm(), v0, reg, holder, index);
1189  __ Ret();
1190}
1191
1192
1193void StubCompiler::GenerateLoadConstant(JSObject* object,
1194                                        JSObject* holder,
1195                                        Register receiver,
1196                                        Register scratch1,
1197                                        Register scratch2,
1198                                        Register scratch3,
1199                                        Object* value,
1200                                        String* name,
1201                                        Label* miss) {
1202  // Check that the receiver isn't a smi.
1203  __ JumpIfSmi(receiver, miss, scratch1);
1204
1205  // Check that the maps haven't changed.
1206  Register reg =
1207      CheckPrototypes(object, receiver, holder,
1208                      scratch1, scratch2, scratch3, name, miss);
1209
1210  // Return the constant value.
1211  __ li(v0, Operand(Handle<Object>(value)));
1212  __ Ret();
1213}
1214
1215
1216MaybeObject* StubCompiler::GenerateLoadCallback(JSObject* object,
1217                                                JSObject* holder,
1218                                                Register receiver,
1219                                                Register name_reg,
1220                                                Register scratch1,
1221                                                Register scratch2,
1222                                                Register scratch3,
1223                                                AccessorInfo* callback,
1224                                                String* name,
1225                                                Label* miss) {
1226  // Check that the receiver isn't a smi.
1227  __ JumpIfSmi(receiver, miss, scratch1);
1228
1229  // Check that the maps haven't changed.
1230  Register reg =
1231    CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3,
1232                    name, miss);
1233
1234  // Build AccessorInfo::args_ list on the stack and push property name below
1235  // the exit frame to make GC aware of them and store pointers to them.
1236  __ push(receiver);
1237  __ mov(scratch2, sp);  // scratch2 = AccessorInfo::args_
1238  Handle<AccessorInfo> callback_handle(callback);
1239  if (heap()->InNewSpace(callback_handle->data())) {
1240    __ li(scratch3, callback_handle);
1241    __ lw(scratch3, FieldMemOperand(scratch3, AccessorInfo::kDataOffset));
1242  } else {
1243    __ li(scratch3, Handle<Object>(callback_handle->data()));
1244  }
1245  __ Push(reg, scratch3, name_reg);
1246  __ mov(a2, scratch2);  // Saved in case scratch2 == a1.
1247  __ mov(a1, sp);  // a1 (first argument - see note below) = Handle<String>
1248
1249  Address getter_address = v8::ToCData<Address>(callback->getter());
1250  ApiFunction fun(getter_address);
1251
1252  // NOTE: the O32 abi requires a0 to hold a special pointer when returning a
1253  // struct from the function (which is currently the case). This means we pass
1254  // the arguments in a1-a2 instead of a0-a1. TryCallApiFunctionAndReturn
1255  // will handle setting up a0.
1256
1257  const int kApiStackSpace = 1;
1258
1259  __ EnterExitFrame(false, kApiStackSpace);
1260  // Create AccessorInfo instance on the stack above the exit frame with
1261  // scratch2 (internal::Object **args_) as the data.
1262  __ sw(a2, MemOperand(sp, kPointerSize));
1263  // a2 (second argument - see note above) = AccessorInfo&
1264  __ Addu(a2, sp, kPointerSize);
1265
1266  // Emitting a stub call may try to allocate (if the code is not
1267  // already generated).  Do not allow the assembler to perform a
1268  // garbage collection but instead return the allocation failure
1269  // object.
1270  ExternalReference ref =
1271      ExternalReference(&fun,
1272                        ExternalReference::DIRECT_GETTER_CALL,
1273                        masm()->isolate());
1274  // 4 args - will be freed later by LeaveExitFrame.
1275  return masm()->TryCallApiFunctionAndReturn(ref, 4);
1276}
1277
1278
1279void StubCompiler::GenerateLoadInterceptor(JSObject* object,
1280                                           JSObject* interceptor_holder,
1281                                           LookupResult* lookup,
1282                                           Register receiver,
1283                                           Register name_reg,
1284                                           Register scratch1,
1285                                           Register scratch2,
1286                                           Register scratch3,
1287                                           String* name,
1288                                           Label* miss) {
1289  ASSERT(interceptor_holder->HasNamedInterceptor());
1290  ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1291
1292  // Check that the receiver isn't a smi.
1293  __ JumpIfSmi(receiver, miss);
1294
1295  // So far the most popular follow ups for interceptor loads are FIELD
1296  // and CALLBACKS, so inline only them, other cases may be added
1297  // later.
1298  bool compile_followup_inline = false;
1299  if (lookup->IsProperty() && lookup->IsCacheable()) {
1300    if (lookup->type() == FIELD) {
1301      compile_followup_inline = true;
1302    } else if (lookup->type() == CALLBACKS &&
1303        lookup->GetCallbackObject()->IsAccessorInfo() &&
1304        AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL) {
1305      compile_followup_inline = true;
1306    }
1307  }
1308
1309  if (compile_followup_inline) {
1310    // Compile the interceptor call, followed by inline code to load the
1311    // property from further up the prototype chain if the call fails.
1312    // Check that the maps haven't changed.
1313    Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1314                                          scratch1, scratch2, scratch3,
1315                                          name, miss);
1316    ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
1317
1318    // Save necessary data before invoking an interceptor.
1319    // Requires a frame to make GC aware of pushed pointers.
1320    __ EnterInternalFrame();
1321
1322    if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1323      // CALLBACKS case needs a receiver to be passed into C++ callback.
1324      __ Push(receiver, holder_reg, name_reg);
1325    } else {
1326      __ Push(holder_reg, name_reg);
1327    }
1328
1329    // Invoke an interceptor.  Note: map checks from receiver to
1330    // interceptor's holder has been compiled before (see a caller
1331    // of this method).
1332    CompileCallLoadPropertyWithInterceptor(masm(),
1333                                           receiver,
1334                                           holder_reg,
1335                                           name_reg,
1336                                           interceptor_holder);
1337
1338    // Check if interceptor provided a value for property.  If it's
1339    // the case, return immediately.
1340    Label interceptor_failed;
1341    __ LoadRoot(scratch1, Heap::kNoInterceptorResultSentinelRootIndex);
1342    __ Branch(&interceptor_failed, eq, v0, Operand(scratch1));
1343    __ LeaveInternalFrame();
1344    __ Ret();
1345
1346    __ bind(&interceptor_failed);
1347    __ pop(name_reg);
1348    __ pop(holder_reg);
1349    if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1350      __ pop(receiver);
1351    }
1352
1353    __ LeaveInternalFrame();
1354
1355    // Check that the maps from interceptor's holder to lookup's holder
1356    // haven't changed.  And load lookup's holder into |holder| register.
1357    if (interceptor_holder != lookup->holder()) {
1358      holder_reg = CheckPrototypes(interceptor_holder,
1359                                   holder_reg,
1360                                   lookup->holder(),
1361                                   scratch1,
1362                                   scratch2,
1363                                   scratch3,
1364                                   name,
1365                                   miss);
1366    }
1367
1368    if (lookup->type() == FIELD) {
1369      // We found FIELD property in prototype chain of interceptor's holder.
1370      // Retrieve a field from field's holder.
1371      GenerateFastPropertyLoad(masm(), v0, holder_reg,
1372                               lookup->holder(), lookup->GetFieldIndex());
1373      __ Ret();
1374    } else {
1375      // We found CALLBACKS property in prototype chain of interceptor's
1376      // holder.
1377      ASSERT(lookup->type() == CALLBACKS);
1378      ASSERT(lookup->GetCallbackObject()->IsAccessorInfo());
1379      AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
1380      ASSERT(callback != NULL);
1381      ASSERT(callback->getter() != NULL);
1382
1383      // Tail call to runtime.
1384      // Important invariant in CALLBACKS case: the code above must be
1385      // structured to never clobber |receiver| register.
1386      __ li(scratch2, Handle<AccessorInfo>(callback));
1387      // holder_reg is either receiver or scratch1.
1388      if (!receiver.is(holder_reg)) {
1389        ASSERT(scratch1.is(holder_reg));
1390        __ Push(receiver, holder_reg);
1391        __ lw(scratch3,
1392              FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
1393        __ Push(scratch3, scratch2, name_reg);
1394      } else {
1395        __ push(receiver);
1396        __ lw(scratch3,
1397              FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
1398        __ Push(holder_reg, scratch3, scratch2, name_reg);
1399      }
1400
1401      ExternalReference ref =
1402          ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
1403                            masm()->isolate());
1404      __ TailCallExternalReference(ref, 5, 1);
1405    }
1406  } else {  // !compile_followup_inline
1407    // Call the runtime system to load the interceptor.
1408    // Check that the maps haven't changed.
1409    Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1410                                          scratch1, scratch2, scratch3,
1411                                          name, miss);
1412    PushInterceptorArguments(masm(), receiver, holder_reg,
1413                             name_reg, interceptor_holder);
1414
1415    ExternalReference ref = ExternalReference(
1416        IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), masm()->isolate());
1417    __ TailCallExternalReference(ref, 5, 1);
1418  }
1419}
1420
1421
1422void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) {
1423  if (kind_ == Code::KEYED_CALL_IC) {
1424    __ Branch(miss, ne, a2, Operand(Handle<String>(name)));
1425  }
1426}
1427
1428
1429void CallStubCompiler::GenerateGlobalReceiverCheck(JSObject* object,
1430                                                   JSObject* holder,
1431                                                   String* name,
1432                                                   Label* miss) {
1433  ASSERT(holder->IsGlobalObject());
1434
1435  // Get the number of arguments.
1436  const int argc = arguments().immediate();
1437
1438  // Get the receiver from the stack.
1439  __ lw(a0, MemOperand(sp, argc * kPointerSize));
1440
1441  // If the object is the holder then we know that it's a global
1442  // object which can only happen for contextual calls. In this case,
1443  // the receiver cannot be a smi.
1444  if (object != holder) {
1445    __ JumpIfSmi(a0, miss);
1446  }
1447
1448  // Check that the maps haven't changed.
1449  CheckPrototypes(object, a0, holder, a3, a1, t0, name, miss);
1450}
1451
1452
1453void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
1454                                                    JSFunction* function,
1455                                                    Label* miss) {
1456  // Get the value from the cell.
1457  __ li(a3, Operand(Handle<JSGlobalPropertyCell>(cell)));
1458  __ lw(a1, FieldMemOperand(a3, JSGlobalPropertyCell::kValueOffset));
1459
1460  // Check that the cell contains the same function.
1461  if (heap()->InNewSpace(function)) {
1462    // We can't embed a pointer to a function in new space so we have
1463    // to verify that the shared function info is unchanged. This has
1464    // the nice side effect that multiple closures based on the same
1465    // function can all use this call IC. Before we load through the
1466    // function, we have to verify that it still is a function.
1467    __ JumpIfSmi(a1, miss);
1468    __ GetObjectType(a1, a3, a3);
1469    __ Branch(miss, ne, a3, Operand(JS_FUNCTION_TYPE));
1470
1471    // Check the shared function info. Make sure it hasn't changed.
1472    __ li(a3, Handle<SharedFunctionInfo>(function->shared()));
1473    __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1474    __ Branch(miss, ne, t0, Operand(a3));
1475  } else {
1476    __ Branch(miss, ne, a1, Operand(Handle<JSFunction>(function)));
1477  }
1478}
1479
1480
1481MaybeObject* CallStubCompiler::GenerateMissBranch() {
1482  MaybeObject* maybe_obj =
1483      isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
1484                                               kind_,
1485                                               extra_ic_state_);
1486  Object* obj;
1487  if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1488  __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
1489  return obj;
1490}
1491
1492
1493MaybeObject* CallStubCompiler::CompileCallField(JSObject* object,
1494                                                JSObject* holder,
1495                                                int index,
1496                                                String* name) {
1497  // ----------- S t a t e -------------
1498  //  -- a2    : name
1499  //  -- ra    : return address
1500  // -----------------------------------
1501  Label miss;
1502
1503  GenerateNameCheck(name, &miss);
1504
1505  const int argc = arguments().immediate();
1506
1507  // Get the receiver of the function from the stack into a0.
1508  __ lw(a0, MemOperand(sp, argc * kPointerSize));
1509  // Check that the receiver isn't a smi.
1510  __ JumpIfSmi(a0, &miss, t0);
1511
1512  // Do the right check and compute the holder register.
1513  Register reg = CheckPrototypes(object, a0, holder, a1, a3, t0, name, &miss);
1514  GenerateFastPropertyLoad(masm(), a1, reg, holder, index);
1515
1516  GenerateCallFunction(masm(), object, arguments(), &miss, extra_ic_state_);
1517
1518  // Handle call cache miss.
1519  __ bind(&miss);
1520  MaybeObject* maybe_result = GenerateMissBranch();
1521  if (maybe_result->IsFailure()) return maybe_result;
1522
1523  // Return the generated code.
1524  return GetCode(FIELD, name);
1525}
1526
1527
1528MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object,
1529                                                    JSObject* holder,
1530                                                    JSGlobalPropertyCell* cell,
1531                                                    JSFunction* function,
1532                                                    String* name) {
1533  // ----------- S t a t e -------------
1534  //  -- a2    : name
1535  //  -- ra    : return address
1536  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1537  //  -- ...
1538  //  -- sp[argc * 4]           : receiver
1539  // -----------------------------------
1540
1541  // If object is not an array, bail out to regular call.
1542  if (!object->IsJSArray() || cell != NULL) return heap()->undefined_value();
1543
1544  Label miss;
1545
1546  GenerateNameCheck(name, &miss);
1547
1548  Register receiver = a1;
1549
1550  // Get the receiver from the stack.
1551  const int argc = arguments().immediate();
1552  __ lw(receiver, MemOperand(sp, argc * kPointerSize));
1553
1554  // Check that the receiver isn't a smi.
1555  __ JumpIfSmi(receiver, &miss);
1556
1557  // Check that the maps haven't changed.
1558  CheckPrototypes(JSObject::cast(object), receiver,
1559                  holder, a3, v0, t0, name, &miss);
1560
1561  if (argc == 0) {
1562    // Nothing to do, just return the length.
1563    __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1564    __ Drop(argc + 1);
1565    __ Ret();
1566  } else {
1567    Label call_builtin;
1568
1569    Register elements = a3;
1570    Register end_elements = t1;
1571
1572    // Get the elements array of the object.
1573    __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1574
1575    // Check that the elements are in fast mode and writable.
1576    __ CheckMap(elements,
1577                v0,
1578                Heap::kFixedArrayMapRootIndex,
1579                &call_builtin,
1580                DONT_DO_SMI_CHECK);
1581
1582    if (argc == 1) {  // Otherwise fall through to call the builtin.
1583      Label exit, with_write_barrier, attempt_to_grow_elements;
1584
1585      // Get the array's length into v0 and calculate new length.
1586      __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1587      STATIC_ASSERT(kSmiTagSize == 1);
1588      STATIC_ASSERT(kSmiTag == 0);
1589      __ Addu(v0, v0, Operand(Smi::FromInt(argc)));
1590
1591      // Get the element's length.
1592      __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
1593
1594      // Check if we could survive without allocation.
1595      __ Branch(&attempt_to_grow_elements, gt, v0, Operand(t0));
1596
1597      // Save new length.
1598      __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1599
1600      // Push the element.
1601      __ lw(t0, MemOperand(sp, (argc - 1) * kPointerSize));
1602      // We may need a register containing the address end_elements below,
1603      // so write back the value in end_elements.
1604      __ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize);
1605      __ Addu(end_elements, elements, end_elements);
1606      const int kEndElementsOffset =
1607          FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize;
1608      __ sw(t0, MemOperand(end_elements, kEndElementsOffset));
1609      __ Addu(end_elements, end_elements, kPointerSize);
1610
1611      // Check for a smi.
1612      __ JumpIfNotSmi(t0, &with_write_barrier);
1613      __ bind(&exit);
1614      __ Drop(argc + 1);
1615      __ Ret();
1616
1617      __ bind(&with_write_barrier);
1618      __ InNewSpace(elements, t0, eq, &exit);
1619      __ RecordWriteHelper(elements, end_elements, t0);
1620      __ Drop(argc + 1);
1621      __ Ret();
1622
1623      __ bind(&attempt_to_grow_elements);
1624      // v0: array's length + 1.
1625      // t0: elements' length.
1626
1627      if (!FLAG_inline_new) {
1628        __ Branch(&call_builtin);
1629      }
1630
1631      ExternalReference new_space_allocation_top =
1632          ExternalReference::new_space_allocation_top_address(
1633              masm()->isolate());
1634      ExternalReference new_space_allocation_limit =
1635          ExternalReference::new_space_allocation_limit_address(
1636              masm()->isolate());
1637
1638      const int kAllocationDelta = 4;
1639      // Load top and check if it is the end of elements.
1640      __ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize);
1641      __ Addu(end_elements, elements, end_elements);
1642      __ Addu(end_elements, end_elements, Operand(kEndElementsOffset));
1643      __ li(t3, Operand(new_space_allocation_top));
1644      __ lw(t2, MemOperand(t3));
1645      __ Branch(&call_builtin, ne, end_elements, Operand(t2));
1646
1647      __ li(t5, Operand(new_space_allocation_limit));
1648      __ lw(t5, MemOperand(t5));
1649      __ Addu(t2, t2, Operand(kAllocationDelta * kPointerSize));
1650      __ Branch(&call_builtin, hi, t2, Operand(t5));
1651
1652      // We fit and could grow elements.
1653      // Update new_space_allocation_top.
1654      __ sw(t2, MemOperand(t3));
1655      // Push the argument.
1656      __ lw(t2, MemOperand(sp, (argc - 1) * kPointerSize));
1657      __ sw(t2, MemOperand(end_elements));
1658      // Fill the rest with holes.
1659      __ LoadRoot(t2, Heap::kTheHoleValueRootIndex);
1660      for (int i = 1; i < kAllocationDelta; i++) {
1661        __ sw(t2, MemOperand(end_elements, i * kPointerSize));
1662      }
1663
1664      // Update elements' and array's sizes.
1665      __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1666      __ Addu(t0, t0, Operand(Smi::FromInt(kAllocationDelta)));
1667      __ sw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
1668
1669      // Elements are in new space, so write barrier is not required.
1670      __ Drop(argc + 1);
1671      __ Ret();
1672    }
1673    __ bind(&call_builtin);
1674    __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush,
1675                                                   masm()->isolate()),
1676                                 argc + 1,
1677                                 1);
1678  }
1679
1680  // Handle call cache miss.
1681  __ bind(&miss);
1682  MaybeObject* maybe_result = GenerateMissBranch();
1683  if (maybe_result->IsFailure()) return maybe_result;
1684
1685  // Return the generated code.
1686  return GetCode(function);
1687}
1688
1689
1690MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object,
1691                                                   JSObject* holder,
1692                                                   JSGlobalPropertyCell* cell,
1693                                                   JSFunction* function,
1694                                                   String* name) {
1695  // ----------- S t a t e -------------
1696  //  -- a2    : name
1697  //  -- ra    : return address
1698  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1699  //  -- ...
1700  //  -- sp[argc * 4]           : receiver
1701  // -----------------------------------
1702
1703  // If object is not an array, bail out to regular call.
1704  if (!object->IsJSArray() || cell != NULL) return heap()->undefined_value();
1705
1706  Label miss, return_undefined, call_builtin;
1707
1708  Register receiver = a1;
1709  Register elements = a3;
1710
1711  GenerateNameCheck(name, &miss);
1712
1713  // Get the receiver from the stack.
1714  const int argc = arguments().immediate();
1715  __ lw(receiver, MemOperand(sp, argc * kPointerSize));
1716
1717  // Check that the receiver isn't a smi.
1718  __ JumpIfSmi(receiver, &miss);
1719
1720  // Check that the maps haven't changed.
1721  CheckPrototypes(JSObject::cast(object),
1722                  receiver, holder, elements, t0, v0, name, &miss);
1723
1724  // Get the elements array of the object.
1725  __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1726
1727  // Check that the elements are in fast mode and writable.
1728  __ CheckMap(elements,
1729              v0,
1730              Heap::kFixedArrayMapRootIndex,
1731              &call_builtin,
1732              DONT_DO_SMI_CHECK);
1733
1734  // Get the array's length into t0 and calculate new length.
1735  __ lw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1736  __ Subu(t0, t0, Operand(Smi::FromInt(1)));
1737  __ Branch(&return_undefined, lt, t0, Operand(zero_reg));
1738
1739  // Get the last element.
1740  __ LoadRoot(t2, Heap::kTheHoleValueRootIndex);
1741  STATIC_ASSERT(kSmiTagSize == 1);
1742  STATIC_ASSERT(kSmiTag == 0);
1743  // We can't address the last element in one operation. Compute the more
1744  // expensive shift first, and use an offset later on.
1745  __ sll(t1, t0, kPointerSizeLog2 - kSmiTagSize);
1746  __ Addu(elements, elements, t1);
1747  __ lw(v0, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag));
1748  __ Branch(&call_builtin, eq, v0, Operand(t2));
1749
1750  // Set the array's length.
1751  __ sw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1752
1753  // Fill with the hole.
1754  __ sw(t2, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag));
1755  __ Drop(argc + 1);
1756  __ Ret();
1757
1758  __ bind(&return_undefined);
1759  __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
1760  __ Drop(argc + 1);
1761  __ Ret();
1762
1763  __ bind(&call_builtin);
1764  __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop,
1765                                                 masm()->isolate()),
1766                               argc + 1,
1767                               1);
1768
1769  // Handle call cache miss.
1770  __ bind(&miss);
1771  MaybeObject* maybe_result = GenerateMissBranch();
1772  if (maybe_result->IsFailure()) return maybe_result;
1773
1774  // Return the generated code.
1775  return GetCode(function);
1776}
1777
1778
1779MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall(
1780    Object* object,
1781    JSObject* holder,
1782    JSGlobalPropertyCell* cell,
1783    JSFunction* function,
1784    String* name) {
1785  // ----------- S t a t e -------------
1786  //  -- a2                     : function name
1787  //  -- ra                     : return address
1788  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1789  //  -- ...
1790  //  -- sp[argc * 4]           : receiver
1791  // -----------------------------------
1792
1793  // If object is not a string, bail out to regular call.
1794  if (!object->IsString() || cell != NULL) return heap()->undefined_value();
1795
1796  const int argc = arguments().immediate();
1797
1798  Label miss;
1799  Label name_miss;
1800  Label index_out_of_range;
1801
1802  Label* index_out_of_range_label = &index_out_of_range;
1803
1804  if (kind_ == Code::CALL_IC &&
1805      (CallICBase::StringStubState::decode(extra_ic_state_) ==
1806       DEFAULT_STRING_STUB)) {
1807    index_out_of_range_label = &miss;
1808  }
1809
1810  GenerateNameCheck(name, &name_miss);
1811
1812  // Check that the maps starting from the prototype haven't changed.
1813  GenerateDirectLoadGlobalFunctionPrototype(masm(),
1814                                            Context::STRING_FUNCTION_INDEX,
1815                                            v0,
1816                                            &miss);
1817  ASSERT(object != holder);
1818  CheckPrototypes(JSObject::cast(object->GetPrototype()), v0, holder,
1819                  a1, a3, t0, name, &miss);
1820
1821  Register receiver = a1;
1822  Register index = t1;
1823  Register scratch = a3;
1824  Register result = v0;
1825  __ lw(receiver, MemOperand(sp, argc * kPointerSize));
1826  if (argc > 0) {
1827    __ lw(index, MemOperand(sp, (argc - 1) * kPointerSize));
1828  } else {
1829    __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1830  }
1831
1832  StringCharCodeAtGenerator char_code_at_generator(receiver,
1833                                                   index,
1834                                                   scratch,
1835                                                   result,
1836                                                   &miss,  // When not a string.
1837                                                   &miss,  // When not a number.
1838                                                   index_out_of_range_label,
1839                                                   STRING_INDEX_IS_NUMBER);
1840  char_code_at_generator.GenerateFast(masm());
1841  __ Drop(argc + 1);
1842  __ Ret();
1843
1844  StubRuntimeCallHelper call_helper;
1845  char_code_at_generator.GenerateSlow(masm(), call_helper);
1846
1847  if (index_out_of_range.is_linked()) {
1848    __ bind(&index_out_of_range);
1849    __ LoadRoot(v0, Heap::kNanValueRootIndex);
1850    __ Drop(argc + 1);
1851    __ Ret();
1852  }
1853
1854  __ bind(&miss);
1855  // Restore function name in a2.
1856  __ li(a2, Handle<String>(name));
1857  __ bind(&name_miss);
1858  MaybeObject* maybe_result = GenerateMissBranch();
1859  if (maybe_result->IsFailure()) return maybe_result;
1860
1861  // Return the generated code.
1862  return GetCode(function);
1863}
1864
1865
1866MaybeObject* CallStubCompiler::CompileStringCharAtCall(
1867    Object* object,
1868    JSObject* holder,
1869    JSGlobalPropertyCell* cell,
1870    JSFunction* function,
1871    String* name) {
1872  // ----------- S t a t e -------------
1873  //  -- a2                     : function name
1874  //  -- ra                     : return address
1875  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1876  //  -- ...
1877  //  -- sp[argc * 4]           : receiver
1878  // -----------------------------------
1879
1880  // If object is not a string, bail out to regular call.
1881  if (!object->IsString() || cell != NULL) return heap()->undefined_value();
1882
1883  const int argc = arguments().immediate();
1884
1885  Label miss;
1886  Label name_miss;
1887  Label index_out_of_range;
1888  Label* index_out_of_range_label = &index_out_of_range;
1889
1890  if (kind_ == Code::CALL_IC &&
1891      (CallICBase::StringStubState::decode(extra_ic_state_) ==
1892       DEFAULT_STRING_STUB)) {
1893    index_out_of_range_label = &miss;
1894  }
1895
1896  GenerateNameCheck(name, &name_miss);
1897
1898  // Check that the maps starting from the prototype haven't changed.
1899  GenerateDirectLoadGlobalFunctionPrototype(masm(),
1900                                            Context::STRING_FUNCTION_INDEX,
1901                                            v0,
1902                                            &miss);
1903  ASSERT(object != holder);
1904  CheckPrototypes(JSObject::cast(object->GetPrototype()), v0, holder,
1905                  a1, a3, t0, name, &miss);
1906
1907  Register receiver = v0;
1908  Register index = t1;
1909  Register scratch1 = a1;
1910  Register scratch2 = a3;
1911  Register result = v0;
1912  __ lw(receiver, MemOperand(sp, argc * kPointerSize));
1913  if (argc > 0) {
1914    __ lw(index, MemOperand(sp, (argc - 1) * kPointerSize));
1915  } else {
1916    __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1917  }
1918
1919  StringCharAtGenerator char_at_generator(receiver,
1920                                          index,
1921                                          scratch1,
1922                                          scratch2,
1923                                          result,
1924                                          &miss,  // When not a string.
1925                                          &miss,  // When not a number.
1926                                          index_out_of_range_label,
1927                                          STRING_INDEX_IS_NUMBER);
1928  char_at_generator.GenerateFast(masm());
1929  __ Drop(argc + 1);
1930  __ Ret();
1931
1932  StubRuntimeCallHelper call_helper;
1933  char_at_generator.GenerateSlow(masm(), call_helper);
1934
1935  if (index_out_of_range.is_linked()) {
1936    __ bind(&index_out_of_range);
1937    __ LoadRoot(v0, Heap::kEmptyStringRootIndex);
1938    __ Drop(argc + 1);
1939    __ Ret();
1940  }
1941
1942  __ bind(&miss);
1943  // Restore function name in a2.
1944  __ li(a2, Handle<String>(name));
1945  __ bind(&name_miss);
1946  MaybeObject* maybe_result = GenerateMissBranch();
1947  if (maybe_result->IsFailure()) return maybe_result;
1948
1949  // Return the generated code.
1950  return GetCode(function);
1951}
1952
1953
1954MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall(
1955    Object* object,
1956    JSObject* holder,
1957    JSGlobalPropertyCell* cell,
1958    JSFunction* function,
1959    String* name) {
1960  // ----------- S t a t e -------------
1961  //  -- a2                     : function name
1962  //  -- ra                     : return address
1963  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1964  //  -- ...
1965  //  -- sp[argc * 4]           : receiver
1966  // -----------------------------------
1967
1968  const int argc = arguments().immediate();
1969
1970  // If the object is not a JSObject or we got an unexpected number of
1971  // arguments, bail out to the regular call.
1972  if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
1973
1974  Label miss;
1975  GenerateNameCheck(name, &miss);
1976
1977  if (cell == NULL) {
1978    __ lw(a1, MemOperand(sp, 1 * kPointerSize));
1979
1980    STATIC_ASSERT(kSmiTag == 0);
1981    __ JumpIfSmi(a1, &miss);
1982
1983    CheckPrototypes(JSObject::cast(object), a1, holder, v0, a3, t0, name,
1984                    &miss);
1985  } else {
1986    ASSERT(cell->value() == function);
1987    GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
1988    GenerateLoadFunctionFromCell(cell, function, &miss);
1989  }
1990
1991  // Load the char code argument.
1992  Register code = a1;
1993  __ lw(code, MemOperand(sp, 0 * kPointerSize));
1994
1995  // Check the code is a smi.
1996  Label slow;
1997  STATIC_ASSERT(kSmiTag == 0);
1998  __ JumpIfNotSmi(code, &slow);
1999
2000  // Convert the smi code to uint16.
2001  __ And(code, code, Operand(Smi::FromInt(0xffff)));
2002
2003  StringCharFromCodeGenerator char_from_code_generator(code, v0);
2004  char_from_code_generator.GenerateFast(masm());
2005  __ Drop(argc + 1);
2006  __ Ret();
2007
2008  StubRuntimeCallHelper call_helper;
2009  char_from_code_generator.GenerateSlow(masm(), call_helper);
2010
2011  // Tail call the full function. We do not have to patch the receiver
2012  // because the function makes no use of it.
2013  __ bind(&slow);
2014  __ InvokeFunction(function, arguments(), JUMP_FUNCTION, CALL_AS_METHOD);
2015
2016  __ bind(&miss);
2017  // a2: function name.
2018  MaybeObject* maybe_result = GenerateMissBranch();
2019  if (maybe_result->IsFailure()) return maybe_result;
2020
2021  // Return the generated code.
2022  return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
2023}
2024
2025
2026MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object,
2027                                                    JSObject* holder,
2028                                                    JSGlobalPropertyCell* cell,
2029                                                    JSFunction* function,
2030                                                    String* name) {
2031  // ----------- S t a t e -------------
2032  //  -- a2                     : function name
2033  //  -- ra                     : return address
2034  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2035  //  -- ...
2036  //  -- sp[argc * 4]           : receiver
2037  // -----------------------------------
2038
2039  if (!CpuFeatures::IsSupported(FPU))
2040    return heap()->undefined_value();
2041  CpuFeatures::Scope scope_fpu(FPU);
2042
2043  const int argc = arguments().immediate();
2044
2045  // If the object is not a JSObject or we got an unexpected number of
2046  // arguments, bail out to the regular call.
2047  if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
2048
2049  Label miss, slow;
2050  GenerateNameCheck(name, &miss);
2051
2052  if (cell == NULL) {
2053    __ lw(a1, MemOperand(sp, 1 * kPointerSize));
2054
2055    STATIC_ASSERT(kSmiTag == 0);
2056    __ JumpIfSmi(a1, &miss);
2057
2058    CheckPrototypes(JSObject::cast(object), a1, holder, a0, a3, t0, name,
2059                    &miss);
2060  } else {
2061    ASSERT(cell->value() == function);
2062    GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
2063    GenerateLoadFunctionFromCell(cell, function, &miss);
2064  }
2065
2066  // Load the (only) argument into v0.
2067  __ lw(v0, MemOperand(sp, 0 * kPointerSize));
2068
2069  // If the argument is a smi, just return.
2070  STATIC_ASSERT(kSmiTag == 0);
2071  __ And(t0, v0, Operand(kSmiTagMask));
2072  __ Drop(argc + 1, eq, t0, Operand(zero_reg));
2073  __ Ret(eq, t0, Operand(zero_reg));
2074
2075  __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
2076
2077  Label wont_fit_smi, no_fpu_error, restore_fcsr_and_return;
2078
2079  // If fpu is enabled, we use the floor instruction.
2080
2081  // Load the HeapNumber value.
2082  __ ldc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset));
2083
2084  // Backup FCSR.
2085  __ cfc1(a3, FCSR);
2086  // Clearing FCSR clears the exception mask with no side-effects.
2087  __ ctc1(zero_reg, FCSR);
2088  // Convert the argument to an integer.
2089  __ floor_w_d(f0, f0);
2090
2091  // Start checking for special cases.
2092  // Get the argument exponent and clear the sign bit.
2093  __ lw(t1, FieldMemOperand(v0, HeapNumber::kValueOffset + kPointerSize));
2094  __ And(t2, t1, Operand(~HeapNumber::kSignMask));
2095  __ srl(t2, t2, HeapNumber::kMantissaBitsInTopWord);
2096
2097  // Retrieve FCSR and check for fpu errors.
2098  __ cfc1(t5, FCSR);
2099  __ And(t5, t5, Operand(kFCSRExceptionFlagMask));
2100  __ Branch(&no_fpu_error, eq, t5, Operand(zero_reg));
2101
2102  // Check for NaN, Infinity, and -Infinity.
2103  // They are invariant through a Math.Floor call, so just
2104  // return the original argument.
2105  __ Subu(t3, t2, Operand(HeapNumber::kExponentMask
2106        >> HeapNumber::kMantissaBitsInTopWord));
2107  __ Branch(&restore_fcsr_and_return, eq, t3, Operand(zero_reg));
2108  // We had an overflow or underflow in the conversion. Check if we
2109  // have a big exponent.
2110  // If greater or equal, the argument is already round and in v0.
2111  __ Branch(&restore_fcsr_and_return, ge, t3,
2112      Operand(HeapNumber::kMantissaBits));
2113  __ Branch(&wont_fit_smi);
2114
2115  __ bind(&no_fpu_error);
2116  // Move the result back to v0.
2117  __ mfc1(v0, f0);
2118  // Check if the result fits into a smi.
2119  __ Addu(a1, v0, Operand(0x40000000));
2120  __ Branch(&wont_fit_smi, lt, a1, Operand(zero_reg));
2121  // Tag the result.
2122  STATIC_ASSERT(kSmiTag == 0);
2123  __ sll(v0, v0, kSmiTagSize);
2124
2125  // Check for -0.
2126  __ Branch(&restore_fcsr_and_return, ne, v0, Operand(zero_reg));
2127  // t1 already holds the HeapNumber exponent.
2128  __ And(t0, t1, Operand(HeapNumber::kSignMask));
2129  // If our HeapNumber is negative it was -0, so load its address and return.
2130  // Else v0 is loaded with 0, so we can also just return.
2131  __ Branch(&restore_fcsr_and_return, eq, t0, Operand(zero_reg));
2132  __ lw(v0, MemOperand(sp, 0 * kPointerSize));
2133
2134  __ bind(&restore_fcsr_and_return);
2135  // Restore FCSR and return.
2136  __ ctc1(a3, FCSR);
2137
2138  __ Drop(argc + 1);
2139  __ Ret();
2140
2141  __ bind(&wont_fit_smi);
2142  // Restore FCSR and fall to slow case.
2143  __ ctc1(a3, FCSR);
2144
2145  __ bind(&slow);
2146  // Tail call the full function. We do not have to patch the receiver
2147  // because the function makes no use of it.
2148  __ InvokeFunction(function, arguments(), JUMP_FUNCTION, CALL_AS_METHOD);
2149
2150  __ bind(&miss);
2151  // a2: function name.
2152  MaybeObject* obj = GenerateMissBranch();
2153  if (obj->IsFailure()) return obj;
2154
2155  // Return the generated code.
2156  return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
2157}
2158
2159
2160MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object,
2161                                                  JSObject* holder,
2162                                                  JSGlobalPropertyCell* cell,
2163                                                  JSFunction* function,
2164                                                  String* name) {
2165  // ----------- S t a t e -------------
2166  //  -- a2                     : function name
2167  //  -- ra                     : return address
2168  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2169  //  -- ...
2170  //  -- sp[argc * 4]           : receiver
2171  // -----------------------------------
2172
2173  const int argc = arguments().immediate();
2174
2175  // If the object is not a JSObject or we got an unexpected number of
2176  // arguments, bail out to the regular call.
2177  if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
2178
2179  Label miss;
2180  GenerateNameCheck(name, &miss);
2181
2182  if (cell == NULL) {
2183    __ lw(a1, MemOperand(sp, 1 * kPointerSize));
2184
2185    STATIC_ASSERT(kSmiTag == 0);
2186    __ JumpIfSmi(a1, &miss);
2187
2188    CheckPrototypes(JSObject::cast(object), a1, holder, v0, a3, t0, name,
2189                    &miss);
2190  } else {
2191    ASSERT(cell->value() == function);
2192    GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
2193    GenerateLoadFunctionFromCell(cell, function, &miss);
2194  }
2195
2196  // Load the (only) argument into v0.
2197  __ lw(v0, MemOperand(sp, 0 * kPointerSize));
2198
2199  // Check if the argument is a smi.
2200  Label not_smi;
2201  STATIC_ASSERT(kSmiTag == 0);
2202  __ JumpIfNotSmi(v0, &not_smi);
2203
2204  // Do bitwise not or do nothing depending on the sign of the
2205  // argument.
2206  __ sra(t0, v0, kBitsPerInt - 1);
2207  __ Xor(a1, v0, t0);
2208
2209  // Add 1 or do nothing depending on the sign of the argument.
2210  __ Subu(v0, a1, t0);
2211
2212  // If the result is still negative, go to the slow case.
2213  // This only happens for the most negative smi.
2214  Label slow;
2215  __ Branch(&slow, lt, v0, Operand(zero_reg));
2216
2217  // Smi case done.
2218  __ Drop(argc + 1);
2219  __ Ret();
2220
2221  // Check if the argument is a heap number and load its exponent and
2222  // sign.
2223  __ bind(&not_smi);
2224  __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
2225  __ lw(a1, FieldMemOperand(v0, HeapNumber::kExponentOffset));
2226
2227  // Check the sign of the argument. If the argument is positive,
2228  // just return it.
2229  Label negative_sign;
2230  __ And(t0, a1, Operand(HeapNumber::kSignMask));
2231  __ Branch(&negative_sign, ne, t0, Operand(zero_reg));
2232  __ Drop(argc + 1);
2233  __ Ret();
2234
2235  // If the argument is negative, clear the sign, and return a new
2236  // number.
2237  __ bind(&negative_sign);
2238  __ Xor(a1, a1, Operand(HeapNumber::kSignMask));
2239  __ lw(a3, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
2240  __ LoadRoot(t2, Heap::kHeapNumberMapRootIndex);
2241  __ AllocateHeapNumber(v0, t0, t1, t2, &slow);
2242  __ sw(a1, FieldMemOperand(v0, HeapNumber::kExponentOffset));
2243  __ sw(a3, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
2244  __ Drop(argc + 1);
2245  __ Ret();
2246
2247  // Tail call the full function. We do not have to patch the receiver
2248  // because the function makes no use of it.
2249  __ bind(&slow);
2250  __ InvokeFunction(function, arguments(), JUMP_FUNCTION, CALL_AS_METHOD);
2251
2252  __ bind(&miss);
2253  // a2: function name.
2254  MaybeObject* maybe_result = GenerateMissBranch();
2255  if (maybe_result->IsFailure()) return maybe_result;
2256
2257  // Return the generated code.
2258  return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
2259}
2260
2261
2262MaybeObject* CallStubCompiler::CompileFastApiCall(
2263    const CallOptimization& optimization,
2264    Object* object,
2265    JSObject* holder,
2266    JSGlobalPropertyCell* cell,
2267    JSFunction* function,
2268    String* name) {
2269
2270  Counters* counters = isolate()->counters();
2271
2272  ASSERT(optimization.is_simple_api_call());
2273  // Bail out if object is a global object as we don't want to
2274  // repatch it to global receiver.
2275  if (object->IsGlobalObject()) return heap()->undefined_value();
2276  if (cell != NULL) return heap()->undefined_value();
2277  if (!object->IsJSObject()) return heap()->undefined_value();
2278  int depth = optimization.GetPrototypeDepthOfExpectedType(
2279            JSObject::cast(object), holder);
2280  if (depth == kInvalidProtoDepth) return heap()->undefined_value();
2281
2282  Label miss, miss_before_stack_reserved;
2283
2284  GenerateNameCheck(name, &miss_before_stack_reserved);
2285
2286  // Get the receiver from the stack.
2287  const int argc = arguments().immediate();
2288  __ lw(a1, MemOperand(sp, argc * kPointerSize));
2289
2290  // Check that the receiver isn't a smi.
2291  __ JumpIfSmi(a1, &miss_before_stack_reserved);
2292
2293  __ IncrementCounter(counters->call_const(), 1, a0, a3);
2294  __ IncrementCounter(counters->call_const_fast_api(), 1, a0, a3);
2295
2296  ReserveSpaceForFastApiCall(masm(), a0);
2297
2298  // Check that the maps haven't changed and find a Holder as a side effect.
2299  CheckPrototypes(JSObject::cast(object), a1, holder, a0, a3, t0, name,
2300                  depth, &miss);
2301
2302  MaybeObject* result = GenerateFastApiDirectCall(masm(), optimization, argc);
2303  if (result->IsFailure()) return result;
2304
2305  __ bind(&miss);
2306  FreeSpaceForFastApiCall(masm());
2307
2308  __ bind(&miss_before_stack_reserved);
2309  MaybeObject* maybe_result = GenerateMissBranch();
2310  if (maybe_result->IsFailure()) return maybe_result;
2311
2312  // Return the generated code.
2313  return GetCode(function);
2314}
2315
2316
2317MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
2318                                                   JSObject* holder,
2319                                                   JSFunction* function,
2320                                                   String* name,
2321                                                   CheckType check) {
2322  // ----------- S t a t e -------------
2323  //  -- a2    : name
2324  //  -- ra    : return address
2325  // -----------------------------------
2326  if (HasCustomCallGenerator(function)) {
2327    MaybeObject* maybe_result = CompileCustomCall(
2328        object, holder, NULL, function, name);
2329    Object* result;
2330    if (!maybe_result->ToObject(&result)) return maybe_result;
2331    // Undefined means bail out to regular compiler.
2332    if (!result->IsUndefined()) return result;
2333  }
2334
2335  Label miss;
2336
2337  GenerateNameCheck(name, &miss);
2338
2339  // Get the receiver from the stack.
2340  const int argc = arguments().immediate();
2341  __ lw(a1, MemOperand(sp, argc * kPointerSize));
2342
2343  // Check that the receiver isn't a smi.
2344  if (check != NUMBER_CHECK) {
2345    __ And(t1, a1, Operand(kSmiTagMask));
2346    __ Branch(&miss, eq, t1, Operand(zero_reg));
2347  }
2348
2349  // Make sure that it's okay not to patch the on stack receiver
2350  // unless we're doing a receiver map check.
2351  ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
2352
2353  SharedFunctionInfo* function_info = function->shared();
2354  switch (check) {
2355    case RECEIVER_MAP_CHECK:
2356      __ IncrementCounter(masm()->isolate()->counters()->call_const(),
2357          1, a0, a3);
2358
2359      // Check that the maps haven't changed.
2360      CheckPrototypes(JSObject::cast(object), a1, holder, a0, a3, t0, name,
2361                      &miss);
2362
2363      // Patch the receiver on the stack with the global proxy if
2364      // necessary.
2365      if (object->IsGlobalObject()) {
2366        __ lw(a3, FieldMemOperand(a1, GlobalObject::kGlobalReceiverOffset));
2367        __ sw(a3, MemOperand(sp, argc * kPointerSize));
2368      }
2369      break;
2370
2371    case STRING_CHECK:
2372      if (!function->IsBuiltin() && !function_info->strict_mode()) {
2373        // Calling non-strict non-builtins with a value as the receiver
2374        // requires boxing.
2375        __ jmp(&miss);
2376      } else {
2377        // Check that the object is a two-byte string or a symbol.
2378        __ GetObjectType(a1, a3, a3);
2379        __ Branch(&miss, Ugreater_equal, a3, Operand(FIRST_NONSTRING_TYPE));
2380        // Check that the maps starting from the prototype haven't changed.
2381        GenerateDirectLoadGlobalFunctionPrototype(
2382            masm(), Context::STRING_FUNCTION_INDEX, a0, &miss);
2383        CheckPrototypes(JSObject::cast(object->GetPrototype()), a0, holder, a3,
2384                        a1, t0, name, &miss);
2385      }
2386      break;
2387
2388    case NUMBER_CHECK: {
2389      if (!function->IsBuiltin() && !function_info->strict_mode()) {
2390        // Calling non-strict non-builtins with a value as the receiver
2391        // requires boxing.
2392        __ jmp(&miss);
2393      } else {
2394      Label fast;
2395        // Check that the object is a smi or a heap number.
2396        __ And(t1, a1, Operand(kSmiTagMask));
2397        __ Branch(&fast, eq, t1, Operand(zero_reg));
2398        __ GetObjectType(a1, a0, a0);
2399        __ Branch(&miss, ne, a0, Operand(HEAP_NUMBER_TYPE));
2400        __ bind(&fast);
2401        // Check that the maps starting from the prototype haven't changed.
2402        GenerateDirectLoadGlobalFunctionPrototype(
2403            masm(), Context::NUMBER_FUNCTION_INDEX, a0, &miss);
2404        CheckPrototypes(JSObject::cast(object->GetPrototype()), a0, holder, a3,
2405                        a1, t0, name, &miss);
2406      }
2407      break;
2408    }
2409
2410    case BOOLEAN_CHECK: {
2411      if (!function->IsBuiltin() && !function_info->strict_mode()) {
2412        // Calling non-strict non-builtins with a value as the receiver
2413        // requires boxing.
2414        __ jmp(&miss);
2415      } else {
2416        Label fast;
2417        // Check that the object is a boolean.
2418        __ LoadRoot(t0, Heap::kTrueValueRootIndex);
2419        __ Branch(&fast, eq, a1, Operand(t0));
2420        __ LoadRoot(t0, Heap::kFalseValueRootIndex);
2421        __ Branch(&miss, ne, a1, Operand(t0));
2422        __ bind(&fast);
2423        // Check that the maps starting from the prototype haven't changed.
2424        GenerateDirectLoadGlobalFunctionPrototype(
2425            masm(), Context::BOOLEAN_FUNCTION_INDEX, a0, &miss);
2426        CheckPrototypes(JSObject::cast(object->GetPrototype()), a0, holder, a3,
2427                        a1, t0, name, &miss);
2428      }
2429      break;
2430    }
2431
2432    default:
2433      UNREACHABLE();
2434  }
2435
2436  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
2437      ? CALL_AS_FUNCTION
2438      : CALL_AS_METHOD;
2439  __ InvokeFunction(function, arguments(), JUMP_FUNCTION, call_kind);
2440
2441  // Handle call cache miss.
2442  __ bind(&miss);
2443
2444  MaybeObject* maybe_result = GenerateMissBranch();
2445  if (maybe_result->IsFailure()) return maybe_result;
2446
2447  // Return the generated code.
2448  return GetCode(function);
2449}
2450
2451
2452MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object,
2453                                                      JSObject* holder,
2454                                                      String* name) {
2455  // ----------- S t a t e -------------
2456  //  -- a2    : name
2457  //  -- ra    : return address
2458  // -----------------------------------
2459
2460  Label miss;
2461
2462  GenerateNameCheck(name, &miss);
2463
2464  // Get the number of arguments.
2465  const int argc = arguments().immediate();
2466
2467  LookupResult lookup;
2468  LookupPostInterceptor(holder, name, &lookup);
2469
2470  // Get the receiver from the stack.
2471  __ lw(a1, MemOperand(sp, argc * kPointerSize));
2472
2473  CallInterceptorCompiler compiler(this, arguments(), a2, extra_ic_state_);
2474  MaybeObject* result = compiler.Compile(masm(),
2475                                         object,
2476                                         holder,
2477                                         name,
2478                                         &lookup,
2479                                         a1,
2480                                         a3,
2481                                         t0,
2482                                         a0,
2483                                         &miss);
2484  if (result->IsFailure()) {
2485    return result;
2486  }
2487
2488  // Move returned value, the function to call, to a1.
2489  __ mov(a1, v0);
2490  // Restore receiver.
2491  __ lw(a0, MemOperand(sp, argc * kPointerSize));
2492
2493  GenerateCallFunction(masm(), object, arguments(), &miss, extra_ic_state_);
2494
2495  // Handle call cache miss.
2496  __ bind(&miss);
2497  MaybeObject* maybe_result = GenerateMissBranch();
2498  if (maybe_result->IsFailure()) return maybe_result;
2499
2500  // Return the generated code.
2501  return GetCode(INTERCEPTOR, name);
2502}
2503
2504
2505MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object,
2506                                                 GlobalObject* holder,
2507                                                 JSGlobalPropertyCell* cell,
2508                                                 JSFunction* function,
2509                                                 String* name) {
2510  // ----------- S t a t e -------------
2511  //  -- a2    : name
2512  //  -- ra    : return address
2513  // -----------------------------------
2514
2515  if (HasCustomCallGenerator(function)) {
2516    MaybeObject* maybe_result = CompileCustomCall(
2517        object, holder, cell, function, name);
2518    Object* result;
2519    if (!maybe_result->ToObject(&result)) return maybe_result;
2520    // Undefined means bail out to regular compiler.
2521    if (!result->IsUndefined()) return result;
2522  }
2523
2524  Label miss;
2525
2526  GenerateNameCheck(name, &miss);
2527
2528  // Get the number of arguments.
2529  const int argc = arguments().immediate();
2530
2531  GenerateGlobalReceiverCheck(object, holder, name, &miss);
2532  GenerateLoadFunctionFromCell(cell, function, &miss);
2533
2534  // Patch the receiver on the stack with the global proxy if
2535  // necessary.
2536  if (object->IsGlobalObject()) {
2537    __ lw(a3, FieldMemOperand(a0, GlobalObject::kGlobalReceiverOffset));
2538    __ sw(a3, MemOperand(sp, argc * kPointerSize));
2539  }
2540
2541  // Setup the context (function already in r1).
2542  __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
2543
2544  // Jump to the cached code (tail call).
2545  Counters* counters = masm()->isolate()->counters();
2546  __ IncrementCounter(counters->call_global_inline(), 1, a3, t0);
2547  ASSERT(function->is_compiled());
2548  Handle<Code> code(function->code());
2549  ParameterCount expected(function->shared()->formal_parameter_count());
2550  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
2551      ? CALL_AS_FUNCTION
2552      : CALL_AS_METHOD;
2553  if (V8::UseCrankshaft()) {
2554    UNIMPLEMENTED_MIPS();
2555  } else {
2556    __ InvokeCode(code, expected, arguments(), RelocInfo::CODE_TARGET,
2557                  JUMP_FUNCTION, call_kind);
2558  }
2559
2560  // Handle call cache miss.
2561  __ bind(&miss);
2562  __ IncrementCounter(counters->call_global_inline_miss(), 1, a1, a3);
2563  MaybeObject* maybe_result = GenerateMissBranch();
2564  if (maybe_result->IsFailure()) return maybe_result;
2565
2566  // Return the generated code.
2567  return GetCode(NORMAL, name);
2568}
2569
2570
2571MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object,
2572                                                  int index,
2573                                                  Map* transition,
2574                                                  String* name) {
2575  // ----------- S t a t e -------------
2576  //  -- a0    : value
2577  //  -- a1    : receiver
2578  //  -- a2    : name
2579  //  -- ra    : return address
2580  // -----------------------------------
2581  Label miss;
2582
2583  // Name register might be clobbered.
2584  GenerateStoreField(masm(),
2585                     object,
2586                     index,
2587                     transition,
2588                     a1, a2, a3,
2589                     &miss);
2590  __ bind(&miss);
2591  __ li(a2, Operand(Handle<String>(name)));  // Restore name.
2592  Handle<Code> ic = masm()->isolate()->builtins()->Builtins::StoreIC_Miss();
2593  __ Jump(ic, RelocInfo::CODE_TARGET);
2594
2595  // Return the generated code.
2596  return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2597}
2598
2599
2600MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object,
2601                                                     AccessorInfo* callback,
2602                                                     String* name) {
2603  // ----------- S t a t e -------------
2604  //  -- a0    : value
2605  //  -- a1    : receiver
2606  //  -- a2    : name
2607  //  -- ra    : return address
2608  // -----------------------------------
2609  Label miss;
2610
2611  // Check that the object isn't a smi.
2612  __ JumpIfSmi(a1, &miss);
2613
2614  // Check that the map of the object hasn't changed.
2615  __ lw(a3, FieldMemOperand(a1, HeapObject::kMapOffset));
2616  __ Branch(&miss, ne, a3, Operand(Handle<Map>(object->map())));
2617
2618  // Perform global security token check if needed.
2619  if (object->IsJSGlobalProxy()) {
2620    __ CheckAccessGlobalProxy(a1, a3, &miss);
2621  }
2622
2623  // Stub never generated for non-global objects that require access
2624  // checks.
2625  ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2626
2627  __ push(a1);  // Receiver.
2628  __ li(a3, Operand(Handle<AccessorInfo>(callback)));  // Callback info.
2629  __ Push(a3, a2, a0);
2630
2631  // Do tail-call to the runtime system.
2632  ExternalReference store_callback_property =
2633      ExternalReference(IC_Utility(IC::kStoreCallbackProperty),
2634          masm()->isolate());
2635  __ TailCallExternalReference(store_callback_property, 4, 1);
2636
2637  // Handle store cache miss.
2638  __ bind(&miss);
2639  Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2640  __ Jump(ic, RelocInfo::CODE_TARGET);
2641
2642  // Return the generated code.
2643  return GetCode(CALLBACKS, name);
2644}
2645
2646
2647MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
2648                                                        String* name) {
2649  // ----------- S t a t e -------------
2650  //  -- a0    : value
2651  //  -- a1    : receiver
2652  //  -- a2    : name
2653  //  -- ra    : return address
2654  // -----------------------------------
2655  Label miss;
2656
2657  // Check that the object isn't a smi.
2658  __ JumpIfSmi(a1, &miss);
2659
2660  // Check that the map of the object hasn't changed.
2661  __ lw(a3, FieldMemOperand(a1, HeapObject::kMapOffset));
2662  __ Branch(&miss, ne, a3, Operand(Handle<Map>(receiver->map())));
2663
2664  // Perform global security token check if needed.
2665  if (receiver->IsJSGlobalProxy()) {
2666    __ CheckAccessGlobalProxy(a1, a3, &miss);
2667  }
2668
2669  // Stub is never generated for non-global objects that require access
2670  // checks.
2671  ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
2672
2673  __ Push(a1, a2, a0);  // Receiver, name, value.
2674
2675  __ li(a0, Operand(Smi::FromInt(strict_mode_)));
2676  __ push(a0);  // Strict mode.
2677
2678  // Do tail-call to the runtime system.
2679  ExternalReference store_ic_property =
2680      ExternalReference(IC_Utility(IC::kStoreInterceptorProperty),
2681          masm()->isolate());
2682  __ TailCallExternalReference(store_ic_property, 4, 1);
2683
2684  // Handle store cache miss.
2685  __ bind(&miss);
2686  Handle<Code> ic = masm()->isolate()->builtins()->Builtins::StoreIC_Miss();
2687  __ Jump(ic, RelocInfo::CODE_TARGET);
2688
2689  // Return the generated code.
2690  return GetCode(INTERCEPTOR, name);
2691}
2692
2693
2694MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
2695                                                   JSGlobalPropertyCell* cell,
2696                                                   String* name) {
2697  // ----------- S t a t e -------------
2698  //  -- a0    : value
2699  //  -- a1    : receiver
2700  //  -- a2    : name
2701  //  -- ra    : return address
2702  // -----------------------------------
2703  Label miss;
2704
2705  // Check that the map of the global has not changed.
2706  __ lw(a3, FieldMemOperand(a1, HeapObject::kMapOffset));
2707  __ Branch(&miss, ne, a3, Operand(Handle<Map>(object->map())));
2708
2709  // Check that the value in the cell is not the hole. If it is, this
2710  // cell could have been deleted and reintroducing the global needs
2711  // to update the property details in the property dictionary of the
2712  // global object. We bail out to the runtime system to do that.
2713  __ li(t0, Operand(Handle<JSGlobalPropertyCell>(cell)));
2714  __ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
2715  __ lw(t2, FieldMemOperand(t0, JSGlobalPropertyCell::kValueOffset));
2716  __ Branch(&miss, eq, t1, Operand(t2));
2717
2718  // Store the value in the cell.
2719  __ sw(a0, FieldMemOperand(t0, JSGlobalPropertyCell::kValueOffset));
2720  __ mov(v0, a0);  // Stored value must be returned in v0.
2721  Counters* counters = masm()->isolate()->counters();
2722  __ IncrementCounter(counters->named_store_global_inline(), 1, a1, a3);
2723  __ Ret();
2724
2725  // Handle store cache miss.
2726  __ bind(&miss);
2727  __ IncrementCounter(counters->named_store_global_inline_miss(), 1, a1, a3);
2728  Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2729  __ Jump(ic, RelocInfo::CODE_TARGET);
2730
2731  // Return the generated code.
2732  return GetCode(NORMAL, name);
2733}
2734
2735
2736MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
2737                                                      JSObject* object,
2738                                                      JSObject* last) {
2739  // ----------- S t a t e -------------
2740  //  -- a0    : receiver
2741  //  -- ra    : return address
2742  // -----------------------------------
2743  Label miss;
2744
2745  // Check that the receiver is not a smi.
2746  __ JumpIfSmi(a0, &miss);
2747
2748  // Check the maps of the full prototype chain.
2749  CheckPrototypes(object, a0, last, a3, a1, t0, name, &miss);
2750
2751  // If the last object in the prototype chain is a global object,
2752  // check that the global property cell is empty.
2753  if (last->IsGlobalObject()) {
2754    MaybeObject* cell = GenerateCheckPropertyCell(masm(),
2755                                                  GlobalObject::cast(last),
2756                                                  name,
2757                                                  a1,
2758                                                  &miss);
2759    if (cell->IsFailure()) {
2760      miss.Unuse();
2761      return cell;
2762    }
2763  }
2764
2765  // Return undefined if maps of the full prototype chain is still the same.
2766  __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
2767  __ Ret();
2768
2769  __ bind(&miss);
2770  GenerateLoadMiss(masm(), Code::LOAD_IC);
2771
2772  // Return the generated code.
2773  return GetCode(NONEXISTENT, heap()->empty_string());
2774}
2775
2776
2777MaybeObject* LoadStubCompiler::CompileLoadField(JSObject* object,
2778                                                JSObject* holder,
2779                                                int index,
2780                                                String* name) {
2781  // ----------- S t a t e -------------
2782  //  -- a0    : receiver
2783  //  -- a2    : name
2784  //  -- ra    : return address
2785  // -----------------------------------
2786  Label miss;
2787
2788  __ mov(v0, a0);
2789
2790  GenerateLoadField(object, holder, v0, a3, a1, t0, index, name, &miss);
2791  __ bind(&miss);
2792  GenerateLoadMiss(masm(), Code::LOAD_IC);
2793
2794  // Return the generated code.
2795  return GetCode(FIELD, name);
2796}
2797
2798
2799MaybeObject* LoadStubCompiler::CompileLoadCallback(String* name,
2800                                                   JSObject* object,
2801                                                   JSObject* holder,
2802                                                   AccessorInfo* callback) {
2803  // ----------- S t a t e -------------
2804  //  -- a0    : receiver
2805  //  -- a2    : name
2806  //  -- ra    : return address
2807  // -----------------------------------
2808  Label miss;
2809
2810  MaybeObject* result = GenerateLoadCallback(object, holder, a0, a2, a3, a1, t0,
2811                                             callback, name, &miss);
2812  if (result->IsFailure()) {
2813    miss.Unuse();
2814    return result;
2815  }
2816
2817  __ bind(&miss);
2818  GenerateLoadMiss(masm(), Code::LOAD_IC);
2819
2820  // Return the generated code.
2821  return GetCode(CALLBACKS, name);
2822}
2823
2824
2825MaybeObject* LoadStubCompiler::CompileLoadConstant(JSObject* object,
2826                                                   JSObject* holder,
2827                                                   Object* value,
2828                                                   String* name) {
2829  // ----------- S t a t e -------------
2830  //  -- a0    : receiver
2831  //  -- a2    : name
2832  //  -- ra    : return address
2833  // -----------------------------------
2834  Label miss;
2835
2836  GenerateLoadConstant(object, holder, a0, a3, a1, t0, value, name, &miss);
2837  __ bind(&miss);
2838  GenerateLoadMiss(masm(), Code::LOAD_IC);
2839
2840  // Return the generated code.
2841  return GetCode(CONSTANT_FUNCTION, name);
2842}
2843
2844
2845MaybeObject* LoadStubCompiler::CompileLoadInterceptor(JSObject* object,
2846                                                      JSObject* holder,
2847                                                      String* name) {
2848  // ----------- S t a t e -------------
2849  //  -- a0    : receiver
2850  //  -- a2    : name
2851  //  -- ra    : return address
2852  //  -- [sp]  : receiver
2853  // -----------------------------------
2854  Label miss;
2855
2856  LookupResult lookup;
2857  LookupPostInterceptor(holder, name, &lookup);
2858  GenerateLoadInterceptor(object,
2859                          holder,
2860                          &lookup,
2861                          a0,
2862                          a2,
2863                          a3,
2864                          a1,
2865                          t0,
2866                          name,
2867                          &miss);
2868  __ bind(&miss);
2869  GenerateLoadMiss(masm(), Code::LOAD_IC);
2870
2871  // Return the generated code.
2872  return GetCode(INTERCEPTOR, name);
2873}
2874
2875
2876MaybeObject* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
2877                                                 GlobalObject* holder,
2878                                                 JSGlobalPropertyCell* cell,
2879                                                 String* name,
2880                                                 bool is_dont_delete) {
2881  // ----------- S t a t e -------------
2882  //  -- a0    : receiver
2883  //  -- a2    : name
2884  //  -- ra    : return address
2885  // -----------------------------------
2886  Label miss;
2887
2888  // If the object is the holder then we know that it's a global
2889  // object which can only happen for contextual calls. In this case,
2890  // the receiver cannot be a smi.
2891  if (object != holder) {
2892    __ And(t0, a0, Operand(kSmiTagMask));
2893    __ Branch(&miss, eq, t0, Operand(zero_reg));
2894  }
2895
2896  // Check that the map of the global has not changed.
2897  CheckPrototypes(object, a0, holder, a3, t0, a1, name, &miss);
2898
2899  // Get the value from the cell.
2900  __ li(a3, Operand(Handle<JSGlobalPropertyCell>(cell)));
2901  __ lw(t0, FieldMemOperand(a3, JSGlobalPropertyCell::kValueOffset));
2902
2903  // Check for deleted property if property can actually be deleted.
2904  if (!is_dont_delete) {
2905    __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2906    __ Branch(&miss, eq, t0, Operand(at));
2907  }
2908
2909  __ mov(v0, t0);
2910  Counters* counters = masm()->isolate()->counters();
2911  __ IncrementCounter(counters->named_load_global_stub(), 1, a1, a3);
2912  __ Ret();
2913
2914  __ bind(&miss);
2915  __ IncrementCounter(counters->named_load_global_stub_miss(), 1, a1, a3);
2916  GenerateLoadMiss(masm(), Code::LOAD_IC);
2917
2918  // Return the generated code.
2919  return GetCode(NORMAL, name);
2920}
2921
2922
2923MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name,
2924                                                     JSObject* receiver,
2925                                                     JSObject* holder,
2926                                                     int index) {
2927  // ----------- S t a t e -------------
2928  //  -- ra    : return address
2929  //  -- a0    : key
2930  //  -- a1    : receiver
2931  // -----------------------------------
2932  Label miss;
2933
2934  // Check the key is the cached one.
2935  __ Branch(&miss, ne, a0, Operand(Handle<String>(name)));
2936
2937  GenerateLoadField(receiver, holder, a1, a2, a3, t0, index, name, &miss);
2938  __ bind(&miss);
2939  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2940
2941  return GetCode(FIELD, name);
2942}
2943
2944
2945MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback(
2946    String* name,
2947    JSObject* receiver,
2948    JSObject* holder,
2949    AccessorInfo* callback) {
2950  // ----------- S t a t e -------------
2951  //  -- ra    : return address
2952  //  -- a0    : key
2953  //  -- a1    : receiver
2954  // -----------------------------------
2955  Label miss;
2956
2957  // Check the key is the cached one.
2958  __ Branch(&miss, ne, a0, Operand(Handle<String>(name)));
2959
2960  MaybeObject* result = GenerateLoadCallback(receiver, holder, a1, a0, a2, a3,
2961                                             t0, callback, name, &miss);
2962  if (result->IsFailure()) {
2963    miss.Unuse();
2964    return result;
2965  }
2966
2967  __ bind(&miss);
2968  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2969
2970  return GetCode(CALLBACKS, name);
2971}
2972
2973
2974MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
2975                                                        JSObject* receiver,
2976                                                        JSObject* holder,
2977                                                        Object* value) {
2978  // ----------- S t a t e -------------
2979  //  -- ra    : return address
2980  //  -- a0    : key
2981  //  -- a1    : receiver
2982  // -----------------------------------
2983  Label miss;
2984
2985  // Check the key is the cached one.
2986  __ Branch(&miss, ne, a0, Operand(Handle<String>(name)));
2987
2988  GenerateLoadConstant(receiver, holder, a1, a2, a3, t0, value, name, &miss);
2989  __ bind(&miss);
2990  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2991
2992  // Return the generated code.
2993  return GetCode(CONSTANT_FUNCTION, name);
2994}
2995
2996
2997MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
2998                                                           JSObject* holder,
2999                                                           String* name) {
3000  // ----------- S t a t e -------------
3001  //  -- ra    : return address
3002  //  -- a0    : key
3003  //  -- a1    : receiver
3004  // -----------------------------------
3005  Label miss;
3006
3007  // Check the key is the cached one.
3008  __ Branch(&miss, ne, a0, Operand(Handle<String>(name)));
3009
3010  LookupResult lookup;
3011  LookupPostInterceptor(holder, name, &lookup);
3012  GenerateLoadInterceptor(receiver,
3013                          holder,
3014                          &lookup,
3015                          a1,
3016                          a0,
3017                          a2,
3018                          a3,
3019                          t0,
3020                          name,
3021                          &miss);
3022  __ bind(&miss);
3023  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3024
3025  return GetCode(INTERCEPTOR, name);
3026}
3027
3028
3029MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
3030  // ----------- S t a t e -------------
3031  //  -- ra    : return address
3032  //  -- a0    : key
3033  //  -- a1    : receiver
3034  // -----------------------------------
3035  Label miss;
3036
3037  // Check the key is the cached one.
3038  __ Branch(&miss, ne, a0, Operand(Handle<String>(name)));
3039
3040  GenerateLoadArrayLength(masm(), a1, a2, &miss);
3041  __ bind(&miss);
3042  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3043
3044  return GetCode(CALLBACKS, name);
3045}
3046
3047
3048MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
3049  // ----------- S t a t e -------------
3050  //  -- ra    : return address
3051  //  -- a0    : key
3052  //  -- a1    : receiver
3053  // -----------------------------------
3054  Label miss;
3055
3056  Counters* counters = masm()->isolate()->counters();
3057  __ IncrementCounter(counters->keyed_load_string_length(), 1, a2, a3);
3058
3059  // Check the key is the cached one.
3060  __ Branch(&miss, ne, a0, Operand(Handle<String>(name)));
3061
3062  GenerateLoadStringLength(masm(), a1, a2, a3, &miss, true);
3063  __ bind(&miss);
3064  __ DecrementCounter(counters->keyed_load_string_length(), 1, a2, a3);
3065
3066  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3067
3068  return GetCode(CALLBACKS, name);
3069}
3070
3071
3072MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
3073  // ----------- S t a t e -------------
3074  //  -- ra    : return address
3075  //  -- a0    : key
3076  //  -- a1    : receiver
3077  // -----------------------------------
3078  Label miss;
3079
3080  Counters* counters = masm()->isolate()->counters();
3081  __ IncrementCounter(counters->keyed_load_function_prototype(), 1, a2, a3);
3082
3083  // Check the name hasn't changed.
3084  __ Branch(&miss, ne, a0, Operand(Handle<String>(name)));
3085
3086  GenerateLoadFunctionPrototype(masm(), a1, a2, a3, &miss);
3087  __ bind(&miss);
3088  __ DecrementCounter(counters->keyed_load_function_prototype(), 1, a2, a3);
3089  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3090
3091  return GetCode(CALLBACKS, name);
3092}
3093
3094
3095MaybeObject* KeyedLoadStubCompiler::CompileLoadElement(Map* receiver_map) {
3096  // ----------- S t a t e -------------
3097  //  -- ra    : return address
3098  //  -- a0    : key
3099  //  -- a1    : receiver
3100  // -----------------------------------
3101  Code* stub;
3102  ElementsKind elements_kind = receiver_map->elements_kind();
3103  MaybeObject* maybe_stub = KeyedLoadElementStub(elements_kind).TryGetCode();
3104  if (!maybe_stub->To(&stub)) return maybe_stub;
3105  __ DispatchMap(a1,
3106                 a2,
3107                 Handle<Map>(receiver_map),
3108                 Handle<Code>(stub),
3109                 DO_SMI_CHECK);
3110
3111  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Miss();
3112  __ Jump(ic, RelocInfo::CODE_TARGET);
3113
3114  // Return the generated code.
3115  return GetCode(NORMAL, NULL);
3116}
3117
3118
3119MaybeObject* KeyedLoadStubCompiler::CompileLoadMegamorphic(
3120    MapList* receiver_maps,
3121    CodeList* handler_ics) {
3122  // ----------- S t a t e -------------
3123  //  -- ra    : return address
3124  //  -- a0    : key
3125  //  -- a1    : receiver
3126  // -----------------------------------
3127  Label miss;
3128  __ JumpIfSmi(a1, &miss);
3129
3130  int receiver_count = receiver_maps->length();
3131  __ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset));
3132  for (int current = 0; current < receiver_count; ++current) {
3133    Handle<Map> map(receiver_maps->at(current));
3134    Handle<Code> code(handler_ics->at(current));
3135    __ Jump(code, RelocInfo::CODE_TARGET, eq, a2, Operand(map));
3136  }
3137
3138  __ bind(&miss);
3139  Handle<Code> miss_ic = isolate()->builtins()->KeyedLoadIC_Miss();
3140  __ Jump(miss_ic, RelocInfo::CODE_TARGET);
3141
3142  // Return the generated code.
3143  return GetCode(NORMAL, NULL, MEGAMORPHIC);
3144}
3145
3146
3147MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
3148                                                       int index,
3149                                                       Map* transition,
3150                                                       String* name) {
3151  // ----------- S t a t e -------------
3152  //  -- a0    : value
3153  //  -- a1    : key
3154  //  -- a2    : receiver
3155  //  -- ra    : return address
3156  // -----------------------------------
3157
3158  Label miss;
3159
3160  Counters* counters = masm()->isolate()->counters();
3161  __ IncrementCounter(counters->keyed_store_field(), 1, a3, t0);
3162
3163  // Check that the name has not changed.
3164  __ Branch(&miss, ne, a1, Operand(Handle<String>(name)));
3165
3166  // a3 is used as scratch register. a1 and a2 keep their values if a jump to
3167  // the miss label is generated.
3168  GenerateStoreField(masm(),
3169                     object,
3170                     index,
3171                     transition,
3172                     a2, a1, a3,
3173                     &miss);
3174  __ bind(&miss);
3175
3176  __ DecrementCounter(counters->keyed_store_field(), 1, a3, t0);
3177  Handle<Code> ic = masm()->isolate()->builtins()->KeyedStoreIC_Miss();
3178  __ Jump(ic, RelocInfo::CODE_TARGET);
3179
3180  // Return the generated code.
3181  return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
3182}
3183
3184
3185MaybeObject* KeyedStoreStubCompiler::CompileStoreElement(Map* receiver_map) {
3186  // ----------- S t a t e -------------
3187  //  -- a0    : value
3188  //  -- a1    : key
3189  //  -- a2    : receiver
3190  //  -- ra    : return address
3191  //  -- a3    : scratch
3192  // -----------------------------------
3193  Code* stub;
3194  ElementsKind elements_kind = receiver_map->elements_kind();
3195  bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
3196  MaybeObject* maybe_stub =
3197      KeyedStoreElementStub(is_js_array, elements_kind).TryGetCode();
3198  if (!maybe_stub->To(&stub)) return maybe_stub;
3199  __ DispatchMap(a2,
3200                 a3,
3201                 Handle<Map>(receiver_map),
3202                 Handle<Code>(stub),
3203                 DO_SMI_CHECK);
3204
3205  Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
3206  __ Jump(ic, RelocInfo::CODE_TARGET);
3207
3208  // Return the generated code.
3209  return GetCode(NORMAL, NULL);
3210}
3211
3212
3213MaybeObject* KeyedStoreStubCompiler::CompileStoreMegamorphic(
3214    MapList* receiver_maps,
3215    CodeList* handler_ics) {
3216  // ----------- S t a t e -------------
3217  //  -- a0    : value
3218  //  -- a1    : key
3219  //  -- a2    : receiver
3220  //  -- ra    : return address
3221  //  -- a3    : scratch
3222  // -----------------------------------
3223  Label miss;
3224  __ JumpIfSmi(a2, &miss);
3225
3226  int receiver_count = receiver_maps->length();
3227  __ lw(a3, FieldMemOperand(a2, HeapObject::kMapOffset));
3228  for (int current = 0; current < receiver_count; ++current) {
3229    Handle<Map> map(receiver_maps->at(current));
3230    Handle<Code> code(handler_ics->at(current));
3231    __ Jump(code, RelocInfo::CODE_TARGET, eq, a3, Operand(map));
3232  }
3233
3234  __ bind(&miss);
3235  Handle<Code> miss_ic = isolate()->builtins()->KeyedStoreIC_Miss();
3236  __ Jump(miss_ic, RelocInfo::CODE_TARGET);
3237
3238  // Return the generated code.
3239  return GetCode(NORMAL, NULL, MEGAMORPHIC);
3240}
3241
3242
3243MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
3244  // a0    : argc
3245  // a1    : constructor
3246  // ra    : return address
3247  // [sp]  : last argument
3248  Label generic_stub_call;
3249
3250  // Use t7 for holding undefined which is used in several places below.
3251  __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
3252
3253#ifdef ENABLE_DEBUGGER_SUPPORT
3254  // Check to see whether there are any break points in the function code. If
3255  // there are jump to the generic constructor stub which calls the actual
3256  // code for the function thereby hitting the break points.
3257  __ lw(t5, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
3258  __ lw(a2, FieldMemOperand(t5, SharedFunctionInfo::kDebugInfoOffset));
3259  __ Branch(&generic_stub_call, ne, a2, Operand(t7));
3260#endif
3261
3262  // Load the initial map and verify that it is in fact a map.
3263  // a1: constructor function
3264  // t7: undefined
3265  __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
3266  __ And(t0, a2, Operand(kSmiTagMask));
3267  __ Branch(&generic_stub_call, eq, t0, Operand(zero_reg));
3268  __ GetObjectType(a2, a3, t0);
3269  __ Branch(&generic_stub_call, ne, t0, Operand(MAP_TYPE));
3270
3271#ifdef DEBUG
3272  // Cannot construct functions this way.
3273  // a0: argc
3274  // a1: constructor function
3275  // a2: initial map
3276  // t7: undefined
3277  __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset));
3278  __ Check(ne, "Function constructed by construct stub.",
3279      a3, Operand(JS_FUNCTION_TYPE));
3280#endif
3281
3282  // Now allocate the JSObject in new space.
3283  // a0: argc
3284  // a1: constructor function
3285  // a2: initial map
3286  // t7: undefined
3287  __ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset));
3288  __ AllocateInNewSpace(a3,
3289                        t4,
3290                        t5,
3291                        t6,
3292                        &generic_stub_call,
3293                        SIZE_IN_WORDS);
3294
3295  // Allocated the JSObject, now initialize the fields. Map is set to initial
3296  // map and properties and elements are set to empty fixed array.
3297  // a0: argc
3298  // a1: constructor function
3299  // a2: initial map
3300  // a3: object size (in words)
3301  // t4: JSObject (not tagged)
3302  // t7: undefined
3303  __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex);
3304  __ mov(t5, t4);
3305  __ sw(a2, MemOperand(t5, JSObject::kMapOffset));
3306  __ sw(t6, MemOperand(t5, JSObject::kPropertiesOffset));
3307  __ sw(t6, MemOperand(t5, JSObject::kElementsOffset));
3308  __ Addu(t5, t5, Operand(3 * kPointerSize));
3309  ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
3310  ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
3311  ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
3312
3313
3314  // Calculate the location of the first argument. The stack contains only the
3315  // argc arguments.
3316  __ sll(a1, a0, kPointerSizeLog2);
3317  __ Addu(a1, a1, sp);
3318
3319  // Fill all the in-object properties with undefined.
3320  // a0: argc
3321  // a1: first argument
3322  // a3: object size (in words)
3323  // t4: JSObject (not tagged)
3324  // t5: First in-object property of JSObject (not tagged)
3325  // t7: undefined
3326  // Fill the initialized properties with a constant value or a passed argument
3327  // depending on the this.x = ...; assignment in the function.
3328  SharedFunctionInfo* shared = function->shared();
3329  for (int i = 0; i < shared->this_property_assignments_count(); i++) {
3330    if (shared->IsThisPropertyAssignmentArgument(i)) {
3331      Label not_passed, next;
3332      // Check if the argument assigned to the property is actually passed.
3333      int arg_number = shared->GetThisPropertyAssignmentArgument(i);
3334      __ Branch(&not_passed, less_equal, a0, Operand(arg_number));
3335      // Argument passed - find it on the stack.
3336      __ lw(a2, MemOperand(a1, (arg_number + 1) * -kPointerSize));
3337      __ sw(a2, MemOperand(t5));
3338      __ Addu(t5, t5, kPointerSize);
3339      __ jmp(&next);
3340      __ bind(&not_passed);
3341      // Set the property to undefined.
3342      __ sw(t7, MemOperand(t5));
3343      __ Addu(t5, t5, Operand(kPointerSize));
3344      __ bind(&next);
3345    } else {
3346      // Set the property to the constant value.
3347      Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i));
3348      __ li(a2, Operand(constant));
3349      __ sw(a2, MemOperand(t5));
3350      __ Addu(t5, t5, kPointerSize);
3351    }
3352  }
3353
3354  // Fill the unused in-object property fields with undefined.
3355  ASSERT(function->has_initial_map());
3356  for (int i = shared->this_property_assignments_count();
3357       i < function->initial_map()->inobject_properties();
3358       i++) {
3359      __ sw(t7, MemOperand(t5));
3360      __ Addu(t5, t5, kPointerSize);
3361  }
3362
3363  // a0: argc
3364  // t4: JSObject (not tagged)
3365  // Move argc to a1 and the JSObject to return to v0 and tag it.
3366  __ mov(a1, a0);
3367  __ mov(v0, t4);
3368  __ Or(v0, v0, Operand(kHeapObjectTag));
3369
3370  // v0: JSObject
3371  // a1: argc
3372  // Remove caller arguments and receiver from the stack and return.
3373  __ sll(t0, a1, kPointerSizeLog2);
3374  __ Addu(sp, sp, t0);
3375  __ Addu(sp, sp, Operand(kPointerSize));
3376  Counters* counters = masm()->isolate()->counters();
3377  __ IncrementCounter(counters->constructed_objects(), 1, a1, a2);
3378  __ IncrementCounter(counters->constructed_objects_stub(), 1, a1, a2);
3379  __ Ret();
3380
3381  // Jump to the generic stub in case the specialized code cannot handle the
3382  // construction.
3383  __ bind(&generic_stub_call);
3384  Handle<Code> generic_construct_stub =
3385      masm()->isolate()->builtins()->JSConstructStubGeneric();
3386  __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
3387
3388  // Return the generated code.
3389  return GetCode();
3390}
3391
3392
3393#undef __
3394#define __ ACCESS_MASM(masm)
3395
3396
3397void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
3398    MacroAssembler* masm) {
3399  // ---------- S t a t e --------------
3400  //  -- ra     : return address
3401  //  -- a0     : key
3402  //  -- a1     : receiver
3403  // -----------------------------------
3404  Label slow, miss_force_generic;
3405
3406  Register key = a0;
3407  Register receiver = a1;
3408
3409  __ JumpIfNotSmi(key, &miss_force_generic);
3410  __ lw(t0, FieldMemOperand(receiver, JSObject::kElementsOffset));
3411  __ sra(a2, a0, kSmiTagSize);
3412  __ LoadFromNumberDictionary(&slow, t0, a0, v0, a2, a3, t1);
3413  __ Ret();
3414
3415  // Slow case, key and receiver still in a0 and a1.
3416  __ bind(&slow);
3417  __ IncrementCounter(
3418      masm->isolate()->counters()->keyed_load_external_array_slow(),
3419      1, a2, a3);
3420  // Entry registers are intact.
3421  // ---------- S t a t e --------------
3422  //  -- ra     : return address
3423  //  -- a0     : key
3424  //  -- a1     : receiver
3425  // -----------------------------------
3426  Handle<Code> slow_ic =
3427      masm->isolate()->builtins()->KeyedLoadIC_Slow();
3428  __ Jump(slow_ic, RelocInfo::CODE_TARGET);
3429
3430  // Miss case, call the runtime.
3431  __ bind(&miss_force_generic);
3432
3433  // ---------- S t a t e --------------
3434  //  -- ra     : return address
3435  //  -- a0     : key
3436  //  -- a1     : receiver
3437  // -----------------------------------
3438
3439  Handle<Code> miss_ic =
3440     masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
3441  __ Jump(miss_ic, RelocInfo::CODE_TARGET);
3442}
3443
3444
3445static bool IsElementTypeSigned(ElementsKind elements_kind) {
3446  switch (elements_kind) {
3447    case EXTERNAL_BYTE_ELEMENTS:
3448    case EXTERNAL_SHORT_ELEMENTS:
3449    case EXTERNAL_INT_ELEMENTS:
3450      return true;
3451
3452    case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
3453    case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
3454    case EXTERNAL_UNSIGNED_INT_ELEMENTS:
3455    case EXTERNAL_PIXEL_ELEMENTS:
3456      return false;
3457
3458    case EXTERNAL_FLOAT_ELEMENTS:
3459    case EXTERNAL_DOUBLE_ELEMENTS:
3460    case FAST_ELEMENTS:
3461    case FAST_DOUBLE_ELEMENTS:
3462    case DICTIONARY_ELEMENTS:
3463    case NON_STRICT_ARGUMENTS_ELEMENTS:
3464      UNREACHABLE();
3465      return false;
3466  }
3467  return false;
3468}
3469
3470
3471void KeyedLoadStubCompiler::GenerateLoadExternalArray(
3472    MacroAssembler* masm,
3473    ElementsKind elements_kind) {
3474  // ---------- S t a t e --------------
3475  //  -- ra     : return address
3476  //  -- a0     : key
3477  //  -- a1     : receiver
3478  // -----------------------------------
3479  Label miss_force_generic, slow, failed_allocation;
3480
3481  Register key = a0;
3482  Register receiver = a1;
3483
3484  // This stub is meant to be tail-jumped to, the receiver must already
3485  // have been verified by the caller to not be a smi.
3486
3487  // Check that the key is a smi.
3488  __ JumpIfNotSmi(key, &miss_force_generic);
3489
3490  __ lw(a3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3491  // a3: elements array
3492
3493  // Check that the index is in range.
3494  __ lw(t1, FieldMemOperand(a3, ExternalArray::kLengthOffset));
3495  __ sra(t2, key, kSmiTagSize);
3496  // Unsigned comparison catches both negative and too-large values.
3497  __ Branch(&miss_force_generic, Ugreater_equal, key, Operand(t1));
3498
3499  __ lw(a3, FieldMemOperand(a3, ExternalArray::kExternalPointerOffset));
3500  // a3: base pointer of external storage
3501
3502  // We are not untagging smi key and instead work with it
3503  // as if it was premultiplied by 2.
3504  STATIC_ASSERT((kSmiTag == 0) && (kSmiTagSize == 1));
3505
3506  Register value = a2;
3507  switch (elements_kind) {
3508    case EXTERNAL_BYTE_ELEMENTS:
3509      __ srl(t2, key, 1);
3510      __ addu(t3, a3, t2);
3511      __ lb(value, MemOperand(t3, 0));
3512      break;
3513    case EXTERNAL_PIXEL_ELEMENTS:
3514    case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
3515      __ srl(t2, key, 1);
3516      __ addu(t3, a3, t2);
3517      __ lbu(value, MemOperand(t3, 0));
3518      break;
3519    case EXTERNAL_SHORT_ELEMENTS:
3520      __ addu(t3, a3, key);
3521      __ lh(value, MemOperand(t3, 0));
3522      break;
3523    case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
3524      __ addu(t3, a3, key);
3525      __ lhu(value, MemOperand(t3, 0));
3526      break;
3527    case EXTERNAL_INT_ELEMENTS:
3528    case EXTERNAL_UNSIGNED_INT_ELEMENTS:
3529      __ sll(t2, key, 1);
3530      __ addu(t3, a3, t2);
3531      __ lw(value, MemOperand(t3, 0));
3532      break;
3533    case EXTERNAL_FLOAT_ELEMENTS:
3534      __ sll(t3, t2, 2);
3535      __ addu(t3, a3, t3);
3536      if (CpuFeatures::IsSupported(FPU)) {
3537        CpuFeatures::Scope scope(FPU);
3538        __ lwc1(f0, MemOperand(t3, 0));
3539      } else {
3540        __ lw(value, MemOperand(t3, 0));
3541      }
3542      break;
3543    case EXTERNAL_DOUBLE_ELEMENTS:
3544      __ sll(t2, key, 2);
3545      __ addu(t3, a3, t2);
3546      if (CpuFeatures::IsSupported(FPU)) {
3547        CpuFeatures::Scope scope(FPU);
3548        __ ldc1(f0, MemOperand(t3, 0));
3549      } else {
3550        // t3: pointer to the beginning of the double we want to load.
3551        __ lw(a2, MemOperand(t3, 0));
3552        __ lw(a3, MemOperand(t3, Register::kSizeInBytes));
3553      }
3554      break;
3555    case FAST_ELEMENTS:
3556    case FAST_DOUBLE_ELEMENTS:
3557    case DICTIONARY_ELEMENTS:
3558    case NON_STRICT_ARGUMENTS_ELEMENTS:
3559      UNREACHABLE();
3560      break;
3561  }
3562
3563  // For integer array types:
3564  // a2: value
3565  // For float array type:
3566  // f0: value (if FPU is supported)
3567  // a2: value (if FPU is not supported)
3568  // For double array type:
3569  // f0: value (if FPU is supported)
3570  // a2/a3: value (if FPU is not supported)
3571
3572  if (elements_kind == EXTERNAL_INT_ELEMENTS) {
3573    // For the Int and UnsignedInt array types, we need to see whether
3574    // the value can be represented in a Smi. If not, we need to convert
3575    // it to a HeapNumber.
3576    Label box_int;
3577    __ Subu(t3, value, Operand(0xC0000000));  // Non-smi value gives neg result.
3578    __ Branch(&box_int, lt, t3, Operand(zero_reg));
3579    // Tag integer as smi and return it.
3580    __ sll(v0, value, kSmiTagSize);
3581    __ Ret();
3582
3583    __ bind(&box_int);
3584    // Allocate a HeapNumber for the result and perform int-to-double
3585    // conversion.
3586    // The arm version uses a temporary here to save r0, but we don't need to
3587    // (a0 is not modified).
3588    __ LoadRoot(t1, Heap::kHeapNumberMapRootIndex);
3589    __ AllocateHeapNumber(v0, a3, t0, t1, &slow);
3590
3591    if (CpuFeatures::IsSupported(FPU)) {
3592      CpuFeatures::Scope scope(FPU);
3593      __ mtc1(value, f0);
3594      __ cvt_d_w(f0, f0);
3595      __ sdc1(f0, MemOperand(v0, HeapNumber::kValueOffset - kHeapObjectTag));
3596      __ Ret();
3597    } else {
3598      Register dst1 = t2;
3599      Register dst2 = t3;
3600      FloatingPointHelper::Destination dest =
3601          FloatingPointHelper::kCoreRegisters;
3602      FloatingPointHelper::ConvertIntToDouble(masm,
3603                                              value,
3604                                              dest,
3605                                              f0,
3606                                              dst1,
3607                                              dst2,
3608                                              t1,
3609                                              f2);
3610      __ sw(dst1, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3611      __ sw(dst2, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3612      __ Ret();
3613    }
3614  } else if (elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS) {
3615    // The test is different for unsigned int values. Since we need
3616    // the value to be in the range of a positive smi, we can't
3617    // handle either of the top two bits being set in the value.
3618    if (CpuFeatures::IsSupported(FPU)) {
3619      CpuFeatures::Scope scope(FPU);
3620      Label pl_box_int;
3621      __ And(t2, value, Operand(0xC0000000));
3622      __ Branch(&pl_box_int, ne, t2, Operand(zero_reg));
3623
3624      // It can fit in an Smi.
3625      // Tag integer as smi and return it.
3626      __ sll(v0, value, kSmiTagSize);
3627      __ Ret();
3628
3629      __ bind(&pl_box_int);
3630      // Allocate a HeapNumber for the result and perform int-to-double
3631      // conversion. Don't use a0 and a1 as AllocateHeapNumber clobbers all
3632      // registers - also when jumping due to exhausted young space.
3633      __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3634      __ AllocateHeapNumber(v0, t2, t3, t6, &slow);
3635
3636      // This is replaced by a macro:
3637      // __ mtc1(value, f0);     // LS 32-bits.
3638      // __ mtc1(zero_reg, f1);  // MS 32-bits are all zero.
3639      // __ cvt_d_l(f0, f0); // Use 64 bit conv to get correct unsigned 32-bit.
3640
3641      __ Cvt_d_uw(f0, value, f22);
3642
3643      __ sdc1(f0, MemOperand(v0, HeapNumber::kValueOffset - kHeapObjectTag));
3644
3645      __ Ret();
3646    } else {
3647      // Check whether unsigned integer fits into smi.
3648      Label box_int_0, box_int_1, done;
3649      __ And(t2, value, Operand(0x80000000));
3650      __ Branch(&box_int_0, ne, t2, Operand(zero_reg));
3651      __ And(t2, value, Operand(0x40000000));
3652      __ Branch(&box_int_1, ne, t2, Operand(zero_reg));
3653
3654      // Tag integer as smi and return it.
3655      __ sll(v0, value, kSmiTagSize);
3656      __ Ret();
3657
3658      Register hiword = value;  // a2.
3659      Register loword = a3;
3660
3661      __ bind(&box_int_0);
3662      // Integer does not have leading zeros.
3663      GenerateUInt2Double(masm, hiword, loword, t0, 0);
3664      __ Branch(&done);
3665
3666      __ bind(&box_int_1);
3667      // Integer has one leading zero.
3668      GenerateUInt2Double(masm, hiword, loword, t0, 1);
3669
3670
3671      __ bind(&done);
3672      // Integer was converted to double in registers hiword:loword.
3673      // Wrap it into a HeapNumber. Don't use a0 and a1 as AllocateHeapNumber
3674      // clobbers all registers - also when jumping due to exhausted young
3675      // space.
3676      __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3677      __ AllocateHeapNumber(t2, t3, t5, t6, &slow);
3678
3679      __ sw(hiword, FieldMemOperand(t2, HeapNumber::kExponentOffset));
3680      __ sw(loword, FieldMemOperand(t2, HeapNumber::kMantissaOffset));
3681
3682      __ mov(v0, t2);
3683      __ Ret();
3684    }
3685  } else if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
3686    // For the floating-point array type, we need to always allocate a
3687    // HeapNumber.
3688    if (CpuFeatures::IsSupported(FPU)) {
3689      CpuFeatures::Scope scope(FPU);
3690      // Allocate a HeapNumber for the result. Don't use a0 and a1 as
3691      // AllocateHeapNumber clobbers all registers - also when jumping due to
3692      // exhausted young space.
3693      __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3694      __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3695      // The float (single) value is already in fpu reg f0 (if we use float).
3696      __ cvt_d_s(f0, f0);
3697      __ sdc1(f0, MemOperand(v0, HeapNumber::kValueOffset - kHeapObjectTag));
3698      __ Ret();
3699    } else {
3700      // Allocate a HeapNumber for the result. Don't use a0 and a1 as
3701      // AllocateHeapNumber clobbers all registers - also when jumping due to
3702      // exhausted young space.
3703      __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3704      __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3705      // FPU is not available, do manual single to double conversion.
3706
3707      // a2: floating point value (binary32).
3708      // v0: heap number for result
3709
3710      // Extract mantissa to t4.
3711      __ And(t4, value, Operand(kBinary32MantissaMask));
3712
3713      // Extract exponent to t5.
3714      __ srl(t5, value, kBinary32MantissaBits);
3715      __ And(t5, t5, Operand(kBinary32ExponentMask >> kBinary32MantissaBits));
3716
3717      Label exponent_rebiased;
3718      __ Branch(&exponent_rebiased, eq, t5, Operand(zero_reg));
3719
3720      __ li(t0, 0x7ff);
3721      __ Xor(t1, t5, Operand(0xFF));
3722      __ movz(t5, t0, t1);  // Set t5 to 0x7ff only if t5 is equal to 0xff.
3723      __ Branch(&exponent_rebiased, eq, t0, Operand(0xff));
3724
3725      // Rebias exponent.
3726      __ Addu(t5,
3727              t5,
3728              Operand(-kBinary32ExponentBias + HeapNumber::kExponentBias));
3729
3730      __ bind(&exponent_rebiased);
3731      __ And(a2, value, Operand(kBinary32SignMask));
3732      value = no_reg;
3733      __ sll(t0, t5, HeapNumber::kMantissaBitsInTopWord);
3734      __ or_(a2, a2, t0);
3735
3736      // Shift mantissa.
3737      static const int kMantissaShiftForHiWord =
3738          kBinary32MantissaBits - HeapNumber::kMantissaBitsInTopWord;
3739
3740      static const int kMantissaShiftForLoWord =
3741          kBitsPerInt - kMantissaShiftForHiWord;
3742
3743      __ srl(t0, t4, kMantissaShiftForHiWord);
3744      __ or_(a2, a2, t0);
3745      __ sll(a0, t4, kMantissaShiftForLoWord);
3746
3747      __ sw(a2, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3748      __ sw(a0, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3749      __ Ret();
3750    }
3751
3752  } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
3753    if (CpuFeatures::IsSupported(FPU)) {
3754      CpuFeatures::Scope scope(FPU);
3755      // Allocate a HeapNumber for the result. Don't use a0 and a1 as
3756      // AllocateHeapNumber clobbers all registers - also when jumping due to
3757      // exhausted young space.
3758      __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3759      __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3760      // The double value is already in f0
3761      __ sdc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset));
3762      __ Ret();
3763    } else {
3764      // Allocate a HeapNumber for the result. Don't use a0 and a1 as
3765      // AllocateHeapNumber clobbers all registers - also when jumping due to
3766      // exhausted young space.
3767      __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3768      __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3769
3770      __ sw(a2, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3771      __ sw(a3, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3772      __ Ret();
3773    }
3774
3775  } else {
3776    // Tag integer as smi and return it.
3777    __ sll(v0, value, kSmiTagSize);
3778    __ Ret();
3779  }
3780
3781  // Slow case, key and receiver still in a0 and a1.
3782  __ bind(&slow);
3783  __ IncrementCounter(
3784      masm->isolate()->counters()->keyed_load_external_array_slow(),
3785      1, a2, a3);
3786
3787  // ---------- S t a t e --------------
3788  //  -- ra     : return address
3789  //  -- a0     : key
3790  //  -- a1     : receiver
3791  // -----------------------------------
3792
3793  __ Push(a1, a0);
3794
3795  __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
3796
3797  __ bind(&miss_force_generic);
3798  Code* stub = masm->isolate()->builtins()->builtin(
3799      Builtins::kKeyedLoadIC_MissForceGeneric);
3800  __ Jump(Handle<Code>(stub), RelocInfo::CODE_TARGET);
3801}
3802
3803
3804void KeyedStoreStubCompiler::GenerateStoreExternalArray(
3805    MacroAssembler* masm,
3806    ElementsKind elements_kind) {
3807  // ---------- S t a t e --------------
3808  //  -- a0     : value
3809  //  -- a1     : key
3810  //  -- a2     : receiver
3811  //  -- ra     : return address
3812  // -----------------------------------
3813
3814  Label slow, check_heap_number, miss_force_generic;
3815
3816  // Register usage.
3817  Register value = a0;
3818  Register key = a1;
3819  Register receiver = a2;
3820  // a3 mostly holds the elements array or the destination external array.
3821
3822  // This stub is meant to be tail-jumped to, the receiver must already
3823  // have been verified by the caller to not be a smi.
3824
3825    // Check that the key is a smi.
3826  __ JumpIfNotSmi(key, &miss_force_generic);
3827
3828  __ lw(a3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3829
3830  // Check that the index is in range.
3831  __ SmiUntag(t0, key);
3832  __ lw(t1, FieldMemOperand(a3, ExternalArray::kLengthOffset));
3833  // Unsigned comparison catches both negative and too-large values.
3834  __ Branch(&miss_force_generic, Ugreater_equal, key, Operand(t1));
3835
3836  // Handle both smis and HeapNumbers in the fast path. Go to the
3837  // runtime for all other kinds of values.
3838  // a3: external array.
3839  // t0: key (integer).
3840
3841  if (elements_kind == EXTERNAL_PIXEL_ELEMENTS) {
3842    // Double to pixel conversion is only implemented in the runtime for now.
3843    __ JumpIfNotSmi(value, &slow);
3844  } else {
3845    __ JumpIfNotSmi(value, &check_heap_number);
3846  }
3847  __ SmiUntag(t1, value);
3848  __ lw(a3, FieldMemOperand(a3, ExternalArray::kExternalPointerOffset));
3849
3850  // a3: base pointer of external storage.
3851  // t0: key (integer).
3852  // t1: value (integer).
3853
3854  switch (elements_kind) {
3855    case EXTERNAL_PIXEL_ELEMENTS: {
3856      // Clamp the value to [0..255].
3857      // v0 is used as a scratch register here.
3858      Label done;
3859      __ li(v0, Operand(255));
3860      // Normal branch: nop in delay slot.
3861      __ Branch(&done, gt, t1, Operand(v0));
3862      // Use delay slot in this branch.
3863      __ Branch(USE_DELAY_SLOT, &done, lt, t1, Operand(zero_reg));
3864      __ mov(v0, zero_reg);  // In delay slot.
3865      __ mov(v0, t1);  // Value is in range 0..255.
3866      __ bind(&done);
3867      __ mov(t1, v0);
3868      __ addu(t8, a3, t0);
3869      __ sb(t1, MemOperand(t8, 0));
3870      }
3871      break;
3872    case EXTERNAL_BYTE_ELEMENTS:
3873    case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
3874      __ addu(t8, a3, t0);
3875      __ sb(t1, MemOperand(t8, 0));
3876      break;
3877    case EXTERNAL_SHORT_ELEMENTS:
3878    case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
3879      __ sll(t8, t0, 1);
3880      __ addu(t8, a3, t8);
3881      __ sh(t1, MemOperand(t8, 0));
3882      break;
3883    case EXTERNAL_INT_ELEMENTS:
3884    case EXTERNAL_UNSIGNED_INT_ELEMENTS:
3885      __ sll(t8, t0, 2);
3886      __ addu(t8, a3, t8);
3887      __ sw(t1, MemOperand(t8, 0));
3888      break;
3889    case EXTERNAL_FLOAT_ELEMENTS:
3890      // Perform int-to-float conversion and store to memory.
3891      StoreIntAsFloat(masm, a3, t0, t1, t2, t3, t4);
3892      break;
3893    case EXTERNAL_DOUBLE_ELEMENTS:
3894      __ sll(t8, t0, 3);
3895      __ addu(a3, a3, t8);
3896      // a3: effective address of the double element
3897      FloatingPointHelper::Destination destination;
3898      if (CpuFeatures::IsSupported(FPU)) {
3899        destination = FloatingPointHelper::kFPURegisters;
3900      } else {
3901        destination = FloatingPointHelper::kCoreRegisters;
3902      }
3903      FloatingPointHelper::ConvertIntToDouble(
3904          masm, t1, destination,
3905          f0, t2, t3,  // These are: double_dst, dst1, dst2.
3906          t0, f2);  // These are: scratch2, single_scratch.
3907      if (destination == FloatingPointHelper::kFPURegisters) {
3908        CpuFeatures::Scope scope(FPU);
3909        __ sdc1(f0, MemOperand(a3, 0));
3910      } else {
3911        __ sw(t2, MemOperand(a3, 0));
3912        __ sw(t3, MemOperand(a3, Register::kSizeInBytes));
3913      }
3914      break;
3915    case FAST_ELEMENTS:
3916    case FAST_DOUBLE_ELEMENTS:
3917    case DICTIONARY_ELEMENTS:
3918    case NON_STRICT_ARGUMENTS_ELEMENTS:
3919      UNREACHABLE();
3920      break;
3921  }
3922
3923  // Entry registers are intact, a0 holds the value which is the return value.
3924  __ mov(v0, value);
3925  __ Ret();
3926
3927  if (elements_kind != EXTERNAL_PIXEL_ELEMENTS) {
3928    // a3: external array.
3929    // t0: index (integer).
3930    __ bind(&check_heap_number);
3931    __ GetObjectType(value, t1, t2);
3932    __ Branch(&slow, ne, t2, Operand(HEAP_NUMBER_TYPE));
3933
3934    __ lw(a3, FieldMemOperand(a3, ExternalArray::kExternalPointerOffset));
3935
3936    // a3: base pointer of external storage.
3937    // t0: key (integer).
3938
3939    // The WebGL specification leaves the behavior of storing NaN and
3940    // +/-Infinity into integer arrays basically undefined. For more
3941    // reproducible behavior, convert these to zero.
3942
3943    if (CpuFeatures::IsSupported(FPU)) {
3944      CpuFeatures::Scope scope(FPU);
3945
3946      __ ldc1(f0, FieldMemOperand(a0, HeapNumber::kValueOffset));
3947
3948      if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
3949        __ cvt_s_d(f0, f0);
3950        __ sll(t8, t0, 2);
3951        __ addu(t8, a3, t8);
3952        __ swc1(f0, MemOperand(t8, 0));
3953      } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
3954        __ sll(t8, t0, 3);
3955        __ addu(t8, a3, t8);
3956        __ sdc1(f0, MemOperand(t8, 0));
3957      } else {
3958        __ EmitECMATruncate(t3, f0, f2, t2, t1, t5);
3959
3960        switch (elements_kind) {
3961          case EXTERNAL_BYTE_ELEMENTS:
3962          case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
3963            __ addu(t8, a3, t0);
3964            __ sb(t3, MemOperand(t8, 0));
3965            break;
3966          case EXTERNAL_SHORT_ELEMENTS:
3967          case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
3968            __ sll(t8, t0, 1);
3969            __ addu(t8, a3, t8);
3970            __ sh(t3, MemOperand(t8, 0));
3971            break;
3972          case EXTERNAL_INT_ELEMENTS:
3973          case EXTERNAL_UNSIGNED_INT_ELEMENTS:
3974            __ sll(t8, t0, 2);
3975            __ addu(t8, a3, t8);
3976            __ sw(t3, MemOperand(t8, 0));
3977            break;
3978          case EXTERNAL_PIXEL_ELEMENTS:
3979          case EXTERNAL_FLOAT_ELEMENTS:
3980          case EXTERNAL_DOUBLE_ELEMENTS:
3981          case FAST_ELEMENTS:
3982          case FAST_DOUBLE_ELEMENTS:
3983          case DICTIONARY_ELEMENTS:
3984          case NON_STRICT_ARGUMENTS_ELEMENTS:
3985            UNREACHABLE();
3986            break;
3987        }
3988      }
3989
3990      // Entry registers are intact, a0 holds the value
3991      // which is the return value.
3992      __ mov(v0, value);
3993      __ Ret();
3994    } else {
3995      // FPU is not available, do manual conversions.
3996
3997      __ lw(t3, FieldMemOperand(value, HeapNumber::kExponentOffset));
3998      __ lw(t4, FieldMemOperand(value, HeapNumber::kMantissaOffset));
3999
4000      if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
4001        Label done, nan_or_infinity_or_zero;
4002        static const int kMantissaInHiWordShift =
4003            kBinary32MantissaBits - HeapNumber::kMantissaBitsInTopWord;
4004
4005        static const int kMantissaInLoWordShift =
4006            kBitsPerInt - kMantissaInHiWordShift;
4007
4008        // Test for all special exponent values: zeros, subnormal numbers, NaNs
4009        // and infinities. All these should be converted to 0.
4010        __ li(t5, HeapNumber::kExponentMask);
4011        __ and_(t6, t3, t5);
4012        __ Branch(&nan_or_infinity_or_zero, eq, t6, Operand(zero_reg));
4013
4014        __ xor_(t1, t6, t5);
4015        __ li(t2, kBinary32ExponentMask);
4016        __ movz(t6, t2, t1);  // Only if t6 is equal to t5.
4017        __ Branch(&nan_or_infinity_or_zero, eq, t6, Operand(t5));
4018
4019        // Rebias exponent.
4020        __ srl(t6, t6, HeapNumber::kExponentShift);
4021        __ Addu(t6,
4022                t6,
4023                Operand(kBinary32ExponentBias - HeapNumber::kExponentBias));
4024
4025        __ li(t1, Operand(kBinary32MaxExponent));
4026        __ Slt(t1, t1, t6);
4027        __ And(t2, t3, Operand(HeapNumber::kSignMask));
4028        __ Or(t2, t2, Operand(kBinary32ExponentMask));
4029        __ movn(t3, t2, t1);  // Only if t6 is gt kBinary32MaxExponent.
4030        __ Branch(&done, gt, t6, Operand(kBinary32MaxExponent));
4031
4032        __ Slt(t1, t6, Operand(kBinary32MinExponent));
4033        __ And(t2, t3, Operand(HeapNumber::kSignMask));
4034        __ movn(t3, t2, t1);  // Only if t6 is lt kBinary32MinExponent.
4035        __ Branch(&done, lt, t6, Operand(kBinary32MinExponent));
4036
4037        __ And(t7, t3, Operand(HeapNumber::kSignMask));
4038        __ And(t3, t3, Operand(HeapNumber::kMantissaMask));
4039        __ sll(t3, t3, kMantissaInHiWordShift);
4040        __ or_(t7, t7, t3);
4041        __ srl(t4, t4, kMantissaInLoWordShift);
4042        __ or_(t7, t7, t4);
4043        __ sll(t6, t6, kBinary32ExponentShift);
4044        __ or_(t3, t7, t6);
4045
4046        __ bind(&done);
4047        __ sll(t9, a1, 2);
4048        __ addu(t9, a2, t9);
4049        __ sw(t3, MemOperand(t9, 0));
4050
4051        // Entry registers are intact, a0 holds the value which is the return
4052        // value.
4053        __ mov(v0, value);
4054        __ Ret();
4055
4056        __ bind(&nan_or_infinity_or_zero);
4057        __ And(t7, t3, Operand(HeapNumber::kSignMask));
4058        __ And(t3, t3, Operand(HeapNumber::kMantissaMask));
4059        __ or_(t6, t6, t7);
4060        __ sll(t3, t3, kMantissaInHiWordShift);
4061        __ or_(t6, t6, t3);
4062        __ srl(t4, t4, kMantissaInLoWordShift);
4063        __ or_(t3, t6, t4);
4064        __ Branch(&done);
4065      } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
4066        __ sll(t8, t0, 3);
4067        __ addu(t8, a3, t8);
4068        // t8: effective address of destination element.
4069        __ sw(t4, MemOperand(t8, 0));
4070        __ sw(t3, MemOperand(t8, Register::kSizeInBytes));
4071        __ Ret();
4072      } else {
4073        bool is_signed_type = IsElementTypeSigned(elements_kind);
4074        int meaningfull_bits = is_signed_type ? (kBitsPerInt - 1) : kBitsPerInt;
4075        int32_t min_value    = is_signed_type ? 0x80000000 : 0x00000000;
4076
4077        Label done, sign;
4078
4079        // Test for all special exponent values: zeros, subnormal numbers, NaNs
4080        // and infinities. All these should be converted to 0.
4081        __ li(t5, HeapNumber::kExponentMask);
4082        __ and_(t6, t3, t5);
4083        __ movz(t3, zero_reg, t6);  // Only if t6 is equal to zero.
4084        __ Branch(&done, eq, t6, Operand(zero_reg));
4085
4086        __ xor_(t2, t6, t5);
4087        __ movz(t3, zero_reg, t2);  // Only if t6 is equal to t5.
4088        __ Branch(&done, eq, t6, Operand(t5));
4089
4090        // Unbias exponent.
4091        __ srl(t6, t6, HeapNumber::kExponentShift);
4092        __ Subu(t6, t6, Operand(HeapNumber::kExponentBias));
4093        // If exponent is negative then result is 0.
4094        __ slt(t2, t6, zero_reg);
4095        __ movn(t3, zero_reg, t2);  // Only if exponent is negative.
4096        __ Branch(&done, lt, t6, Operand(zero_reg));
4097
4098        // If exponent is too big then result is minimal value.
4099        __ slti(t1, t6, meaningfull_bits - 1);
4100        __ li(t2, min_value);
4101        __ movz(t3, t2, t1);  // Only if t6 is ge meaningfull_bits - 1.
4102        __ Branch(&done, ge, t6, Operand(meaningfull_bits - 1));
4103
4104        __ And(t5, t3, Operand(HeapNumber::kSignMask));
4105        __ And(t3, t3, Operand(HeapNumber::kMantissaMask));
4106        __ Or(t3, t3, Operand(1u << HeapNumber::kMantissaBitsInTopWord));
4107
4108        __ li(t9, HeapNumber::kMantissaBitsInTopWord);
4109        __ subu(t6, t9, t6);
4110        __ slt(t1, t6, zero_reg);
4111        __ srlv(t2, t3, t6);
4112        __ movz(t3, t2, t1);  // Only if t6 is positive.
4113        __ Branch(&sign, ge, t6, Operand(zero_reg));
4114
4115        __ subu(t6, zero_reg, t6);
4116        __ sllv(t3, t3, t6);
4117        __ li(t9, meaningfull_bits);
4118        __ subu(t6, t9, t6);
4119        __ srlv(t4, t4, t6);
4120        __ or_(t3, t3, t4);
4121
4122        __ bind(&sign);
4123        __ subu(t2, t3, zero_reg);
4124        __ movz(t3, t2, t5);  // Only if t5 is zero.
4125
4126        __ bind(&done);
4127
4128        // Result is in t3.
4129        // This switch block should be exactly the same as above (FPU mode).
4130        switch (elements_kind) {
4131          case EXTERNAL_BYTE_ELEMENTS:
4132          case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
4133            __ addu(t8, a3, t0);
4134            __ sb(t3, MemOperand(t8, 0));
4135            break;
4136          case EXTERNAL_SHORT_ELEMENTS:
4137          case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
4138            __ sll(t8, t0, 1);
4139            __ addu(t8, a3, t8);
4140            __ sh(t3, MemOperand(t8, 0));
4141            break;
4142          case EXTERNAL_INT_ELEMENTS:
4143          case EXTERNAL_UNSIGNED_INT_ELEMENTS:
4144            __ sll(t8, t0, 2);
4145            __ addu(t8, a3, t8);
4146            __ sw(t3, MemOperand(t8, 0));
4147            break;
4148          case EXTERNAL_PIXEL_ELEMENTS:
4149          case EXTERNAL_FLOAT_ELEMENTS:
4150          case EXTERNAL_DOUBLE_ELEMENTS:
4151          case FAST_ELEMENTS:
4152          case FAST_DOUBLE_ELEMENTS:
4153          case DICTIONARY_ELEMENTS:
4154          case NON_STRICT_ARGUMENTS_ELEMENTS:
4155            UNREACHABLE();
4156            break;
4157        }
4158      }
4159    }
4160  }
4161
4162  // Slow case, key and receiver still in a0 and a1.
4163  __ bind(&slow);
4164  __ IncrementCounter(
4165      masm->isolate()->counters()->keyed_load_external_array_slow(),
4166      1, a2, a3);
4167  // Entry registers are intact.
4168  // ---------- S t a t e --------------
4169  //  -- ra     : return address
4170  //  -- a0     : key
4171  //  -- a1     : receiver
4172  // -----------------------------------
4173  Handle<Code> slow_ic =
4174      masm->isolate()->builtins()->KeyedStoreIC_Slow();
4175  __ Jump(slow_ic, RelocInfo::CODE_TARGET);
4176
4177  // Miss case, call the runtime.
4178  __ bind(&miss_force_generic);
4179
4180  // ---------- S t a t e --------------
4181  //  -- ra     : return address
4182  //  -- a0     : key
4183  //  -- a1     : receiver
4184  // -----------------------------------
4185
4186  Handle<Code> miss_ic =
4187     masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4188  __ Jump(miss_ic, RelocInfo::CODE_TARGET);
4189}
4190
4191
4192void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) {
4193  // ----------- S t a t e -------------
4194  //  -- ra    : return address
4195  //  -- a0    : key
4196  //  -- a1    : receiver
4197  // -----------------------------------
4198  Label miss_force_generic;
4199
4200  // This stub is meant to be tail-jumped to, the receiver must already
4201  // have been verified by the caller to not be a smi.
4202
4203  // Check that the key is a smi.
4204  __ JumpIfNotSmi(a0, &miss_force_generic);
4205
4206  // Get the elements array.
4207  __ lw(a2, FieldMemOperand(a1, JSObject::kElementsOffset));
4208  __ AssertFastElements(a2);
4209
4210  // Check that the key is within bounds.
4211  __ lw(a3, FieldMemOperand(a2, FixedArray::kLengthOffset));
4212  __ Branch(&miss_force_generic, hs, a0, Operand(a3));
4213
4214  // Load the result and make sure it's not the hole.
4215  __ Addu(a3, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4216  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
4217  __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize);
4218  __ Addu(t0, t0, a3);
4219  __ lw(t0, MemOperand(t0));
4220  __ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
4221  __ Branch(&miss_force_generic, eq, t0, Operand(t1));
4222  __ mov(v0, t0);
4223  __ Ret();
4224
4225  __ bind(&miss_force_generic);
4226  Code* stub = masm->isolate()->builtins()->builtin(
4227      Builtins::kKeyedLoadIC_MissForceGeneric);
4228  __ Jump(Handle<Code>(stub), RelocInfo::CODE_TARGET);
4229}
4230
4231
4232void KeyedLoadStubCompiler::GenerateLoadFastDoubleElement(
4233    MacroAssembler* masm) {
4234  // ----------- S t a t e -------------
4235  //  -- ra    : return address
4236  //  -- a0    : key
4237  //  -- a1    : receiver
4238  // -----------------------------------
4239  Label miss_force_generic, slow_allocate_heapnumber;
4240
4241  Register key_reg = a0;
4242  Register receiver_reg = a1;
4243  Register elements_reg = a2;
4244  Register heap_number_reg = a2;
4245  Register indexed_double_offset = a3;
4246  Register scratch = t0;
4247  Register scratch2 = t1;
4248  Register scratch3 = t2;
4249  Register heap_number_map = t3;
4250
4251  // This stub is meant to be tail-jumped to, the receiver must already
4252  // have been verified by the caller to not be a smi.
4253
4254  // Check that the key is a smi.
4255  __ JumpIfNotSmi(key_reg, &miss_force_generic);
4256
4257  // Get the elements array.
4258  __ lw(elements_reg,
4259        FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
4260
4261  // Check that the key is within bounds.
4262  __ lw(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4263  __ Branch(&miss_force_generic, hs, key_reg, Operand(scratch));
4264
4265  // Load the upper word of the double in the fixed array and test for NaN.
4266  __ sll(scratch2, key_reg, kDoubleSizeLog2 - kSmiTagSize);
4267  __ Addu(indexed_double_offset, elements_reg, Operand(scratch2));
4268  uint32_t upper_32_offset = FixedArray::kHeaderSize + sizeof(kHoleNanLower32);
4269  __ lw(scratch, FieldMemOperand(indexed_double_offset, upper_32_offset));
4270  __ Branch(&miss_force_generic, eq, scratch, Operand(kHoleNanUpper32));
4271
4272  // Non-NaN. Allocate a new heap number and copy the double value into it.
4273  __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
4274  __ AllocateHeapNumber(heap_number_reg, scratch2, scratch3,
4275                        heap_number_map, &slow_allocate_heapnumber);
4276
4277  // Don't need to reload the upper 32 bits of the double, it's already in
4278  // scratch.
4279  __ sw(scratch, FieldMemOperand(heap_number_reg,
4280                                 HeapNumber::kExponentOffset));
4281  __ lw(scratch, FieldMemOperand(indexed_double_offset,
4282                                 FixedArray::kHeaderSize));
4283  __ sw(scratch, FieldMemOperand(heap_number_reg,
4284                                 HeapNumber::kMantissaOffset));
4285
4286  __ mov(v0, heap_number_reg);
4287  __ Ret();
4288
4289  __ bind(&slow_allocate_heapnumber);
4290  Handle<Code> slow_ic =
4291      masm->isolate()->builtins()->KeyedLoadIC_Slow();
4292  __ Jump(slow_ic, RelocInfo::CODE_TARGET);
4293
4294  __ bind(&miss_force_generic);
4295  Handle<Code> miss_ic =
4296      masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
4297  __ Jump(miss_ic, RelocInfo::CODE_TARGET);
4298}
4299
4300
4301void KeyedStoreStubCompiler::GenerateStoreFastElement(MacroAssembler* masm,
4302                                                      bool is_js_array) {
4303  // ----------- S t a t e -------------
4304  //  -- a0    : value
4305  //  -- a1    : key
4306  //  -- a2    : receiver
4307  //  -- ra    : return address
4308  //  -- a3    : scratch
4309  //  -- a4    : scratch (elements)
4310  // -----------------------------------
4311  Label miss_force_generic;
4312
4313  Register value_reg = a0;
4314  Register key_reg = a1;
4315  Register receiver_reg = a2;
4316  Register scratch = a3;
4317  Register elements_reg = t0;
4318  Register scratch2 = t1;
4319  Register scratch3 = t2;
4320
4321  // This stub is meant to be tail-jumped to, the receiver must already
4322  // have been verified by the caller to not be a smi.
4323
4324  // Check that the key is a smi.
4325  __ JumpIfNotSmi(key_reg, &miss_force_generic);
4326
4327  // Get the elements array and make sure it is a fast element array, not 'cow'.
4328  __ lw(elements_reg,
4329        FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
4330  __ CheckMap(elements_reg,
4331              scratch,
4332              Heap::kFixedArrayMapRootIndex,
4333              &miss_force_generic,
4334              DONT_DO_SMI_CHECK);
4335
4336  // Check that the key is within bounds.
4337  if (is_js_array) {
4338    __ lw(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4339  } else {
4340    __ lw(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4341  }
4342  // Compare smis.
4343  __ Branch(&miss_force_generic, hs, key_reg, Operand(scratch));
4344
4345  __ Addu(scratch,
4346          elements_reg, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4347  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
4348  __ sll(scratch2, key_reg, kPointerSizeLog2 - kSmiTagSize);
4349  __ Addu(scratch3, scratch2, scratch);
4350  __ sw(value_reg, MemOperand(scratch3));
4351  __ RecordWrite(scratch, Operand(scratch2), receiver_reg , elements_reg);
4352
4353  // value_reg (a0) is preserved.
4354  // Done.
4355  __ Ret();
4356
4357  __ bind(&miss_force_generic);
4358  Handle<Code> ic =
4359      masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4360  __ Jump(ic, RelocInfo::CODE_TARGET);
4361}
4362
4363
4364void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
4365    MacroAssembler* masm,
4366    bool is_js_array) {
4367  // ----------- S t a t e -------------
4368  //  -- a0    : value
4369  //  -- a1    : key
4370  //  -- a2    : receiver
4371  //  -- ra    : return address
4372  //  -- a3    : scratch
4373  //  -- t0    : scratch (elements_reg)
4374  //  -- t1    : scratch (mantissa_reg)
4375  //  -- t2    : scratch (exponent_reg)
4376  //  -- t3    : scratch4
4377  // -----------------------------------
4378  Label miss_force_generic, smi_value, is_nan, maybe_nan, have_double_value;
4379
4380  Register value_reg = a0;
4381  Register key_reg = a1;
4382  Register receiver_reg = a2;
4383  Register scratch = a3;
4384  Register elements_reg = t0;
4385  Register mantissa_reg = t1;
4386  Register exponent_reg = t2;
4387  Register scratch4 = t3;
4388
4389  // This stub is meant to be tail-jumped to, the receiver must already
4390  // have been verified by the caller to not be a smi.
4391  __ JumpIfNotSmi(key_reg, &miss_force_generic);
4392
4393  __ lw(elements_reg,
4394         FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
4395
4396  // Check that the key is within bounds.
4397  if (is_js_array) {
4398    __ lw(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4399  } else {
4400    __ lw(scratch,
4401          FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4402  }
4403  // Compare smis, unsigned compare catches both negative and out-of-bound
4404  // indexes.
4405  __ Branch(&miss_force_generic, hs, key_reg, Operand(scratch));
4406
4407  // Handle smi values specially.
4408  __ JumpIfSmi(value_reg, &smi_value);
4409
4410  // Ensure that the object is a heap number
4411  __ CheckMap(value_reg,
4412              scratch,
4413              masm->isolate()->factory()->heap_number_map(),
4414              &miss_force_generic,
4415              DONT_DO_SMI_CHECK);
4416
4417  // Check for nan: all NaN values have a value greater (signed) than 0x7ff00000
4418  // in the exponent.
4419  __ li(scratch, Operand(kNaNOrInfinityLowerBoundUpper32));
4420  __ lw(exponent_reg, FieldMemOperand(value_reg, HeapNumber::kExponentOffset));
4421  __ Branch(&maybe_nan, ge, exponent_reg, Operand(scratch));
4422
4423  __ lw(mantissa_reg, FieldMemOperand(value_reg, HeapNumber::kMantissaOffset));
4424
4425  __ bind(&have_double_value);
4426  __ sll(scratch4, key_reg, kDoubleSizeLog2 - kSmiTagSize);
4427  __ Addu(scratch, elements_reg, Operand(scratch4));
4428  __ sw(mantissa_reg, FieldMemOperand(scratch, FixedDoubleArray::kHeaderSize));
4429  uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32);
4430  __ sw(exponent_reg, FieldMemOperand(scratch, offset));
4431  __ Ret(USE_DELAY_SLOT);
4432  __ mov(v0, value_reg);  // In delay slot.
4433
4434  __ bind(&maybe_nan);
4435  // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise
4436  // it's an Infinity, and the non-NaN code path applies.
4437  __ li(scratch, Operand(kNaNOrInfinityLowerBoundUpper32));
4438  __ Branch(&is_nan, gt, exponent_reg, Operand(scratch));
4439  __ lw(mantissa_reg, FieldMemOperand(value_reg, HeapNumber::kMantissaOffset));
4440  __ Branch(&have_double_value, eq, mantissa_reg, Operand(zero_reg));
4441
4442  __ bind(&is_nan);
4443  // Load canonical NaN for storing into the double array.
4444  uint64_t nan_int64 = BitCast<uint64_t>(
4445      FixedDoubleArray::canonical_not_the_hole_nan_as_double());
4446  __ li(mantissa_reg, Operand(static_cast<uint32_t>(nan_int64)));
4447  __ li(exponent_reg, Operand(static_cast<uint32_t>(nan_int64 >> 32)));
4448  __ jmp(&have_double_value);
4449
4450  __ bind(&smi_value);
4451  __ Addu(scratch, elements_reg,
4452          Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag));
4453  __ sll(scratch4, key_reg, kDoubleSizeLog2 - kSmiTagSize);
4454  __ Addu(scratch, scratch, scratch4);
4455  // scratch is now effective address of the double element
4456
4457  FloatingPointHelper::Destination destination;
4458  if (CpuFeatures::IsSupported(FPU)) {
4459    destination = FloatingPointHelper::kFPURegisters;
4460  } else {
4461    destination = FloatingPointHelper::kCoreRegisters;
4462  }
4463
4464  Register untagged_value = receiver_reg;
4465  __ SmiUntag(untagged_value, value_reg);
4466  FloatingPointHelper::ConvertIntToDouble(
4467      masm,
4468      untagged_value,
4469      destination,
4470      f0,
4471      mantissa_reg,
4472      exponent_reg,
4473      scratch4,
4474      f2);
4475  if (destination == FloatingPointHelper::kFPURegisters) {
4476    CpuFeatures::Scope scope(FPU);
4477    __ sdc1(f0, MemOperand(scratch, 0));
4478  } else {
4479    __ sw(mantissa_reg, MemOperand(scratch, 0));
4480    __ sw(exponent_reg, MemOperand(scratch, Register::kSizeInBytes));
4481  }
4482  __ Ret(USE_DELAY_SLOT);
4483  __ mov(v0, value_reg);  // In delay slot.
4484
4485  // Handle store cache miss, replacing the ic with the generic stub.
4486  __ bind(&miss_force_generic);
4487  Handle<Code> ic =
4488      masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4489  __ Jump(ic, RelocInfo::CODE_TARGET);
4490}
4491
4492
4493#undef __
4494
4495} }  // namespace v8::internal
4496
4497#endif  // V8_TARGET_ARCH_MIPS
4498