stub-cache-mips.cc revision c7cc028aaeedbbfa11c11d0b7b243b3d9e837ed9
1// Copyright 2012 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#if defined(V8_TARGET_ARCH_MIPS)
31
32#include "ic-inl.h"
33#include "codegen.h"
34#include "stub-cache.h"
35
36namespace v8 {
37namespace internal {
38
39#define __ ACCESS_MASM(masm)
40
41
42static void ProbeTable(Isolate* isolate,
43                       MacroAssembler* masm,
44                       Code::Flags flags,
45                       StubCache::Table table,
46                       Register name,
47                       Register offset,
48                       Register scratch,
49                       Register scratch2) {
50  ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
51  ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
52
53  uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address());
54  uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address());
55
56  // Check the relative positions of the address fields.
57  ASSERT(value_off_addr > key_off_addr);
58  ASSERT((value_off_addr - key_off_addr) % 4 == 0);
59  ASSERT((value_off_addr - key_off_addr) < (256 * 4));
60
61  Label miss;
62  Register offsets_base_addr = scratch;
63
64  // Check that the key in the entry matches the name.
65  __ li(offsets_base_addr, Operand(key_offset));
66  __ sll(scratch2, offset, 1);
67  __ addu(scratch2, offsets_base_addr, scratch2);
68  __ lw(scratch2, MemOperand(scratch2));
69  __ Branch(&miss, ne, name, Operand(scratch2));
70
71  // Get the code entry from the cache.
72  __ Addu(offsets_base_addr, offsets_base_addr,
73         Operand(value_off_addr - key_off_addr));
74  __ sll(scratch2, offset, 1);
75  __ addu(scratch2, offsets_base_addr, scratch2);
76  __ lw(scratch2, MemOperand(scratch2));
77
78  // Check that the flags match what we're looking for.
79  __ lw(scratch2, FieldMemOperand(scratch2, Code::kFlagsOffset));
80  __ And(scratch2, scratch2, Operand(~Code::kFlagsNotUsedInLookup));
81  __ Branch(&miss, ne, scratch2, Operand(flags));
82
83  // Re-load code entry from cache.
84  __ sll(offset, offset, 1);
85  __ addu(offset, offset, offsets_base_addr);
86  __ lw(offset, MemOperand(offset));
87
88  // Jump to the first instruction in the code stub.
89  __ Addu(offset, offset, Operand(Code::kHeaderSize - kHeapObjectTag));
90  __ Jump(offset);
91
92  // Miss: fall through.
93  __ bind(&miss);
94}
95
96
97// Helper function used to check that the dictionary doesn't contain
98// the property. This function may return false negatives, so miss_label
99// must always call a backup property check that is complete.
100// This function is safe to call if the receiver has fast properties.
101// Name must be a symbol and receiver must be a heap object.
102static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
103                                             Label* miss_label,
104                                             Register receiver,
105                                             Handle<String> name,
106                                             Register scratch0,
107                                             Register scratch1) {
108  ASSERT(name->IsSymbol());
109  Counters* counters = masm->isolate()->counters();
110  __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
111  __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
112
113  Label done;
114
115  const int kInterceptorOrAccessCheckNeededMask =
116      (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
117
118  // Bail out if the receiver has a named interceptor or requires access checks.
119  Register map = scratch1;
120  __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
121  __ lbu(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
122  __ And(scratch0, scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
123  __ Branch(miss_label, ne, scratch0, Operand(zero_reg));
124
125  // Check that receiver is a JSObject.
126  __ lbu(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
127  __ Branch(miss_label, lt, scratch0, Operand(FIRST_SPEC_OBJECT_TYPE));
128
129  // Load properties array.
130  Register properties = scratch0;
131  __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
132  // Check that the properties array is a dictionary.
133  __ lw(map, FieldMemOperand(properties, HeapObject::kMapOffset));
134  Register tmp = properties;
135  __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
136  __ Branch(miss_label, ne, map, Operand(tmp));
137
138  // Restore the temporarily used register.
139  __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
140
141
142  StringDictionaryLookupStub::GenerateNegativeLookup(masm,
143                                                     miss_label,
144                                                     &done,
145                                                     receiver,
146                                                     properties,
147                                                     name,
148                                                     scratch1);
149  __ bind(&done);
150  __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
151}
152
153
154void StubCache::GenerateProbe(MacroAssembler* masm,
155                              Code::Flags flags,
156                              Register receiver,
157                              Register name,
158                              Register scratch,
159                              Register extra,
160                              Register extra2) {
161  Isolate* isolate = masm->isolate();
162  Label miss;
163
164  // Make sure that code is valid. The shifting code relies on the
165  // entry size being 8.
166  ASSERT(sizeof(Entry) == 8);
167
168  // Make sure the flags does not name a specific type.
169  ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
170
171  // Make sure that there are no register conflicts.
172  ASSERT(!scratch.is(receiver));
173  ASSERT(!scratch.is(name));
174  ASSERT(!extra.is(receiver));
175  ASSERT(!extra.is(name));
176  ASSERT(!extra.is(scratch));
177  ASSERT(!extra2.is(receiver));
178  ASSERT(!extra2.is(name));
179  ASSERT(!extra2.is(scratch));
180  ASSERT(!extra2.is(extra));
181
182  // Check scratch, extra and extra2 registers are valid.
183  ASSERT(!scratch.is(no_reg));
184  ASSERT(!extra.is(no_reg));
185  ASSERT(!extra2.is(no_reg));
186
187  // Check that the receiver isn't a smi.
188  __ JumpIfSmi(receiver, &miss, t0);
189
190  // Get the map of the receiver and compute the hash.
191  __ lw(scratch, FieldMemOperand(name, String::kHashFieldOffset));
192  __ lw(t8, FieldMemOperand(receiver, HeapObject::kMapOffset));
193  __ Addu(scratch, scratch, Operand(t8));
194  __ Xor(scratch, scratch, Operand(flags));
195  __ And(scratch,
196         scratch,
197         Operand((kPrimaryTableSize - 1) << kHeapObjectTagSize));
198
199  // Probe the primary table.
200  ProbeTable(isolate, masm, flags, kPrimary, name, scratch, extra, extra2);
201
202  // Primary miss: Compute hash for secondary probe.
203  __ Subu(scratch, scratch, Operand(name));
204  __ Addu(scratch, scratch, Operand(flags));
205  __ And(scratch,
206         scratch,
207         Operand((kSecondaryTableSize - 1) << kHeapObjectTagSize));
208
209  // Probe the secondary table.
210  ProbeTable(isolate, masm, flags, kSecondary, name, scratch, extra, extra2);
211
212  // Cache miss: Fall-through and let caller handle the miss by
213  // entering the runtime system.
214  __ bind(&miss);
215}
216
217
218void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
219                                                       int index,
220                                                       Register prototype) {
221  // Load the global or builtins object from the current context.
222  __ lw(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
223  // Load the global context from the global or builtins object.
224  __ lw(prototype,
225         FieldMemOperand(prototype, GlobalObject::kGlobalContextOffset));
226  // Load the function from the global context.
227  __ lw(prototype, MemOperand(prototype, Context::SlotOffset(index)));
228  // Load the initial map.  The global functions all have initial maps.
229  __ lw(prototype,
230         FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
231  // Load the prototype from the initial map.
232  __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
233}
234
235
236void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
237    MacroAssembler* masm,
238    int index,
239    Register prototype,
240    Label* miss) {
241  Isolate* isolate = masm->isolate();
242  // Check we're still in the same context.
243  __ lw(prototype, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
244  ASSERT(!prototype.is(at));
245  __ li(at, isolate->global());
246  __ Branch(miss, ne, prototype, Operand(at));
247  // Get the global function with the given index.
248  Handle<JSFunction> function(
249      JSFunction::cast(isolate->global_context()->get(index)));
250  // Load its initial map. The global functions all have initial maps.
251  __ li(prototype, Handle<Map>(function->initial_map()));
252  // Load the prototype from the initial map.
253  __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
254}
255
256
257// Load a fast property out of a holder object (src). In-object properties
258// are loaded directly otherwise the property is loaded from the properties
259// fixed array.
260void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
261                                            Register dst,
262                                            Register src,
263                                            Handle<JSObject> holder,
264                                            int index) {
265  // Adjust for the number of properties stored in the holder.
266  index -= holder->map()->inobject_properties();
267  if (index < 0) {
268    // Get the property straight out of the holder.
269    int offset = holder->map()->instance_size() + (index * kPointerSize);
270    __ lw(dst, FieldMemOperand(src, offset));
271  } else {
272    // Calculate the offset into the properties array.
273    int offset = index * kPointerSize + FixedArray::kHeaderSize;
274    __ lw(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
275    __ lw(dst, FieldMemOperand(dst, offset));
276  }
277}
278
279
280void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
281                                           Register receiver,
282                                           Register scratch,
283                                           Label* miss_label) {
284  // Check that the receiver isn't a smi.
285  __ JumpIfSmi(receiver, miss_label);
286
287  // Check that the object is a JS array.
288  __ GetObjectType(receiver, scratch, scratch);
289  __ Branch(miss_label, ne, scratch, Operand(JS_ARRAY_TYPE));
290
291  // Load length directly from the JS array.
292  __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
293  __ Ret();
294}
295
296
297// Generate code to check if an object is a string.  If the object is a
298// heap object, its map's instance type is left in the scratch1 register.
299// If this is not needed, scratch1 and scratch2 may be the same register.
300static void GenerateStringCheck(MacroAssembler* masm,
301                                Register receiver,
302                                Register scratch1,
303                                Register scratch2,
304                                Label* smi,
305                                Label* non_string_object) {
306  // Check that the receiver isn't a smi.
307  __ JumpIfSmi(receiver, smi, t0);
308
309  // Check that the object is a string.
310  __ lw(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
311  __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
312  __ And(scratch2, scratch1, Operand(kIsNotStringMask));
313  // The cast is to resolve the overload for the argument of 0x0.
314  __ Branch(non_string_object,
315            ne,
316            scratch2,
317            Operand(static_cast<int32_t>(kStringTag)));
318}
319
320
321// Generate code to load the length from a string object and return the length.
322// If the receiver object is not a string or a wrapped string object the
323// execution continues at the miss label. The register containing the
324// receiver is potentially clobbered.
325void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
326                                            Register receiver,
327                                            Register scratch1,
328                                            Register scratch2,
329                                            Label* miss,
330                                            bool support_wrappers) {
331  Label check_wrapper;
332
333  // Check if the object is a string leaving the instance type in the
334  // scratch1 register.
335  GenerateStringCheck(masm, receiver, scratch1, scratch2, miss,
336                      support_wrappers ? &check_wrapper : miss);
337
338  // Load length directly from the string.
339  __ lw(v0, FieldMemOperand(receiver, String::kLengthOffset));
340  __ Ret();
341
342  if (support_wrappers) {
343    // Check if the object is a JSValue wrapper.
344    __ bind(&check_wrapper);
345    __ Branch(miss, ne, scratch1, Operand(JS_VALUE_TYPE));
346
347    // Unwrap the value and check if the wrapped value is a string.
348    __ lw(scratch1, FieldMemOperand(receiver, JSValue::kValueOffset));
349    GenerateStringCheck(masm, scratch1, scratch2, scratch2, miss, miss);
350    __ lw(v0, FieldMemOperand(scratch1, String::kLengthOffset));
351    __ Ret();
352  }
353}
354
355
356void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
357                                                 Register receiver,
358                                                 Register scratch1,
359                                                 Register scratch2,
360                                                 Label* miss_label) {
361  __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
362  __ mov(v0, scratch1);
363  __ Ret();
364}
365
366
367// Generate StoreField code, value is passed in a0 register.
368// After executing generated code, the receiver_reg and name_reg
369// may be clobbered.
370void StubCompiler::GenerateStoreField(MacroAssembler* masm,
371                                      Handle<JSObject> object,
372                                      int index,
373                                      Handle<Map> transition,
374                                      Register receiver_reg,
375                                      Register name_reg,
376                                      Register scratch,
377                                      Label* miss_label) {
378  // a0 : value.
379  Label exit;
380  // Check that the map of the object hasn't changed.
381  __ CheckMap(receiver_reg, scratch, Handle<Map>(object->map()), miss_label,
382              DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS);
383
384  // Perform global security token check if needed.
385  if (object->IsJSGlobalProxy()) {
386    __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
387  }
388
389  // Stub never generated for non-global objects that require access
390  // checks.
391  ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
392
393  // Perform map transition for the receiver if necessary.
394  if (!transition.is_null() && (object->map()->unused_property_fields() == 0)) {
395    // The properties must be extended before we can store the value.
396    // We jump to a runtime call that extends the properties array.
397    __ push(receiver_reg);
398    __ li(a2, Operand(transition));
399    __ Push(a2, a0);
400    __ TailCallExternalReference(
401           ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
402                             masm->isolate()),
403           3, 1);
404    return;
405  }
406
407  if (!transition.is_null()) {
408    // Update the map of the object; no write barrier updating is
409    // needed because the map is never in new space.
410    __ li(t0, Operand(transition));
411    __ sw(t0, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
412  }
413
414  // Adjust for the number of properties stored in the object. Even in the
415  // face of a transition we can use the old map here because the size of the
416  // object and the number of in-object properties is not going to change.
417  index -= object->map()->inobject_properties();
418
419  if (index < 0) {
420    // Set the property straight into the object.
421    int offset = object->map()->instance_size() + (index * kPointerSize);
422    __ sw(a0, FieldMemOperand(receiver_reg, offset));
423
424    // Skip updating write barrier if storing a smi.
425    __ JumpIfSmi(a0, &exit, scratch);
426
427    // Update the write barrier for the array address.
428    // Pass the now unused name_reg as a scratch register.
429    __ mov(name_reg, a0);
430    __ RecordWriteField(receiver_reg,
431                        offset,
432                        name_reg,
433                        scratch,
434                        kRAHasNotBeenSaved,
435                        kDontSaveFPRegs);
436  } else {
437    // Write to the properties array.
438    int offset = index * kPointerSize + FixedArray::kHeaderSize;
439    // Get the properties array.
440    __ lw(scratch, FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
441    __ sw(a0, FieldMemOperand(scratch, offset));
442
443    // Skip updating write barrier if storing a smi.
444    __ JumpIfSmi(a0, &exit);
445
446    // Update the write barrier for the array address.
447    // Ok to clobber receiver_reg and name_reg, since we return.
448    __ mov(name_reg, a0);
449    __ RecordWriteField(scratch,
450                        offset,
451                        name_reg,
452                        receiver_reg,
453                        kRAHasNotBeenSaved,
454                        kDontSaveFPRegs);
455  }
456
457  // Return the value (register v0).
458  __ bind(&exit);
459  __ mov(v0, a0);
460  __ Ret();
461}
462
463
464void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
465  ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
466  Handle<Code> code = (kind == Code::LOAD_IC)
467      ? masm->isolate()->builtins()->LoadIC_Miss()
468      : masm->isolate()->builtins()->KeyedLoadIC_Miss();
469  __ Jump(code, RelocInfo::CODE_TARGET);
470}
471
472
473static void GenerateCallFunction(MacroAssembler* masm,
474                                 Handle<Object> object,
475                                 const ParameterCount& arguments,
476                                 Label* miss,
477                                 Code::ExtraICState extra_ic_state) {
478  // ----------- S t a t e -------------
479  //  -- a0: receiver
480  //  -- a1: function to call
481  // -----------------------------------
482  // Check that the function really is a function.
483  __ JumpIfSmi(a1, miss);
484  __ GetObjectType(a1, a3, a3);
485  __ Branch(miss, ne, a3, Operand(JS_FUNCTION_TYPE));
486
487  // Patch the receiver on the stack with the global proxy if
488  // necessary.
489  if (object->IsGlobalObject()) {
490    __ lw(a3, FieldMemOperand(a0, GlobalObject::kGlobalReceiverOffset));
491    __ sw(a3, MemOperand(sp, arguments.immediate() * kPointerSize));
492  }
493
494  // Invoke the function.
495  CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state)
496      ? CALL_AS_FUNCTION
497      : CALL_AS_METHOD;
498  __ InvokeFunction(a1, arguments, JUMP_FUNCTION, NullCallWrapper(), call_kind);
499}
500
501
502static void PushInterceptorArguments(MacroAssembler* masm,
503                                     Register receiver,
504                                     Register holder,
505                                     Register name,
506                                     Handle<JSObject> holder_obj) {
507  __ push(name);
508  Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
509  ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
510  Register scratch = name;
511  __ li(scratch, Operand(interceptor));
512  __ Push(scratch, receiver, holder);
513  __ lw(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset));
514  __ push(scratch);
515}
516
517
518static void CompileCallLoadPropertyWithInterceptor(
519    MacroAssembler* masm,
520    Register receiver,
521    Register holder,
522    Register name,
523    Handle<JSObject> holder_obj) {
524  PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
525
526  ExternalReference ref =
527      ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
528          masm->isolate());
529  __ li(a0, Operand(5));
530  __ li(a1, Operand(ref));
531
532  CEntryStub stub(1);
533  __ CallStub(&stub);
534}
535
536
537static const int kFastApiCallArguments = 3;
538
539
540// Reserves space for the extra arguments to FastHandleApiCall in the
541// caller's frame.
542//
543// These arguments are set by CheckPrototypes and GenerateFastApiDirectCall.
544static void ReserveSpaceForFastApiCall(MacroAssembler* masm,
545                                       Register scratch) {
546  ASSERT(Smi::FromInt(0) == 0);
547  for (int i = 0; i < kFastApiCallArguments; i++) {
548    __ push(zero_reg);
549  }
550}
551
552
553// Undoes the effects of ReserveSpaceForFastApiCall.
554static void FreeSpaceForFastApiCall(MacroAssembler* masm) {
555  __ Drop(kFastApiCallArguments);
556}
557
558
559static void GenerateFastApiDirectCall(MacroAssembler* masm,
560                                      const CallOptimization& optimization,
561                                      int argc) {
562  // ----------- S t a t e -------------
563  //  -- sp[0]              : holder (set by CheckPrototypes)
564  //  -- sp[4]              : callee JS function
565  //  -- sp[8]              : call data
566  //  -- sp[12]             : last JS argument
567  //  -- ...
568  //  -- sp[(argc + 3) * 4] : first JS argument
569  //  -- sp[(argc + 4) * 4] : receiver
570  // -----------------------------------
571  // Get the function and setup the context.
572  Handle<JSFunction> function = optimization.constant_function();
573  __ LoadHeapObject(t1, function);
574  __ lw(cp, FieldMemOperand(t1, JSFunction::kContextOffset));
575
576  // Pass the additional arguments FastHandleApiCall expects.
577  Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
578  Handle<Object> call_data(api_call_info->data());
579  if (masm->isolate()->heap()->InNewSpace(*call_data)) {
580    __ li(a0, api_call_info);
581    __ lw(t2, FieldMemOperand(a0, CallHandlerInfo::kDataOffset));
582  } else {
583    __ li(t2, call_data);
584  }
585
586  // Store JS function and call data.
587  __ sw(t1, MemOperand(sp, 1 * kPointerSize));
588  __ sw(t2, MemOperand(sp, 2 * kPointerSize));
589
590  // a2 points to call data as expected by Arguments
591  // (refer to layout above).
592  __ Addu(a2, sp, Operand(2 * kPointerSize));
593
594  const int kApiStackSpace = 4;
595
596  FrameScope frame_scope(masm, StackFrame::MANUAL);
597  __ EnterExitFrame(false, kApiStackSpace);
598
599  // NOTE: the O32 abi requires a0 to hold a special pointer when returning a
600  // struct from the function (which is currently the case). This means we pass
601  // the first argument in a1 instead of a0. TryCallApiFunctionAndReturn
602  // will handle setting up a0.
603
604  // a1 = v8::Arguments&
605  // Arguments is built at sp + 1 (sp is a reserved spot for ra).
606  __ Addu(a1, sp, kPointerSize);
607
608  // v8::Arguments::implicit_args = data
609  __ sw(a2, MemOperand(a1, 0 * kPointerSize));
610  // v8::Arguments::values = last argument
611  __ Addu(t0, a2, Operand(argc * kPointerSize));
612  __ sw(t0, MemOperand(a1, 1 * kPointerSize));
613  // v8::Arguments::length_ = argc
614  __ li(t0, Operand(argc));
615  __ sw(t0, MemOperand(a1, 2 * kPointerSize));
616  // v8::Arguments::is_construct_call = 0
617  __ sw(zero_reg, MemOperand(a1, 3 * kPointerSize));
618
619  const int kStackUnwindSpace = argc + kFastApiCallArguments + 1;
620  Address function_address = v8::ToCData<Address>(api_call_info->callback());
621  ApiFunction fun(function_address);
622  ExternalReference ref =
623      ExternalReference(&fun,
624                        ExternalReference::DIRECT_API_CALL,
625                        masm->isolate());
626  AllowExternalCallThatCantCauseGC scope(masm);
627  __ CallApiFunctionAndReturn(ref, kStackUnwindSpace);
628}
629
630class CallInterceptorCompiler BASE_EMBEDDED {
631 public:
632  CallInterceptorCompiler(StubCompiler* stub_compiler,
633                          const ParameterCount& arguments,
634                          Register name,
635                          Code::ExtraICState extra_ic_state)
636      : stub_compiler_(stub_compiler),
637        arguments_(arguments),
638        name_(name),
639        extra_ic_state_(extra_ic_state) {}
640
641  void Compile(MacroAssembler* masm,
642               Handle<JSObject> object,
643               Handle<JSObject> holder,
644               Handle<String> name,
645               LookupResult* lookup,
646               Register receiver,
647               Register scratch1,
648               Register scratch2,
649               Register scratch3,
650               Label* miss) {
651    ASSERT(holder->HasNamedInterceptor());
652    ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
653
654    // Check that the receiver isn't a smi.
655    __ JumpIfSmi(receiver, miss);
656    CallOptimization optimization(lookup);
657    if (optimization.is_constant_call()) {
658      CompileCacheable(masm, object, receiver, scratch1, scratch2, scratch3,
659                       holder, lookup, name, optimization, miss);
660    } else {
661      CompileRegular(masm, object, receiver, scratch1, scratch2, scratch3,
662                     name, holder, miss);
663    }
664  }
665
666 private:
667  void CompileCacheable(MacroAssembler* masm,
668                        Handle<JSObject> object,
669                        Register receiver,
670                        Register scratch1,
671                        Register scratch2,
672                        Register scratch3,
673                        Handle<JSObject> interceptor_holder,
674                        LookupResult* lookup,
675                        Handle<String> name,
676                        const CallOptimization& optimization,
677                        Label* miss_label) {
678    ASSERT(optimization.is_constant_call());
679    ASSERT(!lookup->holder()->IsGlobalObject());
680    Counters* counters = masm->isolate()->counters();
681    int depth1 = kInvalidProtoDepth;
682    int depth2 = kInvalidProtoDepth;
683    bool can_do_fast_api_call = false;
684    if (optimization.is_simple_api_call() &&
685          !lookup->holder()->IsGlobalObject()) {
686      depth1 = optimization.GetPrototypeDepthOfExpectedType(
687          object, interceptor_holder);
688      if (depth1 == kInvalidProtoDepth) {
689        depth2 = optimization.GetPrototypeDepthOfExpectedType(
690            interceptor_holder, Handle<JSObject>(lookup->holder()));
691      }
692      can_do_fast_api_call =
693          depth1 != kInvalidProtoDepth || depth2 != kInvalidProtoDepth;
694    }
695
696    __ IncrementCounter(counters->call_const_interceptor(), 1,
697                        scratch1, scratch2);
698
699    if (can_do_fast_api_call) {
700      __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1,
701                          scratch1, scratch2);
702      ReserveSpaceForFastApiCall(masm, scratch1);
703    }
704
705    // Check that the maps from receiver to interceptor's holder
706    // haven't changed and thus we can invoke interceptor.
707    Label miss_cleanup;
708    Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
709    Register holder =
710        stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
711                                        scratch1, scratch2, scratch3,
712                                        name, depth1, miss);
713
714    // Invoke an interceptor and if it provides a value,
715    // branch to |regular_invoke|.
716    Label regular_invoke;
717    LoadWithInterceptor(masm, receiver, holder, interceptor_holder, scratch2,
718                        &regular_invoke);
719
720    // Interceptor returned nothing for this property.  Try to use cached
721    // constant function.
722
723    // Check that the maps from interceptor's holder to constant function's
724    // holder haven't changed and thus we can use cached constant function.
725    if (*interceptor_holder != lookup->holder()) {
726      stub_compiler_->CheckPrototypes(interceptor_holder, receiver,
727                                      Handle<JSObject>(lookup->holder()),
728                                      scratch1, scratch2, scratch3,
729                                      name, depth2, miss);
730    } else {
731      // CheckPrototypes has a side effect of fetching a 'holder'
732      // for API (object which is instanceof for the signature).  It's
733      // safe to omit it here, as if present, it should be fetched
734      // by the previous CheckPrototypes.
735      ASSERT(depth2 == kInvalidProtoDepth);
736    }
737
738    // Invoke function.
739    if (can_do_fast_api_call) {
740      GenerateFastApiDirectCall(masm, optimization, arguments_.immediate());
741    } else {
742      CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_)
743          ? CALL_AS_FUNCTION
744          : CALL_AS_METHOD;
745      __ InvokeFunction(optimization.constant_function(), arguments_,
746                        JUMP_FUNCTION, NullCallWrapper(), call_kind);
747    }
748
749    // Deferred code for fast API call case---clean preallocated space.
750    if (can_do_fast_api_call) {
751      __ bind(&miss_cleanup);
752      FreeSpaceForFastApiCall(masm);
753      __ Branch(miss_label);
754    }
755
756    // Invoke a regular function.
757    __ bind(&regular_invoke);
758    if (can_do_fast_api_call) {
759      FreeSpaceForFastApiCall(masm);
760    }
761  }
762
763  void CompileRegular(MacroAssembler* masm,
764                      Handle<JSObject> object,
765                      Register receiver,
766                      Register scratch1,
767                      Register scratch2,
768                      Register scratch3,
769                      Handle<String> name,
770                      Handle<JSObject> interceptor_holder,
771                      Label* miss_label) {
772    Register holder =
773        stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
774                                        scratch1, scratch2, scratch3,
775                                        name, miss_label);
776
777    // Call a runtime function to load the interceptor property.
778    FrameScope scope(masm, StackFrame::INTERNAL);
779    // Save the name_ register across the call.
780    __ push(name_);
781
782    PushInterceptorArguments(masm, receiver, holder, name_, interceptor_holder);
783
784    __ CallExternalReference(
785          ExternalReference(
786              IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
787              masm->isolate()),
788          5);
789    // Restore the name_ register.
790    __ pop(name_);
791    // Leave the internal frame.
792  }
793
794  void LoadWithInterceptor(MacroAssembler* masm,
795                           Register receiver,
796                           Register holder,
797                           Handle<JSObject> holder_obj,
798                           Register scratch,
799                           Label* interceptor_succeeded) {
800    {
801      FrameScope scope(masm, StackFrame::INTERNAL);
802
803      __ Push(holder, name_);
804      CompileCallLoadPropertyWithInterceptor(masm,
805                                             receiver,
806                                             holder,
807                                             name_,
808                                             holder_obj);
809      __ pop(name_);  // Restore the name.
810      __ pop(receiver);  // Restore the holder.
811    }
812    // If interceptor returns no-result sentinel, call the constant function.
813    __ LoadRoot(scratch, Heap::kNoInterceptorResultSentinelRootIndex);
814    __ Branch(interceptor_succeeded, ne, v0, Operand(scratch));
815  }
816
817  StubCompiler* stub_compiler_;
818  const ParameterCount& arguments_;
819  Register name_;
820  Code::ExtraICState extra_ic_state_;
821};
822
823
824
825// Generate code to check that a global property cell is empty. Create
826// the property cell at compilation time if no cell exists for the
827// property.
828static void GenerateCheckPropertyCell(MacroAssembler* masm,
829                                      Handle<GlobalObject> global,
830                                      Handle<String> name,
831                                      Register scratch,
832                                      Label* miss) {
833  Handle<JSGlobalPropertyCell> cell =
834      GlobalObject::EnsurePropertyCell(global, name);
835  ASSERT(cell->value()->IsTheHole());
836  __ li(scratch, Operand(cell));
837  __ lw(scratch,
838        FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
839  __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
840  __ Branch(miss, ne, scratch, Operand(at));
841}
842
843
844// Calls GenerateCheckPropertyCell for each global object in the prototype chain
845// from object to (but not including) holder.
846static void GenerateCheckPropertyCells(MacroAssembler* masm,
847                                       Handle<JSObject> object,
848                                       Handle<JSObject> holder,
849                                       Handle<String> name,
850                                       Register scratch,
851                                       Label* miss) {
852  Handle<JSObject> current = object;
853  while (!current.is_identical_to(holder)) {
854    if (current->IsGlobalObject()) {
855      GenerateCheckPropertyCell(masm,
856                                Handle<GlobalObject>::cast(current),
857                                name,
858                                scratch,
859                                miss);
860    }
861    current = Handle<JSObject>(JSObject::cast(current->GetPrototype()));
862  }
863}
864
865
866// Convert and store int passed in register ival to IEEE 754 single precision
867// floating point value at memory location (dst + 4 * wordoffset)
868// If FPU is available use it for conversion.
869static void StoreIntAsFloat(MacroAssembler* masm,
870                            Register dst,
871                            Register wordoffset,
872                            Register ival,
873                            Register fval,
874                            Register scratch1,
875                            Register scratch2) {
876  if (CpuFeatures::IsSupported(FPU)) {
877    CpuFeatures::Scope scope(FPU);
878    __ mtc1(ival, f0);
879    __ cvt_s_w(f0, f0);
880    __ sll(scratch1, wordoffset, 2);
881    __ addu(scratch1, dst, scratch1);
882    __ swc1(f0, MemOperand(scratch1, 0));
883  } else {
884    // FPU is not available,  do manual conversions.
885
886    Label not_special, done;
887    // Move sign bit from source to destination.  This works because the sign
888    // bit in the exponent word of the double has the same position and polarity
889    // as the 2's complement sign bit in a Smi.
890    ASSERT(kBinary32SignMask == 0x80000000u);
891
892    __ And(fval, ival, Operand(kBinary32SignMask));
893    // Negate value if it is negative.
894    __ subu(scratch1, zero_reg, ival);
895    __ movn(ival, scratch1, fval);
896
897    // We have -1, 0 or 1, which we treat specially. Register ival contains
898    // absolute value: it is either equal to 1 (special case of -1 and 1),
899    // greater than 1 (not a special case) or less than 1 (special case of 0).
900    __ Branch(&not_special, gt, ival, Operand(1));
901
902    // For 1 or -1 we need to or in the 0 exponent (biased).
903    static const uint32_t exponent_word_for_1 =
904        kBinary32ExponentBias << kBinary32ExponentShift;
905
906    __ Xor(scratch1, ival, Operand(1));
907    __ li(scratch2, exponent_word_for_1);
908    __ or_(scratch2, fval, scratch2);
909    __ movz(fval, scratch2, scratch1);  // Only if ival is equal to 1.
910    __ Branch(&done);
911
912    __ bind(&not_special);
913    // Count leading zeros.
914    // Gets the wrong answer for 0, but we already checked for that case above.
915    Register zeros = scratch2;
916    __ clz(zeros, ival);
917
918    // Compute exponent and or it into the exponent register.
919    __ li(scratch1, (kBitsPerInt - 1) + kBinary32ExponentBias);
920    __ subu(scratch1, scratch1, zeros);
921
922    __ sll(scratch1, scratch1, kBinary32ExponentShift);
923    __ or_(fval, fval, scratch1);
924
925    // Shift up the source chopping the top bit off.
926    __ Addu(zeros, zeros, Operand(1));
927    // This wouldn't work for 1 and -1 as the shift would be 32 which means 0.
928    __ sllv(ival, ival, zeros);
929    // And the top (top 20 bits).
930    __ srl(scratch1, ival, kBitsPerInt - kBinary32MantissaBits);
931    __ or_(fval, fval, scratch1);
932
933    __ bind(&done);
934
935    __ sll(scratch1, wordoffset, 2);
936    __ addu(scratch1, dst, scratch1);
937    __ sw(fval, MemOperand(scratch1, 0));
938  }
939}
940
941
942// Convert unsigned integer with specified number of leading zeroes in binary
943// representation to IEEE 754 double.
944// Integer to convert is passed in register hiword.
945// Resulting double is returned in registers hiword:loword.
946// This functions does not work correctly for 0.
947static void GenerateUInt2Double(MacroAssembler* masm,
948                                Register hiword,
949                                Register loword,
950                                Register scratch,
951                                int leading_zeroes) {
952  const int meaningful_bits = kBitsPerInt - leading_zeroes - 1;
953  const int biased_exponent = HeapNumber::kExponentBias + meaningful_bits;
954
955  const int mantissa_shift_for_hi_word =
956      meaningful_bits - HeapNumber::kMantissaBitsInTopWord;
957
958  const int mantissa_shift_for_lo_word =
959      kBitsPerInt - mantissa_shift_for_hi_word;
960
961  __ li(scratch, biased_exponent << HeapNumber::kExponentShift);
962  if (mantissa_shift_for_hi_word > 0) {
963    __ sll(loword, hiword, mantissa_shift_for_lo_word);
964    __ srl(hiword, hiword, mantissa_shift_for_hi_word);
965    __ or_(hiword, scratch, hiword);
966  } else {
967    __ mov(loword, zero_reg);
968    __ sll(hiword, hiword, mantissa_shift_for_hi_word);
969    __ or_(hiword, scratch, hiword);
970  }
971
972  // If least significant bit of biased exponent was not 1 it was corrupted
973  // by most significant bit of mantissa so we should fix that.
974  if (!(biased_exponent & 1)) {
975    __ li(scratch, 1 << HeapNumber::kExponentShift);
976    __ nor(scratch, scratch, scratch);
977    __ and_(hiword, hiword, scratch);
978  }
979}
980
981
982#undef __
983#define __ ACCESS_MASM(masm())
984
985
986Register StubCompiler::CheckPrototypes(Handle<JSObject> object,
987                                       Register object_reg,
988                                       Handle<JSObject> holder,
989                                       Register holder_reg,
990                                       Register scratch1,
991                                       Register scratch2,
992                                       Handle<String> name,
993                                       int save_at_depth,
994                                       Label* miss) {
995  // Make sure there's no overlap between holder and object registers.
996  ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
997  ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
998         && !scratch2.is(scratch1));
999
1000  // Keep track of the current object in register reg.
1001  Register reg = object_reg;
1002  int depth = 0;
1003
1004  if (save_at_depth == depth) {
1005    __ sw(reg, MemOperand(sp));
1006  }
1007
1008  // Check the maps in the prototype chain.
1009  // Traverse the prototype chain from the object and do map checks.
1010  Handle<JSObject> current = object;
1011  while (!current.is_identical_to(holder)) {
1012    ++depth;
1013
1014    // Only global objects and objects that do not require access
1015    // checks are allowed in stubs.
1016    ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
1017
1018    Handle<JSObject> prototype(JSObject::cast(current->GetPrototype()));
1019    if (!current->HasFastProperties() &&
1020        !current->IsJSGlobalObject() &&
1021        !current->IsJSGlobalProxy()) {
1022      if (!name->IsSymbol()) {
1023        name = factory()->LookupSymbol(name);
1024      }
1025      ASSERT(current->property_dictionary()->FindEntry(*name) ==
1026             StringDictionary::kNotFound);
1027
1028      GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
1029                                       scratch1, scratch2);
1030
1031      __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1032      reg = holder_reg;  // From now on the object will be in holder_reg.
1033      __ lw(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
1034    } else {
1035      Handle<Map> current_map(current->map());
1036      __ CheckMap(reg, scratch1, current_map, miss, DONT_DO_SMI_CHECK,
1037                  ALLOW_ELEMENT_TRANSITION_MAPS);
1038      // Check access rights to the global object.  This has to happen after
1039      // the map check so that we know that the object is actually a global
1040      // object.
1041      if (current->IsJSGlobalProxy()) {
1042        __ CheckAccessGlobalProxy(reg, scratch2, miss);
1043      }
1044      reg = holder_reg;  // From now on the object will be in holder_reg.
1045
1046      if (heap()->InNewSpace(*prototype)) {
1047        // The prototype is in new space; we cannot store a reference to it
1048        // in the code.  Load it from the map.
1049        __ lw(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
1050      } else {
1051        // The prototype is in old space; load it directly.
1052        __ li(reg, Operand(prototype));
1053      }
1054    }
1055
1056    if (save_at_depth == depth) {
1057      __ sw(reg, MemOperand(sp));
1058    }
1059
1060    // Go to the next object in the prototype chain.
1061    current = prototype;
1062  }
1063
1064  // Log the check depth.
1065  LOG(masm()->isolate(), IntEvent("check-maps-depth", depth + 1));
1066
1067  // Check the holder map.
1068  __ CheckMap(reg, scratch1, Handle<Map>(current->map()), miss,
1069              DONT_DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS);
1070
1071  // Perform security check for access to the global object.
1072  ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
1073  if (holder->IsJSGlobalProxy()) {
1074    __ CheckAccessGlobalProxy(reg, scratch1, miss);
1075  }
1076
1077  // If we've skipped any global objects, it's not enough to verify that
1078  // their maps haven't changed.  We also need to check that the property
1079  // cell for the property is still empty.
1080  GenerateCheckPropertyCells(masm(), object, holder, name, scratch1, miss);
1081
1082  // Return the register containing the holder.
1083  return reg;
1084}
1085
1086
1087void StubCompiler::GenerateLoadField(Handle<JSObject> object,
1088                                     Handle<JSObject> holder,
1089                                     Register receiver,
1090                                     Register scratch1,
1091                                     Register scratch2,
1092                                     Register scratch3,
1093                                     int index,
1094                                     Handle<String> name,
1095                                     Label* miss) {
1096  // Check that the receiver isn't a smi.
1097  __ JumpIfSmi(receiver, miss);
1098
1099  // Check that the maps haven't changed.
1100  Register reg = CheckPrototypes(
1101      object, receiver, holder, scratch1, scratch2, scratch3, name, miss);
1102  GenerateFastPropertyLoad(masm(), v0, reg, holder, index);
1103  __ Ret();
1104}
1105
1106
1107void StubCompiler::GenerateLoadConstant(Handle<JSObject> object,
1108                                        Handle<JSObject> holder,
1109                                        Register receiver,
1110                                        Register scratch1,
1111                                        Register scratch2,
1112                                        Register scratch3,
1113                                        Handle<JSFunction> value,
1114                                        Handle<String> name,
1115                                        Label* miss) {
1116  // Check that the receiver isn't a smi.
1117  __ JumpIfSmi(receiver, miss, scratch1);
1118
1119  // Check that the maps haven't changed.
1120  Register reg =
1121      CheckPrototypes(object, receiver, holder,
1122                      scratch1, scratch2, scratch3, name, miss);
1123
1124  // Return the constant value.
1125  __ LoadHeapObject(v0, value);
1126  __ Ret();
1127}
1128
1129
1130void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
1131                                        Handle<JSObject> holder,
1132                                        Register receiver,
1133                                        Register name_reg,
1134                                        Register scratch1,
1135                                        Register scratch2,
1136                                        Register scratch3,
1137                                        Handle<AccessorInfo> callback,
1138                                        Handle<String> name,
1139                                        Label* miss) {
1140  // Check that the receiver isn't a smi.
1141  __ JumpIfSmi(receiver, miss, scratch1);
1142
1143  // Check that the maps haven't changed.
1144  Register reg = CheckPrototypes(object, receiver, holder, scratch1,
1145                                 scratch2, scratch3, name, miss);
1146
1147  // Build AccessorInfo::args_ list on the stack and push property name below
1148  // the exit frame to make GC aware of them and store pointers to them.
1149  __ push(receiver);
1150  __ mov(scratch2, sp);  // scratch2 = AccessorInfo::args_
1151  if (heap()->InNewSpace(callback->data())) {
1152    __ li(scratch3, callback);
1153    __ lw(scratch3, FieldMemOperand(scratch3, AccessorInfo::kDataOffset));
1154  } else {
1155    __ li(scratch3, Handle<Object>(callback->data()));
1156  }
1157  __ Push(reg, scratch3, name_reg);
1158  __ mov(a2, scratch2);  // Saved in case scratch2 == a1.
1159  __ mov(a1, sp);  // a1 (first argument - see note below) = Handle<String>
1160
1161  // NOTE: the O32 abi requires a0 to hold a special pointer when returning a
1162  // struct from the function (which is currently the case). This means we pass
1163  // the arguments in a1-a2 instead of a0-a1. TryCallApiFunctionAndReturn
1164  // will handle setting up a0.
1165
1166  const int kApiStackSpace = 1;
1167  FrameScope frame_scope(masm(), StackFrame::MANUAL);
1168  __ EnterExitFrame(false, kApiStackSpace);
1169
1170  // Create AccessorInfo instance on the stack above the exit frame with
1171  // scratch2 (internal::Object** args_) as the data.
1172  __ sw(a2, MemOperand(sp, kPointerSize));
1173  // a2 (second argument - see note above) = AccessorInfo&
1174  __ Addu(a2, sp, kPointerSize);
1175
1176  const int kStackUnwindSpace = 4;
1177  Address getter_address = v8::ToCData<Address>(callback->getter());
1178  ApiFunction fun(getter_address);
1179  ExternalReference ref =
1180      ExternalReference(&fun,
1181                        ExternalReference::DIRECT_GETTER_CALL,
1182                        masm()->isolate());
1183  __ CallApiFunctionAndReturn(ref, kStackUnwindSpace);
1184}
1185
1186
1187void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
1188                                           Handle<JSObject> interceptor_holder,
1189                                           LookupResult* lookup,
1190                                           Register receiver,
1191                                           Register name_reg,
1192                                           Register scratch1,
1193                                           Register scratch2,
1194                                           Register scratch3,
1195                                           Handle<String> name,
1196                                           Label* miss) {
1197  ASSERT(interceptor_holder->HasNamedInterceptor());
1198  ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1199
1200  // Check that the receiver isn't a smi.
1201  __ JumpIfSmi(receiver, miss);
1202
1203  // So far the most popular follow ups for interceptor loads are FIELD
1204  // and CALLBACKS, so inline only them, other cases may be added
1205  // later.
1206  bool compile_followup_inline = false;
1207  if (lookup->IsFound() && lookup->IsCacheable()) {
1208    if (lookup->type() == FIELD) {
1209      compile_followup_inline = true;
1210    } else if (lookup->type() == CALLBACKS &&
1211        lookup->GetCallbackObject()->IsAccessorInfo()) {
1212      compile_followup_inline =
1213          AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL;
1214    }
1215  }
1216
1217  if (compile_followup_inline) {
1218    // Compile the interceptor call, followed by inline code to load the
1219    // property from further up the prototype chain if the call fails.
1220    // Check that the maps haven't changed.
1221    Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1222                                          scratch1, scratch2, scratch3,
1223                                          name, miss);
1224    ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
1225
1226    // Save necessary data before invoking an interceptor.
1227    // Requires a frame to make GC aware of pushed pointers.
1228    {
1229      FrameScope frame_scope(masm(), StackFrame::INTERNAL);
1230      if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1231        // CALLBACKS case needs a receiver to be passed into C++ callback.
1232        __ Push(receiver, holder_reg, name_reg);
1233      } else {
1234        __ Push(holder_reg, name_reg);
1235      }
1236      // Invoke an interceptor.  Note: map checks from receiver to
1237      // interceptor's holder has been compiled before (see a caller
1238      // of this method).
1239      CompileCallLoadPropertyWithInterceptor(masm(),
1240                                             receiver,
1241                                             holder_reg,
1242                                             name_reg,
1243                                             interceptor_holder);
1244      // Check if interceptor provided a value for property.  If it's
1245      // the case, return immediately.
1246      Label interceptor_failed;
1247      __ LoadRoot(scratch1, Heap::kNoInterceptorResultSentinelRootIndex);
1248      __ Branch(&interceptor_failed, eq, v0, Operand(scratch1));
1249      frame_scope.GenerateLeaveFrame();
1250      __ Ret();
1251
1252      __ bind(&interceptor_failed);
1253      __ pop(name_reg);
1254      __ pop(holder_reg);
1255      if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1256        __ pop(receiver);
1257      }
1258      // Leave the internal frame.
1259    }
1260    // Check that the maps from interceptor's holder to lookup's holder
1261    // haven't changed.  And load lookup's holder into |holder| register.
1262    if (*interceptor_holder != lookup->holder()) {
1263      holder_reg = CheckPrototypes(interceptor_holder,
1264                                   holder_reg,
1265                                   Handle<JSObject>(lookup->holder()),
1266                                   scratch1,
1267                                   scratch2,
1268                                   scratch3,
1269                                   name,
1270                                   miss);
1271    }
1272
1273    if (lookup->type() == FIELD) {
1274      // We found FIELD property in prototype chain of interceptor's holder.
1275      // Retrieve a field from field's holder.
1276      GenerateFastPropertyLoad(masm(), v0, holder_reg,
1277                               Handle<JSObject>(lookup->holder()),
1278                               lookup->GetFieldIndex());
1279      __ Ret();
1280    } else {
1281      // We found CALLBACKS property in prototype chain of interceptor's
1282      // holder.
1283      ASSERT(lookup->type() == CALLBACKS);
1284      Handle<AccessorInfo> callback(
1285          AccessorInfo::cast(lookup->GetCallbackObject()));
1286      ASSERT(callback->getter() != NULL);
1287
1288      // Tail call to runtime.
1289      // Important invariant in CALLBACKS case: the code above must be
1290      // structured to never clobber |receiver| register.
1291      __ li(scratch2, callback);
1292      // holder_reg is either receiver or scratch1.
1293      if (!receiver.is(holder_reg)) {
1294        ASSERT(scratch1.is(holder_reg));
1295        __ Push(receiver, holder_reg);
1296        __ lw(scratch3,
1297              FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
1298        __ Push(scratch3, scratch2, name_reg);
1299      } else {
1300        __ push(receiver);
1301        __ lw(scratch3,
1302              FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
1303        __ Push(holder_reg, scratch3, scratch2, name_reg);
1304      }
1305
1306      ExternalReference ref =
1307          ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
1308                            masm()->isolate());
1309      __ TailCallExternalReference(ref, 5, 1);
1310    }
1311  } else {  // !compile_followup_inline
1312    // Call the runtime system to load the interceptor.
1313    // Check that the maps haven't changed.
1314    Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1315                                          scratch1, scratch2, scratch3,
1316                                          name, miss);
1317    PushInterceptorArguments(masm(), receiver, holder_reg,
1318                             name_reg, interceptor_holder);
1319
1320    ExternalReference ref = ExternalReference(
1321        IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), masm()->isolate());
1322    __ TailCallExternalReference(ref, 5, 1);
1323  }
1324}
1325
1326
1327void CallStubCompiler::GenerateNameCheck(Handle<String> name, Label* miss) {
1328  if (kind_ == Code::KEYED_CALL_IC) {
1329    __ Branch(miss, ne, a2, Operand(name));
1330  }
1331}
1332
1333
1334void CallStubCompiler::GenerateGlobalReceiverCheck(Handle<JSObject> object,
1335                                                   Handle<JSObject> holder,
1336                                                   Handle<String> name,
1337                                                   Label* miss) {
1338  ASSERT(holder->IsGlobalObject());
1339
1340  // Get the number of arguments.
1341  const int argc = arguments().immediate();
1342
1343  // Get the receiver from the stack.
1344  __ lw(a0, MemOperand(sp, argc * kPointerSize));
1345
1346  // If the object is the holder then we know that it's a global
1347  // object which can only happen for contextual calls. In this case,
1348  // the receiver cannot be a smi.
1349  if (!object.is_identical_to(holder)) {
1350    __ JumpIfSmi(a0, miss);
1351  }
1352
1353  // Check that the maps haven't changed.
1354  CheckPrototypes(object, a0, holder, a3, a1, t0, name, miss);
1355}
1356
1357
1358void CallStubCompiler::GenerateLoadFunctionFromCell(
1359    Handle<JSGlobalPropertyCell> cell,
1360    Handle<JSFunction> function,
1361    Label* miss) {
1362  // Get the value from the cell.
1363  __ li(a3, Operand(cell));
1364  __ lw(a1, FieldMemOperand(a3, JSGlobalPropertyCell::kValueOffset));
1365
1366  // Check that the cell contains the same function.
1367  if (heap()->InNewSpace(*function)) {
1368    // We can't embed a pointer to a function in new space so we have
1369    // to verify that the shared function info is unchanged. This has
1370    // the nice side effect that multiple closures based on the same
1371    // function can all use this call IC. Before we load through the
1372    // function, we have to verify that it still is a function.
1373    __ JumpIfSmi(a1, miss);
1374    __ GetObjectType(a1, a3, a3);
1375    __ Branch(miss, ne, a3, Operand(JS_FUNCTION_TYPE));
1376
1377    // Check the shared function info. Make sure it hasn't changed.
1378    __ li(a3, Handle<SharedFunctionInfo>(function->shared()));
1379    __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1380    __ Branch(miss, ne, t0, Operand(a3));
1381  } else {
1382    __ Branch(miss, ne, a1, Operand(function));
1383  }
1384}
1385
1386
1387void CallStubCompiler::GenerateMissBranch() {
1388  Handle<Code> code =
1389      isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
1390                                               kind_,
1391                                               extra_state_);
1392  __ Jump(code, RelocInfo::CODE_TARGET);
1393}
1394
1395
1396Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
1397                                                Handle<JSObject> holder,
1398                                                int index,
1399                                                Handle<String> name) {
1400  // ----------- S t a t e -------------
1401  //  -- a2    : name
1402  //  -- ra    : return address
1403  // -----------------------------------
1404  Label miss;
1405
1406  GenerateNameCheck(name, &miss);
1407
1408  const int argc = arguments().immediate();
1409
1410  // Get the receiver of the function from the stack into a0.
1411  __ lw(a0, MemOperand(sp, argc * kPointerSize));
1412  // Check that the receiver isn't a smi.
1413  __ JumpIfSmi(a0, &miss, t0);
1414
1415  // Do the right check and compute the holder register.
1416  Register reg = CheckPrototypes(object, a0, holder, a1, a3, t0, name, &miss);
1417  GenerateFastPropertyLoad(masm(), a1, reg, holder, index);
1418
1419  GenerateCallFunction(masm(), object, arguments(), &miss, extra_state_);
1420
1421  // Handle call cache miss.
1422  __ bind(&miss);
1423  GenerateMissBranch();
1424
1425  // Return the generated code.
1426  return GetCode(FIELD, name);
1427}
1428
1429
1430Handle<Code> CallStubCompiler::CompileArrayPushCall(
1431    Handle<Object> object,
1432    Handle<JSObject> holder,
1433    Handle<JSGlobalPropertyCell> cell,
1434    Handle<JSFunction> function,
1435    Handle<String> name) {
1436  // ----------- S t a t e -------------
1437  //  -- a2    : name
1438  //  -- ra    : return address
1439  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1440  //  -- ...
1441  //  -- sp[argc * 4]           : receiver
1442  // -----------------------------------
1443
1444  // If object is not an array, bail out to regular call.
1445  if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null();
1446
1447  Label miss;
1448
1449  GenerateNameCheck(name, &miss);
1450
1451  Register receiver = a1;
1452
1453  // Get the receiver from the stack.
1454  const int argc = arguments().immediate();
1455  __ lw(receiver, MemOperand(sp, argc * kPointerSize));
1456
1457  // Check that the receiver isn't a smi.
1458  __ JumpIfSmi(receiver, &miss);
1459
1460  // Check that the maps haven't changed.
1461  CheckPrototypes(Handle<JSObject>::cast(object), receiver, holder, a3, v0, t0,
1462                  name, &miss);
1463
1464  if (argc == 0) {
1465    // Nothing to do, just return the length.
1466    __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1467    __ Drop(argc + 1);
1468    __ Ret();
1469  } else {
1470    Label call_builtin;
1471    Register elements = a3;
1472    Register end_elements = t1;
1473    // Get the elements array of the object.
1474    __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1475
1476    // Check that the elements are in fast mode and writable.
1477    __ CheckMap(elements,
1478                v0,
1479                Heap::kFixedArrayMapRootIndex,
1480                &call_builtin,
1481                DONT_DO_SMI_CHECK);
1482
1483    if (argc == 1) {  // Otherwise fall through to call the builtin.
1484      Label attempt_to_grow_elements;
1485
1486      // Get the array's length into v0 and calculate new length.
1487      __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1488      STATIC_ASSERT(kSmiTagSize == 1);
1489      STATIC_ASSERT(kSmiTag == 0);
1490      __ Addu(v0, v0, Operand(Smi::FromInt(argc)));
1491
1492      // Get the element's length.
1493      __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
1494
1495      // Check if we could survive without allocation.
1496      __ Branch(&attempt_to_grow_elements, gt, v0, Operand(t0));
1497
1498      // Check if value is a smi.
1499      Label with_write_barrier;
1500      __ lw(t0, MemOperand(sp, (argc - 1) * kPointerSize));
1501      __ JumpIfNotSmi(t0, &with_write_barrier);
1502
1503      // Save new length.
1504      __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1505
1506      // Push the element.
1507      // We may need a register containing the address end_elements below,
1508      // so write back the value in end_elements.
1509      __ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize);
1510      __ Addu(end_elements, elements, end_elements);
1511      const int kEndElementsOffset =
1512          FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize;
1513      __ Addu(end_elements, end_elements, kEndElementsOffset);
1514      __ sw(t0, MemOperand(end_elements));
1515
1516      // Check for a smi.
1517      __ Drop(argc + 1);
1518      __ Ret();
1519
1520      __ bind(&with_write_barrier);
1521
1522      __ lw(t2, FieldMemOperand(receiver, HeapObject::kMapOffset));
1523      __ CheckFastObjectElements(t2, t2, &call_builtin);
1524
1525      // Save new length.
1526      __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1527
1528      // Push the element.
1529      // We may need a register containing the address end_elements below,
1530      // so write back the value in end_elements.
1531      __ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize);
1532      __ Addu(end_elements, elements, end_elements);
1533      __ Addu(end_elements, end_elements, kEndElementsOffset);
1534      __ sw(t0, MemOperand(end_elements));
1535
1536      __ RecordWrite(elements,
1537                     end_elements,
1538                     t0,
1539                     kRAHasNotBeenSaved,
1540                     kDontSaveFPRegs,
1541                     EMIT_REMEMBERED_SET,
1542                     OMIT_SMI_CHECK);
1543      __ Drop(argc + 1);
1544      __ Ret();
1545
1546      __ bind(&attempt_to_grow_elements);
1547      // v0: array's length + 1.
1548      // t0: elements' length.
1549
1550      if (!FLAG_inline_new) {
1551        __ Branch(&call_builtin);
1552      }
1553
1554      __ lw(a2, MemOperand(sp, (argc - 1) * kPointerSize));
1555      // Growing elements that are SMI-only requires special handling in case
1556      // the new element is non-Smi. For now, delegate to the builtin.
1557      Label no_fast_elements_check;
1558      __ JumpIfSmi(a2, &no_fast_elements_check);
1559      __ lw(t3, FieldMemOperand(receiver, HeapObject::kMapOffset));
1560      __ CheckFastObjectElements(t3, t3, &call_builtin);
1561      __ bind(&no_fast_elements_check);
1562
1563      ExternalReference new_space_allocation_top =
1564          ExternalReference::new_space_allocation_top_address(
1565              masm()->isolate());
1566      ExternalReference new_space_allocation_limit =
1567          ExternalReference::new_space_allocation_limit_address(
1568              masm()->isolate());
1569
1570      const int kAllocationDelta = 4;
1571      // Load top and check if it is the end of elements.
1572      __ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize);
1573      __ Addu(end_elements, elements, end_elements);
1574      __ Addu(end_elements, end_elements, Operand(kEndElementsOffset));
1575      __ li(t3, Operand(new_space_allocation_top));
1576      __ lw(t2, MemOperand(t3));
1577      __ Branch(&call_builtin, ne, end_elements, Operand(t2));
1578
1579      __ li(t5, Operand(new_space_allocation_limit));
1580      __ lw(t5, MemOperand(t5));
1581      __ Addu(t2, t2, Operand(kAllocationDelta * kPointerSize));
1582      __ Branch(&call_builtin, hi, t2, Operand(t5));
1583
1584      // We fit and could grow elements.
1585      // Update new_space_allocation_top.
1586      __ sw(t2, MemOperand(t3));
1587      // Push the argument.
1588      __ sw(a2, MemOperand(end_elements));
1589      // Fill the rest with holes.
1590      __ LoadRoot(t2, Heap::kTheHoleValueRootIndex);
1591      for (int i = 1; i < kAllocationDelta; i++) {
1592        __ sw(t2, MemOperand(end_elements, i * kPointerSize));
1593      }
1594
1595      // Update elements' and array's sizes.
1596      __ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1597      __ Addu(t0, t0, Operand(Smi::FromInt(kAllocationDelta)));
1598      __ sw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
1599
1600      // Elements are in new space, so write barrier is not required.
1601      __ Drop(argc + 1);
1602      __ Ret();
1603    }
1604    __ bind(&call_builtin);
1605    __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush,
1606                                                   masm()->isolate()),
1607                                 argc + 1,
1608                                 1);
1609  }
1610
1611  // Handle call cache miss.
1612  __ bind(&miss);
1613  GenerateMissBranch();
1614
1615  // Return the generated code.
1616  return GetCode(function);
1617}
1618
1619
1620Handle<Code> CallStubCompiler::CompileArrayPopCall(
1621    Handle<Object> object,
1622    Handle<JSObject> holder,
1623    Handle<JSGlobalPropertyCell> cell,
1624    Handle<JSFunction> function,
1625    Handle<String> name) {
1626  // ----------- S t a t e -------------
1627  //  -- a2    : name
1628  //  -- ra    : return address
1629  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1630  //  -- ...
1631  //  -- sp[argc * 4]           : receiver
1632  // -----------------------------------
1633
1634  // If object is not an array, bail out to regular call.
1635  if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null();
1636
1637  Label miss, return_undefined, call_builtin;
1638  Register receiver = a1;
1639  Register elements = a3;
1640  GenerateNameCheck(name, &miss);
1641
1642  // Get the receiver from the stack.
1643  const int argc = arguments().immediate();
1644  __ lw(receiver, MemOperand(sp, argc * kPointerSize));
1645  // Check that the receiver isn't a smi.
1646  __ JumpIfSmi(receiver, &miss);
1647
1648  // Check that the maps haven't changed.
1649  CheckPrototypes(Handle<JSObject>::cast(object), receiver, holder, elements,
1650                  t0, v0, name, &miss);
1651
1652  // Get the elements array of the object.
1653  __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1654
1655  // Check that the elements are in fast mode and writable.
1656  __ CheckMap(elements,
1657              v0,
1658              Heap::kFixedArrayMapRootIndex,
1659              &call_builtin,
1660              DONT_DO_SMI_CHECK);
1661
1662  // Get the array's length into t0 and calculate new length.
1663  __ lw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1664  __ Subu(t0, t0, Operand(Smi::FromInt(1)));
1665  __ Branch(&return_undefined, lt, t0, Operand(zero_reg));
1666
1667  // Get the last element.
1668  __ LoadRoot(t2, Heap::kTheHoleValueRootIndex);
1669  STATIC_ASSERT(kSmiTagSize == 1);
1670  STATIC_ASSERT(kSmiTag == 0);
1671  // We can't address the last element in one operation. Compute the more
1672  // expensive shift first, and use an offset later on.
1673  __ sll(t1, t0, kPointerSizeLog2 - kSmiTagSize);
1674  __ Addu(elements, elements, t1);
1675  __ lw(v0, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag));
1676  __ Branch(&call_builtin, eq, v0, Operand(t2));
1677
1678  // Set the array's length.
1679  __ sw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1680
1681  // Fill with the hole.
1682  __ sw(t2, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag));
1683  __ Drop(argc + 1);
1684  __ Ret();
1685
1686  __ bind(&return_undefined);
1687  __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
1688  __ Drop(argc + 1);
1689  __ Ret();
1690
1691  __ bind(&call_builtin);
1692  __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop,
1693                                                 masm()->isolate()),
1694                               argc + 1,
1695                               1);
1696
1697  // Handle call cache miss.
1698  __ bind(&miss);
1699  GenerateMissBranch();
1700
1701  // Return the generated code.
1702  return GetCode(function);
1703}
1704
1705
1706Handle<Code> CallStubCompiler::CompileStringCharCodeAtCall(
1707    Handle<Object> object,
1708    Handle<JSObject> holder,
1709    Handle<JSGlobalPropertyCell> cell,
1710    Handle<JSFunction> function,
1711    Handle<String> name) {
1712  // ----------- S t a t e -------------
1713  //  -- a2                     : function name
1714  //  -- ra                     : return address
1715  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1716  //  -- ...
1717  //  -- sp[argc * 4]           : receiver
1718  // -----------------------------------
1719
1720  // If object is not a string, bail out to regular call.
1721  if (!object->IsString() || !cell.is_null()) return Handle<Code>::null();
1722
1723  const int argc = arguments().immediate();
1724  Label miss;
1725  Label name_miss;
1726  Label index_out_of_range;
1727
1728  Label* index_out_of_range_label = &index_out_of_range;
1729
1730  if (kind_ == Code::CALL_IC &&
1731      (CallICBase::StringStubState::decode(extra_state_) ==
1732       DEFAULT_STRING_STUB)) {
1733    index_out_of_range_label = &miss;
1734  }
1735
1736  GenerateNameCheck(name, &name_miss);
1737
1738  // Check that the maps starting from the prototype haven't changed.
1739  GenerateDirectLoadGlobalFunctionPrototype(masm(),
1740                                            Context::STRING_FUNCTION_INDEX,
1741                                            v0,
1742                                            &miss);
1743  ASSERT(!object.is_identical_to(holder));
1744  CheckPrototypes(Handle<JSObject>(JSObject::cast(object->GetPrototype())),
1745                  v0, holder, a1, a3, t0, name, &miss);
1746
1747  Register receiver = a1;
1748  Register index = t1;
1749  Register result = v0;
1750  __ lw(receiver, MemOperand(sp, argc * kPointerSize));
1751  if (argc > 0) {
1752    __ lw(index, MemOperand(sp, (argc - 1) * kPointerSize));
1753  } else {
1754    __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1755  }
1756
1757  StringCharCodeAtGenerator generator(receiver,
1758                                      index,
1759                                      result,
1760                                      &miss,  // When not a string.
1761                                      &miss,  // When not a number.
1762                                      index_out_of_range_label,
1763                                      STRING_INDEX_IS_NUMBER);
1764  generator.GenerateFast(masm());
1765  __ Drop(argc + 1);
1766  __ Ret();
1767
1768  StubRuntimeCallHelper call_helper;
1769  generator.GenerateSlow(masm(), call_helper);
1770
1771  if (index_out_of_range.is_linked()) {
1772    __ bind(&index_out_of_range);
1773    __ LoadRoot(v0, Heap::kNanValueRootIndex);
1774    __ Drop(argc + 1);
1775    __ Ret();
1776  }
1777
1778  __ bind(&miss);
1779  // Restore function name in a2.
1780  __ li(a2, name);
1781  __ bind(&name_miss);
1782  GenerateMissBranch();
1783
1784  // Return the generated code.
1785  return GetCode(function);
1786}
1787
1788
1789Handle<Code> CallStubCompiler::CompileStringCharAtCall(
1790    Handle<Object> object,
1791    Handle<JSObject> holder,
1792    Handle<JSGlobalPropertyCell> cell,
1793    Handle<JSFunction> function,
1794    Handle<String> name) {
1795  // ----------- S t a t e -------------
1796  //  -- a2                     : function name
1797  //  -- ra                     : return address
1798  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1799  //  -- ...
1800  //  -- sp[argc * 4]           : receiver
1801  // -----------------------------------
1802
1803  // If object is not a string, bail out to regular call.
1804  if (!object->IsString() || !cell.is_null()) return Handle<Code>::null();
1805
1806  const int argc = arguments().immediate();
1807  Label miss;
1808  Label name_miss;
1809  Label index_out_of_range;
1810  Label* index_out_of_range_label = &index_out_of_range;
1811  if (kind_ == Code::CALL_IC &&
1812      (CallICBase::StringStubState::decode(extra_state_) ==
1813       DEFAULT_STRING_STUB)) {
1814    index_out_of_range_label = &miss;
1815  }
1816  GenerateNameCheck(name, &name_miss);
1817
1818  // Check that the maps starting from the prototype haven't changed.
1819  GenerateDirectLoadGlobalFunctionPrototype(masm(),
1820                                            Context::STRING_FUNCTION_INDEX,
1821                                            v0,
1822                                            &miss);
1823  ASSERT(!object.is_identical_to(holder));
1824  CheckPrototypes(Handle<JSObject>(JSObject::cast(object->GetPrototype())),
1825                  v0, holder, a1, a3, t0, name, &miss);
1826
1827  Register receiver = v0;
1828  Register index = t1;
1829  Register scratch = a3;
1830  Register result = v0;
1831  __ lw(receiver, MemOperand(sp, argc * kPointerSize));
1832  if (argc > 0) {
1833    __ lw(index, MemOperand(sp, (argc - 1) * kPointerSize));
1834  } else {
1835    __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1836  }
1837
1838  StringCharAtGenerator generator(receiver,
1839                                  index,
1840                                  scratch,
1841                                  result,
1842                                  &miss,  // When not a string.
1843                                  &miss,  // When not a number.
1844                                  index_out_of_range_label,
1845                                  STRING_INDEX_IS_NUMBER);
1846  generator.GenerateFast(masm());
1847  __ Drop(argc + 1);
1848  __ Ret();
1849
1850  StubRuntimeCallHelper call_helper;
1851  generator.GenerateSlow(masm(), call_helper);
1852
1853  if (index_out_of_range.is_linked()) {
1854    __ bind(&index_out_of_range);
1855    __ LoadRoot(v0, Heap::kEmptyStringRootIndex);
1856    __ Drop(argc + 1);
1857    __ Ret();
1858  }
1859
1860  __ bind(&miss);
1861  // Restore function name in a2.
1862  __ li(a2, name);
1863  __ bind(&name_miss);
1864  GenerateMissBranch();
1865
1866  // Return the generated code.
1867  return GetCode(function);
1868}
1869
1870
1871Handle<Code> CallStubCompiler::CompileStringFromCharCodeCall(
1872    Handle<Object> object,
1873    Handle<JSObject> holder,
1874    Handle<JSGlobalPropertyCell> cell,
1875    Handle<JSFunction> function,
1876    Handle<String> name) {
1877  // ----------- S t a t e -------------
1878  //  -- a2                     : function name
1879  //  -- ra                     : return address
1880  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1881  //  -- ...
1882  //  -- sp[argc * 4]           : receiver
1883  // -----------------------------------
1884
1885  const int argc = arguments().immediate();
1886
1887  // If the object is not a JSObject or we got an unexpected number of
1888  // arguments, bail out to the regular call.
1889  if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
1890
1891  Label miss;
1892  GenerateNameCheck(name, &miss);
1893
1894  if (cell.is_null()) {
1895    __ lw(a1, MemOperand(sp, 1 * kPointerSize));
1896
1897    STATIC_ASSERT(kSmiTag == 0);
1898    __ JumpIfSmi(a1, &miss);
1899
1900    CheckPrototypes(Handle<JSObject>::cast(object), a1, holder, v0, a3, t0,
1901                    name, &miss);
1902  } else {
1903    ASSERT(cell->value() == *function);
1904    GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
1905                                &miss);
1906    GenerateLoadFunctionFromCell(cell, function, &miss);
1907  }
1908
1909  // Load the char code argument.
1910  Register code = a1;
1911  __ lw(code, MemOperand(sp, 0 * kPointerSize));
1912
1913  // Check the code is a smi.
1914  Label slow;
1915  STATIC_ASSERT(kSmiTag == 0);
1916  __ JumpIfNotSmi(code, &slow);
1917
1918  // Convert the smi code to uint16.
1919  __ And(code, code, Operand(Smi::FromInt(0xffff)));
1920
1921  StringCharFromCodeGenerator generator(code, v0);
1922  generator.GenerateFast(masm());
1923  __ Drop(argc + 1);
1924  __ Ret();
1925
1926  StubRuntimeCallHelper call_helper;
1927  generator.GenerateSlow(masm(), call_helper);
1928
1929  // Tail call the full function. We do not have to patch the receiver
1930  // because the function makes no use of it.
1931  __ bind(&slow);
1932  __ InvokeFunction(
1933      function, arguments(), JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
1934
1935  __ bind(&miss);
1936  // a2: function name.
1937  GenerateMissBranch();
1938
1939  // Return the generated code.
1940  return cell.is_null() ? GetCode(function) : GetCode(NORMAL, name);
1941}
1942
1943
1944Handle<Code> CallStubCompiler::CompileMathFloorCall(
1945    Handle<Object> object,
1946    Handle<JSObject> holder,
1947    Handle<JSGlobalPropertyCell> cell,
1948    Handle<JSFunction> function,
1949    Handle<String> name) {
1950  // ----------- S t a t e -------------
1951  //  -- a2                     : function name
1952  //  -- ra                     : return address
1953  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
1954  //  -- ...
1955  //  -- sp[argc * 4]           : receiver
1956  // -----------------------------------
1957
1958  if (!CpuFeatures::IsSupported(FPU)) {
1959    return Handle<Code>::null();
1960  }
1961
1962  CpuFeatures::Scope scope_fpu(FPU);
1963  const int argc = arguments().immediate();
1964  // If the object is not a JSObject or we got an unexpected number of
1965  // arguments, bail out to the regular call.
1966  if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
1967
1968  Label miss, slow;
1969  GenerateNameCheck(name, &miss);
1970
1971  if (cell.is_null()) {
1972    __ lw(a1, MemOperand(sp, 1 * kPointerSize));
1973    STATIC_ASSERT(kSmiTag == 0);
1974    __ JumpIfSmi(a1, &miss);
1975    CheckPrototypes(Handle<JSObject>::cast(object), a1, holder, a0, a3, t0,
1976                    name, &miss);
1977  } else {
1978    ASSERT(cell->value() == *function);
1979    GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
1980                                &miss);
1981    GenerateLoadFunctionFromCell(cell, function, &miss);
1982  }
1983
1984  // Load the (only) argument into v0.
1985  __ lw(v0, MemOperand(sp, 0 * kPointerSize));
1986
1987  // If the argument is a smi, just return.
1988  STATIC_ASSERT(kSmiTag == 0);
1989  __ And(t0, v0, Operand(kSmiTagMask));
1990  __ Drop(argc + 1, eq, t0, Operand(zero_reg));
1991  __ Ret(eq, t0, Operand(zero_reg));
1992
1993  __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
1994
1995  Label wont_fit_smi, no_fpu_error, restore_fcsr_and_return;
1996
1997  // If fpu is enabled, we use the floor instruction.
1998
1999  // Load the HeapNumber value.
2000  __ ldc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset));
2001
2002  // Backup FCSR.
2003  __ cfc1(a3, FCSR);
2004  // Clearing FCSR clears the exception mask with no side-effects.
2005  __ ctc1(zero_reg, FCSR);
2006  // Convert the argument to an integer.
2007  __ floor_w_d(f0, f0);
2008
2009  // Start checking for special cases.
2010  // Get the argument exponent and clear the sign bit.
2011  __ lw(t1, FieldMemOperand(v0, HeapNumber::kValueOffset + kPointerSize));
2012  __ And(t2, t1, Operand(~HeapNumber::kSignMask));
2013  __ srl(t2, t2, HeapNumber::kMantissaBitsInTopWord);
2014
2015  // Retrieve FCSR and check for fpu errors.
2016  __ cfc1(t5, FCSR);
2017  __ And(t5, t5, Operand(kFCSRExceptionFlagMask));
2018  __ Branch(&no_fpu_error, eq, t5, Operand(zero_reg));
2019
2020  // Check for NaN, Infinity, and -Infinity.
2021  // They are invariant through a Math.Floor call, so just
2022  // return the original argument.
2023  __ Subu(t3, t2, Operand(HeapNumber::kExponentMask
2024        >> HeapNumber::kMantissaBitsInTopWord));
2025  __ Branch(&restore_fcsr_and_return, eq, t3, Operand(zero_reg));
2026  // We had an overflow or underflow in the conversion. Check if we
2027  // have a big exponent.
2028  // If greater or equal, the argument is already round and in v0.
2029  __ Branch(&restore_fcsr_and_return, ge, t3,
2030      Operand(HeapNumber::kMantissaBits));
2031  __ Branch(&wont_fit_smi);
2032
2033  __ bind(&no_fpu_error);
2034  // Move the result back to v0.
2035  __ mfc1(v0, f0);
2036  // Check if the result fits into a smi.
2037  __ Addu(a1, v0, Operand(0x40000000));
2038  __ Branch(&wont_fit_smi, lt, a1, Operand(zero_reg));
2039  // Tag the result.
2040  STATIC_ASSERT(kSmiTag == 0);
2041  __ sll(v0, v0, kSmiTagSize);
2042
2043  // Check for -0.
2044  __ Branch(&restore_fcsr_and_return, ne, v0, Operand(zero_reg));
2045  // t1 already holds the HeapNumber exponent.
2046  __ And(t0, t1, Operand(HeapNumber::kSignMask));
2047  // If our HeapNumber is negative it was -0, so load its address and return.
2048  // Else v0 is loaded with 0, so we can also just return.
2049  __ Branch(&restore_fcsr_and_return, eq, t0, Operand(zero_reg));
2050  __ lw(v0, MemOperand(sp, 0 * kPointerSize));
2051
2052  __ bind(&restore_fcsr_and_return);
2053  // Restore FCSR and return.
2054  __ ctc1(a3, FCSR);
2055
2056  __ Drop(argc + 1);
2057  __ Ret();
2058
2059  __ bind(&wont_fit_smi);
2060  // Restore FCSR and fall to slow case.
2061  __ ctc1(a3, FCSR);
2062
2063  __ bind(&slow);
2064  // Tail call the full function. We do not have to patch the receiver
2065  // because the function makes no use of it.
2066  __ InvokeFunction(
2067      function, arguments(), JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
2068
2069  __ bind(&miss);
2070  // a2: function name.
2071  GenerateMissBranch();
2072
2073  // Return the generated code.
2074  return cell.is_null() ? GetCode(function) : GetCode(NORMAL, name);
2075}
2076
2077
2078Handle<Code> CallStubCompiler::CompileMathAbsCall(
2079    Handle<Object> object,
2080    Handle<JSObject> holder,
2081    Handle<JSGlobalPropertyCell> cell,
2082    Handle<JSFunction> function,
2083    Handle<String> name) {
2084  // ----------- S t a t e -------------
2085  //  -- a2                     : function name
2086  //  -- ra                     : return address
2087  //  -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
2088  //  -- ...
2089  //  -- sp[argc * 4]           : receiver
2090  // -----------------------------------
2091
2092  const int argc = arguments().immediate();
2093  // If the object is not a JSObject or we got an unexpected number of
2094  // arguments, bail out to the regular call.
2095  if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
2096
2097  Label miss;
2098
2099  GenerateNameCheck(name, &miss);
2100  if (cell.is_null()) {
2101    __ lw(a1, MemOperand(sp, 1 * kPointerSize));
2102    STATIC_ASSERT(kSmiTag == 0);
2103    __ JumpIfSmi(a1, &miss);
2104    CheckPrototypes(Handle<JSObject>::cast(object), a1, holder, v0, a3, t0,
2105                    name, &miss);
2106  } else {
2107    ASSERT(cell->value() == *function);
2108    GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
2109                                &miss);
2110    GenerateLoadFunctionFromCell(cell, function, &miss);
2111  }
2112
2113  // Load the (only) argument into v0.
2114  __ lw(v0, MemOperand(sp, 0 * kPointerSize));
2115
2116  // Check if the argument is a smi.
2117  Label not_smi;
2118  STATIC_ASSERT(kSmiTag == 0);
2119  __ JumpIfNotSmi(v0, &not_smi);
2120
2121  // Do bitwise not or do nothing depending on the sign of the
2122  // argument.
2123  __ sra(t0, v0, kBitsPerInt - 1);
2124  __ Xor(a1, v0, t0);
2125
2126  // Add 1 or do nothing depending on the sign of the argument.
2127  __ Subu(v0, a1, t0);
2128
2129  // If the result is still negative, go to the slow case.
2130  // This only happens for the most negative smi.
2131  Label slow;
2132  __ Branch(&slow, lt, v0, Operand(zero_reg));
2133
2134  // Smi case done.
2135  __ Drop(argc + 1);
2136  __ Ret();
2137
2138  // Check if the argument is a heap number and load its exponent and
2139  // sign.
2140  __ bind(&not_smi);
2141  __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
2142  __ lw(a1, FieldMemOperand(v0, HeapNumber::kExponentOffset));
2143
2144  // Check the sign of the argument. If the argument is positive,
2145  // just return it.
2146  Label negative_sign;
2147  __ And(t0, a1, Operand(HeapNumber::kSignMask));
2148  __ Branch(&negative_sign, ne, t0, Operand(zero_reg));
2149  __ Drop(argc + 1);
2150  __ Ret();
2151
2152  // If the argument is negative, clear the sign, and return a new
2153  // number.
2154  __ bind(&negative_sign);
2155  __ Xor(a1, a1, Operand(HeapNumber::kSignMask));
2156  __ lw(a3, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
2157  __ LoadRoot(t2, Heap::kHeapNumberMapRootIndex);
2158  __ AllocateHeapNumber(v0, t0, t1, t2, &slow);
2159  __ sw(a1, FieldMemOperand(v0, HeapNumber::kExponentOffset));
2160  __ sw(a3, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
2161  __ Drop(argc + 1);
2162  __ Ret();
2163
2164  // Tail call the full function. We do not have to patch the receiver
2165  // because the function makes no use of it.
2166  __ bind(&slow);
2167  __ InvokeFunction(
2168      function, arguments(), JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
2169
2170  __ bind(&miss);
2171  // a2: function name.
2172  GenerateMissBranch();
2173
2174  // Return the generated code.
2175  return cell.is_null() ? GetCode(function) : GetCode(NORMAL, name);
2176}
2177
2178
2179Handle<Code> CallStubCompiler::CompileFastApiCall(
2180    const CallOptimization& optimization,
2181    Handle<Object> object,
2182    Handle<JSObject> holder,
2183    Handle<JSGlobalPropertyCell> cell,
2184    Handle<JSFunction> function,
2185    Handle<String> name) {
2186
2187  Counters* counters = isolate()->counters();
2188
2189  ASSERT(optimization.is_simple_api_call());
2190  // Bail out if object is a global object as we don't want to
2191  // repatch it to global receiver.
2192  if (object->IsGlobalObject()) return Handle<Code>::null();
2193  if (!cell.is_null()) return Handle<Code>::null();
2194  if (!object->IsJSObject()) return Handle<Code>::null();
2195  int depth = optimization.GetPrototypeDepthOfExpectedType(
2196      Handle<JSObject>::cast(object), holder);
2197  if (depth == kInvalidProtoDepth) return Handle<Code>::null();
2198
2199  Label miss, miss_before_stack_reserved;
2200
2201  GenerateNameCheck(name, &miss_before_stack_reserved);
2202
2203  // Get the receiver from the stack.
2204  const int argc = arguments().immediate();
2205  __ lw(a1, MemOperand(sp, argc * kPointerSize));
2206
2207  // Check that the receiver isn't a smi.
2208  __ JumpIfSmi(a1, &miss_before_stack_reserved);
2209
2210  __ IncrementCounter(counters->call_const(), 1, a0, a3);
2211  __ IncrementCounter(counters->call_const_fast_api(), 1, a0, a3);
2212
2213  ReserveSpaceForFastApiCall(masm(), a0);
2214
2215  // Check that the maps haven't changed and find a Holder as a side effect.
2216  CheckPrototypes(Handle<JSObject>::cast(object), a1, holder, a0, a3, t0, name,
2217                  depth, &miss);
2218
2219  GenerateFastApiDirectCall(masm(), optimization, argc);
2220
2221  __ bind(&miss);
2222  FreeSpaceForFastApiCall(masm());
2223
2224  __ bind(&miss_before_stack_reserved);
2225  GenerateMissBranch();
2226
2227  // Return the generated code.
2228  return GetCode(function);
2229}
2230
2231
2232Handle<Code> CallStubCompiler::CompileCallConstant(Handle<Object> object,
2233                                                   Handle<JSObject> holder,
2234                                                   Handle<JSFunction> function,
2235                                                   Handle<String> name,
2236                                                   CheckType check) {
2237  // ----------- S t a t e -------------
2238  //  -- a2    : name
2239  //  -- ra    : return address
2240  // -----------------------------------
2241  if (HasCustomCallGenerator(function)) {
2242    Handle<Code> code = CompileCustomCall(object, holder,
2243                                          Handle<JSGlobalPropertyCell>::null(),
2244                                          function, name);
2245    // A null handle means bail out to the regular compiler code below.
2246    if (!code.is_null()) return code;
2247  }
2248
2249  Label miss;
2250
2251  GenerateNameCheck(name, &miss);
2252
2253  // Get the receiver from the stack.
2254  const int argc = arguments().immediate();
2255  __ lw(a1, MemOperand(sp, argc * kPointerSize));
2256
2257  // Check that the receiver isn't a smi.
2258  if (check != NUMBER_CHECK) {
2259    __ JumpIfSmi(a1, &miss);
2260  }
2261
2262  // Make sure that it's okay not to patch the on stack receiver
2263  // unless we're doing a receiver map check.
2264  ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
2265  switch (check) {
2266    case RECEIVER_MAP_CHECK:
2267      __ IncrementCounter(masm()->isolate()->counters()->call_const(),
2268          1, a0, a3);
2269
2270      // Check that the maps haven't changed.
2271      CheckPrototypes(Handle<JSObject>::cast(object), a1, holder, a0, a3, t0,
2272                      name, &miss);
2273
2274      // Patch the receiver on the stack with the global proxy if
2275      // necessary.
2276      if (object->IsGlobalObject()) {
2277        __ lw(a3, FieldMemOperand(a1, GlobalObject::kGlobalReceiverOffset));
2278        __ sw(a3, MemOperand(sp, argc * kPointerSize));
2279      }
2280      break;
2281
2282    case STRING_CHECK:
2283      if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
2284        // Check that the object is a two-byte string or a symbol.
2285        __ GetObjectType(a1, a3, a3);
2286        __ Branch(&miss, Ugreater_equal, a3, Operand(FIRST_NONSTRING_TYPE));
2287        // Check that the maps starting from the prototype haven't changed.
2288        GenerateDirectLoadGlobalFunctionPrototype(
2289            masm(), Context::STRING_FUNCTION_INDEX, a0, &miss);
2290        CheckPrototypes(
2291            Handle<JSObject>(JSObject::cast(object->GetPrototype())),
2292            a0, holder, a3, a1, t0, name, &miss);
2293      } else {
2294        // Calling non-strict non-builtins with a value as the receiver
2295        // requires boxing.
2296        __ jmp(&miss);
2297      }
2298      break;
2299
2300    case NUMBER_CHECK:
2301      if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
2302      Label fast;
2303        // Check that the object is a smi or a heap number.
2304        __ JumpIfSmi(a1, &fast);
2305        __ GetObjectType(a1, a0, a0);
2306        __ Branch(&miss, ne, a0, Operand(HEAP_NUMBER_TYPE));
2307        __ bind(&fast);
2308        // Check that the maps starting from the prototype haven't changed.
2309        GenerateDirectLoadGlobalFunctionPrototype(
2310            masm(), Context::NUMBER_FUNCTION_INDEX, a0, &miss);
2311        CheckPrototypes(
2312            Handle<JSObject>(JSObject::cast(object->GetPrototype())),
2313            a0, holder, a3, a1, t0, name, &miss);
2314      } else {
2315        // Calling non-strict non-builtins with a value as the receiver
2316        // requires boxing.
2317        __ jmp(&miss);
2318      }
2319      break;
2320
2321    case BOOLEAN_CHECK:
2322      if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
2323        Label fast;
2324        // Check that the object is a boolean.
2325        __ LoadRoot(t0, Heap::kTrueValueRootIndex);
2326        __ Branch(&fast, eq, a1, Operand(t0));
2327        __ LoadRoot(t0, Heap::kFalseValueRootIndex);
2328        __ Branch(&miss, ne, a1, Operand(t0));
2329        __ bind(&fast);
2330        // Check that the maps starting from the prototype haven't changed.
2331        GenerateDirectLoadGlobalFunctionPrototype(
2332            masm(), Context::BOOLEAN_FUNCTION_INDEX, a0, &miss);
2333        CheckPrototypes(
2334            Handle<JSObject>(JSObject::cast(object->GetPrototype())),
2335            a0, holder, a3, a1, t0, name, &miss);
2336      } else {
2337        // Calling non-strict non-builtins with a value as the receiver
2338        // requires boxing.
2339        __ jmp(&miss);
2340      }
2341      break;
2342    }
2343
2344  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
2345      ? CALL_AS_FUNCTION
2346      : CALL_AS_METHOD;
2347  __ InvokeFunction(
2348      function, arguments(), JUMP_FUNCTION, NullCallWrapper(), call_kind);
2349
2350  // Handle call cache miss.
2351  __ bind(&miss);
2352
2353  GenerateMissBranch();
2354
2355  // Return the generated code.
2356  return GetCode(function);
2357}
2358
2359
2360Handle<Code> CallStubCompiler::CompileCallInterceptor(Handle<JSObject> object,
2361                                                      Handle<JSObject> holder,
2362                                                      Handle<String> name) {
2363  // ----------- S t a t e -------------
2364  //  -- a2    : name
2365  //  -- ra    : return address
2366  // -----------------------------------
2367
2368  Label miss;
2369
2370  GenerateNameCheck(name, &miss);
2371
2372  // Get the number of arguments.
2373  const int argc = arguments().immediate();
2374  LookupResult lookup(isolate());
2375  LookupPostInterceptor(holder, name, &lookup);
2376
2377  // Get the receiver from the stack.
2378  __ lw(a1, MemOperand(sp, argc * kPointerSize));
2379
2380  CallInterceptorCompiler compiler(this, arguments(), a2, extra_state_);
2381  compiler.Compile(masm(), object, holder, name, &lookup, a1, a3, t0, a0,
2382                   &miss);
2383
2384  // Move returned value, the function to call, to a1.
2385  __ mov(a1, v0);
2386  // Restore receiver.
2387  __ lw(a0, MemOperand(sp, argc * kPointerSize));
2388
2389  GenerateCallFunction(masm(), object, arguments(), &miss, extra_state_);
2390
2391  // Handle call cache miss.
2392  __ bind(&miss);
2393  GenerateMissBranch();
2394
2395  // Return the generated code.
2396  return GetCode(INTERCEPTOR, name);
2397}
2398
2399
2400Handle<Code> CallStubCompiler::CompileCallGlobal(
2401    Handle<JSObject> object,
2402    Handle<GlobalObject> holder,
2403    Handle<JSGlobalPropertyCell> cell,
2404    Handle<JSFunction> function,
2405    Handle<String> name) {
2406  // ----------- S t a t e -------------
2407  //  -- a2    : name
2408  //  -- ra    : return address
2409  // -----------------------------------
2410
2411  if (HasCustomCallGenerator(function)) {
2412    Handle<Code> code = CompileCustomCall(object, holder, cell, function, name);
2413    // A null handle means bail out to the regular compiler code below.
2414    if (!code.is_null()) return code;
2415  }
2416
2417  Label miss;
2418  GenerateNameCheck(name, &miss);
2419
2420  // Get the number of arguments.
2421  const int argc = arguments().immediate();
2422  GenerateGlobalReceiverCheck(object, holder, name, &miss);
2423  GenerateLoadFunctionFromCell(cell, function, &miss);
2424
2425  // Patch the receiver on the stack with the global proxy if
2426  // necessary.
2427  if (object->IsGlobalObject()) {
2428    __ lw(a3, FieldMemOperand(a0, GlobalObject::kGlobalReceiverOffset));
2429    __ sw(a3, MemOperand(sp, argc * kPointerSize));
2430  }
2431
2432  // Set up the context (function already in r1).
2433  __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
2434
2435  // Jump to the cached code (tail call).
2436  Counters* counters = masm()->isolate()->counters();
2437  __ IncrementCounter(counters->call_global_inline(), 1, a3, t0);
2438  ParameterCount expected(function->shared()->formal_parameter_count());
2439  CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
2440      ? CALL_AS_FUNCTION
2441      : CALL_AS_METHOD;
2442  // We call indirectly through the code field in the function to
2443  // allow recompilation to take effect without changing any of the
2444  // call sites.
2445  __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
2446  __ InvokeCode(a3, expected, arguments(), JUMP_FUNCTION,
2447                NullCallWrapper(), call_kind);
2448
2449  // Handle call cache miss.
2450  __ bind(&miss);
2451  __ IncrementCounter(counters->call_global_inline_miss(), 1, a1, a3);
2452  GenerateMissBranch();
2453
2454  // Return the generated code.
2455  return GetCode(NORMAL, name);
2456}
2457
2458
2459Handle<Code> StoreStubCompiler::CompileStoreField(Handle<JSObject> object,
2460                                                  int index,
2461                                                  Handle<Map> transition,
2462                                                  Handle<String> name) {
2463  // ----------- S t a t e -------------
2464  //  -- a0    : value
2465  //  -- a1    : receiver
2466  //  -- a2    : name
2467  //  -- ra    : return address
2468  // -----------------------------------
2469  Label miss;
2470
2471  // Name register might be clobbered.
2472  GenerateStoreField(masm(), object, index, transition, a1, a2, a3, &miss);
2473  __ bind(&miss);
2474  __ li(a2, Operand(Handle<String>(name)));  // Restore name.
2475  Handle<Code> ic = masm()->isolate()->builtins()->Builtins::StoreIC_Miss();
2476  __ Jump(ic, RelocInfo::CODE_TARGET);
2477
2478  // Return the generated code.
2479  return GetCode(transition.is_null() ? FIELD : MAP_TRANSITION, name);
2480}
2481
2482
2483Handle<Code> StoreStubCompiler::CompileStoreCallback(
2484    Handle<JSObject> object,
2485    Handle<AccessorInfo> callback,
2486    Handle<String> name) {
2487  // ----------- S t a t e -------------
2488  //  -- a0    : value
2489  //  -- a1    : receiver
2490  //  -- a2    : name
2491  //  -- ra    : return address
2492  // -----------------------------------
2493  Label miss;
2494
2495  // Check that the map of the object hasn't changed.
2496  __ CheckMap(a1, a3, Handle<Map>(object->map()), &miss,
2497              DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS);
2498
2499  // Perform global security token check if needed.
2500  if (object->IsJSGlobalProxy()) {
2501    __ CheckAccessGlobalProxy(a1, a3, &miss);
2502  }
2503
2504  // Stub never generated for non-global objects that require access
2505  // checks.
2506  ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2507
2508  __ push(a1);  // Receiver.
2509  __ li(a3, Operand(callback));  // Callback info.
2510  __ Push(a3, a2, a0);
2511
2512  // Do tail-call to the runtime system.
2513  ExternalReference store_callback_property =
2514      ExternalReference(IC_Utility(IC::kStoreCallbackProperty),
2515          masm()->isolate());
2516  __ TailCallExternalReference(store_callback_property, 4, 1);
2517
2518  // Handle store cache miss.
2519  __ bind(&miss);
2520  Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2521  __ Jump(ic, RelocInfo::CODE_TARGET);
2522
2523  // Return the generated code.
2524  return GetCode(CALLBACKS, name);
2525}
2526
2527
2528Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
2529    Handle<JSObject> receiver,
2530    Handle<String> name) {
2531  // ----------- S t a t e -------------
2532  //  -- a0    : value
2533  //  -- a1    : receiver
2534  //  -- a2    : name
2535  //  -- ra    : return address
2536  // -----------------------------------
2537  Label miss;
2538
2539  // Check that the map of the object hasn't changed.
2540  __ CheckMap(a1, a3, Handle<Map>(receiver->map()), &miss,
2541              DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS);
2542
2543  // Perform global security token check if needed.
2544  if (receiver->IsJSGlobalProxy()) {
2545    __ CheckAccessGlobalProxy(a1, a3, &miss);
2546  }
2547
2548  // Stub is never generated for non-global objects that require access
2549  // checks.
2550  ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
2551
2552  __ Push(a1, a2, a0);  // Receiver, name, value.
2553
2554  __ li(a0, Operand(Smi::FromInt(strict_mode_)));
2555  __ push(a0);  // Strict mode.
2556
2557  // Do tail-call to the runtime system.
2558  ExternalReference store_ic_property =
2559      ExternalReference(IC_Utility(IC::kStoreInterceptorProperty),
2560          masm()->isolate());
2561  __ TailCallExternalReference(store_ic_property, 4, 1);
2562
2563  // Handle store cache miss.
2564  __ bind(&miss);
2565  Handle<Code> ic = masm()->isolate()->builtins()->Builtins::StoreIC_Miss();
2566  __ Jump(ic, RelocInfo::CODE_TARGET);
2567
2568  // Return the generated code.
2569  return GetCode(INTERCEPTOR, name);
2570}
2571
2572
2573Handle<Code> StoreStubCompiler::CompileStoreGlobal(
2574    Handle<GlobalObject> object,
2575    Handle<JSGlobalPropertyCell> cell,
2576    Handle<String> name) {
2577  // ----------- S t a t e -------------
2578  //  -- a0    : value
2579  //  -- a1    : receiver
2580  //  -- a2    : name
2581  //  -- ra    : return address
2582  // -----------------------------------
2583  Label miss;
2584
2585  // Check that the map of the global has not changed.
2586  __ lw(a3, FieldMemOperand(a1, HeapObject::kMapOffset));
2587  __ Branch(&miss, ne, a3, Operand(Handle<Map>(object->map())));
2588
2589  // Check that the value in the cell is not the hole. If it is, this
2590  // cell could have been deleted and reintroducing the global needs
2591  // to update the property details in the property dictionary of the
2592  // global object. We bail out to the runtime system to do that.
2593  __ li(t0, Operand(cell));
2594  __ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
2595  __ lw(t2, FieldMemOperand(t0, JSGlobalPropertyCell::kValueOffset));
2596  __ Branch(&miss, eq, t1, Operand(t2));
2597
2598  // Store the value in the cell.
2599  __ sw(a0, FieldMemOperand(t0, JSGlobalPropertyCell::kValueOffset));
2600  __ mov(v0, a0);  // Stored value must be returned in v0.
2601  // Cells are always rescanned, so no write barrier here.
2602
2603  Counters* counters = masm()->isolate()->counters();
2604  __ IncrementCounter(counters->named_store_global_inline(), 1, a1, a3);
2605  __ Ret();
2606
2607  // Handle store cache miss.
2608  __ bind(&miss);
2609  __ IncrementCounter(counters->named_store_global_inline_miss(), 1, a1, a3);
2610  Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
2611  __ Jump(ic, RelocInfo::CODE_TARGET);
2612
2613  // Return the generated code.
2614  return GetCode(NORMAL, name);
2615}
2616
2617
2618Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<String> name,
2619                                                      Handle<JSObject> object,
2620                                                      Handle<JSObject> last) {
2621  // ----------- S t a t e -------------
2622  //  -- a0    : receiver
2623  //  -- ra    : return address
2624  // -----------------------------------
2625  Label miss;
2626
2627  // Check that the receiver is not a smi.
2628  __ JumpIfSmi(a0, &miss);
2629
2630  // Check the maps of the full prototype chain.
2631  CheckPrototypes(object, a0, last, a3, a1, t0, name, &miss);
2632
2633  // If the last object in the prototype chain is a global object,
2634  // check that the global property cell is empty.
2635  if (last->IsGlobalObject()) {
2636    GenerateCheckPropertyCell(
2637        masm(), Handle<GlobalObject>::cast(last), name, a1, &miss);
2638  }
2639
2640  // Return undefined if maps of the full prototype chain is still the same.
2641  __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
2642  __ Ret();
2643
2644  __ bind(&miss);
2645  GenerateLoadMiss(masm(), Code::LOAD_IC);
2646
2647  // Return the generated code.
2648  return GetCode(NONEXISTENT, factory()->empty_string());
2649}
2650
2651
2652Handle<Code> LoadStubCompiler::CompileLoadField(Handle<JSObject> object,
2653                                                Handle<JSObject> holder,
2654                                                int index,
2655                                                Handle<String> name) {
2656  // ----------- S t a t e -------------
2657  //  -- a0    : receiver
2658  //  -- a2    : name
2659  //  -- ra    : return address
2660  // -----------------------------------
2661  Label miss;
2662
2663  __ mov(v0, a0);
2664
2665  GenerateLoadField(object, holder, v0, a3, a1, t0, index, name, &miss);
2666  __ bind(&miss);
2667  GenerateLoadMiss(masm(), Code::LOAD_IC);
2668
2669  // Return the generated code.
2670  return GetCode(FIELD, name);
2671}
2672
2673
2674Handle<Code> LoadStubCompiler::CompileLoadCallback(
2675    Handle<String> name,
2676    Handle<JSObject> object,
2677    Handle<JSObject> holder,
2678    Handle<AccessorInfo> callback) {
2679  // ----------- S t a t e -------------
2680  //  -- a0    : receiver
2681  //  -- a2    : name
2682  //  -- ra    : return address
2683  // -----------------------------------
2684  Label miss;
2685  GenerateLoadCallback(object, holder, a0, a2, a3, a1, t0, callback, name,
2686                       &miss);
2687  __ bind(&miss);
2688  GenerateLoadMiss(masm(), Code::LOAD_IC);
2689
2690  // Return the generated code.
2691  return GetCode(CALLBACKS, name);
2692}
2693
2694
2695Handle<Code> LoadStubCompiler::CompileLoadConstant(Handle<JSObject> object,
2696                                                   Handle<JSObject> holder,
2697                                                   Handle<JSFunction> value,
2698                                                   Handle<String> name) {
2699  // ----------- S t a t e -------------
2700  //  -- a0    : receiver
2701  //  -- a2    : name
2702  //  -- ra    : return address
2703  // -----------------------------------
2704  Label miss;
2705
2706  GenerateLoadConstant(object, holder, a0, a3, a1, t0, value, name, &miss);
2707  __ bind(&miss);
2708  GenerateLoadMiss(masm(), Code::LOAD_IC);
2709
2710  // Return the generated code.
2711  return GetCode(CONSTANT_FUNCTION, name);
2712}
2713
2714
2715Handle<Code> LoadStubCompiler::CompileLoadInterceptor(Handle<JSObject> object,
2716                                                      Handle<JSObject> holder,
2717                                                      Handle<String> name) {
2718  // ----------- S t a t e -------------
2719  //  -- a0    : receiver
2720  //  -- a2    : name
2721  //  -- ra    : return address
2722  //  -- [sp]  : receiver
2723  // -----------------------------------
2724  Label miss;
2725
2726  LookupResult lookup(isolate());
2727  LookupPostInterceptor(holder, name, &lookup);
2728  GenerateLoadInterceptor(object, holder, &lookup, a0, a2, a3, a1, t0, name,
2729                          &miss);
2730  __ bind(&miss);
2731  GenerateLoadMiss(masm(), Code::LOAD_IC);
2732
2733  // Return the generated code.
2734  return GetCode(INTERCEPTOR, name);
2735}
2736
2737
2738Handle<Code> LoadStubCompiler::CompileLoadGlobal(
2739    Handle<JSObject> object,
2740    Handle<GlobalObject> holder,
2741    Handle<JSGlobalPropertyCell> cell,
2742    Handle<String> name,
2743    bool is_dont_delete) {
2744  // ----------- S t a t e -------------
2745  //  -- a0    : receiver
2746  //  -- a2    : name
2747  //  -- ra    : return address
2748  // -----------------------------------
2749  Label miss;
2750
2751  // If the object is the holder then we know that it's a global
2752  // object which can only happen for contextual calls. In this case,
2753  // the receiver cannot be a smi.
2754  if (!object.is_identical_to(holder)) {
2755    __ JumpIfSmi(a0, &miss);
2756  }
2757
2758  // Check that the map of the global has not changed.
2759  CheckPrototypes(object, a0, holder, a3, t0, a1, name, &miss);
2760
2761  // Get the value from the cell.
2762  __ li(a3, Operand(cell));
2763  __ lw(t0, FieldMemOperand(a3, JSGlobalPropertyCell::kValueOffset));
2764
2765  // Check for deleted property if property can actually be deleted.
2766  if (!is_dont_delete) {
2767    __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2768    __ Branch(&miss, eq, t0, Operand(at));
2769  }
2770
2771  __ mov(v0, t0);
2772  Counters* counters = masm()->isolate()->counters();
2773  __ IncrementCounter(counters->named_load_global_stub(), 1, a1, a3);
2774  __ Ret();
2775
2776  __ bind(&miss);
2777  __ IncrementCounter(counters->named_load_global_stub_miss(), 1, a1, a3);
2778  GenerateLoadMiss(masm(), Code::LOAD_IC);
2779
2780  // Return the generated code.
2781  return GetCode(NORMAL, name);
2782}
2783
2784
2785Handle<Code> KeyedLoadStubCompiler::CompileLoadField(Handle<String> name,
2786                                                     Handle<JSObject> receiver,
2787                                                     Handle<JSObject> holder,
2788                                                     int index) {
2789  // ----------- S t a t e -------------
2790  //  -- ra    : return address
2791  //  -- a0    : key
2792  //  -- a1    : receiver
2793  // -----------------------------------
2794  Label miss;
2795
2796  // Check the key is the cached one.
2797  __ Branch(&miss, ne, a0, Operand(name));
2798
2799  GenerateLoadField(receiver, holder, a1, a2, a3, t0, index, name, &miss);
2800  __ bind(&miss);
2801  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2802
2803  return GetCode(FIELD, name);
2804}
2805
2806
2807Handle<Code> KeyedLoadStubCompiler::CompileLoadCallback(
2808    Handle<String> name,
2809    Handle<JSObject> receiver,
2810    Handle<JSObject> holder,
2811    Handle<AccessorInfo> callback) {
2812  // ----------- S t a t e -------------
2813  //  -- ra    : return address
2814  //  -- a0    : key
2815  //  -- a1    : receiver
2816  // -----------------------------------
2817  Label miss;
2818
2819  // Check the key is the cached one.
2820  __ Branch(&miss, ne, a0, Operand(name));
2821
2822  GenerateLoadCallback(receiver, holder, a1, a0, a2, a3, t0, callback, name,
2823                       &miss);
2824  __ bind(&miss);
2825  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2826
2827  return GetCode(CALLBACKS, name);
2828}
2829
2830
2831Handle<Code> KeyedLoadStubCompiler::CompileLoadConstant(
2832    Handle<String> name,
2833    Handle<JSObject> receiver,
2834    Handle<JSObject> holder,
2835    Handle<JSFunction> value) {
2836  // ----------- S t a t e -------------
2837  //  -- ra    : return address
2838  //  -- a0    : key
2839  //  -- a1    : receiver
2840  // -----------------------------------
2841  Label miss;
2842
2843  // Check the key is the cached one.
2844  __ Branch(&miss, ne, a0, Operand(name));
2845
2846  GenerateLoadConstant(receiver, holder, a1, a2, a3, t0, value, name, &miss);
2847  __ bind(&miss);
2848  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2849
2850  // Return the generated code.
2851  return GetCode(CONSTANT_FUNCTION, name);
2852}
2853
2854
2855Handle<Code> KeyedLoadStubCompiler::CompileLoadInterceptor(
2856    Handle<JSObject> receiver,
2857    Handle<JSObject> holder,
2858    Handle<String> name) {
2859  // ----------- S t a t e -------------
2860  //  -- ra    : return address
2861  //  -- a0    : key
2862  //  -- a1    : receiver
2863  // -----------------------------------
2864  Label miss;
2865
2866  // Check the key is the cached one.
2867  __ Branch(&miss, ne, a0, Operand(name));
2868
2869  LookupResult lookup(isolate());
2870  LookupPostInterceptor(holder, name, &lookup);
2871  GenerateLoadInterceptor(receiver, holder, &lookup, a1, a0, a2, a3, t0, name,
2872                          &miss);
2873  __ bind(&miss);
2874  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2875
2876  return GetCode(INTERCEPTOR, name);
2877}
2878
2879
2880Handle<Code> KeyedLoadStubCompiler::CompileLoadArrayLength(
2881    Handle<String> name) {
2882  // ----------- S t a t e -------------
2883  //  -- ra    : return address
2884  //  -- a0    : key
2885  //  -- a1    : receiver
2886  // -----------------------------------
2887  Label miss;
2888
2889  // Check the key is the cached one.
2890  __ Branch(&miss, ne, a0, Operand(name));
2891
2892  GenerateLoadArrayLength(masm(), a1, a2, &miss);
2893  __ bind(&miss);
2894  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2895
2896  return GetCode(CALLBACKS, name);
2897}
2898
2899
2900Handle<Code> KeyedLoadStubCompiler::CompileLoadStringLength(
2901    Handle<String> name) {
2902  // ----------- S t a t e -------------
2903  //  -- ra    : return address
2904  //  -- a0    : key
2905  //  -- a1    : receiver
2906  // -----------------------------------
2907  Label miss;
2908
2909  Counters* counters = masm()->isolate()->counters();
2910  __ IncrementCounter(counters->keyed_load_string_length(), 1, a2, a3);
2911
2912  // Check the key is the cached one.
2913  __ Branch(&miss, ne, a0, Operand(name));
2914
2915  GenerateLoadStringLength(masm(), a1, a2, a3, &miss, true);
2916  __ bind(&miss);
2917  __ DecrementCounter(counters->keyed_load_string_length(), 1, a2, a3);
2918
2919  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2920
2921  return GetCode(CALLBACKS, name);
2922}
2923
2924
2925Handle<Code> KeyedLoadStubCompiler::CompileLoadFunctionPrototype(
2926    Handle<String> name) {
2927  // ----------- S t a t e -------------
2928  //  -- ra    : return address
2929  //  -- a0    : key
2930  //  -- a1    : receiver
2931  // -----------------------------------
2932  Label miss;
2933
2934  Counters* counters = masm()->isolate()->counters();
2935  __ IncrementCounter(counters->keyed_load_function_prototype(), 1, a2, a3);
2936
2937  // Check the name hasn't changed.
2938  __ Branch(&miss, ne, a0, Operand(name));
2939
2940  GenerateLoadFunctionPrototype(masm(), a1, a2, a3, &miss);
2941  __ bind(&miss);
2942  __ DecrementCounter(counters->keyed_load_function_prototype(), 1, a2, a3);
2943  GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2944
2945  return GetCode(CALLBACKS, name);
2946}
2947
2948
2949Handle<Code> KeyedLoadStubCompiler::CompileLoadElement(
2950    Handle<Map> receiver_map) {
2951  // ----------- S t a t e -------------
2952  //  -- ra    : return address
2953  //  -- a0    : key
2954  //  -- a1    : receiver
2955  // -----------------------------------
2956  ElementsKind elements_kind = receiver_map->elements_kind();
2957  Handle<Code> stub = KeyedLoadElementStub(elements_kind).GetCode();
2958
2959  __ DispatchMap(a1, a2, receiver_map, stub, DO_SMI_CHECK);
2960
2961  Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Miss();
2962  __ Jump(ic, RelocInfo::CODE_TARGET);
2963
2964  // Return the generated code.
2965  return GetCode(NORMAL, factory()->empty_string());
2966}
2967
2968
2969Handle<Code> KeyedLoadStubCompiler::CompileLoadPolymorphic(
2970    MapHandleList* receiver_maps,
2971    CodeHandleList* handler_ics) {
2972  // ----------- S t a t e -------------
2973  //  -- ra    : return address
2974  //  -- a0    : key
2975  //  -- a1    : receiver
2976  // -----------------------------------
2977  Label miss;
2978  __ JumpIfSmi(a1, &miss);
2979
2980  int receiver_count = receiver_maps->length();
2981  __ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset));
2982  for (int current = 0; current < receiver_count; ++current) {
2983    __ Jump(handler_ics->at(current), RelocInfo::CODE_TARGET,
2984        eq, a2, Operand(receiver_maps->at(current)));
2985  }
2986
2987  __ bind(&miss);
2988  Handle<Code> miss_ic = isolate()->builtins()->KeyedLoadIC_Miss();
2989  __ Jump(miss_ic, RelocInfo::CODE_TARGET);
2990
2991  // Return the generated code.
2992  return GetCode(NORMAL, factory()->empty_string(), MEGAMORPHIC);
2993}
2994
2995
2996Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object,
2997                                                       int index,
2998                                                       Handle<Map> transition,
2999                                                       Handle<String> name) {
3000  // ----------- S t a t e -------------
3001  //  -- a0    : value
3002  //  -- a1    : key
3003  //  -- a2    : receiver
3004  //  -- ra    : return address
3005  // -----------------------------------
3006
3007  Label miss;
3008
3009  Counters* counters = masm()->isolate()->counters();
3010  __ IncrementCounter(counters->keyed_store_field(), 1, a3, t0);
3011
3012  // Check that the name has not changed.
3013  __ Branch(&miss, ne, a1, Operand(name));
3014
3015  // a3 is used as scratch register. a1 and a2 keep their values if a jump to
3016  // the miss label is generated.
3017  GenerateStoreField(masm(), object, index, transition, a2, a1, a3, &miss);
3018  __ bind(&miss);
3019
3020  __ DecrementCounter(counters->keyed_store_field(), 1, a3, t0);
3021  Handle<Code> ic = masm()->isolate()->builtins()->KeyedStoreIC_Miss();
3022  __ Jump(ic, RelocInfo::CODE_TARGET);
3023
3024  // Return the generated code.
3025  return GetCode(transition.is_null() ? FIELD : MAP_TRANSITION, name);
3026}
3027
3028
3029Handle<Code> KeyedStoreStubCompiler::CompileStoreElement(
3030    Handle<Map> receiver_map) {
3031  // ----------- S t a t e -------------
3032  //  -- a0    : value
3033  //  -- a1    : key
3034  //  -- a2    : receiver
3035  //  -- ra    : return address
3036  //  -- a3    : scratch
3037  // -----------------------------------
3038  ElementsKind elements_kind = receiver_map->elements_kind();
3039  bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
3040  Handle<Code> stub =
3041      KeyedStoreElementStub(is_js_array, elements_kind).GetCode();
3042
3043  __ DispatchMap(a2, a3, receiver_map, stub, DO_SMI_CHECK);
3044
3045  Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
3046  __ Jump(ic, RelocInfo::CODE_TARGET);
3047
3048  // Return the generated code.
3049  return GetCode(NORMAL, factory()->empty_string());
3050}
3051
3052
3053Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
3054    MapHandleList* receiver_maps,
3055    CodeHandleList* handler_stubs,
3056    MapHandleList* transitioned_maps) {
3057  // ----------- S t a t e -------------
3058  //  -- a0    : value
3059  //  -- a1    : key
3060  //  -- a2    : receiver
3061  //  -- ra    : return address
3062  //  -- a3    : scratch
3063  // -----------------------------------
3064  Label miss;
3065  __ JumpIfSmi(a2, &miss);
3066
3067  int receiver_count = receiver_maps->length();
3068  __ lw(a3, FieldMemOperand(a2, HeapObject::kMapOffset));
3069  for (int i = 0; i < receiver_count; ++i) {
3070    if (transitioned_maps->at(i).is_null()) {
3071      __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET, eq,
3072          a3, Operand(receiver_maps->at(i)));
3073    } else {
3074      Label next_map;
3075      __ Branch(&next_map, ne, a3, Operand(receiver_maps->at(i)));
3076      __ li(a3, Operand(transitioned_maps->at(i)));
3077      __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET);
3078      __ bind(&next_map);
3079    }
3080  }
3081
3082  __ bind(&miss);
3083  Handle<Code> miss_ic = isolate()->builtins()->KeyedStoreIC_Miss();
3084  __ Jump(miss_ic, RelocInfo::CODE_TARGET);
3085
3086  // Return the generated code.
3087  return GetCode(NORMAL, factory()->empty_string(), MEGAMORPHIC);
3088}
3089
3090
3091Handle<Code> ConstructStubCompiler::CompileConstructStub(
3092    Handle<JSFunction> function) {
3093  // a0    : argc
3094  // a1    : constructor
3095  // ra    : return address
3096  // [sp]  : last argument
3097  Label generic_stub_call;
3098
3099  // Use t7 for holding undefined which is used in several places below.
3100  __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
3101
3102#ifdef ENABLE_DEBUGGER_SUPPORT
3103  // Check to see whether there are any break points in the function code. If
3104  // there are jump to the generic constructor stub which calls the actual
3105  // code for the function thereby hitting the break points.
3106  __ lw(t5, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
3107  __ lw(a2, FieldMemOperand(t5, SharedFunctionInfo::kDebugInfoOffset));
3108  __ Branch(&generic_stub_call, ne, a2, Operand(t7));
3109#endif
3110
3111  // Load the initial map and verify that it is in fact a map.
3112  // a1: constructor function
3113  // t7: undefined
3114  __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
3115  __ JumpIfSmi(a2, &generic_stub_call);
3116  __ GetObjectType(a2, a3, t0);
3117  __ Branch(&generic_stub_call, ne, t0, Operand(MAP_TYPE));
3118
3119#ifdef DEBUG
3120  // Cannot construct functions this way.
3121  // a0: argc
3122  // a1: constructor function
3123  // a2: initial map
3124  // t7: undefined
3125  __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset));
3126  __ Check(ne, "Function constructed by construct stub.",
3127      a3, Operand(JS_FUNCTION_TYPE));
3128#endif
3129
3130  // Now allocate the JSObject in new space.
3131  // a0: argc
3132  // a1: constructor function
3133  // a2: initial map
3134  // t7: undefined
3135  __ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset));
3136  __ AllocateInNewSpace(a3, t4, t5, t6, &generic_stub_call, SIZE_IN_WORDS);
3137
3138  // Allocated the JSObject, now initialize the fields. Map is set to initial
3139  // map and properties and elements are set to empty fixed array.
3140  // a0: argc
3141  // a1: constructor function
3142  // a2: initial map
3143  // a3: object size (in words)
3144  // t4: JSObject (not tagged)
3145  // t7: undefined
3146  __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex);
3147  __ mov(t5, t4);
3148  __ sw(a2, MemOperand(t5, JSObject::kMapOffset));
3149  __ sw(t6, MemOperand(t5, JSObject::kPropertiesOffset));
3150  __ sw(t6, MemOperand(t5, JSObject::kElementsOffset));
3151  __ Addu(t5, t5, Operand(3 * kPointerSize));
3152  ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
3153  ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
3154  ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
3155
3156
3157  // Calculate the location of the first argument. The stack contains only the
3158  // argc arguments.
3159  __ sll(a1, a0, kPointerSizeLog2);
3160  __ Addu(a1, a1, sp);
3161
3162  // Fill all the in-object properties with undefined.
3163  // a0: argc
3164  // a1: first argument
3165  // a3: object size (in words)
3166  // t4: JSObject (not tagged)
3167  // t5: First in-object property of JSObject (not tagged)
3168  // t7: undefined
3169  // Fill the initialized properties with a constant value or a passed argument
3170  // depending on the this.x = ...; assignment in the function.
3171  Handle<SharedFunctionInfo> shared(function->shared());
3172  for (int i = 0; i < shared->this_property_assignments_count(); i++) {
3173    if (shared->IsThisPropertyAssignmentArgument(i)) {
3174      Label not_passed, next;
3175      // Check if the argument assigned to the property is actually passed.
3176      int arg_number = shared->GetThisPropertyAssignmentArgument(i);
3177      __ Branch(&not_passed, less_equal, a0, Operand(arg_number));
3178      // Argument passed - find it on the stack.
3179      __ lw(a2, MemOperand(a1, (arg_number + 1) * -kPointerSize));
3180      __ sw(a2, MemOperand(t5));
3181      __ Addu(t5, t5, kPointerSize);
3182      __ jmp(&next);
3183      __ bind(&not_passed);
3184      // Set the property to undefined.
3185      __ sw(t7, MemOperand(t5));
3186      __ Addu(t5, t5, Operand(kPointerSize));
3187      __ bind(&next);
3188    } else {
3189      // Set the property to the constant value.
3190      Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i));
3191      __ li(a2, Operand(constant));
3192      __ sw(a2, MemOperand(t5));
3193      __ Addu(t5, t5, kPointerSize);
3194    }
3195  }
3196
3197  // Fill the unused in-object property fields with undefined.
3198  ASSERT(function->has_initial_map());
3199  for (int i = shared->this_property_assignments_count();
3200       i < function->initial_map()->inobject_properties();
3201       i++) {
3202      __ sw(t7, MemOperand(t5));
3203      __ Addu(t5, t5, kPointerSize);
3204  }
3205
3206  // a0: argc
3207  // t4: JSObject (not tagged)
3208  // Move argc to a1 and the JSObject to return to v0 and tag it.
3209  __ mov(a1, a0);
3210  __ mov(v0, t4);
3211  __ Or(v0, v0, Operand(kHeapObjectTag));
3212
3213  // v0: JSObject
3214  // a1: argc
3215  // Remove caller arguments and receiver from the stack and return.
3216  __ sll(t0, a1, kPointerSizeLog2);
3217  __ Addu(sp, sp, t0);
3218  __ Addu(sp, sp, Operand(kPointerSize));
3219  Counters* counters = masm()->isolate()->counters();
3220  __ IncrementCounter(counters->constructed_objects(), 1, a1, a2);
3221  __ IncrementCounter(counters->constructed_objects_stub(), 1, a1, a2);
3222  __ Ret();
3223
3224  // Jump to the generic stub in case the specialized code cannot handle the
3225  // construction.
3226  __ bind(&generic_stub_call);
3227  Handle<Code> generic_construct_stub =
3228      masm()->isolate()->builtins()->JSConstructStubGeneric();
3229  __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
3230
3231  // Return the generated code.
3232  return GetCode();
3233}
3234
3235
3236#undef __
3237#define __ ACCESS_MASM(masm)
3238
3239
3240void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
3241    MacroAssembler* masm) {
3242  // ---------- S t a t e --------------
3243  //  -- ra     : return address
3244  //  -- a0     : key
3245  //  -- a1     : receiver
3246  // -----------------------------------
3247  Label slow, miss_force_generic;
3248
3249  Register key = a0;
3250  Register receiver = a1;
3251
3252  __ JumpIfNotSmi(key, &miss_force_generic);
3253  __ lw(t0, FieldMemOperand(receiver, JSObject::kElementsOffset));
3254  __ sra(a2, a0, kSmiTagSize);
3255  __ LoadFromNumberDictionary(&slow, t0, a0, v0, a2, a3, t1);
3256  __ Ret();
3257
3258  // Slow case, key and receiver still in a0 and a1.
3259  __ bind(&slow);
3260  __ IncrementCounter(
3261      masm->isolate()->counters()->keyed_load_external_array_slow(),
3262      1, a2, a3);
3263  // Entry registers are intact.
3264  // ---------- S t a t e --------------
3265  //  -- ra     : return address
3266  //  -- a0     : key
3267  //  -- a1     : receiver
3268  // -----------------------------------
3269  Handle<Code> slow_ic =
3270      masm->isolate()->builtins()->KeyedLoadIC_Slow();
3271  __ Jump(slow_ic, RelocInfo::CODE_TARGET);
3272
3273  // Miss case, call the runtime.
3274  __ bind(&miss_force_generic);
3275
3276  // ---------- S t a t e --------------
3277  //  -- ra     : return address
3278  //  -- a0     : key
3279  //  -- a1     : receiver
3280  // -----------------------------------
3281
3282  Handle<Code> miss_ic =
3283     masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
3284  __ Jump(miss_ic, RelocInfo::CODE_TARGET);
3285}
3286
3287
3288static bool IsElementTypeSigned(ElementsKind elements_kind) {
3289  switch (elements_kind) {
3290    case EXTERNAL_BYTE_ELEMENTS:
3291    case EXTERNAL_SHORT_ELEMENTS:
3292    case EXTERNAL_INT_ELEMENTS:
3293      return true;
3294
3295    case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
3296    case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
3297    case EXTERNAL_UNSIGNED_INT_ELEMENTS:
3298    case EXTERNAL_PIXEL_ELEMENTS:
3299      return false;
3300
3301    case EXTERNAL_FLOAT_ELEMENTS:
3302    case EXTERNAL_DOUBLE_ELEMENTS:
3303    case FAST_SMI_ONLY_ELEMENTS:
3304    case FAST_ELEMENTS:
3305    case FAST_DOUBLE_ELEMENTS:
3306    case DICTIONARY_ELEMENTS:
3307    case NON_STRICT_ARGUMENTS_ELEMENTS:
3308      UNREACHABLE();
3309      return false;
3310  }
3311  return false;
3312}
3313
3314
3315void KeyedLoadStubCompiler::GenerateLoadExternalArray(
3316    MacroAssembler* masm,
3317    ElementsKind elements_kind) {
3318  // ---------- S t a t e --------------
3319  //  -- ra     : return address
3320  //  -- a0     : key
3321  //  -- a1     : receiver
3322  // -----------------------------------
3323  Label miss_force_generic, slow, failed_allocation;
3324
3325  Register key = a0;
3326  Register receiver = a1;
3327
3328  // This stub is meant to be tail-jumped to, the receiver must already
3329  // have been verified by the caller to not be a smi.
3330
3331  // Check that the key is a smi.
3332  __ JumpIfNotSmi(key, &miss_force_generic);
3333
3334  __ lw(a3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3335  // a3: elements array
3336
3337  // Check that the index is in range.
3338  __ lw(t1, FieldMemOperand(a3, ExternalArray::kLengthOffset));
3339  __ sra(t2, key, kSmiTagSize);
3340  // Unsigned comparison catches both negative and too-large values.
3341  __ Branch(&miss_force_generic, Ugreater_equal, key, Operand(t1));
3342
3343  __ lw(a3, FieldMemOperand(a3, ExternalArray::kExternalPointerOffset));
3344  // a3: base pointer of external storage
3345
3346  // We are not untagging smi key and instead work with it
3347  // as if it was premultiplied by 2.
3348  STATIC_ASSERT((kSmiTag == 0) && (kSmiTagSize == 1));
3349
3350  Register value = a2;
3351  switch (elements_kind) {
3352    case EXTERNAL_BYTE_ELEMENTS:
3353      __ srl(t2, key, 1);
3354      __ addu(t3, a3, t2);
3355      __ lb(value, MemOperand(t3, 0));
3356      break;
3357    case EXTERNAL_PIXEL_ELEMENTS:
3358    case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
3359      __ srl(t2, key, 1);
3360      __ addu(t3, a3, t2);
3361      __ lbu(value, MemOperand(t3, 0));
3362      break;
3363    case EXTERNAL_SHORT_ELEMENTS:
3364      __ addu(t3, a3, key);
3365      __ lh(value, MemOperand(t3, 0));
3366      break;
3367    case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
3368      __ addu(t3, a3, key);
3369      __ lhu(value, MemOperand(t3, 0));
3370      break;
3371    case EXTERNAL_INT_ELEMENTS:
3372    case EXTERNAL_UNSIGNED_INT_ELEMENTS:
3373      __ sll(t2, key, 1);
3374      __ addu(t3, a3, t2);
3375      __ lw(value, MemOperand(t3, 0));
3376      break;
3377    case EXTERNAL_FLOAT_ELEMENTS:
3378      __ sll(t3, t2, 2);
3379      __ addu(t3, a3, t3);
3380      if (CpuFeatures::IsSupported(FPU)) {
3381        CpuFeatures::Scope scope(FPU);
3382        __ lwc1(f0, MemOperand(t3, 0));
3383      } else {
3384        __ lw(value, MemOperand(t3, 0));
3385      }
3386      break;
3387    case EXTERNAL_DOUBLE_ELEMENTS:
3388      __ sll(t2, key, 2);
3389      __ addu(t3, a3, t2);
3390      if (CpuFeatures::IsSupported(FPU)) {
3391        CpuFeatures::Scope scope(FPU);
3392        __ ldc1(f0, MemOperand(t3, 0));
3393      } else {
3394        // t3: pointer to the beginning of the double we want to load.
3395        __ lw(a2, MemOperand(t3, 0));
3396        __ lw(a3, MemOperand(t3, Register::kSizeInBytes));
3397      }
3398      break;
3399    case FAST_ELEMENTS:
3400    case FAST_SMI_ONLY_ELEMENTS:
3401    case FAST_DOUBLE_ELEMENTS:
3402    case DICTIONARY_ELEMENTS:
3403    case NON_STRICT_ARGUMENTS_ELEMENTS:
3404      UNREACHABLE();
3405      break;
3406  }
3407
3408  // For integer array types:
3409  // a2: value
3410  // For float array type:
3411  // f0: value (if FPU is supported)
3412  // a2: value (if FPU is not supported)
3413  // For double array type:
3414  // f0: value (if FPU is supported)
3415  // a2/a3: value (if FPU is not supported)
3416
3417  if (elements_kind == EXTERNAL_INT_ELEMENTS) {
3418    // For the Int and UnsignedInt array types, we need to see whether
3419    // the value can be represented in a Smi. If not, we need to convert
3420    // it to a HeapNumber.
3421    Label box_int;
3422    __ Subu(t3, value, Operand(0xC0000000));  // Non-smi value gives neg result.
3423    __ Branch(&box_int, lt, t3, Operand(zero_reg));
3424    // Tag integer as smi and return it.
3425    __ sll(v0, value, kSmiTagSize);
3426    __ Ret();
3427
3428    __ bind(&box_int);
3429    // Allocate a HeapNumber for the result and perform int-to-double
3430    // conversion.
3431    // The arm version uses a temporary here to save r0, but we don't need to
3432    // (a0 is not modified).
3433    __ LoadRoot(t1, Heap::kHeapNumberMapRootIndex);
3434    __ AllocateHeapNumber(v0, a3, t0, t1, &slow);
3435
3436    if (CpuFeatures::IsSupported(FPU)) {
3437      CpuFeatures::Scope scope(FPU);
3438      __ mtc1(value, f0);
3439      __ cvt_d_w(f0, f0);
3440      __ sdc1(f0, MemOperand(v0, HeapNumber::kValueOffset - kHeapObjectTag));
3441      __ Ret();
3442    } else {
3443      Register dst1 = t2;
3444      Register dst2 = t3;
3445      FloatingPointHelper::Destination dest =
3446          FloatingPointHelper::kCoreRegisters;
3447      FloatingPointHelper::ConvertIntToDouble(masm,
3448                                              value,
3449                                              dest,
3450                                              f0,
3451                                              dst1,
3452                                              dst2,
3453                                              t1,
3454                                              f2);
3455      __ sw(dst1, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3456      __ sw(dst2, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3457      __ Ret();
3458    }
3459  } else if (elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS) {
3460    // The test is different for unsigned int values. Since we need
3461    // the value to be in the range of a positive smi, we can't
3462    // handle either of the top two bits being set in the value.
3463    if (CpuFeatures::IsSupported(FPU)) {
3464      CpuFeatures::Scope scope(FPU);
3465      Label pl_box_int;
3466      __ And(t2, value, Operand(0xC0000000));
3467      __ Branch(&pl_box_int, ne, t2, Operand(zero_reg));
3468
3469      // It can fit in an Smi.
3470      // Tag integer as smi and return it.
3471      __ sll(v0, value, kSmiTagSize);
3472      __ Ret();
3473
3474      __ bind(&pl_box_int);
3475      // Allocate a HeapNumber for the result and perform int-to-double
3476      // conversion. Don't use a0 and a1 as AllocateHeapNumber clobbers all
3477      // registers - also when jumping due to exhausted young space.
3478      __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3479      __ AllocateHeapNumber(v0, t2, t3, t6, &slow);
3480
3481      // This is replaced by a macro:
3482      // __ mtc1(value, f0);     // LS 32-bits.
3483      // __ mtc1(zero_reg, f1);  // MS 32-bits are all zero.
3484      // __ cvt_d_l(f0, f0); // Use 64 bit conv to get correct unsigned 32-bit.
3485
3486      __ Cvt_d_uw(f0, value, f22);
3487
3488      __ sdc1(f0, MemOperand(v0, HeapNumber::kValueOffset - kHeapObjectTag));
3489
3490      __ Ret();
3491    } else {
3492      // Check whether unsigned integer fits into smi.
3493      Label box_int_0, box_int_1, done;
3494      __ And(t2, value, Operand(0x80000000));
3495      __ Branch(&box_int_0, ne, t2, Operand(zero_reg));
3496      __ And(t2, value, Operand(0x40000000));
3497      __ Branch(&box_int_1, ne, t2, Operand(zero_reg));
3498
3499      // Tag integer as smi and return it.
3500      __ sll(v0, value, kSmiTagSize);
3501      __ Ret();
3502
3503      Register hiword = value;  // a2.
3504      Register loword = a3;
3505
3506      __ bind(&box_int_0);
3507      // Integer does not have leading zeros.
3508      GenerateUInt2Double(masm, hiword, loword, t0, 0);
3509      __ Branch(&done);
3510
3511      __ bind(&box_int_1);
3512      // Integer has one leading zero.
3513      GenerateUInt2Double(masm, hiword, loword, t0, 1);
3514
3515
3516      __ bind(&done);
3517      // Integer was converted to double in registers hiword:loword.
3518      // Wrap it into a HeapNumber. Don't use a0 and a1 as AllocateHeapNumber
3519      // clobbers all registers - also when jumping due to exhausted young
3520      // space.
3521      __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3522      __ AllocateHeapNumber(t2, t3, t5, t6, &slow);
3523
3524      __ sw(hiword, FieldMemOperand(t2, HeapNumber::kExponentOffset));
3525      __ sw(loword, FieldMemOperand(t2, HeapNumber::kMantissaOffset));
3526
3527      __ mov(v0, t2);
3528      __ Ret();
3529    }
3530  } else if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
3531    // For the floating-point array type, we need to always allocate a
3532    // HeapNumber.
3533    if (CpuFeatures::IsSupported(FPU)) {
3534      CpuFeatures::Scope scope(FPU);
3535      // Allocate a HeapNumber for the result. Don't use a0 and a1 as
3536      // AllocateHeapNumber clobbers all registers - also when jumping due to
3537      // exhausted young space.
3538      __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3539      __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3540      // The float (single) value is already in fpu reg f0 (if we use float).
3541      __ cvt_d_s(f0, f0);
3542      __ sdc1(f0, MemOperand(v0, HeapNumber::kValueOffset - kHeapObjectTag));
3543      __ Ret();
3544    } else {
3545      // Allocate a HeapNumber for the result. Don't use a0 and a1 as
3546      // AllocateHeapNumber clobbers all registers - also when jumping due to
3547      // exhausted young space.
3548      __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3549      __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3550      // FPU is not available, do manual single to double conversion.
3551
3552      // a2: floating point value (binary32).
3553      // v0: heap number for result
3554
3555      // Extract mantissa to t4.
3556      __ And(t4, value, Operand(kBinary32MantissaMask));
3557
3558      // Extract exponent to t5.
3559      __ srl(t5, value, kBinary32MantissaBits);
3560      __ And(t5, t5, Operand(kBinary32ExponentMask >> kBinary32MantissaBits));
3561
3562      Label exponent_rebiased;
3563      __ Branch(&exponent_rebiased, eq, t5, Operand(zero_reg));
3564
3565      __ li(t0, 0x7ff);
3566      __ Xor(t1, t5, Operand(0xFF));
3567      __ movz(t5, t0, t1);  // Set t5 to 0x7ff only if t5 is equal to 0xff.
3568      __ Branch(&exponent_rebiased, eq, t0, Operand(0xff));
3569
3570      // Rebias exponent.
3571      __ Addu(t5,
3572              t5,
3573              Operand(-kBinary32ExponentBias + HeapNumber::kExponentBias));
3574
3575      __ bind(&exponent_rebiased);
3576      __ And(a2, value, Operand(kBinary32SignMask));
3577      value = no_reg;
3578      __ sll(t0, t5, HeapNumber::kMantissaBitsInTopWord);
3579      __ or_(a2, a2, t0);
3580
3581      // Shift mantissa.
3582      static const int kMantissaShiftForHiWord =
3583          kBinary32MantissaBits - HeapNumber::kMantissaBitsInTopWord;
3584
3585      static const int kMantissaShiftForLoWord =
3586          kBitsPerInt - kMantissaShiftForHiWord;
3587
3588      __ srl(t0, t4, kMantissaShiftForHiWord);
3589      __ or_(a2, a2, t0);
3590      __ sll(a0, t4, kMantissaShiftForLoWord);
3591
3592      __ sw(a2, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3593      __ sw(a0, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3594      __ Ret();
3595    }
3596
3597  } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
3598    if (CpuFeatures::IsSupported(FPU)) {
3599      CpuFeatures::Scope scope(FPU);
3600      // Allocate a HeapNumber for the result. Don't use a0 and a1 as
3601      // AllocateHeapNumber clobbers all registers - also when jumping due to
3602      // exhausted young space.
3603      __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3604      __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3605      // The double value is already in f0
3606      __ sdc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset));
3607      __ Ret();
3608    } else {
3609      // Allocate a HeapNumber for the result. Don't use a0 and a1 as
3610      // AllocateHeapNumber clobbers all registers - also when jumping due to
3611      // exhausted young space.
3612      __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
3613      __ AllocateHeapNumber(v0, t3, t5, t6, &slow);
3614
3615      __ sw(a2, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3616      __ sw(a3, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3617      __ Ret();
3618    }
3619
3620  } else {
3621    // Tag integer as smi and return it.
3622    __ sll(v0, value, kSmiTagSize);
3623    __ Ret();
3624  }
3625
3626  // Slow case, key and receiver still in a0 and a1.
3627  __ bind(&slow);
3628  __ IncrementCounter(
3629      masm->isolate()->counters()->keyed_load_external_array_slow(),
3630      1, a2, a3);
3631
3632  // ---------- S t a t e --------------
3633  //  -- ra     : return address
3634  //  -- a0     : key
3635  //  -- a1     : receiver
3636  // -----------------------------------
3637
3638  __ Push(a1, a0);
3639
3640  __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
3641
3642  __ bind(&miss_force_generic);
3643  Handle<Code> stub =
3644      masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
3645  __ Jump(stub, RelocInfo::CODE_TARGET);
3646}
3647
3648
3649void KeyedStoreStubCompiler::GenerateStoreExternalArray(
3650    MacroAssembler* masm,
3651    ElementsKind elements_kind) {
3652  // ---------- S t a t e --------------
3653  //  -- a0     : value
3654  //  -- a1     : key
3655  //  -- a2     : receiver
3656  //  -- ra     : return address
3657  // -----------------------------------
3658
3659  Label slow, check_heap_number, miss_force_generic;
3660
3661  // Register usage.
3662  Register value = a0;
3663  Register key = a1;
3664  Register receiver = a2;
3665  // a3 mostly holds the elements array or the destination external array.
3666
3667  // This stub is meant to be tail-jumped to, the receiver must already
3668  // have been verified by the caller to not be a smi.
3669
3670    // Check that the key is a smi.
3671  __ JumpIfNotSmi(key, &miss_force_generic);
3672
3673  __ lw(a3, FieldMemOperand(receiver, JSObject::kElementsOffset));
3674
3675  // Check that the index is in range.
3676  __ lw(t1, FieldMemOperand(a3, ExternalArray::kLengthOffset));
3677  // Unsigned comparison catches both negative and too-large values.
3678  __ Branch(&miss_force_generic, Ugreater_equal, key, Operand(t1));
3679
3680  // Handle both smis and HeapNumbers in the fast path. Go to the
3681  // runtime for all other kinds of values.
3682  // a3: external array.
3683
3684  if (elements_kind == EXTERNAL_PIXEL_ELEMENTS) {
3685    // Double to pixel conversion is only implemented in the runtime for now.
3686    __ JumpIfNotSmi(value, &slow);
3687  } else {
3688    __ JumpIfNotSmi(value, &check_heap_number);
3689  }
3690  __ SmiUntag(t1, value);
3691  __ lw(a3, FieldMemOperand(a3, ExternalArray::kExternalPointerOffset));
3692
3693  // a3: base pointer of external storage.
3694  // t1: value (integer).
3695
3696  switch (elements_kind) {
3697    case EXTERNAL_PIXEL_ELEMENTS: {
3698      // Clamp the value to [0..255].
3699      // v0 is used as a scratch register here.
3700      Label done;
3701      __ li(v0, Operand(255));
3702      // Normal branch: nop in delay slot.
3703      __ Branch(&done, gt, t1, Operand(v0));
3704      // Use delay slot in this branch.
3705      __ Branch(USE_DELAY_SLOT, &done, lt, t1, Operand(zero_reg));
3706      __ mov(v0, zero_reg);  // In delay slot.
3707      __ mov(v0, t1);  // Value is in range 0..255.
3708      __ bind(&done);
3709      __ mov(t1, v0);
3710
3711      __ srl(t8, key, 1);
3712      __ addu(t8, a3, t8);
3713      __ sb(t1, MemOperand(t8, 0));
3714      }
3715      break;
3716    case EXTERNAL_BYTE_ELEMENTS:
3717    case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
3718      __ srl(t8, key, 1);
3719      __ addu(t8, a3, t8);
3720      __ sb(t1, MemOperand(t8, 0));
3721      break;
3722    case EXTERNAL_SHORT_ELEMENTS:
3723    case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
3724      __ addu(t8, a3, key);
3725      __ sh(t1, MemOperand(t8, 0));
3726      break;
3727    case EXTERNAL_INT_ELEMENTS:
3728    case EXTERNAL_UNSIGNED_INT_ELEMENTS:
3729      __ sll(t8, key, 1);
3730      __ addu(t8, a3, t8);
3731      __ sw(t1, MemOperand(t8, 0));
3732      break;
3733    case EXTERNAL_FLOAT_ELEMENTS:
3734      // Perform int-to-float conversion and store to memory.
3735      __ SmiUntag(t0, key);
3736      StoreIntAsFloat(masm, a3, t0, t1, t2, t3, t4);
3737      break;
3738    case EXTERNAL_DOUBLE_ELEMENTS:
3739      __ sll(t8, key, 2);
3740      __ addu(a3, a3, t8);
3741      // a3: effective address of the double element
3742      FloatingPointHelper::Destination destination;
3743      if (CpuFeatures::IsSupported(FPU)) {
3744        destination = FloatingPointHelper::kFPURegisters;
3745      } else {
3746        destination = FloatingPointHelper::kCoreRegisters;
3747      }
3748      FloatingPointHelper::ConvertIntToDouble(
3749          masm, t1, destination,
3750          f0, t2, t3,  // These are: double_dst, dst1, dst2.
3751          t0, f2);  // These are: scratch2, single_scratch.
3752      if (destination == FloatingPointHelper::kFPURegisters) {
3753        CpuFeatures::Scope scope(FPU);
3754        __ sdc1(f0, MemOperand(a3, 0));
3755      } else {
3756        __ sw(t2, MemOperand(a3, 0));
3757        __ sw(t3, MemOperand(a3, Register::kSizeInBytes));
3758      }
3759      break;
3760    case FAST_ELEMENTS:
3761    case FAST_SMI_ONLY_ELEMENTS:
3762    case FAST_DOUBLE_ELEMENTS:
3763    case DICTIONARY_ELEMENTS:
3764    case NON_STRICT_ARGUMENTS_ELEMENTS:
3765      UNREACHABLE();
3766      break;
3767  }
3768
3769  // Entry registers are intact, a0 holds the value which is the return value.
3770  __ mov(v0, a0);
3771  __ Ret();
3772
3773  if (elements_kind != EXTERNAL_PIXEL_ELEMENTS) {
3774    // a3: external array.
3775    __ bind(&check_heap_number);
3776    __ GetObjectType(value, t1, t2);
3777    __ Branch(&slow, ne, t2, Operand(HEAP_NUMBER_TYPE));
3778
3779    __ lw(a3, FieldMemOperand(a3, ExternalArray::kExternalPointerOffset));
3780
3781    // a3: base pointer of external storage.
3782
3783    // The WebGL specification leaves the behavior of storing NaN and
3784    // +/-Infinity into integer arrays basically undefined. For more
3785    // reproducible behavior, convert these to zero.
3786
3787    if (CpuFeatures::IsSupported(FPU)) {
3788      CpuFeatures::Scope scope(FPU);
3789
3790      __ ldc1(f0, FieldMemOperand(a0, HeapNumber::kValueOffset));
3791
3792      if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
3793        __ cvt_s_d(f0, f0);
3794        __ sll(t8, key, 1);
3795        __ addu(t8, a3, t8);
3796        __ swc1(f0, MemOperand(t8, 0));
3797      } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
3798        __ sll(t8, key, 2);
3799        __ addu(t8, a3, t8);
3800        __ sdc1(f0, MemOperand(t8, 0));
3801      } else {
3802        __ EmitECMATruncate(t3, f0, f2, t2, t1, t5);
3803
3804        switch (elements_kind) {
3805          case EXTERNAL_BYTE_ELEMENTS:
3806          case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
3807            __ srl(t8, key, 1);
3808            __ addu(t8, a3, t8);
3809            __ sb(t3, MemOperand(t8, 0));
3810            break;
3811          case EXTERNAL_SHORT_ELEMENTS:
3812          case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
3813            __ addu(t8, a3, key);
3814            __ sh(t3, MemOperand(t8, 0));
3815            break;
3816          case EXTERNAL_INT_ELEMENTS:
3817          case EXTERNAL_UNSIGNED_INT_ELEMENTS:
3818            __ sll(t8, key, 1);
3819            __ addu(t8, a3, t8);
3820            __ sw(t3, MemOperand(t8, 0));
3821            break;
3822          case EXTERNAL_PIXEL_ELEMENTS:
3823          case EXTERNAL_FLOAT_ELEMENTS:
3824          case EXTERNAL_DOUBLE_ELEMENTS:
3825          case FAST_ELEMENTS:
3826          case FAST_SMI_ONLY_ELEMENTS:
3827          case FAST_DOUBLE_ELEMENTS:
3828          case DICTIONARY_ELEMENTS:
3829          case NON_STRICT_ARGUMENTS_ELEMENTS:
3830            UNREACHABLE();
3831            break;
3832        }
3833      }
3834
3835      // Entry registers are intact, a0 holds the value
3836      // which is the return value.
3837      __ mov(v0, a0);
3838      __ Ret();
3839    } else {
3840      // FPU is not available, do manual conversions.
3841
3842      __ lw(t3, FieldMemOperand(value, HeapNumber::kExponentOffset));
3843      __ lw(t4, FieldMemOperand(value, HeapNumber::kMantissaOffset));
3844
3845      if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
3846        Label done, nan_or_infinity_or_zero;
3847        static const int kMantissaInHiWordShift =
3848            kBinary32MantissaBits - HeapNumber::kMantissaBitsInTopWord;
3849
3850        static const int kMantissaInLoWordShift =
3851            kBitsPerInt - kMantissaInHiWordShift;
3852
3853        // Test for all special exponent values: zeros, subnormal numbers, NaNs
3854        // and infinities. All these should be converted to 0.
3855        __ li(t5, HeapNumber::kExponentMask);
3856        __ and_(t6, t3, t5);
3857        __ Branch(&nan_or_infinity_or_zero, eq, t6, Operand(zero_reg));
3858
3859        __ xor_(t1, t6, t5);
3860        __ li(t2, kBinary32ExponentMask);
3861        __ movz(t6, t2, t1);  // Only if t6 is equal to t5.
3862        __ Branch(&nan_or_infinity_or_zero, eq, t6, Operand(t5));
3863
3864        // Rebias exponent.
3865        __ srl(t6, t6, HeapNumber::kExponentShift);
3866        __ Addu(t6,
3867                t6,
3868                Operand(kBinary32ExponentBias - HeapNumber::kExponentBias));
3869
3870        __ li(t1, Operand(kBinary32MaxExponent));
3871        __ Slt(t1, t1, t6);
3872        __ And(t2, t3, Operand(HeapNumber::kSignMask));
3873        __ Or(t2, t2, Operand(kBinary32ExponentMask));
3874        __ movn(t3, t2, t1);  // Only if t6 is gt kBinary32MaxExponent.
3875        __ Branch(&done, gt, t6, Operand(kBinary32MaxExponent));
3876
3877        __ Slt(t1, t6, Operand(kBinary32MinExponent));
3878        __ And(t2, t3, Operand(HeapNumber::kSignMask));
3879        __ movn(t3, t2, t1);  // Only if t6 is lt kBinary32MinExponent.
3880        __ Branch(&done, lt, t6, Operand(kBinary32MinExponent));
3881
3882        __ And(t7, t3, Operand(HeapNumber::kSignMask));
3883        __ And(t3, t3, Operand(HeapNumber::kMantissaMask));
3884        __ sll(t3, t3, kMantissaInHiWordShift);
3885        __ or_(t7, t7, t3);
3886        __ srl(t4, t4, kMantissaInLoWordShift);
3887        __ or_(t7, t7, t4);
3888        __ sll(t6, t6, kBinary32ExponentShift);
3889        __ or_(t3, t7, t6);
3890
3891        __ bind(&done);
3892        __ sll(t9, key, 1);
3893        __ addu(t9, a2, t9);
3894        __ sw(t3, MemOperand(t9, 0));
3895
3896        // Entry registers are intact, a0 holds the value which is the return
3897        // value.
3898        __ mov(v0, a0);
3899        __ Ret();
3900
3901        __ bind(&nan_or_infinity_or_zero);
3902        __ And(t7, t3, Operand(HeapNumber::kSignMask));
3903        __ And(t3, t3, Operand(HeapNumber::kMantissaMask));
3904        __ or_(t6, t6, t7);
3905        __ sll(t3, t3, kMantissaInHiWordShift);
3906        __ or_(t6, t6, t3);
3907        __ srl(t4, t4, kMantissaInLoWordShift);
3908        __ or_(t3, t6, t4);
3909        __ Branch(&done);
3910      } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
3911        __ sll(t8, t0, 3);
3912        __ addu(t8, a3, t8);
3913        // t8: effective address of destination element.
3914        __ sw(t4, MemOperand(t8, 0));
3915        __ sw(t3, MemOperand(t8, Register::kSizeInBytes));
3916        __ mov(v0, a0);
3917        __ Ret();
3918      } else {
3919        bool is_signed_type = IsElementTypeSigned(elements_kind);
3920        int meaningfull_bits = is_signed_type ? (kBitsPerInt - 1) : kBitsPerInt;
3921        int32_t min_value    = is_signed_type ? 0x80000000 : 0x00000000;
3922
3923        Label done, sign;
3924
3925        // Test for all special exponent values: zeros, subnormal numbers, NaNs
3926        // and infinities. All these should be converted to 0.
3927        __ li(t5, HeapNumber::kExponentMask);
3928        __ and_(t6, t3, t5);
3929        __ movz(t3, zero_reg, t6);  // Only if t6 is equal to zero.
3930        __ Branch(&done, eq, t6, Operand(zero_reg));
3931
3932        __ xor_(t2, t6, t5);
3933        __ movz(t3, zero_reg, t2);  // Only if t6 is equal to t5.
3934        __ Branch(&done, eq, t6, Operand(t5));
3935
3936        // Unbias exponent.
3937        __ srl(t6, t6, HeapNumber::kExponentShift);
3938        __ Subu(t6, t6, Operand(HeapNumber::kExponentBias));
3939        // If exponent is negative then result is 0.
3940        __ slt(t2, t6, zero_reg);
3941        __ movn(t3, zero_reg, t2);  // Only if exponent is negative.
3942        __ Branch(&done, lt, t6, Operand(zero_reg));
3943
3944        // If exponent is too big then result is minimal value.
3945        __ slti(t1, t6, meaningfull_bits - 1);
3946        __ li(t2, min_value);
3947        __ movz(t3, t2, t1);  // Only if t6 is ge meaningfull_bits - 1.
3948        __ Branch(&done, ge, t6, Operand(meaningfull_bits - 1));
3949
3950        __ And(t5, t3, Operand(HeapNumber::kSignMask));
3951        __ And(t3, t3, Operand(HeapNumber::kMantissaMask));
3952        __ Or(t3, t3, Operand(1u << HeapNumber::kMantissaBitsInTopWord));
3953
3954        __ li(t9, HeapNumber::kMantissaBitsInTopWord);
3955        __ subu(t6, t9, t6);
3956        __ slt(t1, t6, zero_reg);
3957        __ srlv(t2, t3, t6);
3958        __ movz(t3, t2, t1);  // Only if t6 is positive.
3959        __ Branch(&sign, ge, t6, Operand(zero_reg));
3960
3961        __ subu(t6, zero_reg, t6);
3962        __ sllv(t3, t3, t6);
3963        __ li(t9, meaningfull_bits);
3964        __ subu(t6, t9, t6);
3965        __ srlv(t4, t4, t6);
3966        __ or_(t3, t3, t4);
3967
3968        __ bind(&sign);
3969        __ subu(t2, t3, zero_reg);
3970        __ movz(t3, t2, t5);  // Only if t5 is zero.
3971
3972        __ bind(&done);
3973
3974        // Result is in t3.
3975        // This switch block should be exactly the same as above (FPU mode).
3976        switch (elements_kind) {
3977          case EXTERNAL_BYTE_ELEMENTS:
3978          case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
3979            __ srl(t8, key, 1);
3980            __ addu(t8, a3, t8);
3981            __ sb(t3, MemOperand(t8, 0));
3982            break;
3983          case EXTERNAL_SHORT_ELEMENTS:
3984          case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
3985            __ addu(t8, a3, key);
3986            __ sh(t3, MemOperand(t8, 0));
3987            break;
3988          case EXTERNAL_INT_ELEMENTS:
3989          case EXTERNAL_UNSIGNED_INT_ELEMENTS:
3990            __ sll(t8, key, 1);
3991            __ addu(t8, a3, t8);
3992            __ sw(t3, MemOperand(t8, 0));
3993            break;
3994          case EXTERNAL_PIXEL_ELEMENTS:
3995          case EXTERNAL_FLOAT_ELEMENTS:
3996          case EXTERNAL_DOUBLE_ELEMENTS:
3997          case FAST_ELEMENTS:
3998          case FAST_SMI_ONLY_ELEMENTS:
3999          case FAST_DOUBLE_ELEMENTS:
4000          case DICTIONARY_ELEMENTS:
4001          case NON_STRICT_ARGUMENTS_ELEMENTS:
4002            UNREACHABLE();
4003            break;
4004        }
4005      }
4006    }
4007  }
4008
4009  // Slow case, key and receiver still in a0 and a1.
4010  __ bind(&slow);
4011  __ IncrementCounter(
4012      masm->isolate()->counters()->keyed_load_external_array_slow(),
4013      1, a2, a3);
4014  // Entry registers are intact.
4015  // ---------- S t a t e --------------
4016  //  -- ra     : return address
4017  //  -- a0     : key
4018  //  -- a1     : receiver
4019  // -----------------------------------
4020  Handle<Code> slow_ic =
4021      masm->isolate()->builtins()->KeyedStoreIC_Slow();
4022  __ Jump(slow_ic, RelocInfo::CODE_TARGET);
4023
4024  // Miss case, call the runtime.
4025  __ bind(&miss_force_generic);
4026
4027  // ---------- S t a t e --------------
4028  //  -- ra     : return address
4029  //  -- a0     : key
4030  //  -- a1     : receiver
4031  // -----------------------------------
4032
4033  Handle<Code> miss_ic =
4034     masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4035  __ Jump(miss_ic, RelocInfo::CODE_TARGET);
4036}
4037
4038
4039void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) {
4040  // ----------- S t a t e -------------
4041  //  -- ra    : return address
4042  //  -- a0    : key
4043  //  -- a1    : receiver
4044  // -----------------------------------
4045  Label miss_force_generic;
4046
4047  // This stub is meant to be tail-jumped to, the receiver must already
4048  // have been verified by the caller to not be a smi.
4049
4050  // Check that the key is a smi.
4051  __ JumpIfNotSmi(a0, &miss_force_generic);
4052
4053  // Get the elements array.
4054  __ lw(a2, FieldMemOperand(a1, JSObject::kElementsOffset));
4055  __ AssertFastElements(a2);
4056
4057  // Check that the key is within bounds.
4058  __ lw(a3, FieldMemOperand(a2, FixedArray::kLengthOffset));
4059  __ Branch(&miss_force_generic, hs, a0, Operand(a3));
4060
4061  // Load the result and make sure it's not the hole.
4062  __ Addu(a3, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4063  STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
4064  __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize);
4065  __ Addu(t0, t0, a3);
4066  __ lw(t0, MemOperand(t0));
4067  __ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
4068  __ Branch(&miss_force_generic, eq, t0, Operand(t1));
4069  __ mov(v0, t0);
4070  __ Ret();
4071
4072  __ bind(&miss_force_generic);
4073  Handle<Code> stub =
4074      masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
4075  __ Jump(stub, RelocInfo::CODE_TARGET);
4076}
4077
4078
4079void KeyedLoadStubCompiler::GenerateLoadFastDoubleElement(
4080    MacroAssembler* masm) {
4081  // ----------- S t a t e -------------
4082  //  -- ra    : return address
4083  //  -- a0    : key
4084  //  -- a1    : receiver
4085  // -----------------------------------
4086  Label miss_force_generic, slow_allocate_heapnumber;
4087
4088  Register key_reg = a0;
4089  Register receiver_reg = a1;
4090  Register elements_reg = a2;
4091  Register heap_number_reg = a2;
4092  Register indexed_double_offset = a3;
4093  Register scratch = t0;
4094  Register scratch2 = t1;
4095  Register scratch3 = t2;
4096  Register heap_number_map = t3;
4097
4098  // This stub is meant to be tail-jumped to, the receiver must already
4099  // have been verified by the caller to not be a smi.
4100
4101  // Check that the key is a smi.
4102  __ JumpIfNotSmi(key_reg, &miss_force_generic);
4103
4104  // Get the elements array.
4105  __ lw(elements_reg,
4106        FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
4107
4108  // Check that the key is within bounds.
4109  __ lw(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4110  __ Branch(&miss_force_generic, hs, key_reg, Operand(scratch));
4111
4112  // Load the upper word of the double in the fixed array and test for NaN.
4113  __ sll(scratch2, key_reg, kDoubleSizeLog2 - kSmiTagSize);
4114  __ Addu(indexed_double_offset, elements_reg, Operand(scratch2));
4115  uint32_t upper_32_offset = FixedArray::kHeaderSize + sizeof(kHoleNanLower32);
4116  __ lw(scratch, FieldMemOperand(indexed_double_offset, upper_32_offset));
4117  __ Branch(&miss_force_generic, eq, scratch, Operand(kHoleNanUpper32));
4118
4119  // Non-NaN. Allocate a new heap number and copy the double value into it.
4120  __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
4121  __ AllocateHeapNumber(heap_number_reg, scratch2, scratch3,
4122                        heap_number_map, &slow_allocate_heapnumber);
4123
4124  // Don't need to reload the upper 32 bits of the double, it's already in
4125  // scratch.
4126  __ sw(scratch, FieldMemOperand(heap_number_reg,
4127                                 HeapNumber::kExponentOffset));
4128  __ lw(scratch, FieldMemOperand(indexed_double_offset,
4129                                 FixedArray::kHeaderSize));
4130  __ sw(scratch, FieldMemOperand(heap_number_reg,
4131                                 HeapNumber::kMantissaOffset));
4132
4133  __ mov(v0, heap_number_reg);
4134  __ Ret();
4135
4136  __ bind(&slow_allocate_heapnumber);
4137  Handle<Code> slow_ic =
4138      masm->isolate()->builtins()->KeyedLoadIC_Slow();
4139  __ Jump(slow_ic, RelocInfo::CODE_TARGET);
4140
4141  __ bind(&miss_force_generic);
4142  Handle<Code> miss_ic =
4143      masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
4144  __ Jump(miss_ic, RelocInfo::CODE_TARGET);
4145}
4146
4147
4148void KeyedStoreStubCompiler::GenerateStoreFastElement(
4149    MacroAssembler* masm,
4150    bool is_js_array,
4151    ElementsKind elements_kind) {
4152  // ----------- S t a t e -------------
4153  //  -- a0    : value
4154  //  -- a1    : key
4155  //  -- a2    : receiver
4156  //  -- ra    : return address
4157  //  -- a3    : scratch
4158  //  -- a4    : scratch (elements)
4159  // -----------------------------------
4160  Label miss_force_generic, transition_elements_kind;
4161
4162  Register value_reg = a0;
4163  Register key_reg = a1;
4164  Register receiver_reg = a2;
4165  Register scratch = a3;
4166  Register elements_reg = t0;
4167  Register scratch2 = t1;
4168  Register scratch3 = t2;
4169
4170  // This stub is meant to be tail-jumped to, the receiver must already
4171  // have been verified by the caller to not be a smi.
4172
4173  // Check that the key is a smi.
4174  __ JumpIfNotSmi(key_reg, &miss_force_generic);
4175
4176  // Get the elements array and make sure it is a fast element array, not 'cow'.
4177  __ lw(elements_reg,
4178        FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
4179  __ CheckMap(elements_reg,
4180              scratch,
4181              Heap::kFixedArrayMapRootIndex,
4182              &miss_force_generic,
4183              DONT_DO_SMI_CHECK);
4184
4185  // Check that the key is within bounds.
4186  if (is_js_array) {
4187    __ lw(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4188  } else {
4189    __ lw(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4190  }
4191  // Compare smis.
4192  __ Branch(&miss_force_generic, hs, key_reg, Operand(scratch));
4193
4194  if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
4195    __ JumpIfNotSmi(value_reg, &transition_elements_kind);
4196    __ Addu(scratch,
4197            elements_reg,
4198            Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4199    STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
4200    __ sll(scratch2, key_reg, kPointerSizeLog2 - kSmiTagSize);
4201    __ Addu(scratch, scratch, scratch2);
4202    __ sw(value_reg, MemOperand(scratch));
4203  } else {
4204    ASSERT(elements_kind == FAST_ELEMENTS);
4205    __ Addu(scratch,
4206            elements_reg,
4207            Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4208    STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
4209    __ sll(scratch2, key_reg, kPointerSizeLog2 - kSmiTagSize);
4210    __ Addu(scratch, scratch, scratch2);
4211    __ sw(value_reg, MemOperand(scratch));
4212    __ mov(receiver_reg, value_reg);
4213  ASSERT(elements_kind == FAST_ELEMENTS);
4214    __ RecordWrite(elements_reg,  // Object.
4215                   scratch,       // Address.
4216                   receiver_reg,  // Value.
4217                   kRAHasNotBeenSaved,
4218                   kDontSaveFPRegs);
4219  }
4220  // value_reg (a0) is preserved.
4221  // Done.
4222  __ Ret();
4223
4224  __ bind(&miss_force_generic);
4225  Handle<Code> ic =
4226      masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4227  __ Jump(ic, RelocInfo::CODE_TARGET);
4228
4229  __ bind(&transition_elements_kind);
4230  Handle<Code> ic_miss = masm->isolate()->builtins()->KeyedStoreIC_Miss();
4231  __ Jump(ic_miss, RelocInfo::CODE_TARGET);
4232}
4233
4234
4235void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
4236    MacroAssembler* masm,
4237    bool is_js_array) {
4238  // ----------- S t a t e -------------
4239  //  -- a0    : value
4240  //  -- a1    : key
4241  //  -- a2    : receiver
4242  //  -- ra    : return address
4243  //  -- a3    : scratch
4244  //  -- t0    : scratch (elements_reg)
4245  //  -- t1    : scratch (mantissa_reg)
4246  //  -- t2    : scratch (exponent_reg)
4247  //  -- t3    : scratch4
4248  // -----------------------------------
4249  Label miss_force_generic, transition_elements_kind;
4250
4251  Register value_reg = a0;
4252  Register key_reg = a1;
4253  Register receiver_reg = a2;
4254  Register elements_reg = a3;
4255  Register scratch1 = t0;
4256  Register scratch2 = t1;
4257  Register scratch3 = t2;
4258  Register scratch4 = t3;
4259
4260  // This stub is meant to be tail-jumped to, the receiver must already
4261  // have been verified by the caller to not be a smi.
4262  __ JumpIfNotSmi(key_reg, &miss_force_generic);
4263
4264  __ lw(elements_reg,
4265         FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
4266
4267  // Check that the key is within bounds.
4268  if (is_js_array) {
4269    __ lw(scratch1, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
4270  } else {
4271    __ lw(scratch1,
4272          FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
4273  }
4274  // Compare smis, unsigned compare catches both negative and out-of-bound
4275  // indexes.
4276  __ Branch(&miss_force_generic, hs, key_reg, Operand(scratch1));
4277
4278  __ StoreNumberToDoubleElements(value_reg,
4279                                 key_reg,
4280                                 receiver_reg,
4281                                 elements_reg,
4282                                 scratch1,
4283                                 scratch2,
4284                                 scratch3,
4285                                 scratch4,
4286                                 &transition_elements_kind);
4287
4288  __ Ret(USE_DELAY_SLOT);
4289  __ mov(v0, value_reg);  // In delay slot.
4290
4291  // Handle store cache miss, replacing the ic with the generic stub.
4292  __ bind(&miss_force_generic);
4293  Handle<Code> ic =
4294      masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
4295  __ Jump(ic, RelocInfo::CODE_TARGET);
4296
4297  __ bind(&transition_elements_kind);
4298  Handle<Code> ic_miss = masm->isolate()->builtins()->KeyedStoreIC_Miss();
4299  __ Jump(ic_miss, RelocInfo::CODE_TARGET);
4300}
4301
4302
4303#undef __
4304
4305} }  // namespace v8::internal
4306
4307#endif  // V8_TARGET_ARCH_MIPS
4308