1// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_MIPS64
6
7#include "src/ic/handler-compiler.h"
8
9#include "src/api-arguments.h"
10#include "src/field-type.h"
11#include "src/ic/call-optimization.h"
12#include "src/ic/ic.h"
13#include "src/isolate-inl.h"
14
15namespace v8 {
16namespace internal {
17
18#define __ ACCESS_MASM(masm)
19
20
21void NamedLoadHandlerCompiler::GenerateLoadViaGetter(
22    MacroAssembler* masm, Handle<Map> map, Register receiver, Register holder,
23    int accessor_index, int expected_arguments, Register scratch) {
24  // ----------- S t a t e -------------
25  //  -- a0    : receiver
26  //  -- a2    : name
27  //  -- ra    : return address
28  // -----------------------------------
29  {
30    FrameScope scope(masm, StackFrame::INTERNAL);
31
32    // Save context register
33    __ push(cp);
34
35    if (accessor_index >= 0) {
36      DCHECK(!holder.is(scratch));
37      DCHECK(!receiver.is(scratch));
38      // Call the JavaScript getter with the receiver on the stack.
39      if (map->IsJSGlobalObjectMap()) {
40        // Swap in the global receiver.
41        __ ld(scratch,
42              FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
43        receiver = scratch;
44      }
45      __ push(receiver);
46      __ LoadAccessor(a1, holder, accessor_index, ACCESSOR_GETTER);
47      __ li(a0, Operand(V8_INT64_C(0)));
48      __ Call(masm->isolate()->builtins()->CallFunction(
49                  ConvertReceiverMode::kNotNullOrUndefined),
50              RelocInfo::CODE_TARGET);
51    } else {
52      // If we generate a global code snippet for deoptimization only, remember
53      // the place to continue after deoptimization.
54      masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
55    }
56
57    // Restore context register.
58    __ pop(cp);
59  }
60  __ Ret();
61}
62
63
64void NamedStoreHandlerCompiler::GenerateStoreViaSetter(
65    MacroAssembler* masm, Handle<Map> map, Register receiver, Register holder,
66    int accessor_index, int expected_arguments, Register scratch) {
67  // ----------- S t a t e -------------
68  //  -- ra    : return address
69  // -----------------------------------
70  {
71    FrameScope scope(masm, StackFrame::INTERNAL);
72
73    // Save context and value registers, so we can restore them later.
74    __ Push(cp, value());
75
76    if (accessor_index >= 0) {
77      DCHECK(!holder.is(scratch));
78      DCHECK(!receiver.is(scratch));
79      DCHECK(!value().is(scratch));
80      // Call the JavaScript setter with receiver and value on the stack.
81      if (map->IsJSGlobalObjectMap()) {
82        // Swap in the global receiver.
83        __ ld(scratch,
84              FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
85        receiver = scratch;
86      }
87      __ Push(receiver, value());
88      __ LoadAccessor(a1, holder, accessor_index, ACCESSOR_SETTER);
89      __ li(a0, Operand(1));
90      __ Call(masm->isolate()->builtins()->CallFunction(
91                  ConvertReceiverMode::kNotNullOrUndefined),
92              RelocInfo::CODE_TARGET);
93    } else {
94      // If we generate a global code snippet for deoptimization only, remember
95      // the place to continue after deoptimization.
96      masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
97    }
98
99    // We have to return the passed value, not the return value of the setter.
100    // Restore context register.
101    __ Pop(cp, v0);
102  }
103  __ Ret();
104}
105
106
107void PropertyHandlerCompiler::PushVectorAndSlot(Register vector,
108                                                Register slot) {
109  MacroAssembler* masm = this->masm();
110  STATIC_ASSERT(LoadWithVectorDescriptor::kSlot <
111                LoadWithVectorDescriptor::kVector);
112  STATIC_ASSERT(StoreWithVectorDescriptor::kSlot <
113                StoreWithVectorDescriptor::kVector);
114  STATIC_ASSERT(StoreTransitionDescriptor::kSlot <
115                StoreTransitionDescriptor::kVector);
116  __ Push(slot, vector);
117}
118
119
120void PropertyHandlerCompiler::PopVectorAndSlot(Register vector, Register slot) {
121  MacroAssembler* masm = this->masm();
122  __ Pop(slot, vector);
123}
124
125
126void PropertyHandlerCompiler::DiscardVectorAndSlot() {
127  MacroAssembler* masm = this->masm();
128  // Remove vector and slot.
129  __ Daddu(sp, sp, Operand(2 * kPointerSize));
130}
131
132void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup(
133    MacroAssembler* masm, Label* miss_label, Register receiver,
134    Handle<Name> name, Register scratch0, Register scratch1) {
135  DCHECK(name->IsUniqueName());
136  DCHECK(!receiver.is(scratch0));
137  Counters* counters = masm->isolate()->counters();
138  __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
139  __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
140
141  Label done;
142
143  const int kInterceptorOrAccessCheckNeededMask =
144      (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
145
146  // Bail out if the receiver has a named interceptor or requires access checks.
147  Register map = scratch1;
148  __ ld(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
149  __ lbu(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
150  __ And(scratch0, scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
151  __ Branch(miss_label, ne, scratch0, Operand(zero_reg));
152
153  // Check that receiver is a JSObject.
154  __ lbu(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
155  __ Branch(miss_label, lt, scratch0, Operand(FIRST_JS_RECEIVER_TYPE));
156
157  // Load properties array.
158  Register properties = scratch0;
159  __ ld(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
160  // Check that the properties array is a dictionary.
161  __ ld(map, FieldMemOperand(properties, HeapObject::kMapOffset));
162  Register tmp = properties;
163  __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
164  __ Branch(miss_label, ne, map, Operand(tmp));
165
166  // Restore the temporarily used register.
167  __ ld(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
168
169
170  NameDictionaryLookupStub::GenerateNegativeLookup(
171      masm, miss_label, &done, receiver, properties, name, scratch1);
172  __ bind(&done);
173  __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
174}
175
176// Generate code to check that a global property cell is empty. Create
177// the property cell at compilation time if no cell exists for the
178// property.
179void PropertyHandlerCompiler::GenerateCheckPropertyCell(
180    MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name,
181    Register scratch, Label* miss) {
182  Handle<PropertyCell> cell = JSGlobalObject::EnsureEmptyPropertyCell(
183      global, name, PropertyCellType::kInvalidated);
184  Isolate* isolate = masm->isolate();
185  DCHECK(cell->value()->IsTheHole(isolate));
186  Handle<WeakCell> weak_cell = isolate->factory()->NewWeakCell(cell);
187  __ LoadWeakValue(scratch, weak_cell, miss);
188  __ ld(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
189  __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
190  __ Branch(miss, ne, scratch, Operand(at));
191}
192
193static void CompileCallLoadPropertyWithInterceptor(
194    MacroAssembler* masm, Register receiver, Register holder, Register name,
195    Handle<JSObject> holder_obj, Runtime::FunctionId id) {
196  DCHECK(NamedLoadHandlerCompiler::kInterceptorArgsLength ==
197         Runtime::FunctionForId(id)->nargs);
198
199  STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0);
200  STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 1);
201  STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 2);
202  STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsLength == 3);
203  __ Push(name, receiver, holder);
204
205  __ CallRuntime(id);
206}
207
208
209// Generate call to api function.
210void PropertyHandlerCompiler::GenerateApiAccessorCall(
211    MacroAssembler* masm, const CallOptimization& optimization,
212    Handle<Map> receiver_map, Register receiver, Register scratch_in,
213    bool is_store, Register store_parameter, Register accessor_holder,
214    int accessor_index) {
215  DCHECK(!accessor_holder.is(scratch_in));
216  DCHECK(!receiver.is(scratch_in));
217  __ push(receiver);
218  // Write the arguments to stack frame.
219  if (is_store) {
220    DCHECK(!receiver.is(store_parameter));
221    DCHECK(!scratch_in.is(store_parameter));
222    __ push(store_parameter);
223  }
224  DCHECK(optimization.is_simple_api_call());
225
226  // Abi for CallApiCallbackStub.
227  Register callee = a0;
228  Register data = a4;
229  Register holder = a2;
230  Register api_function_address = a1;
231
232  // Put callee in place.
233  __ LoadAccessor(callee, accessor_holder, accessor_index,
234                  is_store ? ACCESSOR_SETTER : ACCESSOR_GETTER);
235
236  // Put holder in place.
237  CallOptimization::HolderLookup holder_lookup;
238  int holder_depth = 0;
239  optimization.LookupHolderOfExpectedType(receiver_map, &holder_lookup,
240                                          &holder_depth);
241  switch (holder_lookup) {
242    case CallOptimization::kHolderIsReceiver:
243      __ Move(holder, receiver);
244      break;
245    case CallOptimization::kHolderFound:
246      __ ld(holder, FieldMemOperand(receiver, HeapObject::kMapOffset));
247      __ ld(holder, FieldMemOperand(holder, Map::kPrototypeOffset));
248      for (int i = 1; i < holder_depth; i++) {
249        __ ld(holder, FieldMemOperand(holder, HeapObject::kMapOffset));
250        __ ld(holder, FieldMemOperand(holder, Map::kPrototypeOffset));
251      }
252      break;
253    case CallOptimization::kHolderNotFound:
254      UNREACHABLE();
255      break;
256  }
257
258  Isolate* isolate = masm->isolate();
259  Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
260  bool call_data_undefined = false;
261  // Put call data in place.
262  if (api_call_info->data()->IsUndefined(isolate)) {
263    call_data_undefined = true;
264    __ LoadRoot(data, Heap::kUndefinedValueRootIndex);
265  } else {
266    if (optimization.is_constant_call()) {
267      __ ld(data,
268            FieldMemOperand(callee, JSFunction::kSharedFunctionInfoOffset));
269      __ ld(data,
270            FieldMemOperand(data, SharedFunctionInfo::kFunctionDataOffset));
271      __ ld(data, FieldMemOperand(data, FunctionTemplateInfo::kCallCodeOffset));
272    } else {
273      __ ld(data,
274            FieldMemOperand(callee, FunctionTemplateInfo::kCallCodeOffset));
275    }
276    __ ld(data, FieldMemOperand(data, CallHandlerInfo::kDataOffset));
277  }
278
279  if (api_call_info->fast_handler()->IsCode()) {
280    // Just tail call into the fast handler if present.
281    __ Jump(handle(Code::cast(api_call_info->fast_handler())),
282            RelocInfo::CODE_TARGET);
283    return;
284  }
285  // Put api_function_address in place.
286  Address function_address = v8::ToCData<Address>(api_call_info->callback());
287  ApiFunction fun(function_address);
288  ExternalReference::Type type = ExternalReference::DIRECT_API_CALL;
289  ExternalReference ref = ExternalReference(&fun, type, masm->isolate());
290  __ li(api_function_address, Operand(ref));
291
292  // Jump to stub.
293  CallApiCallbackStub stub(isolate, is_store, call_data_undefined,
294                           !optimization.is_constant_call());
295  __ TailCallStub(&stub);
296}
297
298#undef __
299#define __ ACCESS_MASM(masm())
300
301
302void NamedStoreHandlerCompiler::GenerateRestoreName(Label* label,
303                                                    Handle<Name> name) {
304  if (!label->is_unused()) {
305    __ bind(label);
306    __ li(this->name(), Operand(name));
307  }
308}
309
310void PropertyHandlerCompiler::GenerateAccessCheck(
311    Handle<WeakCell> native_context_cell, Register scratch1, Register scratch2,
312    Label* miss, bool compare_native_contexts_only) {
313  Label done;
314  // Load current native context.
315  __ ld(scratch1, NativeContextMemOperand());
316  // Load expected native context.
317  __ LoadWeakValue(scratch2, native_context_cell, miss);
318
319  if (!compare_native_contexts_only) {
320    __ Branch(&done, eq, scratch1, Operand(scratch2));
321
322    // Compare security tokens of current and expected native contexts.
323    __ ld(scratch1, ContextMemOperand(scratch1, Context::SECURITY_TOKEN_INDEX));
324    __ ld(scratch2, ContextMemOperand(scratch2, Context::SECURITY_TOKEN_INDEX));
325  }
326  __ Branch(miss, ne, scratch1, Operand(scratch2));
327
328  __ bind(&done);
329}
330
331Register PropertyHandlerCompiler::CheckPrototypes(
332    Register object_reg, Register holder_reg, Register scratch1,
333    Register scratch2, Handle<Name> name, Label* miss,
334    ReturnHolder return_what) {
335  Handle<Map> receiver_map = map();
336
337  // Make sure there's no overlap between holder and object registers.
338  DCHECK(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
339  DCHECK(!scratch2.is(object_reg) && !scratch2.is(holder_reg) &&
340         !scratch2.is(scratch1));
341
342  Handle<Cell> validity_cell =
343      Map::GetOrCreatePrototypeChainValidityCell(receiver_map, isolate());
344  if (!validity_cell.is_null()) {
345    DCHECK_EQ(Smi::FromInt(Map::kPrototypeChainValid), validity_cell->value());
346    __ li(scratch1, Operand(validity_cell));
347    __ ld(scratch1, FieldMemOperand(scratch1, Cell::kValueOffset));
348    __ Branch(miss, ne, scratch1,
349              Operand(Smi::FromInt(Map::kPrototypeChainValid)));
350  }
351
352  // Keep track of the current object in register reg.
353  Register reg = object_reg;
354  int depth = 0;
355
356  Handle<JSObject> current = Handle<JSObject>::null();
357  if (receiver_map->IsJSGlobalObjectMap()) {
358    current = isolate()->global_object();
359  }
360
361  Handle<Map> current_map(receiver_map->GetPrototypeChainRootMap(isolate()),
362                          isolate());
363  Handle<Map> holder_map(holder()->map());
364  // Traverse the prototype chain and check the maps in the prototype chain for
365  // fast and global objects or do negative lookup for normal objects.
366  while (!current_map.is_identical_to(holder_map)) {
367    ++depth;
368
369    if (current_map->IsJSGlobalObjectMap()) {
370      GenerateCheckPropertyCell(masm(), Handle<JSGlobalObject>::cast(current),
371                                name, scratch2, miss);
372    } else if (current_map->is_dictionary_map()) {
373      DCHECK(!current_map->IsJSGlobalProxyMap());  // Proxy maps are fast.
374      DCHECK(name->IsUniqueName());
375      DCHECK(current.is_null() ||
376             current->property_dictionary()->FindEntry(name) ==
377                 NameDictionary::kNotFound);
378
379      if (depth > 1) {
380        Handle<WeakCell> weak_cell =
381            Map::GetOrCreatePrototypeWeakCell(current, isolate());
382        __ LoadWeakValue(reg, weak_cell, miss);
383      }
384      GenerateDictionaryNegativeLookup(masm(), miss, reg, name, scratch1,
385                                       scratch2);
386    }
387
388    reg = holder_reg;  // From now on the object will be in holder_reg.
389    // Go to the next object in the prototype chain.
390    current = handle(JSObject::cast(current_map->prototype()));
391    current_map = handle(current->map());
392  }
393
394  DCHECK(!current_map->IsJSGlobalProxyMap());
395
396  // Log the check depth.
397  LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
398
399  bool return_holder = return_what == RETURN_HOLDER;
400  if (return_holder && depth != 0) {
401    Handle<WeakCell> weak_cell =
402        Map::GetOrCreatePrototypeWeakCell(current, isolate());
403    __ LoadWeakValue(reg, weak_cell, miss);
404  }
405
406  // Return the register containing the holder.
407  return return_holder ? reg : no_reg;
408}
409
410
411void NamedLoadHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) {
412  if (!miss->is_unused()) {
413    Label success;
414    __ Branch(&success);
415    __ bind(miss);
416    if (IC::ICUseVector(kind())) {
417      DCHECK(kind() == Code::LOAD_IC);
418      PopVectorAndSlot();
419    }
420    TailCallBuiltin(masm(), MissBuiltin(kind()));
421    __ bind(&success);
422  }
423}
424
425
426void NamedStoreHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) {
427  if (!miss->is_unused()) {
428    Label success;
429    __ Branch(&success);
430    GenerateRestoreName(miss, name);
431    if (IC::ICUseVector(kind())) PopVectorAndSlot();
432    TailCallBuiltin(masm(), MissBuiltin(kind()));
433    __ bind(&success);
434  }
435}
436
437void NamedLoadHandlerCompiler::GenerateLoadInterceptorWithFollowup(
438    LookupIterator* it, Register holder_reg) {
439  DCHECK(holder()->HasNamedInterceptor());
440  DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined(isolate()));
441
442  // Compile the interceptor call, followed by inline code to load the
443  // property from further up the prototype chain if the call fails.
444  // Check that the maps haven't changed.
445  DCHECK(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
446
447  // Preserve the receiver register explicitly whenever it is different from the
448  // holder and it is needed should the interceptor return without any result.
449  // The ACCESSOR case needs the receiver to be passed into C++ code, the FIELD
450  // case might cause a miss during the prototype check.
451  bool must_perform_prototype_check =
452      !holder().is_identical_to(it->GetHolder<JSObject>());
453  bool must_preserve_receiver_reg =
454      !receiver().is(holder_reg) &&
455      (it->state() == LookupIterator::ACCESSOR || must_perform_prototype_check);
456
457  // Save necessary data before invoking an interceptor.
458  // Requires a frame to make GC aware of pushed pointers.
459  {
460    FrameScope frame_scope(masm(), StackFrame::INTERNAL);
461    if (must_preserve_receiver_reg) {
462      __ Push(receiver(), holder_reg, this->name());
463    } else {
464      __ Push(holder_reg, this->name());
465    }
466    InterceptorVectorSlotPush(holder_reg);
467    // Invoke an interceptor.  Note: map checks from receiver to
468    // interceptor's holder has been compiled before (see a caller
469    // of this method).
470    CompileCallLoadPropertyWithInterceptor(
471        masm(), receiver(), holder_reg, this->name(), holder(),
472        Runtime::kLoadPropertyWithInterceptorOnly);
473
474    // Check if interceptor provided a value for property.  If it's
475    // the case, return immediately.
476    Label interceptor_failed;
477    __ LoadRoot(scratch1(), Heap::kNoInterceptorResultSentinelRootIndex);
478    __ Branch(&interceptor_failed, eq, v0, Operand(scratch1()));
479    frame_scope.GenerateLeaveFrame();
480    __ Ret();
481
482    __ bind(&interceptor_failed);
483    InterceptorVectorSlotPop(holder_reg);
484    if (must_preserve_receiver_reg) {
485      __ Pop(receiver(), holder_reg, this->name());
486    } else {
487      __ Pop(holder_reg, this->name());
488    }
489    // Leave the internal frame.
490  }
491
492  GenerateLoadPostInterceptor(it, holder_reg);
493}
494
495
496void NamedLoadHandlerCompiler::GenerateLoadInterceptor(Register holder_reg) {
497  // Call the runtime system to load the interceptor.
498  DCHECK(holder()->HasNamedInterceptor());
499  DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined(isolate()));
500
501  STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0);
502  STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 1);
503  STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 2);
504  STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsLength == 3);
505  __ Push(name(), receiver(), holder_reg);
506  // See NamedLoadHandlerCompiler::InterceptorVectorSlotPop() for details.
507  if (holder_reg.is(receiver())) {
508    __ Push(slot(), vector());
509  } else {
510    __ Push(scratch3(), scratch2());  // slot, vector
511  }
512
513  __ TailCallRuntime(Runtime::kLoadPropertyWithInterceptor);
514}
515
516void NamedStoreHandlerCompiler::ZapStackArgumentsRegisterAliases() {
517  STATIC_ASSERT(!StoreWithVectorDescriptor::kPassLastArgsOnStack);
518}
519
520Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback(
521    Handle<JSObject> object, Handle<Name> name, Handle<AccessorInfo> callback,
522    LanguageMode language_mode) {
523  Register holder_reg = Frontend(name);
524
525  __ Push(receiver(), holder_reg);  // Receiver.
526  // If the callback cannot leak, then push the callback directly,
527  // otherwise wrap it in a weak cell.
528  if (callback->data()->IsUndefined(isolate()) || callback->data()->IsSmi()) {
529    __ li(at, Operand(callback));
530  } else {
531    Handle<WeakCell> cell = isolate()->factory()->NewWeakCell(callback);
532    __ li(at, Operand(cell));
533  }
534  __ push(at);
535  __ li(at, Operand(name));
536  __ Push(at, value());
537  __ Push(Smi::FromInt(language_mode));
538
539  // Do tail-call to the runtime system.
540  __ TailCallRuntime(Runtime::kStoreCallbackProperty);
541
542  // Return the generated code.
543  return GetCode(kind(), name);
544}
545
546
547Register NamedStoreHandlerCompiler::value() {
548  return StoreDescriptor::ValueRegister();
549}
550
551
552Handle<Code> NamedLoadHandlerCompiler::CompileLoadGlobal(
553    Handle<PropertyCell> cell, Handle<Name> name, bool is_configurable) {
554  Label miss;
555  if (IC::ICUseVector(kind())) {
556    PushVectorAndSlot();
557  }
558
559  FrontendHeader(receiver(), name, &miss, DONT_RETURN_ANYTHING);
560
561  // Get the value from the cell.
562  Register result = StoreDescriptor::ValueRegister();
563  Handle<WeakCell> weak_cell = factory()->NewWeakCell(cell);
564  __ LoadWeakValue(result, weak_cell, &miss);
565  __ ld(result, FieldMemOperand(result, PropertyCell::kValueOffset));
566
567  // Check for deleted property if property can actually be deleted.
568  if (is_configurable) {
569    __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
570    __ Branch(&miss, eq, result, Operand(at));
571  }
572
573  Counters* counters = isolate()->counters();
574  __ IncrementCounter(counters->ic_named_load_global_stub(), 1, a1, a3);
575  if (IC::ICUseVector(kind())) {
576    DiscardVectorAndSlot();
577  }
578  __ Ret(USE_DELAY_SLOT);
579  __ Move(v0, result);  // Ensure the stub returns correct value.
580
581  FrontendFooter(name, &miss);
582
583  // Return the generated code.
584  return GetCode(kind(), name);
585}
586
587
588#undef __
589}  // namespace internal
590}  // namespace v8
591
592#endif  // V8_TARGET_ARCH_MIPS64
593