handler-compiler-arm.cc revision 109988c7ccb6f3fd1a58574fa3dfb88beaef6632
1// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_ARM
6
7#include "src/ic/handler-compiler.h"
8
9#include "src/field-type.h"
10#include "src/ic/call-optimization.h"
11#include "src/ic/ic.h"
12#include "src/isolate-inl.h"
13
14namespace v8 {
15namespace internal {
16
17#define __ ACCESS_MASM(masm)
18
19
20void NamedLoadHandlerCompiler::GenerateLoadViaGetter(
21    MacroAssembler* masm, Handle<Map> map, Register receiver, Register holder,
22    int accessor_index, int expected_arguments, Register scratch) {
23  // ----------- S t a t e -------------
24  //  -- r0    : receiver
25  //  -- r2    : name
26  //  -- lr    : return address
27  // -----------------------------------
28  {
29    FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
30
31    if (accessor_index >= 0) {
32      DCHECK(!holder.is(scratch));
33      DCHECK(!receiver.is(scratch));
34      // Call the JavaScript getter with the receiver on the stack.
35      if (map->IsJSGlobalObjectMap()) {
36        // Swap in the global receiver.
37        __ ldr(scratch,
38               FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
39        receiver = scratch;
40      }
41      __ push(receiver);
42      ParameterCount actual(0);
43      ParameterCount expected(expected_arguments);
44      __ LoadAccessor(r1, holder, accessor_index, ACCESSOR_GETTER);
45      __ InvokeFunction(r1, expected, actual, CALL_FUNCTION,
46                        CheckDebugStepCallWrapper());
47    } else {
48      // If we generate a global code snippet for deoptimization only, remember
49      // the place to continue after deoptimization.
50      masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
51    }
52
53    // Restore context register.
54    __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
55  }
56  __ Ret();
57}
58
59
60void NamedStoreHandlerCompiler::GenerateStoreViaSetter(
61    MacroAssembler* masm, Handle<Map> map, Register receiver, Register holder,
62    int accessor_index, int expected_arguments, Register scratch) {
63  // ----------- S t a t e -------------
64  //  -- lr    : return address
65  // -----------------------------------
66  {
67    FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
68
69    // Save value register, so we can restore it later.
70    __ push(value());
71
72    if (accessor_index >= 0) {
73      DCHECK(!holder.is(scratch));
74      DCHECK(!receiver.is(scratch));
75      DCHECK(!value().is(scratch));
76      // Call the JavaScript setter with receiver and value on the stack.
77      if (map->IsJSGlobalObjectMap()) {
78        // Swap in the global receiver.
79        __ ldr(scratch,
80               FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
81        receiver = scratch;
82      }
83      __ Push(receiver, value());
84      ParameterCount actual(1);
85      ParameterCount expected(expected_arguments);
86      __ LoadAccessor(r1, holder, accessor_index, ACCESSOR_SETTER);
87      __ InvokeFunction(r1, expected, actual, CALL_FUNCTION,
88                        CheckDebugStepCallWrapper());
89    } else {
90      // If we generate a global code snippet for deoptimization only, remember
91      // the place to continue after deoptimization.
92      masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
93    }
94
95    // We have to return the passed value, not the return value of the setter.
96    __ pop(r0);
97
98    // Restore context register.
99    __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
100  }
101  __ Ret();
102}
103
104
105void PropertyHandlerCompiler::PushVectorAndSlot(Register vector,
106                                                Register slot) {
107  MacroAssembler* masm = this->masm();
108  __ push(vector);
109  __ push(slot);
110}
111
112
113void PropertyHandlerCompiler::PopVectorAndSlot(Register vector, Register slot) {
114  MacroAssembler* masm = this->masm();
115  __ pop(slot);
116  __ pop(vector);
117}
118
119
120void PropertyHandlerCompiler::DiscardVectorAndSlot() {
121  MacroAssembler* masm = this->masm();
122  // Remove vector and slot.
123  __ add(sp, sp, Operand(2 * kPointerSize));
124}
125
126
127void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup(
128    MacroAssembler* masm, Label* miss_label, Register receiver,
129    Handle<Name> name, Register scratch0, Register scratch1) {
130  DCHECK(name->IsUniqueName());
131  DCHECK(!receiver.is(scratch0));
132  Counters* counters = masm->isolate()->counters();
133  __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
134  __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
135
136  Label done;
137
138  const int kInterceptorOrAccessCheckNeededMask =
139      (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
140
141  // Bail out if the receiver has a named interceptor or requires access checks.
142  Register map = scratch1;
143  __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
144  __ ldrb(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
145  __ tst(scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
146  __ b(ne, miss_label);
147
148  // Check that receiver is a JSObject.
149  __ ldrb(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
150  __ cmp(scratch0, Operand(FIRST_JS_RECEIVER_TYPE));
151  __ b(lt, miss_label);
152
153  // Load properties array.
154  Register properties = scratch0;
155  __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
156  // Check that the properties array is a dictionary.
157  __ ldr(map, FieldMemOperand(properties, HeapObject::kMapOffset));
158  Register tmp = properties;
159  __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
160  __ cmp(map, tmp);
161  __ b(ne, miss_label);
162
163  // Restore the temporarily used register.
164  __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
165
166
167  NameDictionaryLookupStub::GenerateNegativeLookup(
168      masm, miss_label, &done, receiver, properties, name, scratch1);
169  __ bind(&done);
170  __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
171}
172
173
174void NamedLoadHandlerCompiler::GenerateDirectLoadGlobalFunctionPrototype(
175    MacroAssembler* masm, int index, Register result, Label* miss) {
176  __ LoadNativeContextSlot(index, result);
177  // Load its initial map. The global functions all have initial maps.
178  __ ldr(result,
179         FieldMemOperand(result, JSFunction::kPrototypeOrInitialMapOffset));
180  // Load the prototype from the initial map.
181  __ ldr(result, FieldMemOperand(result, Map::kPrototypeOffset));
182}
183
184
185void NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(
186    MacroAssembler* masm, Register receiver, Register scratch1,
187    Register scratch2, Label* miss_label) {
188  __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
189  __ mov(r0, scratch1);
190  __ Ret();
191}
192
193
194// Generate code to check that a global property cell is empty. Create
195// the property cell at compilation time if no cell exists for the
196// property.
197void PropertyHandlerCompiler::GenerateCheckPropertyCell(
198    MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name,
199    Register scratch, Label* miss) {
200  Handle<PropertyCell> cell = JSGlobalObject::EnsurePropertyCell(global, name);
201  DCHECK(cell->value()->IsTheHole());
202  Handle<WeakCell> weak_cell = masm->isolate()->factory()->NewWeakCell(cell);
203  __ LoadWeakValue(scratch, weak_cell, miss);
204  __ ldr(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
205  __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
206  __ cmp(scratch, ip);
207  __ b(ne, miss);
208}
209
210
211static void PushInterceptorArguments(MacroAssembler* masm, Register receiver,
212                                     Register holder, Register name,
213                                     Handle<JSObject> holder_obj) {
214  STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0);
215  STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 1);
216  STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 2);
217  STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsLength == 3);
218  __ push(name);
219  __ push(receiver);
220  __ push(holder);
221}
222
223
224static void CompileCallLoadPropertyWithInterceptor(
225    MacroAssembler* masm, Register receiver, Register holder, Register name,
226    Handle<JSObject> holder_obj, Runtime::FunctionId id) {
227  DCHECK(NamedLoadHandlerCompiler::kInterceptorArgsLength ==
228         Runtime::FunctionForId(id)->nargs);
229  PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
230  __ CallRuntime(id);
231}
232
233
234// Generate call to api function.
235void PropertyHandlerCompiler::GenerateApiAccessorCall(
236    MacroAssembler* masm, const CallOptimization& optimization,
237    Handle<Map> receiver_map, Register receiver, Register scratch_in,
238    bool is_store, Register store_parameter, Register accessor_holder,
239    int accessor_index) {
240  DCHECK(!accessor_holder.is(scratch_in));
241  DCHECK(!receiver.is(scratch_in));
242  __ push(receiver);
243  // Write the arguments to stack frame.
244  if (is_store) {
245    DCHECK(!receiver.is(store_parameter));
246    DCHECK(!scratch_in.is(store_parameter));
247    __ push(store_parameter);
248  }
249  DCHECK(optimization.is_simple_api_call());
250
251  // Abi for CallApiFunctionStub.
252  Register callee = r0;
253  Register data = r4;
254  Register holder = r2;
255  Register api_function_address = r1;
256
257  // Put callee in place.
258  __ LoadAccessor(callee, accessor_holder, accessor_index,
259                  is_store ? ACCESSOR_SETTER : ACCESSOR_GETTER);
260
261  // Put holder in place.
262  CallOptimization::HolderLookup holder_lookup;
263  int holder_depth = 0;
264  optimization.LookupHolderOfExpectedType(receiver_map, &holder_lookup,
265                                          &holder_depth);
266  switch (holder_lookup) {
267    case CallOptimization::kHolderIsReceiver:
268      __ Move(holder, receiver);
269      break;
270    case CallOptimization::kHolderFound:
271      __ ldr(holder, FieldMemOperand(receiver, HeapObject::kMapOffset));
272      __ ldr(holder, FieldMemOperand(holder, Map::kPrototypeOffset));
273      for (int i = 1; i < holder_depth; i++) {
274        __ ldr(holder, FieldMemOperand(holder, HeapObject::kMapOffset));
275        __ ldr(holder, FieldMemOperand(holder, Map::kPrototypeOffset));
276      }
277      break;
278    case CallOptimization::kHolderNotFound:
279      UNREACHABLE();
280      break;
281  }
282
283  Isolate* isolate = masm->isolate();
284  Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
285  bool call_data_undefined = false;
286  // Put call data in place.
287  if (api_call_info->data()->IsUndefined()) {
288    call_data_undefined = true;
289    __ LoadRoot(data, Heap::kUndefinedValueRootIndex);
290  } else {
291    if (optimization.is_constant_call()) {
292      __ ldr(data,
293             FieldMemOperand(callee, JSFunction::kSharedFunctionInfoOffset));
294      __ ldr(data,
295             FieldMemOperand(data, SharedFunctionInfo::kFunctionDataOffset));
296      __ ldr(data,
297             FieldMemOperand(data, FunctionTemplateInfo::kCallCodeOffset));
298    } else {
299      __ ldr(data,
300             FieldMemOperand(callee, FunctionTemplateInfo::kCallCodeOffset));
301    }
302    __ ldr(data, FieldMemOperand(data, CallHandlerInfo::kDataOffset));
303  }
304
305  if (api_call_info->fast_handler()->IsCode()) {
306    // Just tail call into the fast handler if present.
307    __ Jump(handle(Code::cast(api_call_info->fast_handler())),
308            RelocInfo::CODE_TARGET);
309    return;
310  }
311
312  // Put api_function_address in place.
313  Address function_address = v8::ToCData<Address>(api_call_info->callback());
314  ApiFunction fun(function_address);
315  ExternalReference::Type type = ExternalReference::DIRECT_API_CALL;
316  ExternalReference ref = ExternalReference(&fun, type, masm->isolate());
317  __ mov(api_function_address, Operand(ref));
318
319  // Jump to stub.
320  CallApiAccessorStub stub(isolate, is_store, call_data_undefined,
321                           !optimization.is_constant_call());
322  __ TailCallStub(&stub);
323}
324
325
326static void StoreIC_PushArgs(MacroAssembler* masm) {
327  __ Push(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister(),
328          StoreDescriptor::ValueRegister(),
329          VectorStoreICDescriptor::SlotRegister(),
330          VectorStoreICDescriptor::VectorRegister());
331}
332
333
334void NamedStoreHandlerCompiler::GenerateSlow(MacroAssembler* masm) {
335  StoreIC_PushArgs(masm);
336
337  // The slow case calls into the runtime to complete the store without causing
338  // an IC miss that would otherwise cause a transition to the generic stub.
339  __ TailCallRuntime(Runtime::kStoreIC_Slow);
340}
341
342
343void ElementHandlerCompiler::GenerateStoreSlow(MacroAssembler* masm) {
344  StoreIC_PushArgs(masm);
345
346  // The slow case calls into the runtime to complete the store without causing
347  // an IC miss that would otherwise cause a transition to the generic stub.
348  __ TailCallRuntime(Runtime::kKeyedStoreIC_Slow);
349}
350
351
352#undef __
353#define __ ACCESS_MASM(masm())
354
355
356void NamedStoreHandlerCompiler::GenerateRestoreName(Label* label,
357                                                    Handle<Name> name) {
358  if (!label->is_unused()) {
359    __ bind(label);
360    __ mov(this->name(), Operand(name));
361  }
362}
363
364
365void NamedStoreHandlerCompiler::GenerateRestoreName(Handle<Name> name) {
366  __ mov(this->name(), Operand(name));
367}
368
369
370void NamedStoreHandlerCompiler::RearrangeVectorAndSlot(
371    Register current_map, Register destination_map) {
372  DCHECK(false);  // Not implemented.
373}
374
375
376void NamedStoreHandlerCompiler::GenerateRestoreMap(Handle<Map> transition,
377                                                   Register map_reg,
378                                                   Register scratch,
379                                                   Label* miss) {
380  Handle<WeakCell> cell = Map::WeakCellForMap(transition);
381  DCHECK(!map_reg.is(scratch));
382  __ LoadWeakValue(map_reg, cell, miss);
383  if (transition->CanBeDeprecated()) {
384    __ ldr(scratch, FieldMemOperand(map_reg, Map::kBitField3Offset));
385    __ tst(scratch, Operand(Map::Deprecated::kMask));
386    __ b(ne, miss);
387  }
388}
389
390
391void NamedStoreHandlerCompiler::GenerateConstantCheck(Register map_reg,
392                                                      int descriptor,
393                                                      Register value_reg,
394                                                      Register scratch,
395                                                      Label* miss_label) {
396  DCHECK(!map_reg.is(scratch));
397  DCHECK(!map_reg.is(value_reg));
398  DCHECK(!value_reg.is(scratch));
399  __ LoadInstanceDescriptors(map_reg, scratch);
400  __ ldr(scratch,
401         FieldMemOperand(scratch, DescriptorArray::GetValueOffset(descriptor)));
402  __ cmp(value_reg, scratch);
403  __ b(ne, miss_label);
404}
405
406void NamedStoreHandlerCompiler::GenerateFieldTypeChecks(FieldType* field_type,
407                                                        Register value_reg,
408                                                        Label* miss_label) {
409  Register map_reg = scratch1();
410  Register scratch = scratch2();
411  DCHECK(!value_reg.is(map_reg));
412  DCHECK(!value_reg.is(scratch));
413  __ JumpIfSmi(value_reg, miss_label);
414  if (field_type->IsClass()) {
415    __ ldr(map_reg, FieldMemOperand(value_reg, HeapObject::kMapOffset));
416    __ CmpWeakValue(map_reg, Map::WeakCellForMap(field_type->AsClass()),
417                    scratch);
418    __ b(ne, miss_label);
419  }
420}
421
422
423Register PropertyHandlerCompiler::CheckPrototypes(
424    Register object_reg, Register holder_reg, Register scratch1,
425    Register scratch2, Handle<Name> name, Label* miss, PrototypeCheckType check,
426    ReturnHolder return_what) {
427  Handle<Map> receiver_map = map();
428
429  // Make sure there's no overlap between holder and object registers.
430  DCHECK(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
431  DCHECK(!scratch2.is(object_reg) && !scratch2.is(holder_reg) &&
432         !scratch2.is(scratch1));
433
434  if (FLAG_eliminate_prototype_chain_checks) {
435    Handle<Cell> validity_cell =
436        Map::GetOrCreatePrototypeChainValidityCell(receiver_map, isolate());
437    if (!validity_cell.is_null()) {
438      DCHECK_EQ(Smi::FromInt(Map::kPrototypeChainValid),
439                validity_cell->value());
440      __ mov(scratch1, Operand(validity_cell));
441      __ ldr(scratch1, FieldMemOperand(scratch1, Cell::kValueOffset));
442      __ cmp(scratch1, Operand(Smi::FromInt(Map::kPrototypeChainValid)));
443      __ b(ne, miss);
444    }
445
446    // The prototype chain of primitives (and their JSValue wrappers) depends
447    // on the native context, which can't be guarded by validity cells.
448    // |object_reg| holds the native context specific prototype in this case;
449    // we need to check its map.
450    if (check == CHECK_ALL_MAPS) {
451      __ ldr(scratch1, FieldMemOperand(object_reg, HeapObject::kMapOffset));
452      Handle<WeakCell> cell = Map::WeakCellForMap(receiver_map);
453      __ CmpWeakValue(scratch1, cell, scratch2);
454      __ b(ne, miss);
455    }
456  }
457
458  // Keep track of the current object in register reg.
459  Register reg = object_reg;
460  int depth = 0;
461
462  Handle<JSObject> current = Handle<JSObject>::null();
463  if (receiver_map->IsJSGlobalObjectMap()) {
464    current = isolate()->global_object();
465  }
466
467  // Check access rights to the global object.  This has to happen after
468  // the map check so that we know that the object is actually a global
469  // object.
470  // This allows us to install generated handlers for accesses to the
471  // global proxy (as opposed to using slow ICs). See corresponding code
472  // in LookupForRead().
473  if (receiver_map->IsJSGlobalProxyMap()) {
474    __ CheckAccessGlobalProxy(reg, scratch2, miss);
475  }
476
477  Handle<JSObject> prototype = Handle<JSObject>::null();
478  Handle<Map> current_map = receiver_map;
479  Handle<Map> holder_map(holder()->map());
480  // Traverse the prototype chain and check the maps in the prototype chain for
481  // fast and global objects or do negative lookup for normal objects.
482  while (!current_map.is_identical_to(holder_map)) {
483    ++depth;
484
485    // Only global objects and objects that do not require access
486    // checks are allowed in stubs.
487    DCHECK(current_map->IsJSGlobalProxyMap() ||
488           !current_map->is_access_check_needed());
489
490    prototype = handle(JSObject::cast(current_map->prototype()));
491    if (current_map->is_dictionary_map() &&
492        !current_map->IsJSGlobalObjectMap()) {
493      DCHECK(!current_map->IsJSGlobalProxyMap());  // Proxy maps are fast.
494      if (!name->IsUniqueName()) {
495        DCHECK(name->IsString());
496        name = factory()->InternalizeString(Handle<String>::cast(name));
497      }
498      DCHECK(current.is_null() ||
499             current->property_dictionary()->FindEntry(name) ==
500                 NameDictionary::kNotFound);
501
502      if (FLAG_eliminate_prototype_chain_checks && depth > 1) {
503        // TODO(jkummerow): Cache and re-use weak cell.
504        __ LoadWeakValue(reg, isolate()->factory()->NewWeakCell(current), miss);
505      }
506      GenerateDictionaryNegativeLookup(masm(), miss, reg, name, scratch1,
507                                       scratch2);
508      if (!FLAG_eliminate_prototype_chain_checks) {
509        __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
510        __ ldr(holder_reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
511      }
512    } else {
513      Register map_reg = scratch1;
514      if (!FLAG_eliminate_prototype_chain_checks) {
515        __ ldr(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
516      }
517      if (current_map->IsJSGlobalObjectMap()) {
518        GenerateCheckPropertyCell(masm(), Handle<JSGlobalObject>::cast(current),
519                                  name, scratch2, miss);
520      } else if (!FLAG_eliminate_prototype_chain_checks &&
521                 (depth != 1 || check == CHECK_ALL_MAPS)) {
522        Handle<WeakCell> cell = Map::WeakCellForMap(current_map);
523        __ CmpWeakValue(map_reg, cell, scratch2);
524        __ b(ne, miss);
525      }
526      if (!FLAG_eliminate_prototype_chain_checks) {
527        __ ldr(holder_reg, FieldMemOperand(map_reg, Map::kPrototypeOffset));
528      }
529    }
530
531    reg = holder_reg;  // From now on the object will be in holder_reg.
532    // Go to the next object in the prototype chain.
533    current = prototype;
534    current_map = handle(current->map());
535  }
536
537  DCHECK(!current_map->IsJSGlobalProxyMap());
538
539  // Log the check depth.
540  LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
541
542  if (!FLAG_eliminate_prototype_chain_checks &&
543      (depth != 0 || check == CHECK_ALL_MAPS)) {
544    // Check the holder map.
545    __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
546    Handle<WeakCell> cell = Map::WeakCellForMap(current_map);
547    __ CmpWeakValue(scratch1, cell, scratch2);
548    __ b(ne, miss);
549  }
550
551  bool return_holder = return_what == RETURN_HOLDER;
552  if (FLAG_eliminate_prototype_chain_checks && return_holder && depth != 0) {
553    __ LoadWeakValue(reg, isolate()->factory()->NewWeakCell(current), miss);
554  }
555
556  // Return the register containing the holder.
557  return return_holder ? reg : no_reg;
558}
559
560
561void NamedLoadHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) {
562  if (!miss->is_unused()) {
563    Label success;
564    __ b(&success);
565    __ bind(miss);
566    if (IC::ICUseVector(kind())) {
567      DCHECK(kind() == Code::LOAD_IC);
568      PopVectorAndSlot();
569    }
570    TailCallBuiltin(masm(), MissBuiltin(kind()));
571    __ bind(&success);
572  }
573}
574
575
576void NamedStoreHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) {
577  if (!miss->is_unused()) {
578    Label success;
579    __ b(&success);
580    GenerateRestoreName(miss, name);
581    if (IC::ICUseVector(kind())) PopVectorAndSlot();
582    TailCallBuiltin(masm(), MissBuiltin(kind()));
583    __ bind(&success);
584  }
585}
586
587
588void NamedLoadHandlerCompiler::GenerateLoadConstant(Handle<Object> value) {
589  // Return the constant value.
590  __ Move(r0, value);
591  __ Ret();
592}
593
594
595void NamedLoadHandlerCompiler::GenerateLoadCallback(
596    Register reg, Handle<AccessorInfo> callback) {
597  DCHECK(!AreAliased(scratch2(), scratch3(), scratch4(), receiver()));
598  DCHECK(!AreAliased(scratch2(), scratch3(), scratch4(), reg));
599
600  // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
601  // name below the exit frame to make GC aware of them.
602  STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0);
603  STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1);
604  STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2);
605  STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3);
606  STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4);
607  STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5);
608  STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6);
609  STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7);
610
611  __ push(receiver());
612  // Push data from AccessorInfo.
613  Handle<Object> data(callback->data(), isolate());
614  if (data->IsUndefined() || data->IsSmi()) {
615    __ Move(scratch2(), data);
616  } else {
617    Handle<WeakCell> cell =
618        isolate()->factory()->NewWeakCell(Handle<HeapObject>::cast(data));
619    // The callback is alive if this instruction is executed,
620    // so the weak cell is not cleared and points to data.
621    __ GetWeakValue(scratch2(), cell);
622  }
623  __ push(scratch2());
624  __ LoadRoot(scratch2(), Heap::kUndefinedValueRootIndex);
625  __ Push(scratch2(), scratch2());
626  __ mov(scratch2(), Operand(ExternalReference::isolate_address(isolate())));
627  __ Push(scratch2(), reg);
628  __ Push(Smi::FromInt(0));  // should_throw_on_error -> false
629  __ push(name());
630
631  // Abi for CallApiGetter
632  Register getter_address_reg = ApiGetterDescriptor::function_address();
633
634  Address getter_address = v8::ToCData<Address>(callback->getter());
635  ApiFunction fun(getter_address);
636  ExternalReference::Type type = ExternalReference::DIRECT_GETTER_CALL;
637  ExternalReference ref = ExternalReference(&fun, type, isolate());
638  __ mov(getter_address_reg, Operand(ref));
639
640  CallApiGetterStub stub(isolate());
641  __ TailCallStub(&stub);
642}
643
644
645void NamedLoadHandlerCompiler::GenerateLoadInterceptorWithFollowup(
646    LookupIterator* it, Register holder_reg) {
647  DCHECK(holder()->HasNamedInterceptor());
648  DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined());
649
650  // Compile the interceptor call, followed by inline code to load the
651  // property from further up the prototype chain if the call fails.
652  // Check that the maps haven't changed.
653  DCHECK(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
654
655  // Preserve the receiver register explicitly whenever it is different from the
656  // holder and it is needed should the interceptor return without any result.
657  // The ACCESSOR case needs the receiver to be passed into C++ code, the FIELD
658  // case might cause a miss during the prototype check.
659  bool must_perform_prototype_check =
660      !holder().is_identical_to(it->GetHolder<JSObject>());
661  bool must_preserve_receiver_reg =
662      !receiver().is(holder_reg) &&
663      (it->state() == LookupIterator::ACCESSOR || must_perform_prototype_check);
664
665  // Save necessary data before invoking an interceptor.
666  // Requires a frame to make GC aware of pushed pointers.
667  {
668    FrameAndConstantPoolScope frame_scope(masm(), StackFrame::INTERNAL);
669    if (must_preserve_receiver_reg) {
670      __ Push(receiver(), holder_reg, this->name());
671    } else {
672      __ Push(holder_reg, this->name());
673    }
674    InterceptorVectorSlotPush(holder_reg);
675    // Invoke an interceptor.  Note: map checks from receiver to
676    // interceptor's holder has been compiled before (see a caller
677    // of this method.)
678    CompileCallLoadPropertyWithInterceptor(
679        masm(), receiver(), holder_reg, this->name(), holder(),
680        Runtime::kLoadPropertyWithInterceptorOnly);
681
682    // Check if interceptor provided a value for property.  If it's
683    // the case, return immediately.
684    Label interceptor_failed;
685    __ LoadRoot(scratch1(), Heap::kNoInterceptorResultSentinelRootIndex);
686    __ cmp(r0, scratch1());
687    __ b(eq, &interceptor_failed);
688    frame_scope.GenerateLeaveFrame();
689    __ Ret();
690
691    __ bind(&interceptor_failed);
692    InterceptorVectorSlotPop(holder_reg);
693    __ pop(this->name());
694    __ pop(holder_reg);
695    if (must_preserve_receiver_reg) {
696      __ pop(receiver());
697    }
698    // Leave the internal frame.
699  }
700
701  GenerateLoadPostInterceptor(it, holder_reg);
702}
703
704
705void NamedLoadHandlerCompiler::GenerateLoadInterceptor(Register holder_reg) {
706  // Call the runtime system to load the interceptor.
707  DCHECK(holder()->HasNamedInterceptor());
708  DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined());
709  PushInterceptorArguments(masm(), receiver(), holder_reg, this->name(),
710                           holder());
711
712  __ TailCallRuntime(Runtime::kLoadPropertyWithInterceptor);
713}
714
715
716Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback(
717    Handle<JSObject> object, Handle<Name> name, Handle<AccessorInfo> callback,
718    LanguageMode language_mode) {
719  Register holder_reg = Frontend(name);
720
721  __ push(receiver());  // receiver
722  __ push(holder_reg);
723
724  // If the callback cannot leak, then push the callback directly,
725  // otherwise wrap it in a weak cell.
726  if (callback->data()->IsUndefined() || callback->data()->IsSmi()) {
727    __ mov(ip, Operand(callback));
728  } else {
729    Handle<WeakCell> cell = isolate()->factory()->NewWeakCell(callback);
730    __ mov(ip, Operand(cell));
731  }
732  __ push(ip);
733  __ mov(ip, Operand(name));
734  __ Push(ip, value());
735  __ Push(Smi::FromInt(language_mode));
736
737  // Do tail-call to the runtime system.
738  __ TailCallRuntime(Runtime::kStoreCallbackProperty);
739
740  // Return the generated code.
741  return GetCode(kind(), Code::FAST, name);
742}
743
744
745Handle<Code> NamedStoreHandlerCompiler::CompileStoreInterceptor(
746    Handle<Name> name) {
747  __ Push(receiver(), this->name(), value());
748
749  // Do tail-call to the runtime system.
750  __ TailCallRuntime(Runtime::kStorePropertyWithInterceptor);
751
752  // Return the generated code.
753  return GetCode(kind(), Code::FAST, name);
754}
755
756
757Register NamedStoreHandlerCompiler::value() {
758  return StoreDescriptor::ValueRegister();
759}
760
761
762Handle<Code> NamedLoadHandlerCompiler::CompileLoadGlobal(
763    Handle<PropertyCell> cell, Handle<Name> name, bool is_configurable) {
764  Label miss;
765  if (IC::ICUseVector(kind())) {
766    PushVectorAndSlot();
767  }
768  FrontendHeader(receiver(), name, &miss, DONT_RETURN_ANYTHING);
769
770  // Get the value from the cell.
771  Register result = StoreDescriptor::ValueRegister();
772  Handle<WeakCell> weak_cell = factory()->NewWeakCell(cell);
773  __ LoadWeakValue(result, weak_cell, &miss);
774  __ ldr(result, FieldMemOperand(result, PropertyCell::kValueOffset));
775
776  // Check for deleted property if property can actually be deleted.
777  if (is_configurable) {
778    __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
779    __ cmp(result, ip);
780    __ b(eq, &miss);
781  }
782
783  Counters* counters = isolate()->counters();
784  __ IncrementCounter(counters->ic_named_load_global_stub(), 1, r1, r3);
785  if (IC::ICUseVector(kind())) {
786    DiscardVectorAndSlot();
787  }
788  __ Ret();
789
790  FrontendFooter(name, &miss);
791
792  // Return the generated code.
793  return GetCode(kind(), Code::NORMAL, name);
794}
795
796
797#undef __
798}  // namespace internal
799}  // namespace v8
800
801#endif  // V8_TARGET_ARCH_ARM
802