1// Copyright 2011 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/v8.h"
6
7#include "src/ic-inl.h"
8#include "src/objects-visiting.h"
9
10namespace v8 {
11namespace internal {
12
13
14static inline bool IsShortcutCandidate(int type) {
15  return ((type & kShortcutTypeMask) == kShortcutTypeTag);
16}
17
18
19StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
20    int instance_type,
21    int instance_size) {
22  if (instance_type < FIRST_NONSTRING_TYPE) {
23    switch (instance_type & kStringRepresentationMask) {
24      case kSeqStringTag:
25        if ((instance_type & kStringEncodingMask) == kOneByteStringTag) {
26          return kVisitSeqOneByteString;
27        } else {
28          return kVisitSeqTwoByteString;
29        }
30
31      case kConsStringTag:
32        if (IsShortcutCandidate(instance_type)) {
33          return kVisitShortcutCandidate;
34        } else {
35          return kVisitConsString;
36        }
37
38      case kSlicedStringTag:
39        return kVisitSlicedString;
40
41      case kExternalStringTag:
42        return GetVisitorIdForSize(kVisitDataObject,
43                                   kVisitDataObjectGeneric,
44                                   instance_size);
45    }
46    UNREACHABLE();
47  }
48
49  switch (instance_type) {
50    case BYTE_ARRAY_TYPE:
51      return kVisitByteArray;
52
53    case FREE_SPACE_TYPE:
54      return kVisitFreeSpace;
55
56    case FIXED_ARRAY_TYPE:
57      return kVisitFixedArray;
58
59    case FIXED_DOUBLE_ARRAY_TYPE:
60      return kVisitFixedDoubleArray;
61
62    case CONSTANT_POOL_ARRAY_TYPE:
63      return kVisitConstantPoolArray;
64
65    case ODDBALL_TYPE:
66      return kVisitOddball;
67
68    case MAP_TYPE:
69      return kVisitMap;
70
71    case CODE_TYPE:
72      return kVisitCode;
73
74    case CELL_TYPE:
75      return kVisitCell;
76
77    case PROPERTY_CELL_TYPE:
78      return kVisitPropertyCell;
79
80    case JS_SET_TYPE:
81      return GetVisitorIdForSize(kVisitStruct,
82                                 kVisitStructGeneric,
83                                 JSSet::kSize);
84
85    case JS_MAP_TYPE:
86      return GetVisitorIdForSize(kVisitStruct,
87                                 kVisitStructGeneric,
88                                 JSMap::kSize);
89
90    case JS_WEAK_MAP_TYPE:
91    case JS_WEAK_SET_TYPE:
92      return kVisitJSWeakCollection;
93
94    case JS_REGEXP_TYPE:
95      return kVisitJSRegExp;
96
97    case SHARED_FUNCTION_INFO_TYPE:
98      return kVisitSharedFunctionInfo;
99
100    case JS_PROXY_TYPE:
101      return GetVisitorIdForSize(kVisitStruct,
102                                 kVisitStructGeneric,
103                                 JSProxy::kSize);
104
105    case JS_FUNCTION_PROXY_TYPE:
106      return GetVisitorIdForSize(kVisitStruct,
107                                 kVisitStructGeneric,
108                                 JSFunctionProxy::kSize);
109
110    case FOREIGN_TYPE:
111      return GetVisitorIdForSize(kVisitDataObject,
112                                 kVisitDataObjectGeneric,
113                                 Foreign::kSize);
114
115    case SYMBOL_TYPE:
116      return kVisitSymbol;
117
118    case FILLER_TYPE:
119      return kVisitDataObjectGeneric;
120
121    case JS_ARRAY_BUFFER_TYPE:
122      return kVisitJSArrayBuffer;
123
124    case JS_TYPED_ARRAY_TYPE:
125      return kVisitJSTypedArray;
126
127    case JS_DATA_VIEW_TYPE:
128      return kVisitJSDataView;
129
130    case JS_OBJECT_TYPE:
131    case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
132    case JS_GENERATOR_OBJECT_TYPE:
133    case JS_MODULE_TYPE:
134    case JS_VALUE_TYPE:
135    case JS_DATE_TYPE:
136    case JS_ARRAY_TYPE:
137    case JS_GLOBAL_PROXY_TYPE:
138    case JS_GLOBAL_OBJECT_TYPE:
139    case JS_BUILTINS_OBJECT_TYPE:
140    case JS_MESSAGE_OBJECT_TYPE:
141    case JS_SET_ITERATOR_TYPE:
142    case JS_MAP_ITERATOR_TYPE:
143      return GetVisitorIdForSize(kVisitJSObject,
144                                 kVisitJSObjectGeneric,
145                                 instance_size);
146
147    case JS_FUNCTION_TYPE:
148      return kVisitJSFunction;
149
150    case HEAP_NUMBER_TYPE:
151#define EXTERNAL_ARRAY_CASE(Type, type, TYPE, ctype, size)                     \
152    case EXTERNAL_##TYPE##_ARRAY_TYPE:
153
154    TYPED_ARRAYS(EXTERNAL_ARRAY_CASE)
155      return GetVisitorIdForSize(kVisitDataObject,
156                                 kVisitDataObjectGeneric,
157                                 instance_size);
158#undef EXTERNAL_ARRAY_CASE
159
160    case FIXED_UINT8_ARRAY_TYPE:
161    case FIXED_INT8_ARRAY_TYPE:
162    case FIXED_UINT16_ARRAY_TYPE:
163    case FIXED_INT16_ARRAY_TYPE:
164    case FIXED_UINT32_ARRAY_TYPE:
165    case FIXED_INT32_ARRAY_TYPE:
166    case FIXED_FLOAT32_ARRAY_TYPE:
167    case FIXED_UINT8_CLAMPED_ARRAY_TYPE:
168      return kVisitFixedTypedArray;
169
170    case FIXED_FLOAT64_ARRAY_TYPE:
171      return kVisitFixedFloat64Array;
172
173#define MAKE_STRUCT_CASE(NAME, Name, name) \
174        case NAME##_TYPE:
175      STRUCT_LIST(MAKE_STRUCT_CASE)
176#undef MAKE_STRUCT_CASE
177          if (instance_type == ALLOCATION_SITE_TYPE) {
178            return kVisitAllocationSite;
179          }
180
181          return GetVisitorIdForSize(kVisitStruct,
182                                     kVisitStructGeneric,
183                                     instance_size);
184
185    default:
186      UNREACHABLE();
187      return kVisitorIdCount;
188  }
189}
190
191
192// We don't record weak slots during marking or scavenges. Instead we do it
193// once when we complete mark-compact cycle.  Note that write barrier has no
194// effect if we are already in the middle of compacting mark-sweep cycle and we
195// have to record slots manually.
196static bool MustRecordSlots(Heap* heap) {
197  return heap->gc_state() == Heap::MARK_COMPACT &&
198      heap->mark_compact_collector()->is_compacting();
199}
200
201
202template <class T>
203struct WeakListVisitor;
204
205
206template <class T>
207Object* VisitWeakList(Heap* heap,
208                      Object* list,
209                      WeakObjectRetainer* retainer) {
210  Object* undefined = heap->undefined_value();
211  Object* head = undefined;
212  T* tail = NULL;
213  MarkCompactCollector* collector = heap->mark_compact_collector();
214  bool record_slots = MustRecordSlots(heap);
215  while (list != undefined) {
216    // Check whether to keep the candidate in the list.
217    T* candidate = reinterpret_cast<T*>(list);
218    Object* retained = retainer->RetainAs(list);
219    if (retained != NULL) {
220      if (head == undefined) {
221        // First element in the list.
222        head = retained;
223      } else {
224        // Subsequent elements in the list.
225        ASSERT(tail != NULL);
226        WeakListVisitor<T>::SetWeakNext(tail, retained);
227        if (record_slots) {
228          Object** next_slot =
229            HeapObject::RawField(tail, WeakListVisitor<T>::WeakNextOffset());
230          collector->RecordSlot(next_slot, next_slot, retained);
231        }
232      }
233      // Retained object is new tail.
234      ASSERT(!retained->IsUndefined());
235      candidate = reinterpret_cast<T*>(retained);
236      tail = candidate;
237
238
239      // tail is a live object, visit it.
240      WeakListVisitor<T>::VisitLiveObject(heap, tail, retainer);
241    } else {
242      WeakListVisitor<T>::VisitPhantomObject(heap, candidate);
243    }
244
245    // Move to next element in the list.
246    list = WeakListVisitor<T>::WeakNext(candidate);
247  }
248
249  // Terminate the list if there is one or more elements.
250  if (tail != NULL) {
251    WeakListVisitor<T>::SetWeakNext(tail, undefined);
252  }
253  return head;
254}
255
256
257template <class T>
258static void ClearWeakList(Heap* heap,
259                          Object* list) {
260  Object* undefined = heap->undefined_value();
261  while (list != undefined) {
262    T* candidate = reinterpret_cast<T*>(list);
263    list = WeakListVisitor<T>::WeakNext(candidate);
264    WeakListVisitor<T>::SetWeakNext(candidate, undefined);
265  }
266}
267
268
269template<>
270struct WeakListVisitor<JSFunction> {
271  static void SetWeakNext(JSFunction* function, Object* next) {
272    function->set_next_function_link(next);
273  }
274
275  static Object* WeakNext(JSFunction* function) {
276    return function->next_function_link();
277  }
278
279  static int WeakNextOffset() {
280    return JSFunction::kNextFunctionLinkOffset;
281  }
282
283  static void VisitLiveObject(Heap*, JSFunction*, WeakObjectRetainer*) {}
284
285  static void VisitPhantomObject(Heap*, JSFunction*) {}
286};
287
288
289template<>
290struct WeakListVisitor<Code> {
291  static void SetWeakNext(Code* code, Object* next) {
292    code->set_next_code_link(next);
293  }
294
295  static Object* WeakNext(Code* code) {
296    return code->next_code_link();
297  }
298
299  static int WeakNextOffset() {
300    return Code::kNextCodeLinkOffset;
301  }
302
303  static void VisitLiveObject(Heap*, Code*, WeakObjectRetainer*) {}
304
305  static void VisitPhantomObject(Heap*, Code*) {}
306};
307
308
309template<>
310struct WeakListVisitor<Context> {
311  static void SetWeakNext(Context* context, Object* next) {
312    context->set(Context::NEXT_CONTEXT_LINK,
313                 next,
314                 UPDATE_WRITE_BARRIER);
315  }
316
317  static Object* WeakNext(Context* context) {
318    return context->get(Context::NEXT_CONTEXT_LINK);
319  }
320
321  static int WeakNextOffset() {
322    return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK);
323  }
324
325  static void VisitLiveObject(Heap* heap,
326                              Context* context,
327                              WeakObjectRetainer* retainer) {
328    // Process the three weak lists linked off the context.
329    DoWeakList<JSFunction>(heap, context, retainer,
330        Context::OPTIMIZED_FUNCTIONS_LIST);
331    DoWeakList<Code>(heap, context, retainer, Context::OPTIMIZED_CODE_LIST);
332    DoWeakList<Code>(heap, context, retainer, Context::DEOPTIMIZED_CODE_LIST);
333  }
334
335  template<class T>
336  static void DoWeakList(Heap* heap,
337                         Context* context,
338                         WeakObjectRetainer* retainer,
339                         int index) {
340    // Visit the weak list, removing dead intermediate elements.
341    Object* list_head = VisitWeakList<T>(heap, context->get(index), retainer);
342
343    // Update the list head.
344    context->set(index, list_head, UPDATE_WRITE_BARRIER);
345
346    if (MustRecordSlots(heap)) {
347      // Record the updated slot if necessary.
348      Object** head_slot = HeapObject::RawField(
349          context, FixedArray::SizeFor(index));
350      heap->mark_compact_collector()->RecordSlot(
351          head_slot, head_slot, list_head);
352    }
353  }
354
355  static void VisitPhantomObject(Heap* heap, Context* context) {
356    ClearWeakList<JSFunction>(heap,
357        context->get(Context::OPTIMIZED_FUNCTIONS_LIST));
358    ClearWeakList<Code>(heap, context->get(Context::OPTIMIZED_CODE_LIST));
359    ClearWeakList<Code>(heap, context->get(Context::DEOPTIMIZED_CODE_LIST));
360  }
361};
362
363
364template<>
365struct WeakListVisitor<JSArrayBufferView> {
366  static void SetWeakNext(JSArrayBufferView* obj, Object* next) {
367    obj->set_weak_next(next);
368  }
369
370  static Object* WeakNext(JSArrayBufferView* obj) {
371    return obj->weak_next();
372  }
373
374  static int WeakNextOffset() {
375    return JSArrayBufferView::kWeakNextOffset;
376  }
377
378  static void VisitLiveObject(Heap*, JSArrayBufferView*, WeakObjectRetainer*) {}
379
380  static void VisitPhantomObject(Heap*, JSArrayBufferView*) {}
381};
382
383
384template<>
385struct WeakListVisitor<JSArrayBuffer> {
386  static void SetWeakNext(JSArrayBuffer* obj, Object* next) {
387    obj->set_weak_next(next);
388  }
389
390  static Object* WeakNext(JSArrayBuffer* obj) {
391    return obj->weak_next();
392  }
393
394  static int WeakNextOffset() {
395    return JSArrayBuffer::kWeakNextOffset;
396  }
397
398  static void VisitLiveObject(Heap* heap,
399                              JSArrayBuffer* array_buffer,
400                              WeakObjectRetainer* retainer) {
401    Object* typed_array_obj =
402        VisitWeakList<JSArrayBufferView>(
403            heap,
404            array_buffer->weak_first_view(),
405            retainer);
406    array_buffer->set_weak_first_view(typed_array_obj);
407    if (typed_array_obj != heap->undefined_value() && MustRecordSlots(heap)) {
408      Object** slot = HeapObject::RawField(
409          array_buffer, JSArrayBuffer::kWeakFirstViewOffset);
410      heap->mark_compact_collector()->RecordSlot(slot, slot, typed_array_obj);
411    }
412  }
413
414  static void VisitPhantomObject(Heap* heap, JSArrayBuffer* phantom) {
415    Runtime::FreeArrayBuffer(heap->isolate(), phantom);
416  }
417};
418
419
420template<>
421struct WeakListVisitor<AllocationSite> {
422  static void SetWeakNext(AllocationSite* obj, Object* next) {
423    obj->set_weak_next(next);
424  }
425
426  static Object* WeakNext(AllocationSite* obj) {
427    return obj->weak_next();
428  }
429
430  static int WeakNextOffset() {
431    return AllocationSite::kWeakNextOffset;
432  }
433
434  static void VisitLiveObject(Heap*, AllocationSite*, WeakObjectRetainer*) {}
435
436  static void VisitPhantomObject(Heap*, AllocationSite*) {}
437};
438
439
440template Object* VisitWeakList<Code>(
441    Heap* heap, Object* list, WeakObjectRetainer* retainer);
442
443
444template Object* VisitWeakList<JSFunction>(
445    Heap* heap, Object* list, WeakObjectRetainer* retainer);
446
447
448template Object* VisitWeakList<Context>(
449    Heap* heap, Object* list, WeakObjectRetainer* retainer);
450
451
452template Object* VisitWeakList<JSArrayBuffer>(
453    Heap* heap, Object* list, WeakObjectRetainer* retainer);
454
455
456template Object* VisitWeakList<AllocationSite>(
457    Heap* heap, Object* list, WeakObjectRetainer* retainer);
458
459} }  // namespace v8::internal
460