1// Copyright 2011 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/v8.h"
6
7#include "src/heap/objects-visiting.h"
8
9namespace v8 {
10namespace internal {
11
12
13StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
14    int instance_type, int instance_size) {
15  if (instance_type < FIRST_NONSTRING_TYPE) {
16    switch (instance_type & kStringRepresentationMask) {
17      case kSeqStringTag:
18        if ((instance_type & kStringEncodingMask) == kOneByteStringTag) {
19          return kVisitSeqOneByteString;
20        } else {
21          return kVisitSeqTwoByteString;
22        }
23
24      case kConsStringTag:
25        if (IsShortcutCandidate(instance_type)) {
26          return kVisitShortcutCandidate;
27        } else {
28          return kVisitConsString;
29        }
30
31      case kSlicedStringTag:
32        return kVisitSlicedString;
33
34      case kExternalStringTag:
35        return GetVisitorIdForSize(kVisitDataObject, kVisitDataObjectGeneric,
36                                   instance_size);
37    }
38    UNREACHABLE();
39  }
40
41  switch (instance_type) {
42    case BYTE_ARRAY_TYPE:
43      return kVisitByteArray;
44
45    case FREE_SPACE_TYPE:
46      return kVisitFreeSpace;
47
48    case FIXED_ARRAY_TYPE:
49      return kVisitFixedArray;
50
51    case FIXED_DOUBLE_ARRAY_TYPE:
52      return kVisitFixedDoubleArray;
53
54    case CONSTANT_POOL_ARRAY_TYPE:
55      return kVisitConstantPoolArray;
56
57    case ODDBALL_TYPE:
58      return kVisitOddball;
59
60    case MAP_TYPE:
61      return kVisitMap;
62
63    case CODE_TYPE:
64      return kVisitCode;
65
66    case CELL_TYPE:
67      return kVisitCell;
68
69    case PROPERTY_CELL_TYPE:
70      return kVisitPropertyCell;
71
72    case JS_SET_TYPE:
73      return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
74                                 JSSet::kSize);
75
76    case JS_MAP_TYPE:
77      return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
78                                 JSMap::kSize);
79
80    case JS_WEAK_MAP_TYPE:
81    case JS_WEAK_SET_TYPE:
82      return kVisitJSWeakCollection;
83
84    case JS_REGEXP_TYPE:
85      return kVisitJSRegExp;
86
87    case SHARED_FUNCTION_INFO_TYPE:
88      return kVisitSharedFunctionInfo;
89
90    case JS_PROXY_TYPE:
91      return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
92                                 JSProxy::kSize);
93
94    case JS_FUNCTION_PROXY_TYPE:
95      return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
96                                 JSFunctionProxy::kSize);
97
98    case FOREIGN_TYPE:
99      return GetVisitorIdForSize(kVisitDataObject, kVisitDataObjectGeneric,
100                                 Foreign::kSize);
101
102    case SYMBOL_TYPE:
103      return kVisitSymbol;
104
105    case FILLER_TYPE:
106      return kVisitDataObjectGeneric;
107
108    case JS_ARRAY_BUFFER_TYPE:
109      return kVisitJSArrayBuffer;
110
111    case JS_TYPED_ARRAY_TYPE:
112      return kVisitJSTypedArray;
113
114    case JS_DATA_VIEW_TYPE:
115      return kVisitJSDataView;
116
117    case JS_OBJECT_TYPE:
118    case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
119    case JS_GENERATOR_OBJECT_TYPE:
120    case JS_MODULE_TYPE:
121    case JS_VALUE_TYPE:
122    case JS_DATE_TYPE:
123    case JS_ARRAY_TYPE:
124    case JS_GLOBAL_PROXY_TYPE:
125    case JS_GLOBAL_OBJECT_TYPE:
126    case JS_BUILTINS_OBJECT_TYPE:
127    case JS_MESSAGE_OBJECT_TYPE:
128    case JS_SET_ITERATOR_TYPE:
129    case JS_MAP_ITERATOR_TYPE:
130      return GetVisitorIdForSize(kVisitJSObject, kVisitJSObjectGeneric,
131                                 instance_size);
132
133    case JS_FUNCTION_TYPE:
134      return kVisitJSFunction;
135
136    case HEAP_NUMBER_TYPE:
137    case MUTABLE_HEAP_NUMBER_TYPE:
138#define EXTERNAL_ARRAY_CASE(Type, type, TYPE, ctype, size) \
139  case EXTERNAL_##TYPE##_ARRAY_TYPE:
140
141      TYPED_ARRAYS(EXTERNAL_ARRAY_CASE)
142      return GetVisitorIdForSize(kVisitDataObject, kVisitDataObjectGeneric,
143                                 instance_size);
144#undef EXTERNAL_ARRAY_CASE
145
146    case FIXED_UINT8_ARRAY_TYPE:
147    case FIXED_INT8_ARRAY_TYPE:
148    case FIXED_UINT16_ARRAY_TYPE:
149    case FIXED_INT16_ARRAY_TYPE:
150    case FIXED_UINT32_ARRAY_TYPE:
151    case FIXED_INT32_ARRAY_TYPE:
152    case FIXED_FLOAT32_ARRAY_TYPE:
153    case FIXED_UINT8_CLAMPED_ARRAY_TYPE:
154      return kVisitFixedTypedArray;
155
156    case FIXED_FLOAT64_ARRAY_TYPE:
157      return kVisitFixedFloat64Array;
158
159#define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE:
160      STRUCT_LIST(MAKE_STRUCT_CASE)
161#undef MAKE_STRUCT_CASE
162      if (instance_type == ALLOCATION_SITE_TYPE) {
163        return kVisitAllocationSite;
164      }
165
166      return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
167                                 instance_size);
168
169    default:
170      UNREACHABLE();
171      return kVisitorIdCount;
172  }
173}
174
175
176// We don't record weak slots during marking or scavenges. Instead we do it
177// once when we complete mark-compact cycle.  Note that write barrier has no
178// effect if we are already in the middle of compacting mark-sweep cycle and we
179// have to record slots manually.
180static bool MustRecordSlots(Heap* heap) {
181  return heap->gc_state() == Heap::MARK_COMPACT &&
182         heap->mark_compact_collector()->is_compacting();
183}
184
185
186template <class T>
187struct WeakListVisitor;
188
189
190template <class T>
191Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) {
192  Object* undefined = heap->undefined_value();
193  Object* head = undefined;
194  T* tail = NULL;
195  MarkCompactCollector* collector = heap->mark_compact_collector();
196  bool record_slots = MustRecordSlots(heap);
197  while (list != undefined) {
198    // Check whether to keep the candidate in the list.
199    T* candidate = reinterpret_cast<T*>(list);
200    Object* retained = retainer->RetainAs(list);
201    if (retained != NULL) {
202      if (head == undefined) {
203        // First element in the list.
204        head = retained;
205      } else {
206        // Subsequent elements in the list.
207        DCHECK(tail != NULL);
208        WeakListVisitor<T>::SetWeakNext(tail, retained);
209        if (record_slots) {
210          Object** next_slot =
211              HeapObject::RawField(tail, WeakListVisitor<T>::WeakNextOffset());
212          collector->RecordSlot(next_slot, next_slot, retained);
213        }
214      }
215      // Retained object is new tail.
216      DCHECK(!retained->IsUndefined());
217      candidate = reinterpret_cast<T*>(retained);
218      tail = candidate;
219
220
221      // tail is a live object, visit it.
222      WeakListVisitor<T>::VisitLiveObject(heap, tail, retainer);
223    } else {
224      WeakListVisitor<T>::VisitPhantomObject(heap, candidate);
225    }
226
227    // Move to next element in the list.
228    list = WeakListVisitor<T>::WeakNext(candidate);
229  }
230
231  // Terminate the list if there is one or more elements.
232  if (tail != NULL) {
233    WeakListVisitor<T>::SetWeakNext(tail, undefined);
234  }
235  return head;
236}
237
238
239template <class T>
240static void ClearWeakList(Heap* heap, Object* list) {
241  Object* undefined = heap->undefined_value();
242  while (list != undefined) {
243    T* candidate = reinterpret_cast<T*>(list);
244    list = WeakListVisitor<T>::WeakNext(candidate);
245    WeakListVisitor<T>::SetWeakNext(candidate, undefined);
246  }
247}
248
249
250template <>
251struct WeakListVisitor<JSFunction> {
252  static void SetWeakNext(JSFunction* function, Object* next) {
253    function->set_next_function_link(next);
254  }
255
256  static Object* WeakNext(JSFunction* function) {
257    return function->next_function_link();
258  }
259
260  static int WeakNextOffset() { return JSFunction::kNextFunctionLinkOffset; }
261
262  static void VisitLiveObject(Heap*, JSFunction*, WeakObjectRetainer*) {}
263
264  static void VisitPhantomObject(Heap*, JSFunction*) {}
265};
266
267
268template <>
269struct WeakListVisitor<Code> {
270  static void SetWeakNext(Code* code, Object* next) {
271    code->set_next_code_link(next);
272  }
273
274  static Object* WeakNext(Code* code) { return code->next_code_link(); }
275
276  static int WeakNextOffset() { return Code::kNextCodeLinkOffset; }
277
278  static void VisitLiveObject(Heap*, Code*, WeakObjectRetainer*) {}
279
280  static void VisitPhantomObject(Heap*, Code*) {}
281};
282
283
284template <>
285struct WeakListVisitor<Context> {
286  static void SetWeakNext(Context* context, Object* next) {
287    context->set(Context::NEXT_CONTEXT_LINK, next, UPDATE_WRITE_BARRIER);
288  }
289
290  static Object* WeakNext(Context* context) {
291    return context->get(Context::NEXT_CONTEXT_LINK);
292  }
293
294  static int WeakNextOffset() {
295    return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK);
296  }
297
298  static void VisitLiveObject(Heap* heap, Context* context,
299                              WeakObjectRetainer* retainer) {
300    // Process the three weak lists linked off the context.
301    DoWeakList<JSFunction>(heap, context, retainer,
302                           Context::OPTIMIZED_FUNCTIONS_LIST);
303    DoWeakList<Code>(heap, context, retainer, Context::OPTIMIZED_CODE_LIST);
304    DoWeakList<Code>(heap, context, retainer, Context::DEOPTIMIZED_CODE_LIST);
305  }
306
307  template <class T>
308  static void DoWeakList(Heap* heap, Context* context,
309                         WeakObjectRetainer* retainer, int index) {
310    // Visit the weak list, removing dead intermediate elements.
311    Object* list_head = VisitWeakList<T>(heap, context->get(index), retainer);
312
313    // Update the list head.
314    context->set(index, list_head, UPDATE_WRITE_BARRIER);
315
316    if (MustRecordSlots(heap)) {
317      // Record the updated slot if necessary.
318      Object** head_slot =
319          HeapObject::RawField(context, FixedArray::SizeFor(index));
320      heap->mark_compact_collector()->RecordSlot(head_slot, head_slot,
321                                                 list_head);
322    }
323  }
324
325  static void VisitPhantomObject(Heap* heap, Context* context) {
326    ClearWeakList<JSFunction>(heap,
327                              context->get(Context::OPTIMIZED_FUNCTIONS_LIST));
328    ClearWeakList<Code>(heap, context->get(Context::OPTIMIZED_CODE_LIST));
329    ClearWeakList<Code>(heap, context->get(Context::DEOPTIMIZED_CODE_LIST));
330  }
331};
332
333
334template <>
335struct WeakListVisitor<JSArrayBufferView> {
336  static void SetWeakNext(JSArrayBufferView* obj, Object* next) {
337    obj->set_weak_next(next);
338  }
339
340  static Object* WeakNext(JSArrayBufferView* obj) { return obj->weak_next(); }
341
342  static int WeakNextOffset() { return JSArrayBufferView::kWeakNextOffset; }
343
344  static void VisitLiveObject(Heap*, JSArrayBufferView*, WeakObjectRetainer*) {}
345
346  static void VisitPhantomObject(Heap*, JSArrayBufferView*) {}
347};
348
349
350template <>
351struct WeakListVisitor<JSArrayBuffer> {
352  static void SetWeakNext(JSArrayBuffer* obj, Object* next) {
353    obj->set_weak_next(next);
354  }
355
356  static Object* WeakNext(JSArrayBuffer* obj) { return obj->weak_next(); }
357
358  static int WeakNextOffset() { return JSArrayBuffer::kWeakNextOffset; }
359
360  static void VisitLiveObject(Heap* heap, JSArrayBuffer* array_buffer,
361                              WeakObjectRetainer* retainer) {
362    Object* typed_array_obj = VisitWeakList<JSArrayBufferView>(
363        heap, array_buffer->weak_first_view(), retainer);
364    array_buffer->set_weak_first_view(typed_array_obj);
365    if (typed_array_obj != heap->undefined_value() && MustRecordSlots(heap)) {
366      Object** slot = HeapObject::RawField(array_buffer,
367                                           JSArrayBuffer::kWeakFirstViewOffset);
368      heap->mark_compact_collector()->RecordSlot(slot, slot, typed_array_obj);
369    }
370  }
371
372  static void VisitPhantomObject(Heap* heap, JSArrayBuffer* phantom) {
373    Runtime::FreeArrayBuffer(heap->isolate(), phantom);
374  }
375};
376
377
378template <>
379struct WeakListVisitor<AllocationSite> {
380  static void SetWeakNext(AllocationSite* obj, Object* next) {
381    obj->set_weak_next(next);
382  }
383
384  static Object* WeakNext(AllocationSite* obj) { return obj->weak_next(); }
385
386  static int WeakNextOffset() { return AllocationSite::kWeakNextOffset; }
387
388  static void VisitLiveObject(Heap*, AllocationSite*, WeakObjectRetainer*) {}
389
390  static void VisitPhantomObject(Heap*, AllocationSite*) {}
391};
392
393
394template Object* VisitWeakList<Code>(Heap* heap, Object* list,
395                                     WeakObjectRetainer* retainer);
396
397
398template Object* VisitWeakList<JSFunction>(Heap* heap, Object* list,
399                                           WeakObjectRetainer* retainer);
400
401
402template Object* VisitWeakList<Context>(Heap* heap, Object* list,
403                                        WeakObjectRetainer* retainer);
404
405
406template Object* VisitWeakList<JSArrayBuffer>(Heap* heap, Object* list,
407                                              WeakObjectRetainer* retainer);
408
409
410template Object* VisitWeakList<AllocationSite>(Heap* heap, Object* list,
411                                               WeakObjectRetainer* retainer);
412}
413}  // namespace v8::internal
414