1// Copyright 2012 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_OBJECTS_VISITING_H_
29#define V8_OBJECTS_VISITING_H_
30
31#include "allocation.h"
32
33// This file provides base classes and auxiliary methods for defining
34// static object visitors used during GC.
35// Visiting HeapObject body with a normal ObjectVisitor requires performing
36// two switches on object's instance type to determine object size and layout
37// and one or more virtual method calls on visitor itself.
38// Static visitor is different: it provides a dispatch table which contains
39// pointers to specialized visit functions. Each map has the visitor_id
40// field which contains an index of specialized visitor to use.
41
42namespace v8 {
43namespace internal {
44
45
46// Base class for all static visitors.
47class StaticVisitorBase : public AllStatic {
48 public:
49#define VISITOR_ID_LIST(V)    \
50  V(SeqOneByteString)           \
51  V(SeqTwoByteString)         \
52  V(ShortcutCandidate)        \
53  V(ByteArray)                \
54  V(FreeSpace)                \
55  V(FixedArray)               \
56  V(FixedDoubleArray)         \
57  V(ConstantPoolArray)        \
58  V(NativeContext)            \
59  V(AllocationSite)           \
60  V(DataObject2)              \
61  V(DataObject3)              \
62  V(DataObject4)              \
63  V(DataObject5)              \
64  V(DataObject6)              \
65  V(DataObject7)              \
66  V(DataObject8)              \
67  V(DataObject9)              \
68  V(DataObjectGeneric)        \
69  V(JSObject2)                \
70  V(JSObject3)                \
71  V(JSObject4)                \
72  V(JSObject5)                \
73  V(JSObject6)                \
74  V(JSObject7)                \
75  V(JSObject8)                \
76  V(JSObject9)                \
77  V(JSObjectGeneric)          \
78  V(Struct2)                  \
79  V(Struct3)                  \
80  V(Struct4)                  \
81  V(Struct5)                  \
82  V(Struct6)                  \
83  V(Struct7)                  \
84  V(Struct8)                  \
85  V(Struct9)                  \
86  V(StructGeneric)            \
87  V(ConsString)               \
88  V(SlicedString)             \
89  V(Symbol)                   \
90  V(Oddball)                  \
91  V(Code)                     \
92  V(Map)                      \
93  V(Cell)                     \
94  V(PropertyCell)             \
95  V(SharedFunctionInfo)       \
96  V(JSFunction)               \
97  V(JSWeakMap)                \
98  V(JSWeakSet)                \
99  V(JSArrayBuffer)            \
100  V(JSTypedArray)             \
101  V(JSDataView)               \
102  V(JSRegExp)
103
104  // For data objects, JS objects and structs along with generic visitor which
105  // can visit object of any size we provide visitors specialized by
106  // object size in words.
107  // Ids of specialized visitors are declared in a linear order (without
108  // holes) starting from the id of visitor specialized for 2 words objects
109  // (base visitor id) and ending with the id of generic visitor.
110  // Method GetVisitorIdForSize depends on this ordering to calculate visitor
111  // id of specialized visitor from given instance size, base visitor id and
112  // generic visitor's id.
113  enum VisitorId {
114#define VISITOR_ID_ENUM_DECL(id)  kVisit##id,
115    VISITOR_ID_LIST(VISITOR_ID_ENUM_DECL)
116#undef VISITOR_ID_ENUM_DECL
117    kVisitorIdCount,
118    kVisitDataObject = kVisitDataObject2,
119    kVisitJSObject = kVisitJSObject2,
120    kVisitStruct = kVisitStruct2,
121    kMinObjectSizeInWords = 2
122  };
123
124  // Visitor ID should fit in one byte.
125  STATIC_ASSERT(kVisitorIdCount <= 256);
126
127  // Determine which specialized visitor should be used for given instance type
128  // and instance type.
129  static VisitorId GetVisitorId(int instance_type, int instance_size);
130
131  static VisitorId GetVisitorId(Map* map) {
132    return GetVisitorId(map->instance_type(), map->instance_size());
133  }
134
135  // For visitors that allow specialization by size calculate VisitorId based
136  // on size, base visitor id and generic visitor id.
137  static VisitorId GetVisitorIdForSize(VisitorId base,
138                                       VisitorId generic,
139                                       int object_size) {
140    ASSERT((base == kVisitDataObject) ||
141           (base == kVisitStruct) ||
142           (base == kVisitJSObject));
143    ASSERT(IsAligned(object_size, kPointerSize));
144    ASSERT(kMinObjectSizeInWords * kPointerSize <= object_size);
145    ASSERT(object_size <= Page::kMaxNonCodeHeapObjectSize);
146
147    const VisitorId specialization = static_cast<VisitorId>(
148        base + (object_size >> kPointerSizeLog2) - kMinObjectSizeInWords);
149
150    return Min(specialization, generic);
151  }
152};
153
154
155template<typename Callback>
156class VisitorDispatchTable {
157 public:
158  void CopyFrom(VisitorDispatchTable* other) {
159    // We are not using memcpy to guarantee that during update
160    // every element of callbacks_ array will remain correct
161    // pointer (memcpy might be implemented as a byte copying loop).
162    for (int i = 0; i < StaticVisitorBase::kVisitorIdCount; i++) {
163      NoBarrier_Store(&callbacks_[i], other->callbacks_[i]);
164    }
165  }
166
167  inline Callback GetVisitorById(StaticVisitorBase::VisitorId id) {
168    return reinterpret_cast<Callback>(callbacks_[id]);
169  }
170
171  inline Callback GetVisitor(Map* map) {
172    return reinterpret_cast<Callback>(callbacks_[map->visitor_id()]);
173  }
174
175  void Register(StaticVisitorBase::VisitorId id, Callback callback) {
176    ASSERT(id < StaticVisitorBase::kVisitorIdCount);  // id is unsigned.
177    callbacks_[id] = reinterpret_cast<AtomicWord>(callback);
178  }
179
180  template<typename Visitor,
181           StaticVisitorBase::VisitorId base,
182           StaticVisitorBase::VisitorId generic,
183           int object_size_in_words>
184  void RegisterSpecialization() {
185    static const int size = object_size_in_words * kPointerSize;
186    Register(StaticVisitorBase::GetVisitorIdForSize(base, generic, size),
187             &Visitor::template VisitSpecialized<size>);
188  }
189
190
191  template<typename Visitor,
192           StaticVisitorBase::VisitorId base,
193           StaticVisitorBase::VisitorId generic>
194  void RegisterSpecializations() {
195    STATIC_ASSERT(
196        (generic - base + StaticVisitorBase::kMinObjectSizeInWords) == 10);
197    RegisterSpecialization<Visitor, base, generic, 2>();
198    RegisterSpecialization<Visitor, base, generic, 3>();
199    RegisterSpecialization<Visitor, base, generic, 4>();
200    RegisterSpecialization<Visitor, base, generic, 5>();
201    RegisterSpecialization<Visitor, base, generic, 6>();
202    RegisterSpecialization<Visitor, base, generic, 7>();
203    RegisterSpecialization<Visitor, base, generic, 8>();
204    RegisterSpecialization<Visitor, base, generic, 9>();
205    Register(generic, &Visitor::Visit);
206  }
207
208 private:
209  AtomicWord callbacks_[StaticVisitorBase::kVisitorIdCount];
210};
211
212
213template<typename StaticVisitor>
214class BodyVisitorBase : public AllStatic {
215 public:
216  INLINE(static void IteratePointers(Heap* heap,
217                                     HeapObject* object,
218                                     int start_offset,
219                                     int end_offset)) {
220    Object** start_slot = reinterpret_cast<Object**>(object->address() +
221                                                     start_offset);
222    Object** end_slot = reinterpret_cast<Object**>(object->address() +
223                                                   end_offset);
224    StaticVisitor::VisitPointers(heap, start_slot, end_slot);
225  }
226};
227
228
229template<typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
230class FlexibleBodyVisitor : public BodyVisitorBase<StaticVisitor> {
231 public:
232  INLINE(static ReturnType Visit(Map* map, HeapObject* object)) {
233    int object_size = BodyDescriptor::SizeOf(map, object);
234    BodyVisitorBase<StaticVisitor>::IteratePointers(
235        map->GetHeap(),
236        object,
237        BodyDescriptor::kStartOffset,
238        object_size);
239    return static_cast<ReturnType>(object_size);
240  }
241
242  template<int object_size>
243  static inline ReturnType VisitSpecialized(Map* map, HeapObject* object) {
244    ASSERT(BodyDescriptor::SizeOf(map, object) == object_size);
245    BodyVisitorBase<StaticVisitor>::IteratePointers(
246        map->GetHeap(),
247        object,
248        BodyDescriptor::kStartOffset,
249        object_size);
250    return static_cast<ReturnType>(object_size);
251  }
252};
253
254
255template<typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
256class FixedBodyVisitor : public BodyVisitorBase<StaticVisitor> {
257 public:
258  INLINE(static ReturnType Visit(Map* map, HeapObject* object)) {
259    BodyVisitorBase<StaticVisitor>::IteratePointers(
260        map->GetHeap(),
261        object,
262        BodyDescriptor::kStartOffset,
263        BodyDescriptor::kEndOffset);
264    return static_cast<ReturnType>(BodyDescriptor::kSize);
265  }
266};
267
268
269// Base class for visitors used for a linear new space iteration.
270// IterateBody returns size of visited object.
271// Certain types of objects (i.e. Code objects) are not handled
272// by dispatch table of this visitor because they cannot appear
273// in the new space.
274//
275// This class is intended to be used in the following way:
276//
277//   class SomeVisitor : public StaticNewSpaceVisitor<SomeVisitor> {
278//     ...
279//   }
280//
281// This is an example of Curiously recurring template pattern
282// (see http://en.wikipedia.org/wiki/Curiously_recurring_template_pattern).
283// We use CRTP to guarantee aggressive compile time optimizations (i.e.
284// inlining and specialization of StaticVisitor::VisitPointers methods).
285template<typename StaticVisitor>
286class StaticNewSpaceVisitor : public StaticVisitorBase {
287 public:
288  static void Initialize();
289
290  INLINE(static int IterateBody(Map* map, HeapObject* obj)) {
291    return table_.GetVisitor(map)(map, obj);
292  }
293
294  INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) {
295    for (Object** p = start; p < end; p++) StaticVisitor::VisitPointer(heap, p);
296  }
297
298 private:
299  INLINE(static int VisitJSFunction(Map* map, HeapObject* object)) {
300    Heap* heap = map->GetHeap();
301    VisitPointers(heap,
302                  HeapObject::RawField(object, JSFunction::kPropertiesOffset),
303                  HeapObject::RawField(object, JSFunction::kCodeEntryOffset));
304
305    // Don't visit code entry. We are using this visitor only during scavenges.
306
307    VisitPointers(
308        heap,
309        HeapObject::RawField(object,
310                             JSFunction::kCodeEntryOffset + kPointerSize),
311        HeapObject::RawField(object,
312                             JSFunction::kNonWeakFieldsEndOffset));
313    return JSFunction::kSize;
314  }
315
316  INLINE(static int VisitByteArray(Map* map, HeapObject* object)) {
317    return reinterpret_cast<ByteArray*>(object)->ByteArraySize();
318  }
319
320  INLINE(static int VisitFixedDoubleArray(Map* map, HeapObject* object)) {
321    int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
322    return FixedDoubleArray::SizeFor(length);
323  }
324
325  INLINE(static int VisitJSObject(Map* map, HeapObject* object)) {
326    return JSObjectVisitor::Visit(map, object);
327  }
328
329  INLINE(static int VisitSeqOneByteString(Map* map, HeapObject* object)) {
330    return SeqOneByteString::cast(object)->
331        SeqOneByteStringSize(map->instance_type());
332  }
333
334  INLINE(static int VisitSeqTwoByteString(Map* map, HeapObject* object)) {
335    return SeqTwoByteString::cast(object)->
336        SeqTwoByteStringSize(map->instance_type());
337  }
338
339  INLINE(static int VisitFreeSpace(Map* map, HeapObject* object)) {
340    return FreeSpace::cast(object)->Size();
341  }
342
343  INLINE(static int VisitJSArrayBuffer(Map* map, HeapObject* object));
344  INLINE(static int VisitJSTypedArray(Map* map, HeapObject* object));
345  INLINE(static int VisitJSDataView(Map* map, HeapObject* object));
346
347  class DataObjectVisitor {
348   public:
349    template<int object_size>
350    static inline int VisitSpecialized(Map* map, HeapObject* object) {
351      return object_size;
352    }
353
354    INLINE(static int Visit(Map* map, HeapObject* object)) {
355      return map->instance_size();
356    }
357  };
358
359  typedef FlexibleBodyVisitor<StaticVisitor,
360                              StructBodyDescriptor,
361                              int> StructVisitor;
362
363  typedef FlexibleBodyVisitor<StaticVisitor,
364                              JSObject::BodyDescriptor,
365                              int> JSObjectVisitor;
366
367  typedef int (*Callback)(Map* map, HeapObject* object);
368
369  static VisitorDispatchTable<Callback> table_;
370};
371
372
373template<typename StaticVisitor>
374VisitorDispatchTable<typename StaticNewSpaceVisitor<StaticVisitor>::Callback>
375    StaticNewSpaceVisitor<StaticVisitor>::table_;
376
377
378// Base class for visitors used to transitively mark the entire heap.
379// IterateBody returns nothing.
380// Certain types of objects might not be handled by this base class and
381// no visitor function is registered by the generic initialization. A
382// specialized visitor function needs to be provided by the inheriting
383// class itself for those cases.
384//
385// This class is intended to be used in the following way:
386//
387//   class SomeVisitor : public StaticMarkingVisitor<SomeVisitor> {
388//     ...
389//   }
390//
391// This is an example of Curiously recurring template pattern.
392template<typename StaticVisitor>
393class StaticMarkingVisitor : public StaticVisitorBase {
394 public:
395  static void Initialize();
396
397  INLINE(static void IterateBody(Map* map, HeapObject* obj)) {
398    table_.GetVisitor(map)(map, obj);
399  }
400
401  INLINE(static void VisitPropertyCell(Map* map, HeapObject* object));
402  INLINE(static void VisitAllocationSite(Map* map, HeapObject* object));
403  INLINE(static void VisitCodeEntry(Heap* heap, Address entry_address));
404  INLINE(static void VisitEmbeddedPointer(Heap* heap, RelocInfo* rinfo));
405  INLINE(static void VisitCell(Heap* heap, RelocInfo* rinfo));
406  INLINE(static void VisitDebugTarget(Heap* heap, RelocInfo* rinfo));
407  INLINE(static void VisitCodeTarget(Heap* heap, RelocInfo* rinfo));
408  INLINE(static void VisitCodeAgeSequence(Heap* heap, RelocInfo* rinfo));
409  INLINE(static void VisitExternalReference(RelocInfo* rinfo)) { }
410  INLINE(static void VisitRuntimeEntry(RelocInfo* rinfo)) { }
411
412  // TODO(mstarzinger): This should be made protected once refactoring is done.
413  // Mark non-optimize code for functions inlined into the given optimized
414  // code. This will prevent it from being flushed.
415  static void MarkInlinedFunctionsCode(Heap* heap, Code* code);
416
417 protected:
418  INLINE(static void VisitMap(Map* map, HeapObject* object));
419  INLINE(static void VisitCode(Map* map, HeapObject* object));
420  INLINE(static void VisitSharedFunctionInfo(Map* map, HeapObject* object));
421  INLINE(static void VisitConstantPoolArray(Map* map, HeapObject* object));
422  INLINE(static void VisitJSFunction(Map* map, HeapObject* object));
423  INLINE(static void VisitJSRegExp(Map* map, HeapObject* object));
424  INLINE(static void VisitJSArrayBuffer(Map* map, HeapObject* object));
425  INLINE(static void VisitJSTypedArray(Map* map, HeapObject* object));
426  INLINE(static void VisitJSDataView(Map* map, HeapObject* object));
427  INLINE(static void VisitNativeContext(Map* map, HeapObject* object));
428
429  // Mark pointers in a Map and its TransitionArray together, possibly
430  // treating transitions or back pointers weak.
431  static void MarkMapContents(Heap* heap, Map* map);
432  static void MarkTransitionArray(Heap* heap, TransitionArray* transitions);
433
434  // Code flushing support.
435  INLINE(static bool IsFlushable(Heap* heap, JSFunction* function));
436  INLINE(static bool IsFlushable(Heap* heap, SharedFunctionInfo* shared_info));
437
438  // Helpers used by code flushing support that visit pointer fields and treat
439  // references to code objects either strongly or weakly.
440  static void VisitSharedFunctionInfoStrongCode(Heap* heap, HeapObject* object);
441  static void VisitSharedFunctionInfoWeakCode(Heap* heap, HeapObject* object);
442  static void VisitJSFunctionStrongCode(Heap* heap, HeapObject* object);
443  static void VisitJSFunctionWeakCode(Heap* heap, HeapObject* object);
444
445  class DataObjectVisitor {
446   public:
447    template<int size>
448    static inline void VisitSpecialized(Map* map, HeapObject* object) {
449    }
450
451    INLINE(static void Visit(Map* map, HeapObject* object)) {
452    }
453  };
454
455  typedef FlexibleBodyVisitor<StaticVisitor,
456                              FixedArray::BodyDescriptor,
457                              void> FixedArrayVisitor;
458
459  typedef FlexibleBodyVisitor<StaticVisitor,
460                              JSObject::BodyDescriptor,
461                              void> JSObjectVisitor;
462
463  typedef FlexibleBodyVisitor<StaticVisitor,
464                              StructBodyDescriptor,
465                              void> StructObjectVisitor;
466
467  typedef void (*Callback)(Map* map, HeapObject* object);
468
469  static VisitorDispatchTable<Callback> table_;
470};
471
472
473template<typename StaticVisitor>
474VisitorDispatchTable<typename StaticMarkingVisitor<StaticVisitor>::Callback>
475    StaticMarkingVisitor<StaticVisitor>::table_;
476
477
478} }  // namespace v8::internal
479
480#endif  // V8_OBJECTS_VISITING_H_
481