1// Copyright 2012 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_OBJECTS_VISITING_INL_H_
29#define V8_OBJECTS_VISITING_INL_H_
30
31
32namespace v8 {
33namespace internal {
34
35template<typename StaticVisitor>
36void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
37  table_.Register(kVisitShortcutCandidate,
38                  &FixedBodyVisitor<StaticVisitor,
39                  ConsString::BodyDescriptor,
40                  int>::Visit);
41
42  table_.Register(kVisitConsString,
43                  &FixedBodyVisitor<StaticVisitor,
44                  ConsString::BodyDescriptor,
45                  int>::Visit);
46
47  table_.Register(kVisitSlicedString,
48                  &FixedBodyVisitor<StaticVisitor,
49                  SlicedString::BodyDescriptor,
50                  int>::Visit);
51
52  table_.Register(kVisitSymbol,
53                  &FixedBodyVisitor<StaticVisitor,
54                  Symbol::BodyDescriptor,
55                  int>::Visit);
56
57  table_.Register(kVisitFixedArray,
58                  &FlexibleBodyVisitor<StaticVisitor,
59                  FixedArray::BodyDescriptor,
60                  int>::Visit);
61
62  table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
63
64  table_.Register(kVisitNativeContext,
65                  &FixedBodyVisitor<StaticVisitor,
66                  Context::ScavengeBodyDescriptor,
67                  int>::Visit);
68
69  table_.Register(kVisitByteArray, &VisitByteArray);
70
71  table_.Register(kVisitSharedFunctionInfo,
72                  &FixedBodyVisitor<StaticVisitor,
73                  SharedFunctionInfo::BodyDescriptor,
74                  int>::Visit);
75
76  table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString);
77
78  table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
79
80  table_.Register(kVisitJSFunction, &VisitJSFunction);
81
82  table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
83
84  table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
85
86  table_.Register(kVisitJSDataView, &VisitJSDataView);
87
88  table_.Register(kVisitFreeSpace, &VisitFreeSpace);
89
90  table_.Register(kVisitJSWeakMap, &JSObjectVisitor::Visit);
91
92  table_.Register(kVisitJSWeakSet, &JSObjectVisitor::Visit);
93
94  table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
95
96  table_.template RegisterSpecializations<DataObjectVisitor,
97                                          kVisitDataObject,
98                                          kVisitDataObjectGeneric>();
99
100  table_.template RegisterSpecializations<JSObjectVisitor,
101                                          kVisitJSObject,
102                                          kVisitJSObjectGeneric>();
103  table_.template RegisterSpecializations<StructVisitor,
104                                          kVisitStruct,
105                                          kVisitStructGeneric>();
106}
107
108
109template<typename StaticVisitor>
110int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
111    Map* map, HeapObject* object) {
112  Heap* heap = map->GetHeap();
113
114  STATIC_ASSERT(
115      JSArrayBuffer::kWeakFirstViewOffset ==
116      JSArrayBuffer::kWeakNextOffset + kPointerSize);
117  VisitPointers(
118      heap,
119      HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
120      HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
121  VisitPointers(
122      heap,
123      HeapObject::RawField(object,
124          JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
125      HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
126  return JSArrayBuffer::kSizeWithInternalFields;
127}
128
129
130template<typename StaticVisitor>
131int StaticNewSpaceVisitor<StaticVisitor>::VisitJSTypedArray(
132    Map* map, HeapObject* object) {
133  VisitPointers(
134      map->GetHeap(),
135      HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
136      HeapObject::RawField(object, JSTypedArray::kWeakNextOffset));
137  VisitPointers(
138      map->GetHeap(),
139      HeapObject::RawField(object,
140          JSTypedArray::kWeakNextOffset + kPointerSize),
141      HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
142  return JSTypedArray::kSizeWithInternalFields;
143}
144
145
146template<typename StaticVisitor>
147int StaticNewSpaceVisitor<StaticVisitor>::VisitJSDataView(
148    Map* map, HeapObject* object) {
149  VisitPointers(
150      map->GetHeap(),
151      HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
152      HeapObject::RawField(object, JSDataView::kWeakNextOffset));
153  VisitPointers(
154      map->GetHeap(),
155      HeapObject::RawField(object,
156          JSDataView::kWeakNextOffset + kPointerSize),
157      HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
158  return JSDataView::kSizeWithInternalFields;
159}
160
161
162template<typename StaticVisitor>
163void StaticMarkingVisitor<StaticVisitor>::Initialize() {
164  table_.Register(kVisitShortcutCandidate,
165                  &FixedBodyVisitor<StaticVisitor,
166                  ConsString::BodyDescriptor,
167                  void>::Visit);
168
169  table_.Register(kVisitConsString,
170                  &FixedBodyVisitor<StaticVisitor,
171                  ConsString::BodyDescriptor,
172                  void>::Visit);
173
174  table_.Register(kVisitSlicedString,
175                  &FixedBodyVisitor<StaticVisitor,
176                  SlicedString::BodyDescriptor,
177                  void>::Visit);
178
179  table_.Register(kVisitSymbol,
180                  &FixedBodyVisitor<StaticVisitor,
181                  Symbol::BodyDescriptor,
182                  void>::Visit);
183
184  table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
185
186  table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
187
188  table_.Register(kVisitConstantPoolArray, &VisitConstantPoolArray);
189
190  table_.Register(kVisitNativeContext, &VisitNativeContext);
191
192  table_.Register(kVisitAllocationSite, &VisitAllocationSite);
193
194  table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
195
196  table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
197
198  table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit);
199
200  table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
201
202  table_.Register(kVisitJSWeakMap, &StaticVisitor::VisitWeakCollection);
203
204  table_.Register(kVisitJSWeakSet, &StaticVisitor::VisitWeakCollection);
205
206  table_.Register(kVisitOddball,
207                  &FixedBodyVisitor<StaticVisitor,
208                  Oddball::BodyDescriptor,
209                  void>::Visit);
210
211  table_.Register(kVisitMap, &VisitMap);
212
213  table_.Register(kVisitCode, &VisitCode);
214
215  table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
216
217  table_.Register(kVisitJSFunction, &VisitJSFunction);
218
219  table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
220
221  table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
222
223  table_.Register(kVisitJSDataView, &VisitJSDataView);
224
225  // Registration for kVisitJSRegExp is done by StaticVisitor.
226
227  table_.Register(kVisitCell,
228                  &FixedBodyVisitor<StaticVisitor,
229                  Cell::BodyDescriptor,
230                  void>::Visit);
231
232  table_.Register(kVisitPropertyCell, &VisitPropertyCell);
233
234  table_.template RegisterSpecializations<DataObjectVisitor,
235                                          kVisitDataObject,
236                                          kVisitDataObjectGeneric>();
237
238  table_.template RegisterSpecializations<JSObjectVisitor,
239                                          kVisitJSObject,
240                                          kVisitJSObjectGeneric>();
241
242  table_.template RegisterSpecializations<StructObjectVisitor,
243                                          kVisitStruct,
244                                          kVisitStructGeneric>();
245}
246
247
248template<typename StaticVisitor>
249void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry(
250    Heap* heap, Address entry_address) {
251  Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
252  heap->mark_compact_collector()->RecordCodeEntrySlot(entry_address, code);
253  StaticVisitor::MarkObject(heap, code);
254}
255
256
257template<typename StaticVisitor>
258void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
259    Heap* heap, RelocInfo* rinfo) {
260  ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
261  ASSERT(!rinfo->target_object()->IsConsString());
262  HeapObject* object = HeapObject::cast(rinfo->target_object());
263  heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
264  if (!Code::IsWeakEmbeddedObject(rinfo->host()->kind(), object)) {
265    StaticVisitor::MarkObject(heap, object);
266  }
267}
268
269
270template<typename StaticVisitor>
271void StaticMarkingVisitor<StaticVisitor>::VisitCell(
272    Heap* heap, RelocInfo* rinfo) {
273  ASSERT(rinfo->rmode() == RelocInfo::CELL);
274  Cell* cell = rinfo->target_cell();
275  StaticVisitor::MarkObject(heap, cell);
276}
277
278
279template<typename StaticVisitor>
280void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(
281    Heap* heap, RelocInfo* rinfo) {
282  ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
283          rinfo->IsPatchedReturnSequence()) ||
284         (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
285          rinfo->IsPatchedDebugBreakSlotSequence()));
286  Code* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
287  heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
288  StaticVisitor::MarkObject(heap, target);
289}
290
291
292template<typename StaticVisitor>
293void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(
294    Heap* heap, RelocInfo* rinfo) {
295  ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
296  Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
297  // Monomorphic ICs are preserved when possible, but need to be flushed
298  // when they might be keeping a Context alive, or when the heap is about
299  // to be serialized.
300  if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub()
301      && (target->ic_state() == MEGAMORPHIC || target->ic_state() == GENERIC ||
302          target->ic_state() == POLYMORPHIC || heap->flush_monomorphic_ics() ||
303          Serializer::enabled() || target->ic_age() != heap->global_ic_age())) {
304    IC::Clear(target->GetIsolate(), rinfo->pc());
305    target = Code::GetCodeFromTargetAddress(rinfo->target_address());
306  }
307  heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
308  StaticVisitor::MarkObject(heap, target);
309}
310
311
312template<typename StaticVisitor>
313void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
314    Heap* heap, RelocInfo* rinfo) {
315  ASSERT(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
316  Code* target = rinfo->code_age_stub();
317  ASSERT(target != NULL);
318  heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
319  StaticVisitor::MarkObject(heap, target);
320}
321
322
323template<typename StaticVisitor>
324void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
325    Map* map, HeapObject* object) {
326  FixedBodyVisitor<StaticVisitor,
327                   Context::MarkCompactBodyDescriptor,
328                   void>::Visit(map, object);
329
330  MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
331  for (int idx = Context::FIRST_WEAK_SLOT;
332       idx < Context::NATIVE_CONTEXT_SLOTS;
333       ++idx) {
334    Object** slot =
335        HeapObject::RawField(object, FixedArray::OffsetOfElementAt(idx));
336    collector->RecordSlot(slot, slot, *slot);
337  }
338}
339
340
341template<typename StaticVisitor>
342void StaticMarkingVisitor<StaticVisitor>::VisitMap(
343    Map* map, HeapObject* object) {
344  Heap* heap = map->GetHeap();
345  Map* map_object = Map::cast(object);
346
347  // Clears the cache of ICs related to this map.
348  if (FLAG_cleanup_code_caches_at_gc) {
349    map_object->ClearCodeCache(heap);
350  }
351
352  // When map collection is enabled we have to mark through map's transitions
353  // and back pointers in a special way to make these links weak.
354  if (FLAG_collect_maps && map_object->CanTransition()) {
355    MarkMapContents(heap, map_object);
356  } else {
357    StaticVisitor::VisitPointers(heap,
358        HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
359        HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
360  }
361}
362
363
364template<typename StaticVisitor>
365void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell(
366    Map* map, HeapObject* object) {
367  Heap* heap = map->GetHeap();
368
369  Object** slot =
370      HeapObject::RawField(object, PropertyCell::kDependentCodeOffset);
371  if (FLAG_collect_maps) {
372    // Mark property cell dependent codes array but do not push it onto marking
373    // stack, this will make references from it weak. We will clean dead
374    // codes when we iterate over property cells in ClearNonLiveReferences.
375    HeapObject* obj = HeapObject::cast(*slot);
376    heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
377    StaticVisitor::MarkObjectWithoutPush(heap, obj);
378  } else {
379    StaticVisitor::VisitPointer(heap, slot);
380  }
381
382  StaticVisitor::VisitPointers(heap,
383      HeapObject::RawField(object, PropertyCell::kPointerFieldsBeginOffset),
384      HeapObject::RawField(object, PropertyCell::kPointerFieldsEndOffset));
385}
386
387
388template<typename StaticVisitor>
389void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite(
390    Map* map, HeapObject* object) {
391  Heap* heap = map->GetHeap();
392
393  Object** slot =
394      HeapObject::RawField(object, AllocationSite::kDependentCodeOffset);
395  if (FLAG_collect_maps) {
396    // Mark allocation site dependent codes array but do not push it onto
397    // marking stack, this will make references from it weak. We will clean
398    // dead codes when we iterate over allocation sites in
399    // ClearNonLiveReferences.
400    HeapObject* obj = HeapObject::cast(*slot);
401    heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
402    StaticVisitor::MarkObjectWithoutPush(heap, obj);
403  } else {
404    StaticVisitor::VisitPointer(heap, slot);
405  }
406
407  StaticVisitor::VisitPointers(heap,
408      HeapObject::RawField(object, AllocationSite::kPointerFieldsBeginOffset),
409      HeapObject::RawField(object, AllocationSite::kPointerFieldsEndOffset));
410}
411
412
413template<typename StaticVisitor>
414void StaticMarkingVisitor<StaticVisitor>::VisitCode(
415    Map* map, HeapObject* object) {
416  Heap* heap = map->GetHeap();
417  Code* code = Code::cast(object);
418  if (FLAG_cleanup_code_caches_at_gc) {
419    code->ClearTypeFeedbackCells(heap);
420  }
421  if (FLAG_age_code && !Serializer::enabled()) {
422    code->MakeOlder(heap->mark_compact_collector()->marking_parity());
423  }
424  code->CodeIterateBody<StaticVisitor>(heap);
425}
426
427
428template<typename StaticVisitor>
429void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
430    Map* map, HeapObject* object) {
431  Heap* heap = map->GetHeap();
432  SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
433  if (shared->ic_age() != heap->global_ic_age()) {
434    shared->ResetForNewContext(heap->global_ic_age());
435  }
436  if (FLAG_cache_optimized_code &&
437      FLAG_flush_optimized_code_cache &&
438      !shared->optimized_code_map()->IsSmi()) {
439    // Always flush the optimized code map if requested by flag.
440    shared->ClearOptimizedCodeMap();
441  }
442  MarkCompactCollector* collector = heap->mark_compact_collector();
443  if (collector->is_code_flushing_enabled()) {
444    if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
445      // Add the shared function info holding an optimized code map to
446      // the code flusher for processing of code maps after marking.
447      collector->code_flusher()->AddOptimizedCodeMap(shared);
448      // Treat all references within the code map weakly by marking the
449      // code map itself but not pushing it onto the marking deque.
450      FixedArray* code_map = FixedArray::cast(shared->optimized_code_map());
451      StaticVisitor::MarkObjectWithoutPush(heap, code_map);
452    }
453    if (IsFlushable(heap, shared)) {
454      // This function's code looks flushable. But we have to postpone
455      // the decision until we see all functions that point to the same
456      // SharedFunctionInfo because some of them might be optimized.
457      // That would also make the non-optimized version of the code
458      // non-flushable, because it is required for bailing out from
459      // optimized code.
460      collector->code_flusher()->AddCandidate(shared);
461      // Treat the reference to the code object weakly.
462      VisitSharedFunctionInfoWeakCode(heap, object);
463      return;
464    }
465  } else {
466    if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
467      // Flush optimized code map on major GCs without code flushing,
468      // needed because cached code doesn't contain breakpoints.
469      shared->ClearOptimizedCodeMap();
470    }
471  }
472  VisitSharedFunctionInfoStrongCode(heap, object);
473}
474
475
476template<typename StaticVisitor>
477void StaticMarkingVisitor<StaticVisitor>::VisitConstantPoolArray(
478    Map* map, HeapObject* object) {
479  Heap* heap = map->GetHeap();
480  ConstantPoolArray* constant_pool = ConstantPoolArray::cast(object);
481  int first_ptr_offset = constant_pool->OffsetOfElementAt(
482      constant_pool->first_ptr_index());
483  int last_ptr_offset = constant_pool->OffsetOfElementAt(
484      constant_pool->first_ptr_index() + constant_pool->count_of_ptr_entries());
485  StaticVisitor::VisitPointers(
486      heap,
487      HeapObject::RawField(object, first_ptr_offset),
488      HeapObject::RawField(object, last_ptr_offset));
489}
490
491
492template<typename StaticVisitor>
493void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(
494    Map* map, HeapObject* object) {
495  Heap* heap = map->GetHeap();
496  JSFunction* function = JSFunction::cast(object);
497  MarkCompactCollector* collector = heap->mark_compact_collector();
498  if (collector->is_code_flushing_enabled()) {
499    if (IsFlushable(heap, function)) {
500      // This function's code looks flushable. But we have to postpone
501      // the decision until we see all functions that point to the same
502      // SharedFunctionInfo because some of them might be optimized.
503      // That would also make the non-optimized version of the code
504      // non-flushable, because it is required for bailing out from
505      // optimized code.
506      collector->code_flusher()->AddCandidate(function);
507      // Visit shared function info immediately to avoid double checking
508      // of its flushability later. This is just an optimization because
509      // the shared function info would eventually be visited.
510      SharedFunctionInfo* shared = function->shared();
511      if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) {
512        StaticVisitor::MarkObject(heap, shared->map());
513        VisitSharedFunctionInfoWeakCode(heap, shared);
514      }
515      // Treat the reference to the code object weakly.
516      VisitJSFunctionWeakCode(heap, object);
517      return;
518    } else {
519      // Visit all unoptimized code objects to prevent flushing them.
520      StaticVisitor::MarkObject(heap, function->shared()->code());
521      if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) {
522        MarkInlinedFunctionsCode(heap, function->code());
523      }
524    }
525  }
526  VisitJSFunctionStrongCode(heap, object);
527}
528
529
530template<typename StaticVisitor>
531void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(
532    Map* map, HeapObject* object) {
533  int last_property_offset =
534      JSRegExp::kSize + kPointerSize * map->inobject_properties();
535  StaticVisitor::VisitPointers(map->GetHeap(),
536      HeapObject::RawField(object, JSRegExp::kPropertiesOffset),
537      HeapObject::RawField(object, last_property_offset));
538}
539
540
541template<typename StaticVisitor>
542void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
543    Map* map, HeapObject* object) {
544  Heap* heap = map->GetHeap();
545
546  STATIC_ASSERT(
547      JSArrayBuffer::kWeakFirstViewOffset ==
548      JSArrayBuffer::kWeakNextOffset + kPointerSize);
549  StaticVisitor::VisitPointers(
550      heap,
551      HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
552      HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
553  StaticVisitor::VisitPointers(
554      heap,
555      HeapObject::RawField(object,
556          JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
557      HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
558}
559
560
561template<typename StaticVisitor>
562void StaticMarkingVisitor<StaticVisitor>::VisitJSTypedArray(
563    Map* map, HeapObject* object) {
564  StaticVisitor::VisitPointers(
565      map->GetHeap(),
566      HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
567      HeapObject::RawField(object, JSTypedArray::kWeakNextOffset));
568  StaticVisitor::VisitPointers(
569      map->GetHeap(),
570      HeapObject::RawField(object,
571        JSTypedArray::kWeakNextOffset + kPointerSize),
572      HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
573}
574
575
576template<typename StaticVisitor>
577void StaticMarkingVisitor<StaticVisitor>::VisitJSDataView(
578    Map* map, HeapObject* object) {
579  StaticVisitor::VisitPointers(
580      map->GetHeap(),
581      HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
582      HeapObject::RawField(object, JSDataView::kWeakNextOffset));
583  StaticVisitor::VisitPointers(
584      map->GetHeap(),
585      HeapObject::RawField(object,
586        JSDataView::kWeakNextOffset + kPointerSize),
587      HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
588}
589
590
591template<typename StaticVisitor>
592void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(
593    Heap* heap, Map* map) {
594  // Make sure that the back pointer stored either in the map itself or
595  // inside its transitions array is marked. Skip recording the back
596  // pointer slot since map space is not compacted.
597  StaticVisitor::MarkObject(heap, HeapObject::cast(map->GetBackPointer()));
598
599  // Treat pointers in the transitions array as weak and also mark that
600  // array to prevent visiting it later. Skip recording the transition
601  // array slot, since it will be implicitly recorded when the pointer
602  // fields of this map are visited.
603  TransitionArray* transitions = map->unchecked_transition_array();
604  if (transitions->IsTransitionArray()) {
605    MarkTransitionArray(heap, transitions);
606  } else {
607    // Already marked by marking map->GetBackPointer() above.
608    ASSERT(transitions->IsMap() || transitions->IsUndefined());
609  }
610
611  // Since descriptor arrays are potentially shared, ensure that only the
612  // descriptors that belong to this map are marked. The first time a
613  // non-empty descriptor array is marked, its header is also visited. The slot
614  // holding the descriptor array will be implicitly recorded when the pointer
615  // fields of this map are visited.
616  DescriptorArray* descriptors = map->instance_descriptors();
617  if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
618      descriptors->length() > 0) {
619    StaticVisitor::VisitPointers(heap,
620        descriptors->GetFirstElementAddress(),
621        descriptors->GetDescriptorEndSlot(0));
622  }
623  int start = 0;
624  int end = map->NumberOfOwnDescriptors();
625  if (start < end) {
626    StaticVisitor::VisitPointers(heap,
627        descriptors->GetDescriptorStartSlot(start),
628        descriptors->GetDescriptorEndSlot(end));
629  }
630
631  // Mark prototype dependent codes array but do not push it onto marking
632  // stack, this will make references from it weak. We will clean dead
633  // codes when we iterate over maps in ClearNonLiveTransitions.
634  Object** slot = HeapObject::RawField(map, Map::kDependentCodeOffset);
635  HeapObject* obj = HeapObject::cast(*slot);
636  heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
637  StaticVisitor::MarkObjectWithoutPush(heap, obj);
638
639  // Mark the pointer fields of the Map. Since the transitions array has
640  // been marked already, it is fine that one of these fields contains a
641  // pointer to it.
642  StaticVisitor::VisitPointers(heap,
643      HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
644      HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
645}
646
647
648template<typename StaticVisitor>
649void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray(
650    Heap* heap, TransitionArray* transitions) {
651  if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return;
652
653  // Simple transitions do not have keys nor prototype transitions.
654  if (transitions->IsSimpleTransition()) return;
655
656  if (transitions->HasPrototypeTransitions()) {
657    // Mark prototype transitions array but do not push it onto marking
658    // stack, this will make references from it weak. We will clean dead
659    // prototype transitions in ClearNonLiveTransitions.
660    Object** slot = transitions->GetPrototypeTransitionsSlot();
661    HeapObject* obj = HeapObject::cast(*slot);
662    heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
663    StaticVisitor::MarkObjectWithoutPush(heap, obj);
664  }
665
666  for (int i = 0; i < transitions->number_of_transitions(); ++i) {
667    StaticVisitor::VisitPointer(heap, transitions->GetKeySlot(i));
668  }
669}
670
671
672template<typename StaticVisitor>
673void StaticMarkingVisitor<StaticVisitor>::MarkInlinedFunctionsCode(
674    Heap* heap, Code* code) {
675  // For optimized functions we should retain both non-optimized version
676  // of its code and non-optimized version of all inlined functions.
677  // This is required to support bailing out from inlined code.
678  DeoptimizationInputData* data =
679      DeoptimizationInputData::cast(code->deoptimization_data());
680  FixedArray* literals = data->LiteralArray();
681  for (int i = 0, count = data->InlinedFunctionCount()->value();
682       i < count;
683       i++) {
684    JSFunction* inlined = JSFunction::cast(literals->get(i));
685    StaticVisitor::MarkObject(heap, inlined->shared()->code());
686  }
687}
688
689
690inline static bool IsValidNonBuiltinContext(Object* context) {
691  return context->IsContext() &&
692      !Context::cast(context)->global_object()->IsJSBuiltinsObject();
693}
694
695
696inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
697  Object* undefined = heap->undefined_value();
698  return (info->script() != undefined) &&
699      (reinterpret_cast<Script*>(info->script())->source() != undefined);
700}
701
702
703template<typename StaticVisitor>
704bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
705    Heap* heap, JSFunction* function) {
706  SharedFunctionInfo* shared_info = function->shared();
707
708  // Code is either on stack, in compilation cache or referenced
709  // by optimized version of function.
710  MarkBit code_mark = Marking::MarkBitFrom(function->code());
711  if (code_mark.Get()) {
712    return false;
713  }
714
715  // The function must have a valid context and not be a builtin.
716  if (!IsValidNonBuiltinContext(function->context())) {
717    return false;
718  }
719
720  // We do not (yet) flush code for optimized functions.
721  if (function->code() != shared_info->code()) {
722    return false;
723  }
724
725  // Check age of optimized code.
726  if (FLAG_age_code && !function->code()->IsOld()) {
727    return false;
728  }
729
730  return IsFlushable(heap, shared_info);
731}
732
733
734template<typename StaticVisitor>
735bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
736    Heap* heap, SharedFunctionInfo* shared_info) {
737  // Code is either on stack, in compilation cache or referenced
738  // by optimized version of function.
739  MarkBit code_mark = Marking::MarkBitFrom(shared_info->code());
740  if (code_mark.Get()) {
741    return false;
742  }
743
744  // The function must be compiled and have the source code available,
745  // to be able to recompile it in case we need the function again.
746  if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
747    return false;
748  }
749
750  // We never flush code for API functions.
751  Object* function_data = shared_info->function_data();
752  if (function_data->IsFunctionTemplateInfo()) {
753    return false;
754  }
755
756  // Only flush code for functions.
757  if (shared_info->code()->kind() != Code::FUNCTION) {
758    return false;
759  }
760
761  // Function must be lazy compilable.
762  if (!shared_info->allows_lazy_compilation()) {
763    return false;
764  }
765
766  // We do not (yet?) flush code for generator functions, because we don't know
767  // if there are still live activations (generator objects) on the heap.
768  if (shared_info->is_generator()) {
769    return false;
770  }
771
772  // If this is a full script wrapped in a function we do not flush the code.
773  if (shared_info->is_toplevel()) {
774    return false;
775  }
776
777  // If this is a function initialized with %SetCode then the one-to-one
778  // relation between SharedFunctionInfo and Code is broken.
779  if (shared_info->dont_flush()) {
780    return false;
781  }
782
783  // Check age of code. If code aging is disabled we never flush.
784  if (!FLAG_age_code || !shared_info->code()->IsOld()) {
785    return false;
786  }
787
788  return true;
789}
790
791
792template<typename StaticVisitor>
793void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
794    Heap* heap, HeapObject* object) {
795  StaticVisitor::BeforeVisitingSharedFunctionInfo(object);
796  Object** start_slot =
797      HeapObject::RawField(object,
798                           SharedFunctionInfo::BodyDescriptor::kStartOffset);
799  Object** end_slot =
800      HeapObject::RawField(object,
801                           SharedFunctionInfo::BodyDescriptor::kEndOffset);
802  StaticVisitor::VisitPointers(heap, start_slot, end_slot);
803}
804
805
806template<typename StaticVisitor>
807void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
808    Heap* heap, HeapObject* object) {
809  StaticVisitor::BeforeVisitingSharedFunctionInfo(object);
810  Object** name_slot =
811      HeapObject::RawField(object, SharedFunctionInfo::kNameOffset);
812  StaticVisitor::VisitPointer(heap, name_slot);
813
814  // Skip visiting kCodeOffset as it is treated weakly here.
815  STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize ==
816      SharedFunctionInfo::kCodeOffset);
817  STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize ==
818      SharedFunctionInfo::kOptimizedCodeMapOffset);
819
820  Object** start_slot =
821      HeapObject::RawField(object,
822                           SharedFunctionInfo::kOptimizedCodeMapOffset);
823  Object** end_slot =
824      HeapObject::RawField(object,
825                           SharedFunctionInfo::BodyDescriptor::kEndOffset);
826  StaticVisitor::VisitPointers(heap, start_slot, end_slot);
827}
828
829
830template<typename StaticVisitor>
831void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
832    Heap* heap, HeapObject* object) {
833  Object** start_slot =
834      HeapObject::RawField(object, JSFunction::kPropertiesOffset);
835  Object** end_slot =
836      HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
837  StaticVisitor::VisitPointers(heap, start_slot, end_slot);
838
839  VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
840  STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
841      JSFunction::kPrototypeOrInitialMapOffset);
842
843  start_slot =
844      HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
845  end_slot =
846      HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
847  StaticVisitor::VisitPointers(heap, start_slot, end_slot);
848}
849
850
851template<typename StaticVisitor>
852void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
853    Heap* heap, HeapObject* object) {
854  Object** start_slot =
855      HeapObject::RawField(object, JSFunction::kPropertiesOffset);
856  Object** end_slot =
857      HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
858  StaticVisitor::VisitPointers(heap, start_slot, end_slot);
859
860  // Skip visiting kCodeEntryOffset as it is treated weakly here.
861  STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
862      JSFunction::kPrototypeOrInitialMapOffset);
863
864  start_slot =
865      HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
866  end_slot =
867      HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
868  StaticVisitor::VisitPointers(heap, start_slot, end_slot);
869}
870
871
872void Code::CodeIterateBody(ObjectVisitor* v) {
873  int mode_mask = RelocInfo::kCodeTargetMask |
874                  RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
875                  RelocInfo::ModeMask(RelocInfo::CELL) |
876                  RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
877                  RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
878                  RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
879                  RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
880
881  // There are two places where we iterate code bodies: here and the
882  // templated CodeIterateBody (below). They should be kept in sync.
883  IteratePointer(v, kRelocationInfoOffset);
884  IteratePointer(v, kHandlerTableOffset);
885  IteratePointer(v, kDeoptimizationDataOffset);
886  IteratePointer(v, kTypeFeedbackInfoOffset);
887
888  RelocIterator it(this, mode_mask);
889  Isolate* isolate = this->GetIsolate();
890  for (; !it.done(); it.next()) {
891    it.rinfo()->Visit(isolate, v);
892  }
893}
894
895
896template<typename StaticVisitor>
897void Code::CodeIterateBody(Heap* heap) {
898  int mode_mask = RelocInfo::kCodeTargetMask |
899                  RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
900                  RelocInfo::ModeMask(RelocInfo::CELL) |
901                  RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
902                  RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
903                  RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
904                  RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
905
906  // There are two places where we iterate code bodies: here and the non-
907  // templated CodeIterateBody (above). They should be kept in sync.
908  StaticVisitor::VisitPointer(
909      heap,
910      reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset));
911  StaticVisitor::VisitPointer(
912      heap,
913      reinterpret_cast<Object**>(this->address() + kHandlerTableOffset));
914  StaticVisitor::VisitPointer(
915      heap,
916      reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset));
917  StaticVisitor::VisitPointer(
918      heap,
919      reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset));
920
921  RelocIterator it(this, mode_mask);
922  for (; !it.done(); it.next()) {
923    it.rinfo()->template Visit<StaticVisitor>(heap);
924  }
925}
926
927
928} }  // namespace v8::internal
929
930#endif  // V8_OBJECTS_VISITING_INL_H_
931