mark_sweep.h revision ec05007f8619f8b0cc868d06731e07f84bb74c5b
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
18#define ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
19
20#include "atomic_integer.h"
21#include "barrier.h"
22#include "base/macros.h"
23#include "base/mutex.h"
24#include "garbage_collector.h"
25#include "offsets.h"
26#include "root_visitor.h"
27#include "UniquePtr.h"
28
29namespace art {
30
31namespace mirror {
32  class Class;
33  class Object;
34  template<class T> class ObjectArray;
35}  // namespace mirror
36
37class StackVisitor;
38class Thread;
39
40namespace gc {
41
42namespace accounting {
43  template <typename T> class AtomicStack;
44  class MarkIfReachesAllocspaceVisitor;
45  class ModUnionClearCardVisitor;
46  class ModUnionVisitor;
47  class ModUnionTableBitmap;
48  class MarkStackChunk;
49  typedef AtomicStack<mirror::Object*> ObjectStack;
50  class SpaceBitmap;
51}  // namespace accounting
52
53namespace space {
54  class ContinuousSpace;
55}  // namespace space
56
57class Heap;
58
59namespace collector {
60
61class MarkSweep : public GarbageCollector {
62 public:
63  explicit MarkSweep(Heap* heap, bool is_concurrent, const std::string& name_prefix = "");
64
65  ~MarkSweep() {}
66
67  virtual void InitializePhase();
68  virtual bool IsConcurrent() const;
69  virtual bool HandleDirtyObjectsPhase() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
70  virtual void MarkingPhase() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
71  virtual void ReclaimPhase() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
72  virtual void FinishPhase() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
73  virtual void MarkReachableObjects()
74      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
75      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
76  virtual GcType GetGcType() const {
77    return kGcTypeFull;
78  }
79
80  // Initializes internal structures.
81  void Init();
82
83  // Find the default mark bitmap.
84  void FindDefaultMarkBitmap();
85
86  // Marks the root set at the start of a garbage collection.
87  void MarkRoots()
88      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
89      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
90
91  void MarkNonThreadRoots()
92      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
93      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
94
95  void MarkConcurrentRoots()
96      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
97      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
98
99  void MarkRootsCheckpoint(Thread* self)
100      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
101      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
102
103  // Builds a mark stack and recursively mark until it empties.
104  void RecursiveMark()
105      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
106      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
107
108  // Make a space immune, immune spaces have all live objects marked - that is the mark and
109  // live bitmaps are bound together.
110  void ImmuneSpace(space::ContinuousSpace* space)
111      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
112      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
113
114  bool IsImmuneSpace(const space::ContinuousSpace* space) const;
115      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
116
117  // Bind the live bits to the mark bits of bitmaps for spaces that are never collected, ie
118  // the image. Mark that portion of the heap as immune.
119  virtual void BindBitmaps() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
120
121  void BindLiveToMarkBitmap(space::ContinuousSpace* space)
122      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
123
124  void UnBindBitmaps()
125      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
126
127  // Builds a mark stack with objects on dirty cards and recursively mark until it empties.
128  void RecursiveMarkDirtyObjects(bool paused, byte minimum_age)
129      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
130      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
131
132  // Remarks the root set after completing the concurrent mark.
133  void ReMarkRoots()
134      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
135      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
136
137  void ProcessReferences(Thread* self)
138      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
139
140  // Update and mark references from immune spaces.
141  virtual void UpdateAndMarkModUnion()
142      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
143
144  // Sweeps unmarked objects to complete the garbage collection.
145  virtual void Sweep(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
146
147  // Sweeps unmarked objects to complete the garbage collection.
148  void SweepLargeObjects(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
149
150  // Sweep only pointers within an array. WARNING: Trashes objects.
151  void SweepArray(accounting::ObjectStack* allocation_stack_, bool swap_bitmaps)
152      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
153
154  mirror::Object* GetClearedReferences() {
155    return cleared_reference_list_;
156  }
157
158  // Blackens an object.
159  void ScanObject(mirror::Object* obj)
160      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
161      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
162
163  // TODO: enable thread safety analysis when in use by multiple worker threads.
164  template <typename MarkVisitor>
165  void ScanObjectVisit(mirror::Object* obj, const MarkVisitor& visitor)
166      NO_THREAD_SAFETY_ANALYSIS;
167
168  // Everything inside the immune range is assumed to be marked.
169  void SetImmuneRange(mirror::Object* begin, mirror::Object* end);
170
171  void SweepSystemWeaks()
172      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
173
174  static mirror::Object* VerifySystemWeakIsLiveCallback(mirror::Object* obj, void* arg)
175      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
176
177  void VerifySystemWeaks()
178      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
179
180  // Verify that an object is live, either in a live bitmap or in the allocation stack.
181  void VerifyIsLive(const mirror::Object* obj)
182      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
183
184  template <typename Visitor>
185  static void VisitObjectReferences(mirror::Object* obj, const Visitor& visitor, bool visit_class)
186      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
187                            Locks::mutator_lock_);
188
189  static mirror::Object* RecursiveMarkObjectCallback(mirror::Object* obj, void* arg)
190      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
191      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
192
193  static mirror::Object* MarkRootCallback(mirror::Object* root, void* arg)
194      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
195      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
196
197  static mirror::Object* MarkRootParallelCallback(mirror::Object* root, void* arg);
198
199  // Marks an object.
200  void MarkObject(const mirror::Object* obj)
201      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
202      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
203
204  void MarkRoot(const mirror::Object* obj)
205      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
206      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
207
208  Barrier& GetBarrier() {
209    return *gc_barrier_;
210  }
211
212 protected:
213  // Returns true if the object has its bit set in the mark bitmap.
214  bool IsMarked(const mirror::Object* object) const;
215
216  static mirror::Object* IsMarkedCallback(mirror::Object* object, void* arg)
217      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
218
219  static void VerifyImageRootVisitor(mirror::Object* root, void* arg)
220      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
221                            Locks::mutator_lock_);
222
223  void MarkObjectNonNull(const mirror::Object* obj)
224        SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
225        EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
226
227  // Unmarks an object by clearing the bit inside of the corresponding bitmap, or if it is in a
228  // space set, removing the object from the set.
229  void UnMarkObjectNonNull(const mirror::Object* obj)
230      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
231      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
232
233  // Mark the vm thread roots.
234  virtual void MarkThreadRoots(Thread* self)
235      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
236      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
237
238  // Marks an object atomically, safe to use from multiple threads.
239  void MarkObjectNonNullParallel(const mirror::Object* obj);
240
241  // Marks or unmarks a large object based on whether or not set is true. If set is true, then we
242  // mark, otherwise we unmark.
243  bool MarkLargeObject(const mirror::Object* obj, bool set)
244      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
245
246  // Returns true if we need to add obj to a mark stack.
247  bool MarkObjectParallel(const mirror::Object* obj) NO_THREAD_SAFETY_ANALYSIS;
248
249  // Verify the roots of the heap and print out information related to any invalid roots.
250  // Called in MarkObject, so may we may not hold the mutator lock.
251  void VerifyRoots()
252      NO_THREAD_SAFETY_ANALYSIS;
253
254  // Expand mark stack to 2x its current size.
255  void ExpandMarkStack() EXCLUSIVE_LOCKS_REQUIRED(mark_stack_lock_);
256  void ResizeMarkStack(size_t new_size) EXCLUSIVE_LOCKS_REQUIRED(mark_stack_lock_);
257
258  // Returns how many threads we should use for the current GC phase based on if we are paused,
259  // whether or not we care about pauses.
260  size_t GetThreadCount(bool paused) const;
261
262  // Returns true if an object is inside of the immune region (assumed to be marked).
263  bool IsImmune(const mirror::Object* obj) const ALWAYS_INLINE {
264    return obj >= immune_begin_ && obj < immune_end_;
265  }
266
267  static void VerifyRootCallback(const mirror::Object* root, void* arg, size_t vreg,
268                                 const StackVisitor *visitor);
269
270  void VerifyRoot(const mirror::Object* root, size_t vreg, const StackVisitor* visitor)
271      NO_THREAD_SAFETY_ANALYSIS;
272
273  template <typename Visitor>
274  static void VisitInstanceFieldsReferences(mirror::Class* klass, mirror::Object* obj,
275                                            const Visitor& visitor)
276      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
277
278  // Visit the header, static field references, and interface pointers of a class object.
279  template <typename Visitor>
280  static void VisitClassReferences(mirror::Class* klass, mirror::Object* obj,
281                                   const Visitor& visitor)
282      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
283
284  template <typename Visitor>
285  static void VisitStaticFieldsReferences(mirror::Class* klass, const Visitor& visitor)
286      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
287
288  template <typename Visitor>
289  static void VisitFieldsReferences(mirror::Object* obj, uint32_t ref_offsets, bool is_static,
290                                    const Visitor& visitor)
291      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
292
293  // Visit all of the references in an object array.
294  template <typename Visitor>
295  static void VisitObjectArrayReferences(mirror::ObjectArray<mirror::Object>* array,
296                                         const Visitor& visitor)
297      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
298
299  // Visits the header and field references of a data object.
300  template <typename Visitor>
301  static void VisitOtherReferences(mirror::Class* klass, mirror::Object* obj,
302                                   const Visitor& visitor)
303      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
304    return VisitInstanceFieldsReferences(klass, obj, visitor);
305  }
306
307  // Blackens objects grayed during a garbage collection.
308  void ScanGrayObjects(bool paused, byte minimum_age)
309      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
310      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
311
312  // Schedules an unmarked object for reference processing.
313  void DelayReferenceReferent(mirror::Class* klass, mirror::Object* reference)
314      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
315
316  // Recursively blackens objects on the mark stack.
317  void ProcessMarkStack(bool paused)
318      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
319      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
320
321  void ProcessMarkStackParallel(size_t thread_count)
322      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
323      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
324
325  void EnqueueFinalizerReferences(mirror::Object** ref)
326      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
327      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
328
329  void PreserveSomeSoftReferences(mirror::Object** ref)
330      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
331      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
332
333  void ClearWhiteReferences(mirror::Object** list)
334      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
335
336  // Whether or not we count how many of each type of object were scanned.
337  static const bool kCountScannedTypes = false;
338
339  // Current space, we check this space first to avoid searching for the appropriate space for an
340  // object.
341  accounting::SpaceBitmap* current_mark_bitmap_;
342
343  accounting::ObjectStack* mark_stack_;
344
345  // Immune range, every object inside the immune range is assumed to be marked.
346  mirror::Object* immune_begin_;
347  mirror::Object* immune_end_;
348
349  mirror::Object* soft_reference_list_;
350  mirror::Object* weak_reference_list_;
351  mirror::Object* finalizer_reference_list_;
352  mirror::Object* phantom_reference_list_;
353  mirror::Object* cleared_reference_list_;
354
355  // Parallel finger.
356  AtomicInteger atomic_finger_;
357  // Number of classes scanned, if kCountScannedTypes.
358  AtomicInteger class_count_;
359  // Number of arrays scanned, if kCountScannedTypes.
360  AtomicInteger array_count_;
361  // Number of non-class/arrays scanned, if kCountScannedTypes.
362  AtomicInteger other_count_;
363  AtomicInteger large_object_test_;
364  AtomicInteger large_object_mark_;
365  AtomicInteger classes_marked_;
366  AtomicInteger overhead_time_;
367  AtomicInteger work_chunks_created_;
368  AtomicInteger work_chunks_deleted_;
369  AtomicInteger reference_count_;
370
371  // Verification.
372  size_t live_stack_freeze_size_;
373
374  UniquePtr<Barrier> gc_barrier_;
375  Mutex large_object_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
376  Mutex mark_stack_lock_ ACQUIRED_AFTER(Locks::classlinker_classes_lock_);
377
378  const bool is_concurrent_;
379
380 private:
381  friend class AddIfReachesAllocSpaceVisitor;  // Used by mod-union table.
382  friend class CardScanTask;
383  friend class CheckBitmapVisitor;
384  friend class CheckReferenceVisitor;
385  friend class art::gc::Heap;
386  friend class InternTableEntryIsUnmarked;
387  friend class MarkIfReachesAllocspaceVisitor;
388  friend class ModUnionCheckReferences;
389  friend class ModUnionClearCardVisitor;
390  friend class ModUnionReferenceVisitor;
391  friend class ModUnionVisitor;
392  friend class ModUnionTableBitmap;
393  friend class ModUnionTableReferenceCache;
394  friend class ModUnionScanImageRootVisitor;
395  friend class ScanBitmapVisitor;
396  friend class ScanImageRootVisitor;
397  template<bool kUseFinger> friend class MarkStackTask;
398  friend class FifoMarkStackChunk;
399
400  DISALLOW_COPY_AND_ASSIGN(MarkSweep);
401};
402
403}  // namespace collector
404}  // namespace gc
405}  // namespace art
406
407#endif  // ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
408