mark_sweep.h revision ef7d42fca18c16fbaf103822ad16f23246e2905d
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
18#define ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
19
20#include "atomic.h"
21#include "barrier.h"
22#include "base/macros.h"
23#include "base/mutex.h"
24#include "garbage_collector.h"
25#include "offsets.h"
26#include "root_visitor.h"
27#include "UniquePtr.h"
28
29namespace art {
30
31namespace mirror {
32  class Class;
33  class Object;
34  template<class T> class ObjectArray;
35}  // namespace mirror
36
37class StackVisitor;
38class Thread;
39
40namespace gc {
41
42namespace accounting {
43  template <typename T> class AtomicStack;
44  class MarkIfReachesAllocspaceVisitor;
45  class ModUnionClearCardVisitor;
46  class ModUnionVisitor;
47  class ModUnionTableBitmap;
48  class MarkStackChunk;
49  typedef AtomicStack<mirror::Object*> ObjectStack;
50  class SpaceBitmap;
51}  // namespace accounting
52
53namespace space {
54  class ContinuousSpace;
55}  // namespace space
56
57class Heap;
58
59namespace collector {
60
61class MarkSweep : public GarbageCollector {
62 public:
63  explicit MarkSweep(Heap* heap, bool is_concurrent, const std::string& name_prefix = "");
64
65  ~MarkSweep() {}
66
67  virtual void InitializePhase();
68  virtual bool IsConcurrent() const;
69  virtual bool HandleDirtyObjectsPhase() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
70  virtual void MarkingPhase() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
71  virtual void ReclaimPhase() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
72  virtual void FinishPhase() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
73  virtual void MarkReachableObjects()
74      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
75      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
76  virtual GcType GetGcType() const {
77    return kGcTypeFull;
78  }
79
80  // Initializes internal structures.
81  void Init();
82
83  // Find the default mark bitmap.
84  void FindDefaultMarkBitmap();
85
86  // Marks the root set at the start of a garbage collection.
87  void MarkRoots()
88      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
89      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
90
91  void MarkNonThreadRoots()
92      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
93      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
94
95  void MarkConcurrentRoots()
96      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
97      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
98
99  void MarkRootsCheckpoint(Thread* self)
100      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
101      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
102
103  // Builds a mark stack and recursively mark until it empties.
104  void RecursiveMark()
105      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
106      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
107
108  // Make a space immune, immune spaces have all live objects marked - that is the mark and
109  // live bitmaps are bound together.
110  void ImmuneSpace(space::ContinuousSpace* space)
111      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
112      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
113
114  bool IsImmuneSpace(const space::ContinuousSpace* space) const;
115      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
116
117  // Bind the live bits to the mark bits of bitmaps for spaces that are never collected, ie
118  // the image. Mark that portion of the heap as immune.
119  virtual void BindBitmaps() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
120
121  // Builds a mark stack with objects on dirty cards and recursively mark until it empties.
122  void RecursiveMarkDirtyObjects(bool paused, byte minimum_age)
123      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
124      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
125
126  // Remarks the root set after completing the concurrent mark.
127  void ReMarkRoots()
128      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
129      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
130
131  void ProcessReferences(Thread* self)
132      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
133
134  // Update and mark references from immune spaces.
135  virtual void UpdateAndMarkModUnion()
136      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
137
138  // Sweeps unmarked objects to complete the garbage collection.
139  virtual void Sweep(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
140
141  // Sweeps unmarked objects to complete the garbage collection.
142  void SweepLargeObjects(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
143
144  // Sweep only pointers within an array. WARNING: Trashes objects.
145  void SweepArray(accounting::ObjectStack* allocation_stack_, bool swap_bitmaps)
146      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
147
148  // Blackens an object.
149  void ScanObject(mirror::Object* obj)
150      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
151      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
152
153  // TODO: enable thread safety analysis when in use by multiple worker threads.
154  template <typename MarkVisitor>
155  void ScanObjectVisit(mirror::Object* obj, const MarkVisitor& visitor)
156      NO_THREAD_SAFETY_ANALYSIS;
157
158  // Everything inside the immune range is assumed to be marked.
159  void SetImmuneRange(mirror::Object* begin, mirror::Object* end);
160
161  void SweepSystemWeaks()
162      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
163
164  static mirror::Object* VerifySystemWeakIsLiveCallback(mirror::Object* obj, void* arg)
165      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
166
167  void VerifySystemWeaks()
168      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
169
170  // Verify that an object is live, either in a live bitmap or in the allocation stack.
171  void VerifyIsLive(const mirror::Object* obj)
172      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
173
174  template <typename Visitor>
175  static void VisitObjectReferences(mirror::Object* obj, const Visitor& visitor, bool visit_class)
176      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
177                            Locks::mutator_lock_);
178
179  static mirror::Object* RecursiveMarkObjectCallback(mirror::Object* obj, void* arg)
180      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
181      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
182
183  static mirror::Object* MarkRootCallback(mirror::Object* root, void* arg)
184      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
185      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
186
187  static mirror::Object* MarkRootParallelCallback(mirror::Object* root, void* arg);
188
189  // Marks an object.
190  void MarkObject(const mirror::Object* obj)
191      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
192      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
193
194  void MarkRoot(const mirror::Object* obj)
195      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
196      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
197
198  Barrier& GetBarrier() {
199    return *gc_barrier_;
200  }
201
202 protected:
203  // Returns true if the object has its bit set in the mark bitmap.
204  bool IsMarked(const mirror::Object* object) const;
205
206  static mirror::Object* IsMarkedCallback(mirror::Object* object, void* arg)
207      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
208
209  static void VerifyImageRootVisitor(mirror::Object* root, void* arg)
210      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
211                            Locks::mutator_lock_);
212
213  void MarkObjectNonNull(const mirror::Object* obj)
214        SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
215        EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
216
217  // Unmarks an object by clearing the bit inside of the corresponding bitmap, or if it is in a
218  // space set, removing the object from the set.
219  void UnMarkObjectNonNull(const mirror::Object* obj)
220      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
221      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
222
223  // Mark the vm thread roots.
224  virtual void MarkThreadRoots(Thread* self)
225      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
226      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
227
228  // Marks an object atomically, safe to use from multiple threads.
229  void MarkObjectNonNullParallel(const mirror::Object* obj);
230
231  // Marks or unmarks a large object based on whether or not set is true. If set is true, then we
232  // mark, otherwise we unmark.
233  bool MarkLargeObject(const mirror::Object* obj, bool set)
234      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
235
236  // Returns true if we need to add obj to a mark stack.
237  bool MarkObjectParallel(const mirror::Object* obj) NO_THREAD_SAFETY_ANALYSIS;
238
239  // Verify the roots of the heap and print out information related to any invalid roots.
240  // Called in MarkObject, so may we may not hold the mutator lock.
241  void VerifyRoots()
242      NO_THREAD_SAFETY_ANALYSIS;
243
244  // Expand mark stack to 2x its current size.
245  void ExpandMarkStack() EXCLUSIVE_LOCKS_REQUIRED(mark_stack_lock_);
246  void ResizeMarkStack(size_t new_size) EXCLUSIVE_LOCKS_REQUIRED(mark_stack_lock_);
247
248  // Returns how many threads we should use for the current GC phase based on if we are paused,
249  // whether or not we care about pauses.
250  size_t GetThreadCount(bool paused) const;
251
252  // Returns true if an object is inside of the immune region (assumed to be marked).
253  bool IsImmune(const mirror::Object* obj) const ALWAYS_INLINE {
254    return obj >= immune_begin_ && obj < immune_end_;
255  }
256
257  static void VerifyRootCallback(const mirror::Object* root, void* arg, size_t vreg,
258                                 const StackVisitor *visitor);
259
260  void VerifyRoot(const mirror::Object* root, size_t vreg, const StackVisitor* visitor)
261      NO_THREAD_SAFETY_ANALYSIS;
262
263  template <typename Visitor>
264  static void VisitInstanceFieldsReferences(mirror::Class* klass, mirror::Object* obj,
265                                            const Visitor& visitor)
266      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
267
268  // Visit the header, static field references, and interface pointers of a class object.
269  template <typename Visitor>
270  static void VisitClassReferences(mirror::Class* klass, mirror::Object* obj,
271                                   const Visitor& visitor)
272      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
273
274  template <typename Visitor>
275  static void VisitStaticFieldsReferences(mirror::Class* klass, const Visitor& visitor)
276      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
277
278  template <typename Visitor>
279  static void VisitFieldsReferences(mirror::Object* obj, uint32_t ref_offsets, bool is_static,
280                                    const Visitor& visitor)
281      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
282
283  // Visit all of the references in an object array.
284  template <typename Visitor>
285  static void VisitObjectArrayReferences(mirror::ObjectArray<mirror::Object>* array,
286                                         const Visitor& visitor)
287      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
288
289  // Visits the header and field references of a data object.
290  template <typename Visitor>
291  static void VisitOtherReferences(mirror::Class* klass, mirror::Object* obj,
292                                   const Visitor& visitor)
293      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
294    return VisitInstanceFieldsReferences(klass, obj, visitor);
295  }
296
297  // Blackens objects grayed during a garbage collection.
298  void ScanGrayObjects(bool paused, byte minimum_age)
299      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
300      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
301
302  // Schedules an unmarked object for reference processing.
303  void DelayReferenceReferent(mirror::Class* klass, mirror::Object* reference)
304      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
305
306  // Recursively blackens objects on the mark stack.
307  void ProcessMarkStack(bool paused)
308      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
309      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
310
311  void ProcessMarkStackParallel(size_t thread_count)
312      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
313      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
314
315  void EnqueueFinalizerReferences(mirror::Object** ref)
316      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
317      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
318
319  void PreserveSomeSoftReferences(mirror::Object** ref)
320      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
321      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
322
323  void ClearWhiteReferences(mirror::Object** list)
324      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
325
326  // Whether or not we count how many of each type of object were scanned.
327  static const bool kCountScannedTypes = false;
328
329  // Current space, we check this space first to avoid searching for the appropriate space for an
330  // object.
331  accounting::SpaceBitmap* current_mark_bitmap_;
332
333  accounting::ObjectStack* mark_stack_;
334
335  // Immune range, every object inside the immune range is assumed to be marked.
336  mirror::Object* immune_begin_;
337  mirror::Object* immune_end_;
338
339  // Parallel finger.
340  AtomicInteger atomic_finger_;
341  // Number of classes scanned, if kCountScannedTypes.
342  AtomicInteger class_count_;
343  // Number of arrays scanned, if kCountScannedTypes.
344  AtomicInteger array_count_;
345  // Number of non-class/arrays scanned, if kCountScannedTypes.
346  AtomicInteger other_count_;
347  AtomicInteger large_object_test_;
348  AtomicInteger large_object_mark_;
349  AtomicInteger classes_marked_;
350  AtomicInteger overhead_time_;
351  AtomicInteger work_chunks_created_;
352  AtomicInteger work_chunks_deleted_;
353  AtomicInteger reference_count_;
354
355  // Verification.
356  size_t live_stack_freeze_size_;
357
358  UniquePtr<Barrier> gc_barrier_;
359  Mutex large_object_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
360  Mutex mark_stack_lock_ ACQUIRED_AFTER(Locks::classlinker_classes_lock_);
361
362  const bool is_concurrent_;
363
364 private:
365  friend class AddIfReachesAllocSpaceVisitor;  // Used by mod-union table.
366  friend class CardScanTask;
367  friend class CheckBitmapVisitor;
368  friend class CheckReferenceVisitor;
369  friend class art::gc::Heap;
370  friend class InternTableEntryIsUnmarked;
371  friend class MarkIfReachesAllocspaceVisitor;
372  friend class ModUnionCheckReferences;
373  friend class ModUnionClearCardVisitor;
374  friend class ModUnionReferenceVisitor;
375  friend class ModUnionVisitor;
376  friend class ModUnionTableBitmap;
377  friend class ModUnionTableReferenceCache;
378  friend class ModUnionScanImageRootVisitor;
379  friend class ScanBitmapVisitor;
380  friend class ScanImageRootVisitor;
381  template<bool kUseFinger> friend class MarkStackTask;
382  friend class FifoMarkStackChunk;
383
384  DISALLOW_COPY_AND_ASSIGN(MarkSweep);
385};
386
387}  // namespace collector
388}  // namespace gc
389}  // namespace art
390
391#endif  // ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
392