mark_sweep.h revision dda54f59271464b5a72bf4cde6d9010e8dc1f337
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
18#define ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
19
20#include "atomic.h"
21#include "barrier.h"
22#include "base/macros.h"
23#include "base/mutex.h"
24#include "garbage_collector.h"
25#include "object_callbacks.h"
26#include "offsets.h"
27#include "UniquePtr.h"
28
29namespace art {
30
31namespace mirror {
32  class Class;
33  class Object;
34  template<class T> class ObjectArray;
35}  // namespace mirror
36
37class StackVisitor;
38class Thread;
39
40namespace gc {
41
42namespace accounting {
43  template <typename T> class AtomicStack;
44  class MarkIfReachesAllocspaceVisitor;
45  class ModUnionClearCardVisitor;
46  class ModUnionVisitor;
47  class ModUnionTableBitmap;
48  class MarkStackChunk;
49  typedef AtomicStack<mirror::Object*> ObjectStack;
50  class SpaceBitmap;
51}  // namespace accounting
52
53namespace space {
54  class ContinuousSpace;
55}  // namespace space
56
57class Heap;
58
59namespace collector {
60
61class MarkSweep : public GarbageCollector {
62 public:
63  explicit MarkSweep(Heap* heap, bool is_concurrent, const std::string& name_prefix = "");
64
65  ~MarkSweep() {}
66
67  virtual void InitializePhase();
68  virtual bool IsConcurrent() const;
69  virtual bool HandleDirtyObjectsPhase() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
70  virtual void MarkingPhase() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
71  virtual void ReclaimPhase() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
72  virtual void FinishPhase() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
73  virtual void MarkReachableObjects()
74      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
75      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
76  virtual GcType GetGcType() const {
77    return kGcTypeFull;
78  }
79
80  // Initializes internal structures.
81  void Init();
82
83  // Find the default mark bitmap.
84  void FindDefaultMarkBitmap();
85
86  // Marks the root set at the start of a garbage collection.
87  void MarkRoots()
88      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
89      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
90
91  void MarkNonThreadRoots()
92      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
93      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
94
95  void MarkConcurrentRoots()
96      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
97      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
98
99  void MarkRootsCheckpoint(Thread* self)
100      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
101      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
102
103  // Builds a mark stack and recursively mark until it empties.
104  void RecursiveMark()
105      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
106      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
107
108  // Make a space immune, immune spaces have all live objects marked - that is the mark and
109  // live bitmaps are bound together.
110  void ImmuneSpace(space::ContinuousSpace* space)
111      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
112      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
113
114  bool IsImmuneSpace(const space::ContinuousSpace* space) const;
115      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
116
117  // Bind the live bits to the mark bits of bitmaps for spaces that are never collected, ie
118  // the image. Mark that portion of the heap as immune.
119  virtual void BindBitmaps() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
120
121  // Builds a mark stack with objects on dirty cards and recursively mark until it empties.
122  void RecursiveMarkDirtyObjects(bool paused, byte minimum_age)
123      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
124      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
125
126  // Remarks the root set after completing the concurrent mark.
127  void ReMarkRoots()
128      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
129      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
130
131  void ProcessReferences(Thread* self)
132      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
133
134  // Update and mark references from immune spaces.
135  virtual void UpdateAndMarkModUnion()
136      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
137
138  // Pre clean cards to reduce how much work is needed in the pause.
139  void PreCleanCards()
140      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
141      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
142
143  // Sweeps unmarked objects to complete the garbage collection.
144  virtual void Sweep(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
145
146  // Sweeps unmarked objects to complete the garbage collection.
147  void SweepLargeObjects(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
148
149  // Sweep only pointers within an array. WARNING: Trashes objects.
150  void SweepArray(accounting::ObjectStack* allocation_stack_, bool swap_bitmaps)
151      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
152
153  // Blackens an object.
154  void ScanObject(mirror::Object* obj)
155      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
156      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
157
158  // TODO: enable thread safety analysis when in use by multiple worker threads.
159  template <typename MarkVisitor>
160  void ScanObjectVisit(mirror::Object* obj, const MarkVisitor& visitor)
161      NO_THREAD_SAFETY_ANALYSIS;
162
163  // Everything inside the immune range is assumed to be marked.
164  void SetImmuneRange(mirror::Object* begin, mirror::Object* end);
165
166  void SweepSystemWeaks()
167      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
168
169  static mirror::Object* VerifySystemWeakIsLiveCallback(mirror::Object* obj, void* arg)
170      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
171
172  void VerifySystemWeaks()
173      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
174
175  // Verify that an object is live, either in a live bitmap or in the allocation stack.
176  void VerifyIsLive(const mirror::Object* obj)
177      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
178
179  template <typename Visitor>
180  static void VisitObjectReferences(mirror::Object* obj, const Visitor& visitor, bool visit_class)
181      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
182                            Locks::mutator_lock_);
183
184  static mirror::Object* MarkObjectCallback(mirror::Object* obj, void* arg)
185      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
186      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
187
188  static void MarkRootCallback(mirror::Object** root, void* arg, uint32_t thread_id,
189                               RootType root_type)
190      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
191      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
192
193  static void ProcessMarkStackPausedCallback(void* arg)
194      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
195
196  static void MarkRootParallelCallback(mirror::Object** root, void* arg, uint32_t thread_id,
197                                       RootType root_type)
198      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
199
200  // Marks an object.
201  void MarkObject(const mirror::Object* obj)
202      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
203      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
204
205  void MarkRoot(const mirror::Object* obj)
206      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
207      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
208
209  Barrier& GetBarrier() {
210    return *gc_barrier_;
211  }
212
213 protected:
214  // Returns true if the object has its bit set in the mark bitmap.
215  bool IsMarked(const mirror::Object* object) const;
216
217  static mirror::Object* IsMarkedCallback(mirror::Object* object, void* arg)
218      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
219
220  static void VerifyImageRootVisitor(mirror::Object* root, void* arg)
221      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
222                            Locks::mutator_lock_);
223
224  void MarkObjectNonNull(const mirror::Object* obj)
225        SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
226        EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
227
228  // Unmarks an object by clearing the bit inside of the corresponding bitmap, or if it is in a
229  // space set, removing the object from the set.
230  void UnMarkObjectNonNull(const mirror::Object* obj)
231      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
232      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
233
234  // Mark the vm thread roots.
235  virtual void MarkThreadRoots(Thread* self)
236      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
237      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
238
239  // Marks an object atomically, safe to use from multiple threads.
240  void MarkObjectNonNullParallel(const mirror::Object* obj);
241
242  // Marks or unmarks a large object based on whether or not set is true. If set is true, then we
243  // mark, otherwise we unmark.
244  bool MarkLargeObject(const mirror::Object* obj, bool set)
245      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
246
247  // Returns true if we need to add obj to a mark stack.
248  bool MarkObjectParallel(const mirror::Object* obj) NO_THREAD_SAFETY_ANALYSIS;
249
250  // Verify the roots of the heap and print out information related to any invalid roots.
251  // Called in MarkObject, so may we may not hold the mutator lock.
252  void VerifyRoots()
253      NO_THREAD_SAFETY_ANALYSIS;
254
255  // Expand mark stack to 2x its current size.
256  void ExpandMarkStack() EXCLUSIVE_LOCKS_REQUIRED(mark_stack_lock_);
257  void ResizeMarkStack(size_t new_size) EXCLUSIVE_LOCKS_REQUIRED(mark_stack_lock_);
258
259  // Returns how many threads we should use for the current GC phase based on if we are paused,
260  // whether or not we care about pauses.
261  size_t GetThreadCount(bool paused) const;
262
263  // Returns true if an object is inside of the immune region (assumed to be marked).
264  bool IsImmune(const mirror::Object* obj) const ALWAYS_INLINE {
265    return obj >= immune_begin_ && obj < immune_end_;
266  }
267
268  static void VerifyRootCallback(const mirror::Object* root, void* arg, size_t vreg,
269                                 const StackVisitor *visitor);
270
271  void VerifyRoot(const mirror::Object* root, size_t vreg, const StackVisitor* visitor)
272      NO_THREAD_SAFETY_ANALYSIS;
273
274  template <typename Visitor>
275  static void VisitInstanceFieldsReferences(mirror::Class* klass, mirror::Object* obj,
276                                            const Visitor& visitor)
277      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
278
279  // Visit the header, static field references, and interface pointers of a class object.
280  template <typename Visitor>
281  static void VisitClassReferences(mirror::Class* klass, mirror::Object* obj,
282                                   const Visitor& visitor)
283      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
284
285  template <typename Visitor>
286  static void VisitStaticFieldsReferences(mirror::Class* klass, const Visitor& visitor)
287      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
288
289  template <typename Visitor>
290  static void VisitFieldsReferences(mirror::Object* obj, uint32_t ref_offsets, bool is_static,
291                                    const Visitor& visitor)
292      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
293
294  // Visit all of the references in an object array.
295  template <typename Visitor>
296  static void VisitObjectArrayReferences(mirror::ObjectArray<mirror::Object>* array,
297                                         const Visitor& visitor)
298      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
299
300  // Visits the header and field references of a data object.
301  template <typename Visitor>
302  static void VisitOtherReferences(mirror::Class* klass, mirror::Object* obj,
303                                   const Visitor& visitor)
304      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
305    return VisitInstanceFieldsReferences(klass, obj, visitor);
306  }
307
308  // Blackens objects grayed during a garbage collection.
309  void ScanGrayObjects(bool paused, byte minimum_age)
310      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
311      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
312
313  // Schedules an unmarked object for reference processing.
314  void DelayReferenceReferent(mirror::Class* klass, mirror::Object* reference)
315      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
316
317  // Recursively blackens objects on the mark stack.
318  void ProcessMarkStack(bool paused)
319      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
320      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
321
322  void ProcessMarkStackParallel(size_t thread_count)
323      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
324      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
325
326  void EnqueueFinalizerReferences(mirror::Object** ref)
327      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
328      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
329
330  void PreserveSomeSoftReferences(mirror::Object** ref)
331      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
332      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
333
334  void ClearWhiteReferences(mirror::Object** list)
335      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
336
337  // Whether or not we count how many of each type of object were scanned.
338  static const bool kCountScannedTypes = false;
339
340  // Current space, we check this space first to avoid searching for the appropriate space for an
341  // object.
342  accounting::SpaceBitmap* current_mark_bitmap_;
343
344  accounting::ObjectStack* mark_stack_;
345
346  // Immune range, every object inside the immune range is assumed to be marked.
347  mirror::Object* immune_begin_;
348  mirror::Object* immune_end_;
349
350  // Parallel finger.
351  AtomicInteger atomic_finger_;
352  // Number of classes scanned, if kCountScannedTypes.
353  AtomicInteger class_count_;
354  // Number of arrays scanned, if kCountScannedTypes.
355  AtomicInteger array_count_;
356  // Number of non-class/arrays scanned, if kCountScannedTypes.
357  AtomicInteger other_count_;
358  AtomicInteger large_object_test_;
359  AtomicInteger large_object_mark_;
360  AtomicInteger classes_marked_;
361  AtomicInteger overhead_time_;
362  AtomicInteger work_chunks_created_;
363  AtomicInteger work_chunks_deleted_;
364  AtomicInteger reference_count_;
365
366  // Verification.
367  size_t live_stack_freeze_size_;
368
369  UniquePtr<Barrier> gc_barrier_;
370  Mutex large_object_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
371  Mutex mark_stack_lock_ ACQUIRED_AFTER(Locks::classlinker_classes_lock_);
372
373  const bool is_concurrent_;
374
375 private:
376  friend class AddIfReachesAllocSpaceVisitor;  // Used by mod-union table.
377  friend class CardScanTask;
378  friend class CheckBitmapVisitor;
379  friend class CheckReferenceVisitor;
380  friend class art::gc::Heap;
381  friend class InternTableEntryIsUnmarked;
382  friend class MarkIfReachesAllocspaceVisitor;
383  friend class ModUnionCheckReferences;
384  friend class ModUnionClearCardVisitor;
385  friend class ModUnionReferenceVisitor;
386  friend class ModUnionVisitor;
387  friend class ModUnionTableBitmap;
388  friend class ModUnionTableReferenceCache;
389  friend class ModUnionScanImageRootVisitor;
390  friend class ScanBitmapVisitor;
391  friend class ScanImageRootVisitor;
392  template<bool kUseFinger> friend class MarkStackTask;
393  friend class FifoMarkStackChunk;
394
395  DISALLOW_COPY_AND_ASSIGN(MarkSweep);
396};
397
398}  // namespace collector
399}  // namespace gc
400}  // namespace art
401
402#endif  // ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
403