mark_sweep.h revision 601276abdb746b03675ff945745aa936694d3439
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
18#define ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
19
20#include "atomic.h"
21#include "barrier.h"
22#include "base/macros.h"
23#include "base/mutex.h"
24#include "garbage_collector.h"
25#include "immune_region.h"
26#include "object_callbacks.h"
27#include "offsets.h"
28#include "UniquePtr.h"
29
30namespace art {
31
32namespace mirror {
33  class Class;
34  class Object;
35  template<class T> class ObjectArray;
36}  // namespace mirror
37
38class StackVisitor;
39class Thread;
40enum VisitRootFlags : uint8_t;
41
42namespace gc {
43
44namespace accounting {
45  template <typename T> class AtomicStack;
46  class MarkIfReachesAllocspaceVisitor;
47  class ModUnionClearCardVisitor;
48  class ModUnionVisitor;
49  class ModUnionTableBitmap;
50  class MarkStackChunk;
51  typedef AtomicStack<mirror::Object*> ObjectStack;
52  class SpaceBitmap;
53}  // namespace accounting
54
55namespace space {
56  class ContinuousSpace;
57}  // namespace space
58
59class Heap;
60
61namespace collector {
62
63class MarkSweep : public GarbageCollector {
64 public:
65  explicit MarkSweep(Heap* heap, bool is_concurrent, const std::string& name_prefix = "");
66
67  ~MarkSweep() {}
68
69  virtual void InitializePhase() OVERRIDE;
70  virtual void MarkingPhase() OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
71  virtual bool HandleDirtyObjectsPhase() OVERRIDE EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
72  virtual void ReclaimPhase() OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
73  virtual void FinishPhase() OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
74  virtual void MarkReachableObjects()
75      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
76      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
77
78  virtual bool IsConcurrent() const OVERRIDE;
79
80  virtual GcType GetGcType() const OVERRIDE {
81    return kGcTypeFull;
82  }
83
84  // Initializes internal structures.
85  void Init();
86
87  // Find the default mark bitmap.
88  void FindDefaultMarkBitmap();
89
90  // Marks all objects in the root set at the start of a garbage collection.
91  void MarkRoots(Thread* self)
92      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
93      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
94
95  void MarkNonThreadRoots()
96      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
97      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
98
99  void MarkConcurrentRoots(VisitRootFlags flags)
100      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
101      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
102
103  void MarkRootsCheckpoint(Thread* self)
104      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
105      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
106
107  // Builds a mark stack and recursively mark until it empties.
108  void RecursiveMark()
109      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
110      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
111
112  // Bind the live bits to the mark bits of bitmaps for spaces that are never collected, ie
113  // the image. Mark that portion of the heap as immune.
114  virtual void BindBitmaps() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
115
116  // Builds a mark stack with objects on dirty cards and recursively mark until it empties.
117  void RecursiveMarkDirtyObjects(bool paused, byte minimum_age)
118      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
119      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
120
121  // Remarks the root set after completing the concurrent mark.
122  void ReMarkRoots()
123      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
124      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
125
126  void ProcessReferences(Thread* self)
127      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
128
129  void PreProcessReferences()
130      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
131      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
132
133  // Update and mark references from immune spaces. Virtual as overridden by StickyMarkSweep.
134  virtual void UpdateAndMarkModUnion()
135      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
136
137  // Pre clean cards to reduce how much work is needed in the pause.
138  void PreCleanCards()
139      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
140      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
141
142  // Sweeps unmarked objects to complete the garbage collection. Virtual as by default it sweeps
143  // all allocation spaces. Partial and sticky GCs want to just sweep a subset of the heap.
144  virtual void Sweep(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
145
146  // Sweeps unmarked objects to complete the garbage collection.
147  void SweepLargeObjects(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
148
149  // Sweep only pointers within an array. WARNING: Trashes objects.
150  void SweepArray(accounting::ObjectStack* allocation_stack_, bool swap_bitmaps)
151      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
152      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
153
154  // Blackens an object.
155  void ScanObject(mirror::Object* obj)
156      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
157      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
158
159  // TODO: enable thread safety analysis when in use by multiple worker threads.
160  template <typename MarkVisitor>
161  void ScanObjectVisit(mirror::Object* obj, const MarkVisitor& visitor)
162      NO_THREAD_SAFETY_ANALYSIS;
163
164  void SweepSystemWeaks()
165      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
166
167  static mirror::Object* VerifySystemWeakIsLiveCallback(mirror::Object* obj, void* arg)
168      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
169
170  void VerifySystemWeaks()
171      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
172
173  // Verify that an object is live, either in a live bitmap or in the allocation stack.
174  void VerifyIsLive(const mirror::Object* obj)
175      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
176
177  template <typename Visitor>
178  static void VisitObjectReferences(mirror::Object* obj, const Visitor& visitor, bool visit_class)
179      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
180                            Locks::mutator_lock_);
181
182  static mirror::Object* MarkObjectCallback(mirror::Object* obj, void* arg)
183      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
184      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
185
186  static void MarkRootCallback(mirror::Object** root, void* arg, uint32_t thread_id,
187                               RootType root_type)
188      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
189      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
190
191  static void VerifyRootMarked(mirror::Object** root, void* arg, uint32_t /*thread_id*/,
192                               RootType /*root_type*/)
193      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
194      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
195
196  static void ProcessMarkStackPausedCallback(void* arg)
197      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
198
199  static void MarkRootParallelCallback(mirror::Object** root, void* arg, uint32_t thread_id,
200                                       RootType root_type)
201      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
202
203  // Marks an object.
204  void MarkObject(const mirror::Object* obj)
205      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
206      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
207
208  Barrier& GetBarrier() {
209    return *gc_barrier_;
210  }
211
212 protected:
213  // Returns true if the object has its bit set in the mark bitmap.
214  bool IsMarked(const mirror::Object* object) const;
215
216  static mirror::Object* IsMarkedCallback(mirror::Object* object, void* arg)
217      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
218
219  static void VerifyImageRootVisitor(mirror::Object* root, void* arg)
220      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
221                            Locks::mutator_lock_);
222
223  void MarkObjectNonNull(const mirror::Object* obj)
224        SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
225        EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
226
227  // Unmarks an object by clearing the bit inside of the corresponding bitmap, or if it is in a
228  // space set, removing the object from the set.
229  void UnMarkObjectNonNull(const mirror::Object* obj)
230      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
231      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
232
233  // Mark the vm thread roots.
234  void MarkThreadRoots(Thread* self)
235      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
236      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
237
238  // Marks an object atomically, safe to use from multiple threads.
239  void MarkObjectNonNullParallel(const mirror::Object* obj);
240
241  // Marks or unmarks a large object based on whether or not set is true. If set is true, then we
242  // mark, otherwise we unmark.
243  bool MarkLargeObject(const mirror::Object* obj, bool set)
244      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
245
246  // Returns true if we need to add obj to a mark stack.
247  bool MarkObjectParallel(const mirror::Object* obj) NO_THREAD_SAFETY_ANALYSIS;
248
249  // Verify the roots of the heap and print out information related to any invalid roots.
250  // Called in MarkObject, so may we may not hold the mutator lock.
251  void VerifyRoots()
252      NO_THREAD_SAFETY_ANALYSIS;
253
254  // Expand mark stack to 2x its current size.
255  void ExpandMarkStack() EXCLUSIVE_LOCKS_REQUIRED(mark_stack_lock_);
256  void ResizeMarkStack(size_t new_size) EXCLUSIVE_LOCKS_REQUIRED(mark_stack_lock_);
257
258  // Returns how many threads we should use for the current GC phase based on if we are paused,
259  // whether or not we care about pauses.
260  size_t GetThreadCount(bool paused) const;
261
262  static void VerifyRootCallback(const mirror::Object* root, void* arg, size_t vreg,
263                                 const StackVisitor *visitor);
264
265  void VerifyRoot(const mirror::Object* root, size_t vreg, const StackVisitor* visitor)
266      NO_THREAD_SAFETY_ANALYSIS;
267
268  template <typename Visitor>
269  static void VisitInstanceFieldsReferences(mirror::Class* klass, mirror::Object* obj,
270                                            const Visitor& visitor)
271      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
272
273  // Visit the header, static field references, and interface pointers of a class object.
274  template <typename Visitor>
275  static void VisitClassReferences(mirror::Class* klass, mirror::Object* obj,
276                                   const Visitor& visitor)
277      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
278
279  template <typename Visitor>
280  static void VisitStaticFieldsReferences(mirror::Class* klass, const Visitor& visitor)
281      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
282
283  template <typename Visitor>
284  static void VisitFieldsReferences(mirror::Object* obj, uint32_t ref_offsets, bool is_static,
285                                    const Visitor& visitor)
286      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
287
288  // Visit all of the references in an object array.
289  template <typename Visitor>
290  static void VisitObjectArrayReferences(mirror::ObjectArray<mirror::Object>* array,
291                                         const Visitor& visitor)
292      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
293
294  // Visits the header and field references of a data object.
295  template <typename Visitor>
296  static void VisitOtherReferences(mirror::Class* klass, mirror::Object* obj,
297                                   const Visitor& visitor)
298      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
299    return VisitInstanceFieldsReferences(klass, obj, visitor);
300  }
301
302  // Blackens objects grayed during a garbage collection.
303  void ScanGrayObjects(bool paused, byte minimum_age)
304      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
305      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
306
307  // Schedules an unmarked object for reference processing.
308  void DelayReferenceReferent(mirror::Class* klass, mirror::Object* reference)
309      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
310
311  // Recursively blackens objects on the mark stack.
312  void ProcessMarkStack(bool paused)
313      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
314      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
315
316  void ProcessMarkStackParallel(size_t thread_count)
317      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
318      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
319
320  void EnqueueFinalizerReferences(mirror::Object** ref)
321      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
322      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
323
324  void PreserveSomeSoftReferences(mirror::Object** ref)
325      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
326      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
327
328  void ClearWhiteReferences(mirror::Object** list)
329      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
330
331  // Used to get around thread safety annotations. The call is from MarkingPhase and is guarded by
332  // IsExclusiveHeld.
333  void RevokeAllThreadLocalAllocationStacks(Thread* self) NO_THREAD_SAFETY_ANALYSIS;
334
335  // Whether or not we count how many of each type of object were scanned.
336  static const bool kCountScannedTypes = false;
337
338  // Current space, we check this space first to avoid searching for the appropriate space for an
339  // object.
340  accounting::SpaceBitmap* current_mark_bitmap_;
341
342  accounting::ObjectStack* mark_stack_;
343
344  // Immune range, every object inside the immune range is assumed to be marked.
345  ImmuneRegion immune_region_;
346
347  // Parallel finger.
348  AtomicInteger atomic_finger_;
349  // Number of classes scanned, if kCountScannedTypes.
350  AtomicInteger class_count_;
351  // Number of arrays scanned, if kCountScannedTypes.
352  AtomicInteger array_count_;
353  // Number of non-class/arrays scanned, if kCountScannedTypes.
354  AtomicInteger other_count_;
355  AtomicInteger large_object_test_;
356  AtomicInteger large_object_mark_;
357  AtomicInteger classes_marked_;
358  AtomicInteger overhead_time_;
359  AtomicInteger work_chunks_created_;
360  AtomicInteger work_chunks_deleted_;
361  AtomicInteger reference_count_;
362
363  // Verification.
364  size_t live_stack_freeze_size_;
365
366  UniquePtr<Barrier> gc_barrier_;
367  Mutex large_object_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
368  Mutex mark_stack_lock_ ACQUIRED_AFTER(Locks::classlinker_classes_lock_);
369
370  const bool is_concurrent_;
371
372 private:
373  friend class AddIfReachesAllocSpaceVisitor;  // Used by mod-union table.
374  friend class CardScanTask;
375  friend class CheckBitmapVisitor;
376  friend class CheckReferenceVisitor;
377  friend class art::gc::Heap;
378  friend class InternTableEntryIsUnmarked;
379  friend class MarkIfReachesAllocspaceVisitor;
380  friend class ModUnionCheckReferences;
381  friend class ModUnionClearCardVisitor;
382  friend class ModUnionReferenceVisitor;
383  friend class ModUnionVisitor;
384  friend class ModUnionTableBitmap;
385  friend class ModUnionTableReferenceCache;
386  friend class ModUnionScanImageRootVisitor;
387  friend class ScanBitmapVisitor;
388  friend class ScanImageRootVisitor;
389  template<bool kUseFinger> friend class MarkStackTask;
390  friend class FifoMarkStackChunk;
391
392  DISALLOW_COPY_AND_ASSIGN(MarkSweep);
393};
394
395}  // namespace collector
396}  // namespace gc
397}  // namespace art
398
399#endif  // ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
400