mark_sweep.h revision 407f702da4f867c074fc3c8c688b8f8c32279eff
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
18#define ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
19
20#include "atomic.h"
21#include "barrier.h"
22#include "base/macros.h"
23#include "base/mutex.h"
24#include "garbage_collector.h"
25#include "immune_region.h"
26#include "object_callbacks.h"
27#include "offsets.h"
28#include "UniquePtr.h"
29
30namespace art {
31
32namespace mirror {
33  class Class;
34  class Object;
35  template<class T> class ObjectArray;
36  class Reference;
37}  // namespace mirror
38
39class StackVisitor;
40class Thread;
41enum VisitRootFlags : uint8_t;
42
43namespace gc {
44
45namespace accounting {
46  template <typename T> class AtomicStack;
47  class MarkIfReachesAllocspaceVisitor;
48  class ModUnionClearCardVisitor;
49  class ModUnionVisitor;
50  class ModUnionTableBitmap;
51  class MarkStackChunk;
52  typedef AtomicStack<mirror::Object*> ObjectStack;
53  class SpaceBitmap;
54}  // namespace accounting
55
56namespace space {
57  class ContinuousSpace;
58}  // namespace space
59
60class Heap;
61
62namespace collector {
63
64class MarkSweep : public GarbageCollector {
65 public:
66  explicit MarkSweep(Heap* heap, bool is_concurrent, const std::string& name_prefix = "");
67
68  ~MarkSweep() {}
69
70  virtual void InitializePhase() OVERRIDE;
71  virtual void MarkingPhase() OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
72  virtual void HandleDirtyObjectsPhase() OVERRIDE EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
73  virtual void ReclaimPhase() OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
74  virtual void FinishPhase() OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
75  virtual void MarkReachableObjects()
76      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
77      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
78
79  bool IsConcurrent() const {
80    return is_concurrent_;
81  }
82
83  virtual GcType GetGcType() const OVERRIDE {
84    return kGcTypeFull;
85  }
86
87  virtual CollectorType GetCollectorType() const OVERRIDE {
88    return is_concurrent_ ? kCollectorTypeCMS : kCollectorTypeMS;
89  }
90
91  // Initializes internal structures.
92  void Init();
93
94  // Find the default mark bitmap.
95  void FindDefaultSpaceBitmap();
96
97  // Marks all objects in the root set at the start of a garbage collection.
98  void MarkRoots(Thread* self)
99      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
100      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
101
102  void MarkNonThreadRoots()
103      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
104      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
105
106  void MarkConcurrentRoots(VisitRootFlags flags)
107      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
108      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
109
110  void MarkRootsCheckpoint(Thread* self)
111      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
112      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
113
114  // Builds a mark stack and recursively mark until it empties.
115  void RecursiveMark()
116      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
117      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
118
119  // Bind the live bits to the mark bits of bitmaps for spaces that are never collected, ie
120  // the image. Mark that portion of the heap as immune.
121  virtual void BindBitmaps() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
122
123  // Builds a mark stack with objects on dirty cards and recursively mark until it empties.
124  void RecursiveMarkDirtyObjects(bool paused, byte minimum_age)
125      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
126      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
127
128  // Remarks the root set after completing the concurrent mark.
129  void ReMarkRoots()
130      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
131      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
132
133  void ProcessReferences(Thread* self)
134      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
135
136  void PreProcessReferences()
137      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
138      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
139
140  // Update and mark references from immune spaces. Virtual as overridden by StickyMarkSweep.
141  virtual void UpdateAndMarkModUnion()
142      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
143
144  // Pre clean cards to reduce how much work is needed in the pause.
145  void PreCleanCards()
146      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
147      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
148
149  // Sweeps unmarked objects to complete the garbage collection. Virtual as by default it sweeps
150  // all allocation spaces. Partial and sticky GCs want to just sweep a subset of the heap.
151  virtual void Sweep(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
152
153  // Sweeps unmarked objects to complete the garbage collection.
154  void SweepLargeObjects(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
155
156  // Sweep only pointers within an array. WARNING: Trashes objects.
157  void SweepArray(accounting::ObjectStack* allocation_stack_, bool swap_bitmaps)
158      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
159      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
160
161  // Blackens an object.
162  void ScanObject(mirror::Object* obj)
163      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
164      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
165
166  // No thread safety analysis due to lambdas.
167  template<typename MarkVisitor, typename ReferenceVisitor>
168  void ScanObjectVisit(mirror::Object* obj, const MarkVisitor& visitor,
169                       const ReferenceVisitor& ref_visitor)
170    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
171    EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
172
173  void SweepSystemWeaks()
174      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
175
176  static mirror::Object* VerifySystemWeakIsLiveCallback(mirror::Object* obj, void* arg)
177      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
178
179  void VerifySystemWeaks()
180      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
181
182  // Verify that an object is live, either in a live bitmap or in the allocation stack.
183  void VerifyIsLive(const mirror::Object* obj)
184      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
185
186  static mirror::Object* MarkObjectCallback(mirror::Object* obj, void* arg)
187      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
188      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
189
190  static void MarkHeapReferenceCallback(mirror::HeapReference<mirror::Object>* ref, void* arg)
191      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
192      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
193
194  static void MarkRootCallback(mirror::Object** root, void* arg, uint32_t thread_id,
195                               RootType root_type)
196      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
197      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
198
199  static void VerifyRootMarked(mirror::Object** root, void* arg, uint32_t /*thread_id*/,
200                               RootType /*root_type*/)
201      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
202      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
203
204  static void ProcessMarkStackPausedCallback(void* arg)
205      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
206
207  static void MarkRootParallelCallback(mirror::Object** root, void* arg, uint32_t thread_id,
208                                       RootType root_type)
209      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
210
211  // Marks an object.
212  void MarkObject(mirror::Object* obj)
213      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
214      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
215
216  Barrier& GetBarrier() {
217    return *gc_barrier_;
218  }
219
220  // Schedules an unmarked object for reference processing.
221  void DelayReferenceReferent(mirror::Class* klass, mirror::Reference* reference)
222      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
223
224 protected:
225  // Returns true if the object has its bit set in the mark bitmap.
226  bool IsMarked(const mirror::Object* object) const;
227
228  static mirror::Object* IsMarkedCallback(mirror::Object* object, void* arg)
229      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
230
231  static void VerifyImageRootVisitor(mirror::Object* root, void* arg)
232      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
233
234  void MarkObjectNonNull(mirror::Object* obj)
235        SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
236        EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
237
238  // Unmarks an object by clearing the bit inside of the corresponding bitmap, or if it is in a
239  // space set, removing the object from the set.
240  void UnMarkObjectNonNull(const mirror::Object* obj)
241      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
242      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
243
244  // Mark the vm thread roots.
245  void MarkThreadRoots(Thread* self)
246      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
247      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
248
249  // Marks an object atomically, safe to use from multiple threads.
250  void MarkObjectNonNullParallel(mirror::Object* obj);
251
252  // Marks or unmarks a large object based on whether or not set is true. If set is true, then we
253  // mark, otherwise we unmark.
254  bool MarkLargeObject(const mirror::Object* obj, bool set)
255      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) LOCKS_EXCLUDED(large_object_lock_);
256
257  // Returns true if we need to add obj to a mark stack.
258  bool MarkObjectParallel(const mirror::Object* obj) NO_THREAD_SAFETY_ANALYSIS;
259
260  // Verify the roots of the heap and print out information related to any invalid roots.
261  // Called in MarkObject, so may we may not hold the mutator lock.
262  void VerifyRoots()
263      NO_THREAD_SAFETY_ANALYSIS;
264
265  // Expand mark stack to 2x its current size.
266  void ExpandMarkStack() EXCLUSIVE_LOCKS_REQUIRED(mark_stack_lock_);
267  void ResizeMarkStack(size_t new_size) EXCLUSIVE_LOCKS_REQUIRED(mark_stack_lock_);
268
269  // Returns how many threads we should use for the current GC phase based on if we are paused,
270  // whether or not we care about pauses.
271  size_t GetThreadCount(bool paused) const;
272
273  static void VerifyRootCallback(const mirror::Object* root, void* arg, size_t vreg,
274                                 const StackVisitor *visitor);
275
276  void VerifyRoot(const mirror::Object* root, size_t vreg, const StackVisitor* visitor)
277      NO_THREAD_SAFETY_ANALYSIS;
278
279  // Push a single reference on a mark stack.
280  void PushOnMarkStack(mirror::Object* obj);
281
282  // Blackens objects grayed during a garbage collection.
283  void ScanGrayObjects(bool paused, byte minimum_age)
284      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
285      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
286
287  // Recursively blackens objects on the mark stack.
288  void ProcessMarkStack(bool paused)
289      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
290      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
291
292  void ProcessMarkStackParallel(size_t thread_count)
293      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
294      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
295
296  // Used to get around thread safety annotations. The call is from MarkingPhase and is guarded by
297  // IsExclusiveHeld.
298  void RevokeAllThreadLocalAllocationStacks(Thread* self) NO_THREAD_SAFETY_ANALYSIS;
299
300  // Revoke all the thread-local buffers.
301  void RevokeAllThreadLocalBuffers();
302
303  // Whether or not we count how many of each type of object were scanned.
304  static const bool kCountScannedTypes = false;
305
306  // Current space, we check this space first to avoid searching for the appropriate space for an
307  // object.
308  accounting::SpaceBitmap* current_space_bitmap_;
309  // Cache the heap's mark bitmap to prevent having to do 2 loads during slow path marking.
310  accounting::HeapBitmap* mark_bitmap_;
311
312  accounting::ObjectStack* mark_stack_;
313
314  // Immune range, every object inside the immune range is assumed to be marked.
315  ImmuneRegion immune_region_;
316
317  // Parallel finger.
318  AtomicInteger atomic_finger_;
319  // Number of classes scanned, if kCountScannedTypes.
320  AtomicInteger class_count_;
321  // Number of arrays scanned, if kCountScannedTypes.
322  AtomicInteger array_count_;
323  // Number of non-class/arrays scanned, if kCountScannedTypes.
324  AtomicInteger other_count_;
325  AtomicInteger large_object_test_;
326  AtomicInteger large_object_mark_;
327  AtomicInteger overhead_time_;
328  AtomicInteger work_chunks_created_;
329  AtomicInteger work_chunks_deleted_;
330  AtomicInteger reference_count_;
331  AtomicInteger mark_null_count_;
332  AtomicInteger mark_immune_count_;
333  AtomicInteger mark_fastpath_count_;
334  AtomicInteger mark_slowpath_count_;
335
336  // Verification.
337  size_t live_stack_freeze_size_;
338
339  UniquePtr<Barrier> gc_barrier_;
340  Mutex large_object_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
341  Mutex mark_stack_lock_ ACQUIRED_AFTER(Locks::classlinker_classes_lock_);
342
343  const bool is_concurrent_;
344
345 private:
346  friend class AddIfReachesAllocSpaceVisitor;  // Used by mod-union table.
347  friend class CardScanTask;
348  friend class CheckBitmapVisitor;
349  friend class CheckReferenceVisitor;
350  friend class art::gc::Heap;
351  friend class InternTableEntryIsUnmarked;
352  friend class MarkIfReachesAllocspaceVisitor;
353  friend class MarkObjectVisitor;
354  friend class ModUnionCheckReferences;
355  friend class ModUnionClearCardVisitor;
356  friend class ModUnionReferenceVisitor;
357  friend class ModUnionVisitor;
358  friend class ModUnionTableBitmap;
359  friend class ModUnionTableReferenceCache;
360  friend class ModUnionScanImageRootVisitor;
361  friend class ScanBitmapVisitor;
362  friend class ScanImageRootVisitor;
363  template<bool kUseFinger> friend class MarkStackTask;
364  friend class FifoMarkStackChunk;
365
366  DISALLOW_COPY_AND_ASSIGN(MarkSweep);
367};
368
369}  // namespace collector
370}  // namespace gc
371}  // namespace art
372
373#endif  // ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
374