mark_sweep.h revision fc0e3219edc9a5bf81b166e82fd5db2796eb6a0d
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
18#define ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
19
20#include "atomic_integer.h"
21#include "barrier.h"
22#include "base/macros.h"
23#include "base/mutex.h"
24#include "garbage_collector.h"
25#include "offsets.h"
26#include "root_visitor.h"
27#include "UniquePtr.h"
28
29namespace art {
30
31namespace mirror {
32  class Class;
33  class Object;
34  template<class T> class ObjectArray;
35}  // namespace mirror
36
37class StackVisitor;
38class Thread;
39
40namespace gc {
41
42namespace accounting {
43  template <typename T> class AtomicStack;
44  class MarkIfReachesAllocspaceVisitor;
45  class ModUnionClearCardVisitor;
46  class ModUnionVisitor;
47  class ModUnionTableBitmap;
48  class MarkStackChunk;
49  typedef AtomicStack<mirror::Object*> ObjectStack;
50  class SpaceBitmap;
51}  // namespace accounting
52
53namespace space {
54  class ContinuousSpace;
55}  // namespace space
56
57class CheckObjectVisitor;
58class Heap;
59
60namespace collector {
61
62class MarkSweep : public GarbageCollector {
63 public:
64  explicit MarkSweep(Heap* heap, bool is_concurrent, const std::string& name_prefix = "");
65
66  ~MarkSweep() {}
67
68  virtual void InitializePhase();
69  virtual bool IsConcurrent() const;
70  virtual bool HandleDirtyObjectsPhase() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
71  virtual void MarkingPhase() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
72  virtual void ReclaimPhase() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
73  virtual void FinishPhase();
74  virtual void MarkReachableObjects()
75      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
76      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
77  virtual GcType GetGcType() const {
78    return kGcTypeFull;
79  }
80
81  // Initializes internal structures.
82  void Init();
83
84  // Find the default mark bitmap.
85  void FindDefaultMarkBitmap();
86
87  // Marks the root set at the start of a garbage collection.
88  void MarkRoots()
89      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
90      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
91
92  void MarkNonThreadRoots()
93      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
94
95  void MarkConcurrentRoots();
96      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
97
98  void MarkRootsCheckpoint(Thread* self)
99      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
100      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
101
102  // Verify that image roots point to only marked objects within the alloc space.
103  void VerifyImageRoots()
104      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
105      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
106
107  // Builds a mark stack and recursively mark until it empties.
108  void RecursiveMark()
109      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
110      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
111
112  // Make a space immune, immune spaces have all live objects marked - that is the mark and
113  // live bitmaps are bound together.
114  void ImmuneSpace(space::ContinuousSpace* space)
115      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
116      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
117
118  // Bind the live bits to the mark bits of bitmaps for spaces that are never collected, ie
119  // the image. Mark that portion of the heap as immune.
120  virtual void BindBitmaps() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
121
122  void BindLiveToMarkBitmap(space::ContinuousSpace* space)
123      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
124
125  void UnBindBitmaps()
126      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
127
128  // Builds a mark stack with objects on dirty cards and recursively mark until it empties.
129  void RecursiveMarkDirtyObjects(byte minimum_age)
130      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
131      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
132
133  // Remarks the root set after completing the concurrent mark.
134  void ReMarkRoots()
135      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
136      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
137
138  void ProcessReferences(Thread* self)
139      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
140
141  // Sweeps unmarked objects to complete the garbage collection.
142  virtual void Sweep(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
143
144  // Sweeps unmarked objects to complete the garbage collection.
145  void SweepLargeObjects(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
146
147  // Sweep only pointers within an array. WARNING: Trashes objects.
148  void SweepArray(accounting::ObjectStack* allocation_stack_, bool swap_bitmaps)
149      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
150
151  mirror::Object* GetClearedReferences() {
152    return cleared_reference_list_;
153  }
154
155  // Proxy for external access to ScanObject.
156  void ScanRoot(const mirror::Object* obj)
157      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
158      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
159
160  // Blackens an object.
161  void ScanObject(const mirror::Object* obj)
162      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
163      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
164
165  // TODO: enable thread safety analysis when in use by multiple worker threads.
166  template <typename MarkVisitor>
167  void ScanObjectVisit(const mirror::Object* obj, const MarkVisitor& visitor)
168      NO_THREAD_SAFETY_ANALYSIS;
169
170  void SetFinger(mirror::Object* new_finger) {
171    finger_ = new_finger;
172  }
173
174  void DisableFinger() {
175    SetFinger(reinterpret_cast<mirror::Object*>(~static_cast<uintptr_t>(0)));
176  }
177
178  size_t GetFreedBytes() const {
179    return freed_bytes_;
180  }
181
182  size_t GetFreedObjects() const {
183    return freed_objects_;
184  }
185
186  uint64_t GetTotalTimeNs() const {
187    return total_time_ns_;
188  }
189
190  uint64_t GetTotalPausedTimeNs() const {
191    return total_paused_time_ns_;
192  }
193
194  uint64_t GetTotalFreedObjects() const {
195    return total_freed_objects_;
196  }
197
198  uint64_t GetTotalFreedBytes() const {
199    return total_freed_bytes_;
200  }
201
202  // Everything inside the immune range is assumed to be marked.
203  void SetImmuneRange(mirror::Object* begin, mirror::Object* end);
204
205  void SweepSystemWeaks()
206      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
207
208  // Only sweep the weaks which are inside of an allocation stack.
209  void SweepSystemWeaksArray(accounting::ObjectStack* allocations)
210      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
211
212  static bool VerifyIsLiveCallback(const mirror::Object* obj, void* arg)
213      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
214
215  void VerifySystemWeaks()
216      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
217
218  // Verify that an object is live, either in a live bitmap or in the allocation stack.
219  void VerifyIsLive(const mirror::Object* obj)
220      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
221
222  template <typename Visitor>
223  static void VisitObjectReferences(const mirror::Object* obj, const Visitor& visitor)
224      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
225                            Locks::mutator_lock_);
226
227  static void MarkObjectCallback(const mirror::Object* root, void* arg)
228      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
229      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
230
231  static void MarkRootParallelCallback(const mirror::Object* root, void* arg);
232
233  // Marks an object.
234  void MarkObject(const mirror::Object* obj)
235      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
236      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
237
238  void MarkRoot(const mirror::Object* obj)
239      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
240      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
241
242  Barrier& GetBarrier() {
243    return *gc_barrier_;
244  }
245
246 protected:
247  // Returns true if the object has its bit set in the mark bitmap.
248  bool IsMarked(const mirror::Object* object) const;
249
250  static bool IsMarkedCallback(const mirror::Object* object, void* arg)
251      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
252
253  static bool IsMarkedArrayCallback(const mirror::Object* object, void* arg)
254      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
255
256  static void ReMarkObjectVisitor(const mirror::Object* root, void* arg)
257      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
258      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
259
260  static void VerifyImageRootVisitor(mirror::Object* root, void* arg)
261      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
262                            Locks::mutator_lock_);
263
264  void MarkObjectNonNull(const mirror::Object* obj, bool check_finger)
265      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
266      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
267
268  void MarkObjectNonNullParallel(const mirror::Object* obj, bool check_finger);
269
270  bool MarkLargeObject(const mirror::Object* obj)
271      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
272
273  // Returns true if we need to add obj to a mark stack.
274  bool MarkObjectParallel(const mirror::Object* obj) NO_THREAD_SAFETY_ANALYSIS;
275
276  static void SweepCallback(size_t num_ptrs, mirror::Object** ptrs, void* arg)
277      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
278
279  // Special sweep for zygote that just marks objects / dirties cards.
280  static void ZygoteSweepCallback(size_t num_ptrs, mirror::Object** ptrs, void* arg)
281      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
282
283  void CheckReference(const mirror::Object* obj, const mirror::Object* ref, MemberOffset offset,
284                      bool is_static)
285      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
286
287  void CheckObject(const mirror::Object* obj)
288      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
289
290  // Verify the roots of the heap and print out information related to any invalid roots.
291  // Called in MarkObject, so may we may not hold the mutator lock.
292  void VerifyRoots()
293      NO_THREAD_SAFETY_ANALYSIS;
294
295  // Expand mark stack to 2x its current size. Thread safe.
296  void ExpandMarkStack();
297
298  static void VerifyRootCallback(const mirror::Object* root, void* arg, size_t vreg,
299                                 const StackVisitor *visitor);
300
301  void VerifyRoot(const mirror::Object* root, size_t vreg, const StackVisitor* visitor)
302      NO_THREAD_SAFETY_ANALYSIS;
303
304  template <typename Visitor>
305  static void VisitInstanceFieldsReferences(const mirror::Class* klass, const mirror::Object* obj,
306                                            const Visitor& visitor)
307      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
308
309  // Visit the header, static field references, and interface pointers of a class object.
310  template <typename Visitor>
311  static void VisitClassReferences(const mirror::Class* klass, const mirror::Object* obj,
312                                   const Visitor& visitor)
313      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
314
315  template <typename Visitor>
316  static void VisitStaticFieldsReferences(const mirror::Class* klass, const Visitor& visitor)
317      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
318
319  template <typename Visitor>
320  static void VisitFieldsReferences(const mirror::Object* obj, uint32_t ref_offsets, bool is_static,
321                                    const Visitor& visitor)
322      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
323
324  // Visit all of the references in an object array.
325  template <typename Visitor>
326  static void VisitObjectArrayReferences(const mirror::ObjectArray<mirror::Object>* array,
327                                         const Visitor& visitor)
328      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
329
330  // Visits the header and field references of a data object.
331  template <typename Visitor>
332  static void VisitOtherReferences(const mirror::Class* klass, const mirror::Object* obj,
333                                   const Visitor& visitor)
334      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
335    return VisitInstanceFieldsReferences(klass, obj, visitor);
336  }
337
338  // Blackens objects grayed during a garbage collection.
339  void ScanGrayObjects(byte minimum_age)
340      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
341      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
342
343  // Schedules an unmarked object for reference processing.
344  void DelayReferenceReferent(mirror::Object* reference)
345      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
346
347  // Recursively blackens objects on the mark stack.
348  void ProcessMarkStack()
349      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
350      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
351
352  void ProcessMarkStackParallel()
353      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
354      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
355
356  void EnqueueFinalizerReferences(mirror::Object** ref)
357      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
358      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
359
360  void PreserveSomeSoftReferences(mirror::Object** ref)
361      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
362      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
363
364  void ClearWhiteReferences(mirror::Object** list)
365      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
366
367  void ProcessReferences(mirror::Object** soft_references, bool clear_soft_references,
368                         mirror::Object** weak_references,
369                         mirror::Object** finalizer_references,
370                         mirror::Object** phantom_references)
371      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
372      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
373
374  void SweepJniWeakGlobals(IsMarkedTester is_marked, void* arg)
375      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
376
377  // Whether or not we count how many of each type of object were scanned.
378  static const bool kCountScannedTypes = false;
379
380  // Current space, we check this space first to avoid searching for the appropriate space for an
381  // object.
382  accounting::SpaceBitmap* current_mark_bitmap_;
383
384  // Cache java.lang.Class for optimization.
385  mirror::Class* java_lang_Class_;
386
387  accounting::ObjectStack* mark_stack_;
388
389  mirror::Object* finger_;
390
391  // Immune range, every object inside the immune range is assumed to be marked.
392  mirror::Object* immune_begin_;
393  mirror::Object* immune_end_;
394
395  mirror::Object* soft_reference_list_;
396  mirror::Object* weak_reference_list_;
397  mirror::Object* finalizer_reference_list_;
398  mirror::Object* phantom_reference_list_;
399  mirror::Object* cleared_reference_list_;
400
401  // Number of bytes freed in this collection.
402  AtomicInteger freed_bytes_;
403  // Number of objects freed in this collection.
404  AtomicInteger freed_objects_;
405  // Number of classes scanned, if kCountScannedTypes.
406  AtomicInteger class_count_;
407  // Number of arrays scanned, if kCountScannedTypes.
408  AtomicInteger array_count_;
409  // Number of non-class/arrays scanned, if kCountScannedTypes.
410  AtomicInteger other_count_;
411  AtomicInteger large_object_test_;
412  AtomicInteger large_object_mark_;
413  AtomicInteger classes_marked_;
414  AtomicInteger overhead_time_;
415  AtomicInteger work_chunks_created_;
416  AtomicInteger work_chunks_deleted_;
417  AtomicInteger reference_count_;
418
419  UniquePtr<Barrier> gc_barrier_;
420  Mutex large_object_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
421  Mutex mark_stack_expand_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
422
423  const bool is_concurrent_;
424
425  bool clear_soft_references_;
426
427  friend class AddIfReachesAllocSpaceVisitor; // Used by mod-union table.
428  friend class CheckBitmapVisitor;
429  friend class CheckObjectVisitor;
430  friend class CheckReferenceVisitor;
431  friend class art::gc::Heap;
432  friend class InternTableEntryIsUnmarked;
433  friend class MarkIfReachesAllocspaceVisitor;
434  friend class ModUnionCheckReferences;
435  friend class ModUnionClearCardVisitor;
436  friend class ModUnionReferenceVisitor;
437  friend class ModUnionVisitor;
438  friend class ModUnionTableBitmap;
439  friend class ModUnionTableReferenceCache;
440  friend class ModUnionScanImageRootVisitor;
441  friend class ScanBitmapVisitor;
442  friend class ScanImageRootVisitor;
443  friend class MarkStackChunk;
444  friend class FifoMarkStackChunk;
445
446  DISALLOW_COPY_AND_ASSIGN(MarkSweep);
447};
448
449}  // namespace collector
450}  // namespace gc
451}  // namespace art
452
453#endif  // ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
454