semi_space.h revision 39e3261168e7761fea6d873494d7c5d191285791
1/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_GC_COLLECTOR_SEMI_SPACE_H_
18#define ART_RUNTIME_GC_COLLECTOR_SEMI_SPACE_H_
19
20#include "atomic_integer.h"
21#include "barrier.h"
22#include "base/macros.h"
23#include "base/mutex.h"
24#include "garbage_collector.h"
25#include "offsets.h"
26#include "root_visitor.h"
27#include "UniquePtr.h"
28
29namespace art {
30
31namespace mirror {
32  class Class;
33  class Object;
34  template<class T> class ObjectArray;
35}  // namespace mirror
36
37class StackVisitor;
38class Thread;
39
40namespace gc {
41
42namespace accounting {
43  template <typename T> class AtomicStack;
44  class MarkIfReachesAllocspaceVisitor;
45  class ModUnionClearCardVisitor;
46  class ModUnionVisitor;
47  class ModUnionTableBitmap;
48  class MarkStackChunk;
49  typedef AtomicStack<mirror::Object*> ObjectStack;
50  class SpaceBitmap;
51}  // namespace accounting
52
53namespace space {
54  class BumpPointerSpace;
55  class ContinuousMemMapAllocSpace;
56  class ContinuousSpace;
57}  // namespace space
58
59class Heap;
60
61namespace collector {
62
63class SemiSpace : public GarbageCollector {
64 public:
65  explicit SemiSpace(Heap* heap, const std::string& name_prefix = "");
66
67  ~SemiSpace() {}
68
69  virtual void InitializePhase();
70  virtual bool IsConcurrent() const {
71    return false;
72  }
73  virtual void MarkingPhase() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
74  virtual void ReclaimPhase() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
75  virtual void FinishPhase() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
76  virtual void MarkReachableObjects()
77      EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
78  virtual GcType GetGcType() const {
79    return kGcTypePartial;
80  }
81
82  // Sets which space we will be copying objects to.
83  void SetToSpace(space::ContinuousMemMapAllocSpace* to_space);
84
85  // Set the space where we copy objects from.
86  void SetFromSpace(space::ContinuousMemMapAllocSpace* from_space);
87
88  // Initializes internal structures.
89  void Init();
90
91  // Find the default mark bitmap.
92  void FindDefaultMarkBitmap();
93
94  // Returns the new address of the object.
95  mirror::Object* MarkObject(mirror::Object* object)
96      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
97
98  void ScanObject(mirror::Object* obj)
99      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
100
101  // Marks the root set at the start of a garbage collection.
102  void MarkRoots()
103      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
104
105  // Make a space immune, immune spaces have all live objects marked - that is the mark and
106  // live bitmaps are bound together.
107  void ImmuneSpace(space::ContinuousSpace* space)
108      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
109      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
110
111  // Bind the live bits to the mark bits of bitmaps for spaces that are never collected, ie
112  // the image. Mark that portion of the heap as immune.
113  virtual void BindBitmaps() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
114
115  void BindLiveToMarkBitmap(space::ContinuousSpace* space)
116      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
117
118  void UnBindBitmaps()
119      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
120
121  void ProcessReferences(Thread* self)
122      EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
123
124  // Sweeps unmarked objects to complete the garbage collection.
125  virtual void Sweep(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
126
127  // Sweeps unmarked objects to complete the garbage collection.
128  void SweepLargeObjects(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
129
130  // Sweep only pointers within an array. WARNING: Trashes objects.
131  void SweepArray(accounting::ObjectStack* allocation_stack_, bool swap_bitmaps)
132      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
133
134  mirror::Object* GetClearedReferences() {
135    return cleared_reference_list_;
136  }
137
138  // TODO: enable thread safety analysis when in use by multiple worker threads.
139  template <typename MarkVisitor>
140  void ScanObjectVisit(const mirror::Object* obj, const MarkVisitor& visitor)
141      NO_THREAD_SAFETY_ANALYSIS;
142
143  void SweepSystemWeaks()
144      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
145
146  template <typename Visitor>
147  static void VisitObjectReferencesAndClass(mirror::Object* obj, const Visitor& visitor)
148      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
149
150  static mirror::Object* MarkRootCallback(mirror::Object* root, void* arg)
151      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
152
153  static mirror::Object* RecursiveMarkObjectCallback(mirror::Object* root, void* arg)
154      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
155
156 protected:
157  // Returns null if the object is not marked, otherwise returns the forwarding address (same as
158  // object for non movable things).
159  mirror::Object* GetMarkedForwardAddress(mirror::Object* object) const;
160
161  static mirror::Object* MarkedForwardingAddressCallback(mirror::Object* object, void* arg)
162      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
163
164  // Marks or unmarks a large object based on whether or not set is true. If set is true, then we
165  // mark, otherwise we unmark.
166  bool MarkLargeObject(const mirror::Object* obj)
167      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
168
169  static void SweepCallback(size_t num_ptrs, mirror::Object** ptrs, void* arg)
170      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
171
172  // Special sweep for zygote that just marks objects / dirties cards.
173  static void ZygoteSweepCallback(size_t num_ptrs, mirror::Object** ptrs, void* arg)
174      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
175
176  // Expand mark stack to 2x its current size.
177  void ResizeMarkStack(size_t new_size);
178
179  // Returns how many threads we should use for the current GC phase based on if we are paused,
180  // whether or not we care about pauses.
181  size_t GetThreadCount(bool paused) const;
182
183  // Returns true if an object is inside of the immune region (assumed to be marked).
184  bool IsImmune(const mirror::Object* obj) const ALWAYS_INLINE {
185    return obj >= immune_begin_ && obj < immune_end_;
186  }
187
188  bool IsImmuneSpace(const space::ContinuousSpace* space) const;
189
190  static void VerifyRootCallback(const mirror::Object* root, void* arg, size_t vreg,
191                                 const StackVisitor *visitor);
192
193  void VerifyRoot(const mirror::Object* root, size_t vreg, const StackVisitor* visitor)
194      NO_THREAD_SAFETY_ANALYSIS;
195
196  template <typename Visitor>
197  static void VisitInstanceFieldsReferences(const mirror::Class* klass, const mirror::Object* obj,
198                                            const Visitor& visitor)
199      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
200
201  // Visit the header, static field references, and interface pointers of a class object.
202  template <typename Visitor>
203  static void VisitClassReferences(const mirror::Class* klass, const mirror::Object* obj,
204                                   const Visitor& visitor)
205      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
206
207  template <typename Visitor>
208  static void VisitStaticFieldsReferences(const mirror::Class* klass, const Visitor& visitor)
209      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
210
211  template <typename Visitor>
212  static void VisitFieldsReferences(const mirror::Object* obj, uint32_t ref_offsets, bool is_static,
213                                    const Visitor& visitor)
214      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
215
216  // Visit all of the references in an object array.
217  template <typename Visitor>
218  static void VisitObjectArrayReferences(const mirror::ObjectArray<mirror::Object>* array,
219                                         const Visitor& visitor)
220      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
221
222  // Visits the header and field references of a data object.
223  template <typename Visitor>
224  static void VisitOtherReferences(const mirror::Class* klass, const mirror::Object* obj,
225                                   const Visitor& visitor)
226      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
227    return VisitInstanceFieldsReferences(klass, obj, visitor);
228  }
229
230  // Push an object onto the mark stack.
231  inline void MarkStackPush(mirror::Object* obj);
232
233  void UpdateAndMarkModUnion()
234      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
235      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
236
237  // Schedules an unmarked object for reference processing.
238  void DelayReferenceReferent(mirror::Class* klass, mirror::Object* reference)
239      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
240
241  // Recursively blackens objects on the mark stack.
242  void ProcessMarkStack(bool paused)
243      EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
244
245  void EnqueueFinalizerReferences(mirror::Object** ref)
246      EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
247
248  void PreserveSomeSoftReferences(mirror::Object** ref)
249      EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
250
251  void ClearWhiteReferences(mirror::Object** list)
252      EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
253
254  void ProcessReferences(mirror::Object** soft_references, bool clear_soft_references,
255                         mirror::Object** weak_references,
256                         mirror::Object** finalizer_references,
257                         mirror::Object** phantom_references)
258      EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
259
260  inline mirror::Object* GetForwardingAddressInFromSpace(mirror::Object* obj) const;
261
262  mirror::Object* GetForwardingAddress(mirror::Object* obj);
263
264  // Current space, we check this space first to avoid searching for the appropriate space for an
265  // object.
266  accounting::ObjectStack* mark_stack_;
267
268  // Immune range, every object inside the immune range is assumed to be marked.
269  mirror::Object* immune_begin_;
270  mirror::Object* immune_end_;
271
272  // Destination and source spaces.
273  space::ContinuousMemMapAllocSpace* to_space_;
274  space::ContinuousMemMapAllocSpace* from_space_;
275
276  mirror::Object* soft_reference_list_;
277  mirror::Object* weak_reference_list_;
278  mirror::Object* finalizer_reference_list_;
279  mirror::Object* phantom_reference_list_;
280  mirror::Object* cleared_reference_list_;
281
282  Thread* self_;
283
284 private:
285  DISALLOW_COPY_AND_ASSIGN(SemiSpace);
286};
287
288}  // namespace collector
289}  // namespace gc
290}  // namespace art
291
292#endif  // ART_RUNTIME_GC_COLLECTOR_SEMI_SPACE_H_
293