semi_space.h revision 507dfdd147c97bfbadebfd63584d094b6a4e7b47
1/* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#ifndef ART_RUNTIME_GC_COLLECTOR_SEMI_SPACE_H_ 18#define ART_RUNTIME_GC_COLLECTOR_SEMI_SPACE_H_ 19 20#include "atomic.h" 21#include "base/macros.h" 22#include "base/mutex.h" 23#include "garbage_collector.h" 24#include "gc/accounting/heap_bitmap.h" 25#include "immune_region.h" 26#include "object_callbacks.h" 27#include "offsets.h" 28#include "UniquePtrCompat.h" 29 30namespace art { 31 32class Thread; 33 34namespace mirror { 35 class Class; 36 class Object; 37} // namespace mirror 38 39namespace gc { 40 41class Heap; 42 43namespace accounting { 44 template <typename T> class AtomicStack; 45 typedef AtomicStack<mirror::Object*> ObjectStack; 46} // namespace accounting 47 48namespace space { 49 class ContinuousMemMapAllocSpace; 50 class ContinuousSpace; 51} // namespace space 52 53namespace collector { 54 55class SemiSpace : public GarbageCollector { 56 public: 57 // If true, use remembered sets in the generational mode. 58 static constexpr bool kUseRememberedSet = true; 59 60 explicit SemiSpace(Heap* heap, bool generational = false, const std::string& name_prefix = ""); 61 62 ~SemiSpace() {} 63 64 virtual void RunPhases() OVERRIDE NO_THREAD_SAFETY_ANALYSIS; 65 virtual void InitializePhase(); 66 virtual void MarkingPhase() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) 67 LOCKS_EXCLUDED(Locks::heap_bitmap_lock_); 68 virtual void ReclaimPhase() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) 69 LOCKS_EXCLUDED(Locks::heap_bitmap_lock_); 70 virtual void FinishPhase() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_); 71 void MarkReachableObjects() 72 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_); 73 virtual GcType GetGcType() const OVERRIDE { 74 return kGcTypePartial; 75 } 76 virtual CollectorType GetCollectorType() const OVERRIDE { 77 return generational_ ? kCollectorTypeGSS : kCollectorTypeSS; 78 } 79 80 // Sets which space we will be copying objects to. 81 void SetToSpace(space::ContinuousMemMapAllocSpace* to_space); 82 83 // Set the space where we copy objects from. 84 void SetFromSpace(space::ContinuousMemMapAllocSpace* from_space); 85 86 // Set whether or not we swap the semi spaces in the heap. This needs to be done with mutators 87 // suspended. 88 void SetSwapSemiSpaces(bool swap_semi_spaces) { 89 swap_semi_spaces_ = swap_semi_spaces; 90 } 91 92 // Initializes internal structures. 93 void Init(); 94 95 // Find the default mark bitmap. 96 void FindDefaultMarkBitmap(); 97 98 // Returns the new address of the object. 99 template<bool kPoisonReferences> 100 void MarkObject(mirror::ObjectReference<kPoisonReferences, mirror::Object>* obj_ptr) 101 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); 102 103 void ScanObject(mirror::Object* obj) 104 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); 105 106 void VerifyNoFromSpaceReferences(mirror::Object* obj) 107 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); 108 109 // Marks the root set at the start of a garbage collection. 110 void MarkRoots() 111 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); 112 113 // Bind the live bits to the mark bits of bitmaps for spaces that are never collected, ie 114 // the image. Mark that portion of the heap as immune. 115 virtual void BindBitmaps() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) 116 LOCKS_EXCLUDED(Locks::heap_bitmap_lock_); 117 118 void UnBindBitmaps() 119 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); 120 121 void ProcessReferences(Thread* self) EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) 122 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_); 123 124 // Sweeps unmarked objects to complete the garbage collection. 125 virtual void Sweep(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); 126 127 // Sweeps unmarked objects to complete the garbage collection. 128 void SweepLargeObjects(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); 129 130 void SweepSystemWeaks() 131 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); 132 133 static void MarkRootCallback(mirror::Object** root, void* arg, uint32_t /*tid*/, 134 RootType /*root_type*/) 135 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); 136 137 static mirror::Object* MarkObjectCallback(mirror::Object* root, void* arg) 138 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); 139 140 static void MarkHeapReferenceCallback(mirror::HeapReference<mirror::Object>* obj_ptr, void* arg) 141 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); 142 143 static void ProcessMarkStackCallback(void* arg) 144 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_); 145 146 static void DelayReferenceReferentCallback(mirror::Class* klass, mirror::Reference* ref, 147 void* arg) 148 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); 149 150 virtual mirror::Object* MarkNonForwardedObject(mirror::Object* obj) 151 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); 152 153 // Schedules an unmarked object for reference processing. 154 void DelayReferenceReferent(mirror::Class* klass, mirror::Reference* reference) 155 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); 156 157 protected: 158 // Returns null if the object is not marked, otherwise returns the forwarding address (same as 159 // object for non movable things). 160 mirror::Object* GetMarkedForwardAddress(mirror::Object* object) const 161 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) 162 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); 163 164 static mirror::Object* MarkedForwardingAddressCallback(mirror::Object* object, void* arg) 165 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) 166 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); 167 168 // Marks or unmarks a large object based on whether or not set is true. If set is true, then we 169 // mark, otherwise we unmark. 170 bool MarkLargeObject(const mirror::Object* obj) 171 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) 172 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 173 174 // Expand mark stack to 2x its current size. 175 void ResizeMarkStack(size_t new_size); 176 177 // Returns true if we should sweep the space. 178 virtual bool ShouldSweepSpace(space::ContinuousSpace* space) const; 179 180 // Push an object onto the mark stack. 181 void MarkStackPush(mirror::Object* obj); 182 183 void UpdateAndMarkModUnion() 184 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) 185 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 186 187 // Recursively blackens objects on the mark stack. 188 void ProcessMarkStack() 189 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_); 190 191 inline mirror::Object* GetForwardingAddressInFromSpace(mirror::Object* obj) const 192 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); 193 194 // Revoke all the thread-local buffers. 195 void RevokeAllThreadLocalBuffers(); 196 197 // Current space, we check this space first to avoid searching for the appropriate space for an 198 // object. 199 accounting::ObjectStack* mark_stack_; 200 201 // Immune region, every object inside the immune region is assumed to be marked. 202 ImmuneRegion immune_region_; 203 204 // If true, the large object space is immune. 205 bool is_large_object_space_immune_; 206 207 // Destination and source spaces (can be any type of ContinuousMemMapAllocSpace which either has 208 // a live bitmap or doesn't). 209 space::ContinuousMemMapAllocSpace* to_space_; 210 // Cached live bitmap as an optimization. 211 accounting::ContinuousSpaceBitmap* to_space_live_bitmap_; 212 space::ContinuousMemMapAllocSpace* from_space_; 213 // Cached mark bitmap as an optimization. 214 accounting::HeapBitmap* mark_bitmap_; 215 216 Thread* self_; 217 218 // When true, the generational mode (promotion and the bump pointer 219 // space only collection) is enabled. TODO: move these to a new file 220 // as a new garbage collector? 221 const bool generational_; 222 223 // Used for the generational mode. the end/top of the bump 224 // pointer space at the end of the last collection. 225 byte* last_gc_to_space_end_; 226 227 // Used for the generational mode. During a collection, keeps track 228 // of how many bytes of objects have been copied so far from the 229 // bump pointer space to the non-moving space. 230 uint64_t bytes_promoted_; 231 232 // Used for the generational mode. Keeps track of how many bytes of 233 // objects have been copied so far from the bump pointer space to 234 // the non-moving space, since the last whole heap collection. 235 uint64_t bytes_promoted_since_last_whole_heap_collection_; 236 237 // Used for the generational mode. Keeps track of how many bytes of 238 // large objects were allocated at the last whole heap collection. 239 uint64_t large_object_bytes_allocated_at_last_whole_heap_collection_; 240 241 // Used for the generational mode. When true, collect the whole 242 // heap. When false, collect only the bump pointer spaces. 243 bool whole_heap_collection_; 244 245 // How many objects and bytes we moved, used so that we don't need to Get the size of the 246 // to_space_ when calculating how many objects and bytes we freed. 247 size_t bytes_moved_; 248 size_t objects_moved_; 249 250 // How many bytes we avoided dirtying. 251 size_t saved_bytes_; 252 253 // The name of the collector. 254 std::string collector_name_; 255 256 // Used for the generational mode. The default interval of the whole 257 // heap collection. If N, the whole heap collection occurs every N 258 // collections. 259 static constexpr int kDefaultWholeHeapCollectionInterval = 5; 260 261 // Whether or not we swap the semi spaces in the heap during the marking phase. 262 bool swap_semi_spaces_; 263 264 private: 265 friend class BitmapSetSlowPathVisitor; 266 DISALLOW_COPY_AND_ASSIGN(SemiSpace); 267}; 268 269} // namespace collector 270} // namespace gc 271} // namespace art 272 273#endif // ART_RUNTIME_GC_COLLECTOR_SEMI_SPACE_H_ 274