space_bitmap.cc revision c381c36aacf977f7e314e6a91e47b31b04639f62
1/*
2 * Copyright (C) 2008 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "space_bitmap-inl.h"
18
19#include "art_field-inl.h"
20#include "base/stringprintf.h"
21#include "dex_file-inl.h"
22#include "mem_map.h"
23#include "mirror/object-inl.h"
24#include "mirror/class-inl.h"
25#include "mirror/object_array.h"
26
27namespace art {
28namespace gc {
29namespace accounting {
30
31template<size_t kAlignment>
32size_t SpaceBitmap<kAlignment>::ComputeBitmapSize(uint64_t capacity) {
33  const uint64_t kBytesCoveredPerWord = kAlignment * kBitsPerIntPtrT;
34  return (RoundUp(capacity, kBytesCoveredPerWord) / kBytesCoveredPerWord) * sizeof(intptr_t);
35}
36
37template<size_t kAlignment>
38size_t SpaceBitmap<kAlignment>::ComputeHeapSize(uint64_t bitmap_bytes) {
39  return bitmap_bytes * kBitsPerByte * kAlignment;
40}
41
42template<size_t kAlignment>
43SpaceBitmap<kAlignment>* SpaceBitmap<kAlignment>::CreateFromMemMap(
44    const std::string& name, MemMap* mem_map, uint8_t* heap_begin, size_t heap_capacity) {
45  CHECK(mem_map != nullptr);
46  uintptr_t* bitmap_begin = reinterpret_cast<uintptr_t*>(mem_map->Begin());
47  const size_t bitmap_size = ComputeBitmapSize(heap_capacity);
48  return new SpaceBitmap(name, mem_map, bitmap_begin, bitmap_size, heap_begin);
49}
50
51template<size_t kAlignment>
52SpaceBitmap<kAlignment>::SpaceBitmap(const std::string& name, MemMap* mem_map, uintptr_t* bitmap_begin,
53                                     size_t bitmap_size, const void* heap_begin)
54    : mem_map_(mem_map),
55      bitmap_begin_(reinterpret_cast<Atomic<uintptr_t>*>(bitmap_begin)),
56      bitmap_size_(bitmap_size),
57      heap_begin_(reinterpret_cast<uintptr_t>(heap_begin)),
58      name_(name) {
59  CHECK(bitmap_begin_ != nullptr);
60  CHECK_NE(bitmap_size, 0U);
61}
62
63template<size_t kAlignment>
64SpaceBitmap<kAlignment>::~SpaceBitmap() {}
65
66template<size_t kAlignment>
67SpaceBitmap<kAlignment>* SpaceBitmap<kAlignment>::Create(
68    const std::string& name, uint8_t* heap_begin, size_t heap_capacity) {
69  // Round up since heap_capacity is not necessarily a multiple of kAlignment * kBitsPerWord.
70  const size_t bitmap_size = ComputeBitmapSize(heap_capacity);
71  std::string error_msg;
72  std::unique_ptr<MemMap> mem_map(MemMap::MapAnonymous(name.c_str(), nullptr, bitmap_size,
73                                                       PROT_READ | PROT_WRITE, false, false,
74                                                       &error_msg));
75  if (UNLIKELY(mem_map.get() == nullptr)) {
76    LOG(ERROR) << "Failed to allocate bitmap " << name << ": " << error_msg;
77    return nullptr;
78  }
79  return CreateFromMemMap(name, mem_map.release(), heap_begin, heap_capacity);
80}
81
82template<size_t kAlignment>
83void SpaceBitmap<kAlignment>::SetHeapLimit(uintptr_t new_end) {
84  DCHECK_ALIGNED(new_end, kBitsPerIntPtrT * kAlignment);
85  size_t new_size = OffsetToIndex(new_end - heap_begin_) * sizeof(intptr_t);
86  if (new_size < bitmap_size_) {
87    bitmap_size_ = new_size;
88  }
89  // Not sure if doing this trim is necessary, since nothing past the end of the heap capacity
90  // should be marked.
91}
92
93template<size_t kAlignment>
94std::string SpaceBitmap<kAlignment>::Dump() const {
95  return StringPrintf("%s: %p-%p", name_.c_str(), reinterpret_cast<void*>(HeapBegin()),
96                      reinterpret_cast<void*>(HeapLimit()));
97}
98
99template<size_t kAlignment>
100void SpaceBitmap<kAlignment>::Clear() {
101  if (bitmap_begin_ != nullptr) {
102    mem_map_->MadviseDontNeedAndZero();
103  }
104}
105
106template<size_t kAlignment>
107void SpaceBitmap<kAlignment>::CopyFrom(SpaceBitmap* source_bitmap) {
108  DCHECK_EQ(Size(), source_bitmap->Size());
109  const size_t count = source_bitmap->Size() / sizeof(intptr_t);
110  Atomic<uintptr_t>* const src = source_bitmap->Begin();
111  Atomic<uintptr_t>* const dest = Begin();
112  for (size_t i = 0; i < count; ++i) {
113    dest[i].StoreRelaxed(src[i].LoadRelaxed());
114  }
115}
116
117template<size_t kAlignment>
118void SpaceBitmap<kAlignment>::Walk(ObjectCallback* callback, void* arg) {
119  CHECK(bitmap_begin_ != nullptr);
120  CHECK(callback != nullptr);
121
122  uintptr_t end = OffsetToIndex(HeapLimit() - heap_begin_ - 1);
123  Atomic<uintptr_t>* bitmap_begin = bitmap_begin_;
124  for (uintptr_t i = 0; i <= end; ++i) {
125    uintptr_t w = bitmap_begin[i].LoadRelaxed();
126    if (w != 0) {
127      uintptr_t ptr_base = IndexToOffset(i) + heap_begin_;
128      do {
129        const size_t shift = CTZ(w);
130        mirror::Object* obj = reinterpret_cast<mirror::Object*>(ptr_base + shift * kAlignment);
131        (*callback)(obj, arg);
132        w ^= (static_cast<uintptr_t>(1)) << shift;
133      } while (w != 0);
134    }
135  }
136}
137
138template<size_t kAlignment>
139void SpaceBitmap<kAlignment>::SweepWalk(const SpaceBitmap<kAlignment>& live_bitmap,
140                                        const SpaceBitmap<kAlignment>& mark_bitmap,
141                                        uintptr_t sweep_begin, uintptr_t sweep_end,
142                                        SpaceBitmap::SweepCallback* callback, void* arg) {
143  CHECK(live_bitmap.bitmap_begin_ != nullptr);
144  CHECK(mark_bitmap.bitmap_begin_ != nullptr);
145  CHECK_EQ(live_bitmap.heap_begin_, mark_bitmap.heap_begin_);
146  CHECK_EQ(live_bitmap.bitmap_size_, mark_bitmap.bitmap_size_);
147  CHECK(callback != nullptr);
148  CHECK_LE(sweep_begin, sweep_end);
149  CHECK_GE(sweep_begin, live_bitmap.heap_begin_);
150
151  if (sweep_end <= sweep_begin) {
152    return;
153  }
154
155  // TODO: rewrite the callbacks to accept a std::vector<mirror::Object*> rather than a mirror::Object**?
156  constexpr size_t buffer_size = sizeof(intptr_t) * kBitsPerIntPtrT;
157#ifdef __LP64__
158  // Heap-allocate for smaller stack frame.
159  std::unique_ptr<mirror::Object*[]> pointer_buf_ptr(new mirror::Object*[buffer_size]);
160  mirror::Object** pointer_buf = pointer_buf_ptr.get();
161#else
162  // Stack-allocate buffer as it's small enough.
163  mirror::Object* pointer_buf[buffer_size];
164#endif
165  mirror::Object** pb = &pointer_buf[0];
166
167  size_t start = OffsetToIndex(sweep_begin - live_bitmap.heap_begin_);
168  size_t end = OffsetToIndex(sweep_end - live_bitmap.heap_begin_ - 1);
169  CHECK_LT(end, live_bitmap.Size() / sizeof(intptr_t));
170  Atomic<uintptr_t>* live = live_bitmap.bitmap_begin_;
171  Atomic<uintptr_t>* mark = mark_bitmap.bitmap_begin_;
172  for (size_t i = start; i <= end; i++) {
173    uintptr_t garbage = live[i].LoadRelaxed() & ~mark[i].LoadRelaxed();
174    if (UNLIKELY(garbage != 0)) {
175      uintptr_t ptr_base = IndexToOffset(i) + live_bitmap.heap_begin_;
176      do {
177        const size_t shift = CTZ(garbage);
178        garbage ^= (static_cast<uintptr_t>(1)) << shift;
179        *pb++ = reinterpret_cast<mirror::Object*>(ptr_base + shift * kAlignment);
180      } while (garbage != 0);
181      // Make sure that there are always enough slots available for an
182      // entire word of one bits.
183      if (pb >= &pointer_buf[buffer_size - kBitsPerIntPtrT]) {
184        (*callback)(pb - &pointer_buf[0], &pointer_buf[0], arg);
185        pb = &pointer_buf[0];
186      }
187    }
188  }
189  if (pb > &pointer_buf[0]) {
190    (*callback)(pb - &pointer_buf[0], &pointer_buf[0], arg);
191  }
192}
193
194template<size_t kAlignment>
195void SpaceBitmap<kAlignment>::WalkInstanceFields(SpaceBitmap<kAlignment>* visited,
196                                                 ObjectCallback* callback, mirror::Object* obj,
197                                                 mirror::Class* klass, void* arg)
198    SHARED_REQUIRES(Locks::mutator_lock_) {
199  // Visit fields of parent classes first.
200  mirror::Class* super = klass->GetSuperClass();
201  if (super != nullptr) {
202    WalkInstanceFields(visited, callback, obj, super, arg);
203  }
204  // Walk instance fields
205  for (ArtField& field : klass->GetIFields()) {
206    if (!field.IsPrimitiveType()) {
207      mirror::Object* value = field.GetObj(obj);
208      if (value != nullptr) {
209        WalkFieldsInOrder(visited, callback, value, arg);
210      }
211    }
212  }
213}
214
215template<size_t kAlignment>
216void SpaceBitmap<kAlignment>::WalkFieldsInOrder(SpaceBitmap<kAlignment>* visited,
217                                                ObjectCallback* callback, mirror::Object* obj,
218                                                void* arg) {
219  if (visited->Test(obj)) {
220    return;
221  }
222  // visit the object itself
223  (*callback)(obj, arg);
224  visited->Set(obj);
225  // Walk instance fields of all objects
226  mirror::Class* klass = obj->GetClass();
227  WalkInstanceFields(visited, callback, obj, klass, arg);
228  // Walk static fields of a Class
229  if (obj->IsClass()) {
230    for (ArtField& field : klass->GetSFields()) {
231      if (!field.IsPrimitiveType()) {
232        mirror::Object* value = field.GetObj(nullptr);
233        if (value != nullptr) {
234          WalkFieldsInOrder(visited, callback, value, arg);
235        }
236      }
237    }
238  } else if (obj->IsObjectArray()) {
239    // Walk elements of an object array
240    mirror::ObjectArray<mirror::Object>* obj_array = obj->AsObjectArray<mirror::Object>();
241    int32_t length = obj_array->GetLength();
242    for (int32_t i = 0; i < length; i++) {
243      mirror::Object* value = obj_array->Get(i);
244      if (value != nullptr) {
245        WalkFieldsInOrder(visited, callback, value, arg);
246      }
247    }
248  }
249}
250
251template<size_t kAlignment>
252void SpaceBitmap<kAlignment>::InOrderWalk(ObjectCallback* callback, void* arg) {
253  std::unique_ptr<SpaceBitmap<kAlignment>> visited(
254      Create("bitmap for in-order walk", reinterpret_cast<uint8_t*>(heap_begin_),
255             IndexToOffset(bitmap_size_ / sizeof(intptr_t))));
256  CHECK(bitmap_begin_ != nullptr);
257  CHECK(callback != nullptr);
258  uintptr_t end = Size() / sizeof(intptr_t);
259  for (uintptr_t i = 0; i < end; ++i) {
260    // Need uint for unsigned shift.
261    uintptr_t w = bitmap_begin_[i].LoadRelaxed();
262    if (UNLIKELY(w != 0)) {
263      uintptr_t ptr_base = IndexToOffset(i) + heap_begin_;
264      while (w != 0) {
265        const size_t shift = CTZ(w);
266        mirror::Object* obj = reinterpret_cast<mirror::Object*>(ptr_base + shift * kAlignment);
267        WalkFieldsInOrder(visited.get(), callback, obj, arg);
268        w ^= (static_cast<uintptr_t>(1)) << shift;
269      }
270    }
271  }
272}
273
274template class SpaceBitmap<kObjectAlignment>;
275template class SpaceBitmap<kPageSize>;
276
277}  // namespace accounting
278}  // namespace gc
279}  // namespace art
280