large_object_space.h revision cf58d4adf461eb9b8e84baa8019054c88cd8acc6
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_GC_SPACE_LARGE_OBJECT_SPACE_H_
18#define ART_RUNTIME_GC_SPACE_LARGE_OBJECT_SPACE_H_
19
20#include "gc/accounting/gc_allocator.h"
21#include "dlmalloc_space.h"
22#include "safe_map.h"
23#include "space.h"
24
25#include <set>
26#include <vector>
27
28namespace art {
29namespace gc {
30namespace space {
31
32// Abstraction implemented by all large object spaces.
33class LargeObjectSpace : public DiscontinuousSpace, public AllocSpace {
34 public:
35  virtual SpaceType GetType() const {
36    return kSpaceTypeLargeObjectSpace;
37  }
38
39  virtual void SwapBitmaps();
40  virtual void CopyLiveToMarked();
41  virtual void Walk(DlMallocSpace::WalkCallback, void* arg) = 0;
42  virtual ~LargeObjectSpace() {}
43
44  uint64_t GetBytesAllocated() {
45    return num_bytes_allocated_;
46  }
47
48  uint64_t GetObjectsAllocated() {
49    return num_objects_allocated_;
50  }
51
52  uint64_t GetTotalBytesAllocated() {
53    return total_bytes_allocated_;
54  }
55
56  uint64_t GetTotalObjectsAllocated() {
57    return total_objects_allocated_;
58  }
59
60  size_t FreeList(Thread* self, size_t num_ptrs, mirror::Object** ptrs);
61
62  virtual bool IsAllocSpace() const {
63    return true;
64  }
65
66  virtual AllocSpace* AsAllocSpace() {
67    return this;
68  }
69
70 protected:
71  explicit LargeObjectSpace(const std::string& name);
72
73  // Approximate number of bytes which have been allocated into the space.
74  size_t num_bytes_allocated_;
75  size_t num_objects_allocated_;
76  size_t total_bytes_allocated_;
77  size_t total_objects_allocated_;
78
79  friend class Space;
80
81 private:
82  DISALLOW_COPY_AND_ASSIGN(LargeObjectSpace);
83};
84
85// A discontinuous large object space implemented by individual mmap/munmap calls.
86class LargeObjectMapSpace : public LargeObjectSpace {
87 public:
88  // Creates a large object space. Allocations into the large object space use memory maps instead
89  // of malloc.
90  static LargeObjectMapSpace* Create(const std::string& name);
91
92  // Return the storage space required by obj.
93  size_t AllocationSize(const mirror::Object* obj);
94  mirror::Object* Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated);
95  size_t Free(Thread* self, mirror::Object* ptr);
96  void Walk(DlMallocSpace::WalkCallback, void* arg) LOCKS_EXCLUDED(lock_);
97  // TODO: disabling thread safety analysis as this may be called when we already hold lock_.
98  bool Contains(const mirror::Object* obj) const NO_THREAD_SAFETY_ANALYSIS;
99
100 private:
101  explicit LargeObjectMapSpace(const std::string& name);
102  virtual ~LargeObjectMapSpace() {}
103
104  // Used to ensure mutual exclusion when the allocation spaces data structures are being modified.
105  mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
106  std::vector<mirror::Object*,
107      accounting::GcAllocator<mirror::Object*> > large_objects_ GUARDED_BY(lock_);
108  typedef SafeMap<mirror::Object*, MemMap*, std::less<mirror::Object*>,
109      accounting::GcAllocator<std::pair<const mirror::Object*, MemMap*> > > MemMaps;
110  MemMaps mem_maps_ GUARDED_BY(lock_);
111};
112
113// A continuous large object space with a free-list to handle holes.
114class FreeListSpace : public LargeObjectSpace {
115 public:
116  virtual ~FreeListSpace();
117  static FreeListSpace* Create(const std::string& name, byte* requested_begin, size_t capacity);
118
119  size_t AllocationSize(const mirror::Object* obj)
120      EXCLUSIVE_LOCKS_REQUIRED(lock_);
121  mirror::Object* Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated);
122  size_t Free(Thread* self, mirror::Object* obj);
123  bool Contains(const mirror::Object* obj) const;
124  void Walk(DlMallocSpace::WalkCallback callback, void* arg) LOCKS_EXCLUDED(lock_);
125
126  // Address at which the space begins.
127  byte* Begin() const {
128    return begin_;
129  }
130
131  // Address at which the space ends, which may vary as the space is filled.
132  byte* End() const {
133    return end_;
134  }
135
136  // Current size of space
137  size_t Size() const {
138    return End() - Begin();
139  }
140
141  void Dump(std::ostream& os) const;
142
143 private:
144  static const size_t kAlignment = kPageSize;
145
146  class AllocationHeader {
147   public:
148    // Returns the allocation size, includes the header.
149    size_t AllocationSize() const {
150      return alloc_size_;
151    }
152
153    // Updates the allocation size in the header, the allocation size includes the header itself.
154    void SetAllocationSize(size_t size) {
155      DCHECK(IsAligned<kPageSize>(size));
156      alloc_size_ = size;
157    }
158
159    bool IsFree() const {
160      return AllocationSize() == 0;
161    }
162
163    // Returns the previous free allocation header by using the prev_free_ member to figure out
164    // where it is. If prev free is 0 then we just return ourself.
165    AllocationHeader* GetPrevFreeAllocationHeader() {
166      return reinterpret_cast<AllocationHeader*>(reinterpret_cast<uintptr_t>(this) - prev_free_);
167    }
168
169    // Returns the address of the object associated with this allocation header.
170    mirror::Object* GetObjectAddress() {
171      return reinterpret_cast<mirror::Object*>(reinterpret_cast<uintptr_t>(this) + sizeof(*this));
172    }
173
174    // Returns the next allocation header after the object associated with this allocation header.
175    AllocationHeader* GetNextAllocationHeader() {
176      DCHECK_NE(alloc_size_, 0U);
177      return reinterpret_cast<AllocationHeader*>(reinterpret_cast<uintptr_t>(this) + alloc_size_);
178    }
179
180    // Returns how many free bytes there is before the block.
181    size_t GetPrevFree() const {
182      return prev_free_;
183    }
184
185    // Update the size of the free block prior to the allocation.
186    void SetPrevFree(size_t prev_free) {
187      DCHECK(IsAligned<kPageSize>(prev_free));
188      prev_free_ = prev_free;
189    }
190
191    // Finds and returns the next non free allocation header after ourself.
192    // TODO: Optimize, currently O(n) for n free following pages.
193    AllocationHeader* GetNextNonFree();
194
195    // Used to implement best fit object allocation. Each allocation has an AllocationHeader which
196    // contains the size of the previous free block preceding it. Implemented in such a way that we
197    // can also find the iterator for any allocation header pointer.
198    class SortByPrevFree {
199     public:
200      bool operator()(const AllocationHeader* a, const AllocationHeader* b) const {
201        if (a->GetPrevFree() < b->GetPrevFree()) return true;
202        if (a->GetPrevFree() > b->GetPrevFree()) return false;
203        if (a->AllocationSize() < b->AllocationSize()) return true;
204        if (a->AllocationSize() > b->AllocationSize()) return false;
205        return reinterpret_cast<uintptr_t>(a) < reinterpret_cast<uintptr_t>(b);
206      }
207    };
208
209   private:
210    // Contains the size of the previous free block, if 0 then the memory preceding us is an
211    // allocation.
212    size_t prev_free_;
213
214    // Allocation size of this object, 0 means that the allocation header is free memory.
215    size_t alloc_size_;
216
217    friend class FreeListSpace;
218  };
219
220  FreeListSpace(const std::string& name, MemMap* mem_map, byte* begin, byte* end);
221
222  // Removes header from the free blocks set by finding the corresponding iterator and erasing it.
223  void RemoveFreePrev(AllocationHeader* header) EXCLUSIVE_LOCKS_REQUIRED(lock_);
224
225  // Finds the allocation header corresponding to obj.
226  AllocationHeader* GetAllocationHeader(const mirror::Object* obj);
227
228  typedef std::set<AllocationHeader*, AllocationHeader::SortByPrevFree,
229                   accounting::GcAllocator<AllocationHeader*> > FreeBlocks;
230
231  byte* const begin_;
232  byte* const end_;
233
234  // There is not footer for any allocations at the end of the space, so we keep track of how much
235  // free space there is at the end manually.
236  UniquePtr<MemMap> mem_map_;
237  Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
238  size_t free_end_ GUARDED_BY(lock_);
239  FreeBlocks free_blocks_ GUARDED_BY(lock_);
240};
241
242}  // namespace space
243}  // namespace gc
244}  // namespace art
245
246#endif  // ART_RUNTIME_GC_SPACE_LARGE_OBJECT_SPACE_H_
247