Lines Matching refs:span

77     // If we're lucky, ll is non-empty, meaning it has a suitable span.
82 // Alternatively, maybe there's a usable returned span.
111 // find the best span (closest to n in size).
116 for (Span* span = large_.normal.next;
117 span != &large_.normal;
118 span = span->next) {
119 if (span->length >= n) {
121 || (span->length < best->length)
122 || ((span->length == best->length) && (span->start < best->start))) {
123 best = span;
130 for (Span* span = large_.returned.next;
131 span != &large_.returned;
132 span = span->next) {
133 if (span->length >= n) {
135 || (span->length < best->length)
136 || ((span->length == best->length) && (span->start < best->start))) {
137 best = span;
146 Span* PageHeap::Split(Span* span, Length n) {
148 ASSERT(n < span->length);
149 ASSERT(span->location == Span::IN_USE);
150 ASSERT(span->sizeclass == 0);
151 Event(span, 'T', n);
153 const int extra = span->length - n;
154 Span* leftover = NewSpan(span->start + n, extra);
158 pagemap_.set(span->start + n - 1, span); // Update map from pageid to span
159 span->length = n;
164 void PageHeap::CommitSpan(Span* span) {
165 TCMalloc_SystemCommit(reinterpret_cast<void*>(span->start << kPageShift),
166 static_cast<size_t>(span->length << kPageShift));
167 stats_.committed_bytes += span->length << kPageShift;
170 void PageHeap::DecommitSpan(Span* span) {
171 TCMalloc_SystemRelease(reinterpret_cast<void*>(span->start << kPageShift),
172 static_cast<size_t>(span->length << kPageShift));
173 stats_.committed_bytes -= span->length << kPageShift;
176 Span* PageHeap::Carve(Span* span, Length n) {
178 ASSERT(span->location != Span::IN_USE);
179 const int old_location = span->location;
180 RemoveFromFreeList(span);
181 span->location = Span::IN_USE;
182 Event(span, 'A', n);
184 const int extra = span->length - n;
187 Span* leftover = NewSpan(span->start + n, extra);
192 // The previous span of |leftover| was just splitted -- no need to
193 // coalesce them. The next span of |leftover| was not previously coalesced
194 // with |span|, i.e. is NULL or has got location other than |old_location|.
203 span->length = n;
204 pagemap_.set(span->start + n - 1, span);
209 CommitSpan(span);
211 ASSERT(span->location == Span::IN_USE);
212 ASSERT(span->length == n);
214 return span;
217 void PageHeap::Delete(Span* span) {
219 ASSERT(span->location == Span::IN_USE);
220 ASSERT(span->length > 0);
221 ASSERT(GetDescriptor(span->start) == span);
222 ASSERT(GetDescriptor(span->start + span->length - 1) == span);
223 const Length n = span->length;
224 span->sizeclass = 0;
225 span->sample = 0;
226 span->location = Span::ON_NORMAL_FREELIST;
227 Event(span, 'D', span->length);
228 MergeIntoFreeList(span); // Coalesces if possible
234 void PageHeap::MergeIntoFreeList(Span* span) {
235 ASSERT(span->location != Span::IN_USE);
242 // Note that the adjacent spans we merge into "span" may come out of a
243 // "normal" (committed) list, and cleanly merge with our IN_USE span, which
258 const PageID p = span->start;
259 const Length n = span->length;
262 // Merge preceding span into this span
266 // We're about to put the merge span into the returned freelist and call
267 // DecommitSpan() on it, which will mark the entire span including this
269 // merged span. To make the math work out we temporarily increase the
275 span->start -= len;
276 span->length += len;
277 pagemap_.set(span->start, span);
278 Event(span, 'L', len);
282 // Merge next span into this span
291 span->length += len;
292 pagemap_.set(span->start + span->length - 1, span);
293 Event(span, 'R', len);
296 Event(span, 'D', span->length);
297 span->location = Span::ON_RETURNED_FREELIST;
298 DecommitSpan(span);
299 PrependToFreeList(span);
302 void PageHeap::PrependToFreeList(Span* span) {
303 ASSERT(span->location != Span::IN_USE);
304 SpanList* list = (span->length < kMaxPages) ? &free_[span->length] : &large_;
305 if (span->location == Span::ON_NORMAL_FREELIST) {
306 stats_.free_bytes += (span->length << kPageShift);
307 DLL_Prepend(&list->normal, span);
309 stats_.unmapped_bytes += (span->length << kPageShift);
310 DLL_Prepend(&list->returned, span);
314 void PageHeap::RemoveFromFreeList(Span* span) {
315 ASSERT(span->location != Span::IN_USE);
316 if (span->location == Span::ON_NORMAL_FREELIST) {
317 stats_.free_bytes -= (span->length << kPageShift);
319 stats_.unmapped_bytes -= (span->length << kPageShift);
321 DLL_Remove(span);
372 // span in each list. Stop after releasing at least num_pages.
394 void PageHeap::RegisterSizeClass(Span* span, size_t sc) {
395 // Associate span object with all interior pages as well
396 ASSERT(span->location == Span::IN_USE);
397 ASSERT(GetDescriptor(span->start) == span);
398 ASSERT(GetDescriptor(span->start+span->length-1) == span);
399 Event(span, 'C', sc);
400 span->sizeclass = sc;
401 for (Length i = 1; i < span->length-1; i++) {
402 pagemap_.set(span->start+i, span);
428 Span* span = reinterpret_cast<Span*>(pagemap_.Next(start));
429 if (span == NULL) {
432 r->address = span->start << kPageShift;
433 r->length = span->length << kPageShift;
435 switch (span->location) {
439 if (span->sizeclass > 0) {
440 // Only some of the objects in this span may be in use.
441 const size_t osize = Static::sizemap()->class_to_size(span->sizeclass);
442 r->fraction = (1.0 * osize * span->refcount) / r->length;
504 Span* span = NewSpan(p, ask);
505 RecordSpan(span);
506 Delete(span);
512 // TODO: Once we can return memory to the system, return the new span