Lines Matching defs:heap

10 #include "src/heap/mark-compact.h"
43 DCHECK(owner == page->heap()->old_pointer_space() ||
44 owner == page->heap()->old_data_space() ||
45 owner == page->heap()->map_space() ||
46 owner == page->heap()->cell_space() ||
47 owner == page->heap()->property_cell_space() ||
48 owner == page->heap()->code_space());
398 NewSpacePage* NewSpacePage::Initialize(Heap* heap, Address start,
404 MemoryChunk::Initialize(heap, start, Page::kPageSize, area_start,
415 heap->incremental_marking()->SetNewSpacePageFlags(page);
430 MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size,
437 chunk->heap_ = heap;
467 if (owner == heap->old_data_space()) {
494 if (!heap()->isolate()->memory_allocator()->CommitMemory(start, length,
555 Heap* heap = isolate_->heap();
665 heap, base, chunk_size, area_start, area_end, executable, owner);
686 return Page::Initialize(isolate_->heap(), chunk, executable, owner);
696 return LargePage::Initialize(isolate_->heap(), chunk);
708 isolate_->heap()->RememberUnmappedPage(reinterpret_cast<Address>(chunk),
880 PagedSpace::PagedSpace(Heap* heap, intptr_t max_capacity, AllocationSpace id,
882 : Space(heap, id, executable),
888 area_size_ = heap->isolate()->memory_allocator()->CodePageAreaSize();
912 heap()->isolate()->memory_allocator()->Free(iterator.next());
934 DCHECK(!heap()->mark_compact_collector()->in_use());
974 Page* p = heap()->isolate()->memory_allocator()->AllocatePage(size, this,
1008 CodeRange* code_range = heap()->isolate()->code_range();
1074 heap()->decrement_scan_on_scavenge_pages();
1087 heap()->isolate()->memory_allocator()->Free(page);
1089 heap()->QueueMemoryChunkForFree(page);
1098 emergency_memory_ = heap()->isolate()->memory_allocator()->AllocateChunk(
1108 heap()->isolate()->memory_allocator()->Free(page);
1114 Page* page = Page::Initialize(heap(), emergency_memory_, executable(), this);
1147 CHECK(heap()->map_space()->Contains(map));
1155 // All the interior pointers should be contained in the heap.
1181 int initial_semispace_capacity = heap()->InitialSemiSpaceSize();
1184 Address base = heap()->isolate()->memory_allocator()->ReserveAlignedMemory(
1190 LOG(heap()->isolate(), NewEvent("InitialChunk", chunk_base_, chunk_size_));
1205 DCHECK(reserved_semispace_capacity == heap()->ReservedSemiSpaceSize());
1207 2 * heap()->ReservedSemiSpaceSize());
1247 LOG(heap()->isolate(), DeleteEvent("InitialChunk", chunk_base_));
1250 heap()->isolate()->memory_allocator()->FreeMemory(&reservation_,
1324 if (heap()->inline_allocation_disabled()) {
1361 if (heap()->gc_state() == Heap::SCAVENGE) {
1362 heap()->promotion_queue()->SetNewLimit(limit);
1366 heap()->CreateFillerObjectAt(top, remaining_in_page);
1383 heap()->incremental_marking()->Step(bytes_allocated,
1391 heap()->incremental_marking()->Step(bytes_allocated,
1425 CHECK(heap()->map_space()->Contains(map));
1434 // All the interior pointers should be contained in the heap.
1491 if (!heap()->isolate()->memory_allocator()->CommitBlock(
1499 NewSpacePage::Initialize(heap(), start_ + i * Page::kPageSize, this);
1514 if (!heap()->isolate()->memory_allocator()->UncommitBlock(start,
1550 if (!heap()->isolate()->memory_allocator()->CommitBlock(
1560 NewSpacePage::Initialize(heap(), page_address, this);
1580 MemoryAllocator* allocator = heap()->isolate()->memory_allocator();
1695 if (page->heap()->incremental_marking()->IsMarking()) {
1850 // Support for statistics gathering for --heap-stats and --log-gc.
1918 Isolate* isolate = heap()->isolate();
1955 void FreeListNode::set_size(Heap* heap, int size_in_bytes) {
1973 synchronized_set_map_no_write_barrier(heap->raw_unchecked_free_space_map());
1975 set_map_no_write_barrier(heap->raw_unchecked_one_pointer_filler_map());
1977 set_map_no_write_barrier(heap->raw_unchecked_two_pointer_filler_map());
2140 void FreeListCategory::RepairFreeList(Heap* heap) {
2145 *map_location = heap->free_space_map();
2147 DCHECK(*map_location == heap->free_space_map());
2154 FreeList::FreeList(PagedSpace* owner) : owner_(owner), heap_(owner->heap()) {
2335 // skipped when scanning the heap. This also puts it back in the free list
2339 owner_->heap()->incremental_marking()->OldSpaceStep(size_in_bytes -
2370 if (owner_->heap()->inline_allocation_disabled()) {
2376 owner_->heap()->incremental_marking()->IsMarkingIncomplete() &&
2426 void FreeList::RepairLists(Heap* heap) {
2427 small_list_.RepairFreeList(heap);
2428 medium_list_.RepairFreeList(heap);
2429 large_list_.RepairFreeList(heap);
2430 huge_list_.RepairFreeList(heap);
2503 DCHECK(heap()->mark_compact_collector()->sweeping_in_progress() ||
2510 // on the heap. If there was already a free list then the elements on it
2513 void PagedSpace::RepairFreeListsAfterBoot() { free_list_.RepairLists(heap()); }
2524 heap()->CreateFillerObjectAt(allocation_info_.top(), remaining);
2534 MarkCompactCollector* collector = heap()->mark_compact_collector();
2550 MarkCompactCollector* collector = heap()->mark_compact_collector();
2576 if (!heap()->always_allocate() &&
2577 heap()->OldGenerationAllocationLimitReached()) {
2696 Isolate* isolate = heap()->isolate();
2732 if (heap()->mark_compact_collector()->sweeping_in_progress()) {
2733 heap()->mark_compact_collector()->EnsureSweepingCompleted();
2735 ClearHistograms(heap()->isolate());
2739 ReportHistogram(heap()->isolate(), true);
2797 LargeObjectSpace::LargeObjectSpace(Heap* heap, intptr_t max_capacity,
2799 : Space(heap, id, NOT_EXECUTABLE), // Managed on a per-allocation basis
2823 LOG(heap()->isolate(), DeleteEvent("LargeObjectChunk", page->address()));
2826 heap()->isolate()->memory_allocator()->PerformAllocationCallback(
2828 heap()->isolate()->memory_allocator()->Free(page);
2838 if (!heap()->always_allocate() &&
2839 heap()->OldGenerationAllocationLimitReached()) {
2847 LargePage* page = heap()->isolate()->memory_allocator()->AllocateLargePage(
2878 // Make the object consistent so the heap can be verified in OldSpaceStep.
2881 heap()->fixed_array_map();
2885 heap()->incremental_marking()->OldSpaceStep(object_size);
2954 heap()->mark_compact_collector()->ReportDeleteIfNeeded(object,
2955 heap()->isolate());
2972 heap()->QueueMemoryChunkForFree(page);
2974 heap()->isolate()->memory_allocator()->Free(page);
2978 heap()->FreeQueuedChunks();
3010 CHECK(heap()->map_space()->Contains(map));
3034 CHECK(heap()->Contains(element_object));
3057 ClearHistograms(heap()->isolate());
3068 if (num_objects > 0) ReportHistogram(heap()->isolate(), false);
3073 Isolate* isolate = heap()->isolate();
3089 HeapObjectIterator objects(this, heap()->GcSafeSizeOfOldObjectFunction());
3096 mark_size += heap()->GcSafeSizeOfOldObjectFunction()(object);