Searched defs:bytes_allocated (Results 1 - 22 of 22) sorted by relevance

/art/runtime/gc/space/
H A Dbump_pointer_space-inl.h27 inline mirror::Object* BumpPointerSpace::Alloc(Thread*, size_t num_bytes, size_t* bytes_allocated, argument
33 *bytes_allocated = num_bytes;
43 size_t* bytes_allocated,
54 *bytes_allocated = num_bytes;
42 AllocThreadUnsafe(Thread* self, size_t num_bytes, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument
H A Ddlmalloc_space-inl.h29 size_t* bytes_allocated,
35 obj = AllocWithoutGrowthLocked(self, num_bytes, bytes_allocated, usable_size,
56 size_t* bytes_allocated,
66 DCHECK(bytes_allocated != nullptr);
67 *bytes_allocated = allocation_size;
28 AllocNonvirtual(Thread* self, size_t num_bytes, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument
54 AllocWithoutGrowthLocked( Thread* , size_t num_bytes, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument
H A Drosalloc_space.h49 mirror::Object* AllocWithGrowth(Thread* self, size_t num_bytes, size_t* bytes_allocated,
52 mirror::Object* Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated,
54 return AllocNonvirtual(self, num_bytes, bytes_allocated, usable_size,
57 mirror::Object* AllocThreadUnsafe(Thread* self, size_t num_bytes, size_t* bytes_allocated,
60 return AllocNonvirtualThreadUnsafe(self, num_bytes, bytes_allocated, usable_size,
71 mirror::Object* AllocNonvirtual(Thread* self, size_t num_bytes, size_t* bytes_allocated, argument
74 return AllocCommon(self, num_bytes, bytes_allocated, usable_size,
78 size_t* bytes_allocated, size_t* usable_size,
81 return AllocCommon<false>(self, num_bytes, bytes_allocated, usable_size,
91 size_t* bytes_allocated);
77 AllocNonvirtualThreadUnsafe(Thread* self, size_t num_bytes, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument
[all...]
H A Drosalloc_space-inl.h65 size_t* bytes_allocated, size_t* usable_size,
82 DCHECK(bytes_allocated != nullptr);
83 *bytes_allocated = rosalloc_bytes_allocated;
99 size_t* bytes_allocated) {
100 DCHECK(bytes_allocated != nullptr);
102 rosalloc_->AllocFromThreadLocalRun(self, num_bytes, bytes_allocated));
64 AllocCommon(Thread* self, size_t num_bytes, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument
98 AllocThreadLocal(Thread* self, size_t num_bytes, size_t* bytes_allocated) argument
H A Dlarge_object_space_test.cc103 size_t bytes_allocated = 0, bytes_tl_bulk_allocated; local
105 mirror::Object* obj = los->Alloc(self, 100 * MB, &bytes_allocated, nullptr,
H A Dmemory_tool_malloc_space-inl.h32 size_t bytes_allocated, size_t usable_size,
37 *bytes_allocated_out = bytes_allocated;
63 // Right redzone. Assumes that if bytes_allocated > usable_size, then the difference is
90 size_t bytes_allocated; local
94 &bytes_allocated, &usable_size,
102 bytes_allocated, usable_size,
119 size_t bytes_allocated; local
123 &bytes_allocated, &usable_size, &bytes_tl_bulk_allocated);
130 bytes_allocated, usable_size,
147 size_t bytes_allocated; local
31 AdjustForValgrind(void* obj_with_rdz, size_t num_bytes, size_t bytes_allocated, size_t usable_size, size_t bytes_tl_bulk_allocated, size_t* bytes_allocated_out, size_t* usable_size_out, size_t* bytes_tl_bulk_allocated_out) argument
[all...]
H A Dregion_space-inl.h26 inline mirror::Object* RegionSpace::Alloc(Thread*, size_t num_bytes, size_t* bytes_allocated, argument
30 return AllocNonvirtual<false>(num_bytes, bytes_allocated, usable_size,
35 size_t* bytes_allocated,
39 return Alloc(self, num_bytes, bytes_allocated, usable_size, bytes_tl_bulk_allocated);
43 inline mirror::Object* RegionSpace::AllocNonvirtual(size_t num_bytes, size_t* bytes_allocated, argument
51 obj = current_region_->Alloc(num_bytes, bytes_allocated, usable_size,
55 obj = evac_region_->Alloc(num_bytes, bytes_allocated, usable_size,
64 obj = current_region_->Alloc(num_bytes, bytes_allocated, usable_size,
67 obj = evac_region_->Alloc(num_bytes, bytes_allocated, usable_size,
84 obj = r->Alloc(num_bytes, bytes_allocated, usable_siz
34 AllocThreadUnsafe(Thread* self, size_t num_bytes, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument
114 Alloc(size_t num_bytes, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument
265 AllocLarge(size_t num_bytes, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument
[all...]
H A Dregion_space.cc288 void RegionSpace::FreeLarge(mirror::Object* large_obj, size_t bytes_allocated) { argument
293 uint8_t* end_addr = AlignUp(reinterpret_cast<uint8_t*>(large_obj) + bytes_allocated, kRegionSize);
H A Dspace_test.h71 size_t* bytes_allocated,
79 bytes_allocated,
91 size_t* bytes_allocated,
97 mirror::Object* obj = alloc_space->AllocWithGrowth(self, bytes, bytes_allocated, usable_size,
195 size_t bytes_allocated = 0; local
198 object.Assign(Alloc(space, self, alloc_size, &bytes_allocated, nullptr,
201 object.Assign(AllocWithGrowth(space, self, alloc_size, &bytes_allocated, nullptr,
209 EXPECT_EQ(bytes_allocated, allocation_size);
293 size_t bytes_allocated = 0; local
296 large_object.Assign(Alloc(space, self, three_quarters_space, &bytes_allocated, nullpt
[all...]
H A Ddlmalloc_space.cc129 size_t* bytes_allocated, size_t* usable_size,
138 result = AllocWithoutGrowthLocked(self, num_bytes, bytes_allocated, usable_size,
266 size_t bytes_allocated = 0; local
267 mspace_inspect_all(mspace_, DlmallocBytesAllocatedCallback, &bytes_allocated);
268 return bytes_allocated;
128 AllocWithGrowth(Thread* self, size_t num_bytes, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument
H A Drosalloc_space.cc155 size_t* bytes_allocated, size_t* usable_size,
164 result = AllocCommon(self, num_bytes, bytes_allocated, usable_size,
290 size_t bytes_allocated = 0; local
291 InspectAllRosAlloc(art::gc::allocator::RosAlloc::BytesAllocatedCallback, &bytes_allocated, false);
292 return bytes_allocated;
154 AllocWithGrowth(Thread* self, size_t num_bytes, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument
H A Dlarge_object_space.cc52 mirror::Object* Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated,
56 LargeObjectMapSpace::Alloc(self, num_bytes + kMemoryToolRedZoneBytes * 2, bytes_allocated,
134 size_t* bytes_allocated, size_t* usable_size,
157 DCHECK(bytes_allocated != nullptr);
163 *bytes_allocated = allocation_size;
479 mirror::Object* FreeListSpace::Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated, argument
513 DCHECK(bytes_allocated != nullptr);
514 *bytes_allocated = allocation_size;
133 Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument
H A Dregion_space.h44 mirror::Object* Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated,
48 mirror::Object* AllocThreadUnsafe(Thread* self, size_t num_bytes, size_t* bytes_allocated,
53 ALWAYS_INLINE mirror::Object* AllocNonvirtual(size_t num_bytes, size_t* bytes_allocated,
59 mirror::Object* AllocLarge(size_t num_bytes, size_t* bytes_allocated, size_t* usable_size,
61 void FreeLarge(mirror::Object* large_obj, size_t bytes_allocated) REQUIRES(!region_lock_);
275 ALWAYS_INLINE mirror::Object* Alloc(size_t num_bytes, size_t* bytes_allocated,
394 size_t bytes_allocated = RoundUp(BytesAllocated(), kRegionSize); local
395 DCHECK_GE(bytes_allocated, 0U);
396 uint result = (live_bytes_ * 100U) / bytes_allocated;
/art/runtime/gc/allocator/
H A Ddlmalloc.cc86 size_t* bytes_allocated = reinterpret_cast<size_t*>(arg); local
87 *bytes_allocated += used_bytes + sizeof(size_t);
H A Drosalloc-inl.h31 inline ALWAYS_INLINE void* RosAlloc::Alloc(Thread* self, size_t size, size_t* bytes_allocated, argument
35 return AllocLargeObject(self, size, bytes_allocated, usable_size,
40 m = AllocFromRun(self, size, bytes_allocated, usable_size, bytes_tl_bulk_allocated);
42 m = AllocFromRunThreadUnsafe(self, size, bytes_allocated, usable_size,
79 size_t* bytes_allocated) {
80 DCHECK(bytes_allocated != nullptr);
97 *bytes_allocated = bracket_size;
78 AllocFromThreadLocalRun(Thread* self, size_t size, size_t* bytes_allocated) argument
H A Drosalloc.cc466 void* RosAlloc::AllocLargeObject(Thread* self, size_t size, size_t* bytes_allocated, argument
468 DCHECK(bytes_allocated != nullptr);
484 *bytes_allocated = total_bytes;
638 void* RosAlloc::AllocFromRunThreadUnsafe(Thread* self, size_t size, size_t* bytes_allocated, argument
641 DCHECK(bytes_allocated != nullptr);
650 *bytes_allocated = bracket_size;
658 void* RosAlloc::AllocFromRun(Thread* self, size_t size, size_t* bytes_allocated, argument
660 DCHECK(bytes_allocated != nullptr);
741 *bytes_allocated = bracket_size;
754 *bytes_allocated
1733 size_t* bytes_allocated = reinterpret_cast<size_t*>(arg); local
[all...]
/art/runtime/gc/
H A Dheap-inl.h70 size_t bytes_allocated; local
88 bytes_allocated = byte_count;
89 usable_size = bytes_allocated;
93 (obj = rosalloc_space_->AllocThreadLocal(self, byte_count, &bytes_allocated)) &&
103 usable_size = bytes_allocated;
109 obj = TryToAllocate<kInstrumented, false>(self, allocator, byte_count, &bytes_allocated,
118 &bytes_allocated,
135 DCHECK_GT(bytes_allocated, 0u);
169 thread_stats->allocated_bytes += bytes_allocated;
172 global_stats->allocated_bytes += bytes_allocated;
236 TryToAllocate(Thread* self, AllocatorType allocator_type, size_t alloc_size, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument
[all...]
H A Dheap.cc1688 size_t* bytes_allocated,
1710 mirror::Object* ptr = TryToAllocate<true, false>(self, allocator, alloc_size, bytes_allocated,
1725 mirror::Object* ptr = TryToAllocate<true, false>(self, allocator, alloc_size, bytes_allocated,
1746 mirror::Object* ptr = TryToAllocate<true, false>(self, allocator, alloc_size, bytes_allocated,
1755 mirror::Object* ptr = TryToAllocate<true, true>(self, allocator, alloc_size, bytes_allocated,
1774 ptr = TryToAllocate<true, true>(self, allocator, alloc_size, bytes_allocated, usable_size,
1795 ptr = TryToAllocate<true, true>(self, allocator, alloc_size, bytes_allocated,
1845 ptr = TryToAllocate<true, true>(self, allocator, alloc_size, bytes_allocated,
2378 size_t bytes_allocated, dummy; local
2379 forward_address = to_space_->Alloc(self_, alloc_size, &bytes_allocated, nullpt
1684 AllocateInternalWithGc(Thread* self, AllocatorType allocator, bool instrumented, size_t alloc_size, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated, mirror::Class** klass) argument
3554 const uint64_t bytes_allocated = GetBytesAllocated(); local
[all...]
/art/runtime/gc/accounting/
H A Dmod_union_table_test.cc53 size_t bytes_allocated = 0, bytes_tl_bulk_allocated; local
55 space->Alloc(self, size, &bytes_allocated, nullptr, &bytes_tl_bulk_allocated));
60 EXPECT_GE(bytes_allocated, size);
82 size_t bytes_allocated = 0, bytes_tl_bulk_allocated; local
83 auto* klass = down_cast<mirror::Class*>(space->Alloc(self, class_size, &bytes_allocated,
/art/runtime/base/
H A Darena_allocator.cc130 const size_t bytes_allocated = BytesAllocated(); local
131 os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
136 << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
/art/runtime/gc/collector/
H A Dsemi_space.cc510 size_t bytes_allocated, dummy; local
516 forward_address = promo_dest_space_->AllocThreadUnsafe(self_, object_size, &bytes_allocated,
520 forward_address = to_space_->AllocThreadUnsafe(self_, object_size, &bytes_allocated, nullptr,
525 bytes_promoted_ += bytes_allocated;
566 forward_address = to_space_->AllocThreadUnsafe(self_, object_size, &bytes_allocated, nullptr,
574 forward_address = fallback_space_->AllocThreadUnsafe(self_, object_size, &bytes_allocated,
583 bytes_moved_ += bytes_allocated;
H A Dconcurrent_copying.cc1792 size_t bytes_allocated = 0U; local
1796 bytes_allocated = region_space_bytes_allocated;
1810 bytes_allocated = region_space_alloc_size;
1823 bytes_allocated = non_moving_space_bytes_allocated;
1846 FillWithDummyObject(to_ref, bytes_allocated);
1849 if (bytes_allocated > space::RegionSpace::kRegionSize) {
1851 region_space_->FreeLarge(to_ref, bytes_allocated);
1854 heap_->num_bytes_allocated_.FetchAndAddSequentiallyConsistent(bytes_allocated);
1855 to_space_bytes_skipped_.FetchAndAddSequentiallyConsistent(bytes_allocated);
1858 skipped_blocks_map_.insert(std::make_pair(bytes_allocated,
[all...]

Completed in 250 milliseconds