/art/runtime/gc/space/ |
H A D | bump_pointer_space-inl.h | 29 size_t* usable_size, 35 if (usable_size != nullptr) { 36 *usable_size = num_bytes; 45 size_t* usable_size, 59 if (UNLIKELY(usable_size != nullptr)) { 60 *usable_size = num_bytes; 28 Alloc(Thread*, size_t num_bytes, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument 43 AllocThreadUnsafe(Thread* self, size_t num_bytes, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument
|
H A D | dlmalloc_space-inl.h | 30 size_t* usable_size, 35 obj = AllocWithoutGrowthLocked(self, num_bytes, bytes_allocated, usable_size, 45 inline size_t DlMallocSpace::AllocationSizeNonvirtual(mirror::Object* obj, size_t* usable_size) { argument 48 if (usable_size != nullptr) { 49 *usable_size = size; 57 size_t* usable_size, 65 size_t allocation_size = AllocationSizeNonvirtual(result, usable_size); 28 AllocNonvirtual(Thread* self, size_t num_bytes, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument 54 AllocWithoutGrowthLocked( Thread* , size_t num_bytes, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument
|
H A D | rosalloc_space-inl.h | 33 size_t* bytes_allocated, size_t* usable_size, 53 if (usable_size != nullptr) { 54 *usable_size = rosalloc_usable_size; 32 AllocCommon(Thread* self, size_t num_bytes, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument
|
H A D | rosalloc_space.h | 50 size_t* usable_size, size_t* bytes_tl_bulk_allocated) 53 size_t* usable_size, size_t* bytes_tl_bulk_allocated) OVERRIDE { 54 return AllocNonvirtual(self, num_bytes, bytes_allocated, usable_size, 58 size_t* usable_size, size_t* bytes_tl_bulk_allocated) 60 return AllocNonvirtualThreadUnsafe(self, num_bytes, bytes_allocated, usable_size, 63 size_t AllocationSize(mirror::Object* obj, size_t* usable_size) OVERRIDE { 64 return AllocationSizeNonvirtual<true>(obj, usable_size); 72 size_t* usable_size, size_t* bytes_tl_bulk_allocated) { 74 return AllocCommon(self, num_bytes, bytes_allocated, usable_size, 78 size_t* bytes_allocated, size_t* usable_size, 71 AllocNonvirtual(Thread* self, size_t num_bytes, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument 77 AllocNonvirtualThreadUnsafe(Thread* self, size_t num_bytes, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument [all...] |
H A D | memory_tool_malloc_space-inl.h | 34 size_t bytes_allocated, size_t usable_size, 51 *usable_size_out = usable_size - 2 * kMemoryToolRedZoneBytes; 65 // Right redzone. Assumes that if bytes_allocated > usable_size, then the difference is 67 // At the moment, this fits RosAlloc (no management data in a slot, usable_size == alloc_size) 68 // and DlMalloc (allocation_size = (usable_size == num_bytes) + 4, 4 is management) 70 usable_size - (num_bytes + kMemoryToolRedZoneBytes)); 93 size_t usable_size; local 96 &bytes_allocated, &usable_size, 104 bytes_allocated, usable_size, 122 size_t usable_size; local 33 AdjustForValgrind(void* obj_with_rdz, size_t num_bytes, size_t bytes_allocated, size_t usable_size, size_t bytes_tl_bulk_allocated, size_t* bytes_allocated_out, size_t* usable_size_out, size_t* bytes_tl_bulk_allocated_out) argument 150 size_t usable_size; local 175 AllocationSize( mirror::Object* obj, size_t* usable_size) argument 203 size_t usable_size; local [all...] |
H A D | space_create_test.cc | 315 size_t allocation_size, usable_size, bytes_tl_bulk_allocated; local 321 &usable_size, 326 EXPECT_EQ(usable_size, computed_usable_size); 336 size_t allocation_size, usable_size, bytes_tl_bulk_allocated; local 341 &usable_size, 346 EXPECT_EQ(usable_size, computed_usable_size);
|
H A D | bump_pointer_space.cc | 222 size_t BumpPointerSpace::AllocationSizeNonvirtual(mirror::Object* obj, size_t* usable_size) { argument 224 if (usable_size != nullptr) { 225 *usable_size = RoundUp(num_bytes, kAlignment);
|
H A D | region_space-inl.h | 30 /* out */ size_t* usable_size, 33 return AllocNonvirtual<false>(num_bytes, bytes_allocated, usable_size, 40 /* out */ size_t* usable_size, 43 return Alloc(self, num_bytes, bytes_allocated, usable_size, bytes_tl_bulk_allocated); 49 /* out */ size_t* usable_size, 57 usable_size, 66 usable_size, 73 obj = r->Alloc(num_bytes, bytes_allocated, usable_size, bytes_tl_bulk_allocated); 86 obj = AllocLarge<kForEvac>(num_bytes, bytes_allocated, usable_size, bytes_tl_bulk_allocated); 96 /* out */ size_t* usable_size, 27 Alloc(Thread* self ATTRIBUTE_UNUSED, size_t num_bytes, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument 37 AllocThreadUnsafe(Thread* self, size_t num_bytes, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument 47 AllocNonvirtual(size_t num_bytes, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument 94 Alloc(size_t num_bytes, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument 244 AllocLarge(size_t num_bytes, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument [all...] |
H A D | dlmalloc_space.cc | 131 size_t* bytes_allocated, size_t* usable_size, 140 result = AllocWithoutGrowthLocked(self, num_bytes, bytes_allocated, usable_size, 130 AllocWithGrowth(Thread* self, size_t num_bytes, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument
|
H A D | rosalloc_space.cc | 157 size_t* bytes_allocated, size_t* usable_size, 166 result = AllocCommon(self, num_bytes, bytes_allocated, usable_size, 379 size_t RosAllocSpace::AllocationSizeNonvirtual(mirror::Object* obj, size_t* usable_size) { argument 405 if (usable_size != nullptr) { 406 *usable_size = size_by_size; 156 AllocWithGrowth(Thread* self, size_t num_bytes, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument
|
H A D | large_object_space.cc | 57 size_t* usable_size, size_t* bytes_tl_bulk_allocated) 61 usable_size, bytes_tl_bulk_allocated); 68 if (usable_size != nullptr) { 69 *usable_size = num_bytes; // Since we have redzones, shrink the usable size. 74 size_t AllocationSize(mirror::Object* obj, size_t* usable_size) OVERRIDE { 75 return LargeObjectMapSpace::AllocationSize(ObjectWithRedzone(obj), usable_size); 138 size_t* bytes_allocated, size_t* usable_size, 159 if (usable_size != nullptr) { 160 *usable_size = allocation_size; 204 size_t LargeObjectMapSpace::AllocationSize(mirror::Object* obj, size_t* usable_size) { argument 137 Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument 463 AllocationSize(mirror::Object* obj, size_t* usable_size) argument 474 Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument [all...] |
H A D | region_space.cc | 577 size_t RegionSpace::AllocationSizeNonvirtual(mirror::Object* obj, size_t* usable_size) { argument 579 if (usable_size != nullptr) { 582 *usable_size = RoundUp(num_bytes, kAlignment); 585 *usable_size = RoundUp(num_bytes, kRegionSize);
|
/art/runtime/gc/allocator/ |
H A D | rosalloc-inl.h | 32 size_t* usable_size, 35 return AllocLargeObject(self, size, bytes_allocated, usable_size, 40 m = AllocFromRun(self, size, bytes_allocated, usable_size, bytes_tl_bulk_allocated); 42 m = AllocFromRunThreadUnsafe(self, size, bytes_allocated, usable_size, 31 Alloc(Thread* self, size_t size, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument
|
H A D | rosalloc.cc | 461 size_t* usable_size, size_t* bytes_tl_bulk_allocated) { 463 DCHECK(usable_size != nullptr); 479 *usable_size = total_bytes; 633 size_t* usable_size, 636 DCHECK(usable_size != nullptr); 645 *usable_size = bracket_size; 653 size_t* usable_size, size_t* bytes_tl_bulk_allocated) { 655 DCHECK(usable_size != nullptr); 736 *usable_size = bracket_size; 749 *usable_size 460 AllocLargeObject(Thread* self, size_t size, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument 632 AllocFromRunThreadUnsafe(Thread* self, size_t size, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument 652 AllocFromRun(Thread* self, size_t size, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument [all...] |
/art/runtime/gc/ |
H A D | heap-inl.h | 80 size_t usable_size; local 94 usable_size = bytes_allocated; 95 pre_fence_visitor(obj, usable_size); 106 usable_size = bytes_allocated; 107 pre_fence_visitor(obj, usable_size); 113 &usable_size, &bytes_tl_bulk_allocated); 122 &usable_size, 139 DCHECK_GT(usable_size, 0u); 156 pre_fence_visitor(obj, usable_size); 168 CHECK_LE(obj->SizeOf(), usable_size); 248 TryToAllocate(Thread* self, AllocatorType allocator_type, size_t alloc_size, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument [all...] |
H A D | heap.cc | 1576 size_t* usable_size, 1597 usable_size, bytes_tl_bulk_allocated); 1612 usable_size, bytes_tl_bulk_allocated); 1633 usable_size, bytes_tl_bulk_allocated); 1642 usable_size, bytes_tl_bulk_allocated); 1660 ptr = TryToAllocate<true, true>(self, allocator, alloc_size, bytes_allocated, usable_size, 1682 usable_size, bytes_tl_bulk_allocated); 1736 usable_size, bytes_tl_bulk_allocated); 4024 size_t* usable_size, 4069 usable_size, 1571 AllocateInternalWithGc(Thread* self, AllocatorType allocator, bool instrumented, size_t alloc_size, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated, ObjPtr<mirror::Class>* klass) argument 4020 AllocWithNewTLAB(Thread* self, size_t alloc_size, bool grow, size_t* bytes_allocated, size_t* usable_size, size_t* bytes_tl_bulk_allocated) argument [all...] |