Searched refs:Alloc (Results 1 - 25 of 108) sorted by relevance

12345

/art/runtime/base/
H A Darena_object.h34 return allocator->Alloc(size, kAllocKind);
38 return allocator->Alloc(size, kAllocKind);
58 return allocator->Alloc(size, kAllocKind);
62 return allocator->Alloc(size, kAllocKind);
H A Darena_allocator_test.cc77 void* alloc1 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 5 / 8);
78 void* alloc2 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 2 / 8);
85 void* alloc1 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 13 / 16);
86 void* alloc2 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 11 / 16);
89 void* alloc3 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 7 / 16);
97 void* alloc1 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 13 / 16);
98 void* alloc2 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 9 / 16);
102 void* alloc3 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 5 / 16);
110 void* alloc1 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 9 / 16);
111 void* alloc2 = allocator.Alloc(arena_allocato
[all...]
H A Darena_bit_vector.cc56 void* storage = allocator->template Alloc<ArenaBitVectorAllocator>(kind);
65 virtual void* Alloc(size_t size) { function in class:art::FINAL
66 return allocator_->Alloc(size, this->Kind());
H A Dscoped_arena_allocator.h95 void* Alloc(size_t bytes, ArenaAllocKind kind) ALWAYS_INLINE {
151 void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
153 return arena_stack_->Alloc(bytes, kind);
157 T* Alloc(ArenaAllocKind kind = kArenaAllocMisc) { function in class:art::ScopedArenaAllocator
163 return static_cast<T*>(Alloc(length * sizeof(T), kind));
H A Darena_bit_vector.h38 void* storage = allocator->template Alloc<ArenaBitVector>(kind);
H A Darena_allocator.h294 void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
355 auto* new_ptr = Alloc(new_size, kind); // Note: Alloc will take care of aligning new_size.
362 T* Alloc(ArenaAllocKind kind = kArenaAllocMisc) { function in class:art::ArenaAllocator
368 return static_cast<T*>(Alloc(length * sizeof(T), kind));
/art/libartbase/base/
H A Ddchecked_vector.h36 template <typename T, typename Alloc = std::allocator<T>>
37 class dchecked_vector : private std::vector<T, Alloc> {
41 using Base = std::vector<T, Alloc>;
195 template <typename T, typename Alloc>
196 void swap(dchecked_vector<T, Alloc>& lhs, dchecked_vector<T, Alloc>& rhs) {
201 template <typename T, typename Alloc>
202 bool operator==(const dchecked_vector<T, Alloc>& lhs, const dchecked_vector<T, Alloc>& rhs) {
205 template <typename T, typename Alloc>
[all...]
H A Dhash_map.h53 class Alloc = std::allocator<std::pair<Key, Value>>>
58 Alloc> {
64 Alloc>;
68 explicit HashMap(const Alloc& alloc)
H A Dallocator.cc33 void* Alloc(size_t size) { function in class:art::FINAL
52 void* Alloc(size_t size ATTRIBUTE_UNUSED) { function in class:art::FINAL
53 LOG(FATAL) << "NoopAllocator::Alloc should not be called";
/art/compiler/utils/
H A Ddedupe_set.h35 typename Alloc,
44 DedupeSet(const char* set_name, const Alloc& alloc);
H A Ddedupe_set-inl.h38 typename Alloc,
42 struct DedupeSet<InKey, StoreKey, Alloc, HashType, HashFunc, kShard>::Stats {
51 typename Alloc,
55 class DedupeSet<InKey, StoreKey, Alloc, HashType, HashFunc, kShard>::Shard {
57 Shard(const Alloc& alloc, const std::string& lock_name)
177 Alloc alloc_;
185 typename Alloc,
189 const StoreKey* DedupeSet<InKey, StoreKey, Alloc, HashType, HashFunc, kShard>::Add(
207 typename Alloc,
211 DedupeSet<InKey, StoreKey, Alloc, HashTyp
[all...]
/art/runtime/
H A Dlinear_alloc.h31 void* Alloc(Thread* self, size_t size) REQUIRES(!lock_);
40 return reinterpret_cast<T*>(Alloc(self, elements * sizeof(T)));
H A Dlinear_alloc.cc31 void* LinearAlloc::Alloc(Thread* self, size_t size) { function in class:art::LinearAlloc
33 return allocator_.Alloc(size);
H A Dreference_table_test.cc109 mirror::Object* o2 = mirror::ShortArray::Alloc(soa.Self(), 0);
292 mirror::Object* b1_1 = mirror::ByteArray::Alloc(soa.Self(), 1);
294 rt.Add(mirror::ByteArray::Alloc(soa.Self(), 2));
296 rt.Add(mirror::ByteArray::Alloc(soa.Self(), 2));
297 rt.Add(mirror::ByteArray::Alloc(soa.Self(), 1));
298 rt.Add(mirror::ByteArray::Alloc(soa.Self(), 2));
301 rt.Add(mirror::CharArray::Alloc(soa.Self(), 0));
/art/test/616-cha-unloading/
H A Dcha_unload.cc48 alloc->Alloc(Thread::Current(), 1);
/art/runtime/gc/space/
H A Dspace_create_test.cc105 MutableHandle<mirror::Object> ptr1(hs.NewHandle(Alloc(space,
118 mirror::Object* ptr2 = Alloc(space, self, 8 * MB, &dummy, nullptr, &dummy);
136 mirror::Object* ptr4 = space->Alloc(self, 8 * MB, &dummy, nullptr, &dummy);
169 EXPECT_TRUE(space->Alloc(self, 1U * MB, &dummy, nullptr, &dummy) != nullptr);
190 ptr1.Assign(Alloc(space,
203 ptr2 = Alloc(space, self, 8 * MB, &dummy, nullptr, &dummy);
239 MutableHandle<mirror::Object> ptr1(hs.NewHandle(Alloc(space,
252 mirror::Object* ptr2 = Alloc(space, self, 8 * MB, &dummy, nullptr, &dummy);
270 mirror::Object* ptr4 = Alloc(space, self, 8 * MB, &dummy, nullptr, &dummy);
317 lots_of_objects[i] = Alloc(spac
[all...]
H A Dmemory_tool_malloc_space.h37 mirror::Object* Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated,
H A Drosalloc_space-inl.h42 rosalloc_->Alloc<kThreadSafe>(self, num_bytes, &rosalloc_bytes_allocated,
H A Dregion_space-inl.h27 inline mirror::Object* RegionSpace::Alloc(Thread* self ATTRIBUTE_UNUSED, function in class:art::gc::space::RegionSpace
43 return Alloc(self, num_bytes, bytes_allocated, usable_size, bytes_tl_bulk_allocated);
55 obj = (kForEvac ? evac_region_ : current_region_)->Alloc(num_bytes,
64 obj = (kForEvac ? evac_region_ : current_region_)->Alloc(num_bytes,
73 obj = r->Alloc(num_bytes, bytes_allocated, usable_size, bytes_tl_bulk_allocated);
94 inline mirror::Object* RegionSpace::Region::Alloc(size_t num_bytes, function in class:art::gc::space::RegionSpace::Region
289 // allocation may use all of 'usable_size' (see mirror::Array::Alloc).
/art/runtime/mirror/
H A Dobject_array.h34 static ObjectArray<T>* Alloc(Thread* self,
40 static ObjectArray<T>* Alloc(Thread* self,
H A Dstack_trace_element.cc43 StackTraceElement* StackTraceElement::Alloc(Thread* self, function in class:art::mirror::StackTraceElement
H A Dobject_test.cc80 return mirror::ObjectArray<T>::Alloc(
158 hs.NewHandle(Array::Alloc<true>(soa.Self(), c, 1, c->GetComponentSizeShift(),
164 a.Assign(Array::Alloc<true>(soa.Self(), c, 1, c->GetComponentSizeShift(),
170 a.Assign(Array::Alloc<true>(soa.Self(), c, 1, c->GetComponentSizeShift(),
181 hs.NewHandle(Array::Alloc<true, true>(soa.Self(), c, 1, c->GetComponentSizeShift(),
187 a.Assign(Array::Alloc<true, true>(soa.Self(), c, 2, c->GetComponentSizeShift(),
193 a.Assign(Array::Alloc<true, true>(soa.Self(), c, 2, c->GetComponentSizeShift(),
199 a.Assign(Array::Alloc<true, true>(soa.Self(), c, 2, c->GetComponentSizeShift(),
210 ArrayT* a = ArrayT::Alloc(soa.Self(), 2);
258 ArrayT* a = ArrayT::Alloc(so
[all...]
H A Darray.cc52 Array::Alloc<true>(self, array_class.Get(), array_length,
140 ObjPtr<Array> new_array = Alloc<true>(self, GetClass(), new_length, component_shift, allocator_type);
H A Dclass_ext.cc77 ObjectArray<DexCache>::Alloc(self,
104 ClassExt* ClassExt::Alloc(Thread* self) { function in class:art::mirror::ClassExt
/art/compiler/linker/arm64/
H A Drelative_patcher_arm64.h116 template <typename Alloc>
117 static uint32_t GetInsn(std::vector<uint8_t, Alloc>* code, uint32_t offset);

Completed in 199 milliseconds

12345