Searched defs:allocator (Results 1 - 25 of 38) sorted by relevance

12

/art/compiler/utils/
H A Dallocation.h28 void* operator new(size_t size, ArenaAllocator* allocator) { argument
29 return allocator->Alloc(size, kArenaAllocMisc);
H A Dscoped_arena_allocator.h104 // the allocator is not exactly a C++ block scope. For example, an optimization
105 // pass can create the scoped allocator in Start() and destroy it in End().
108 ScopedArenaAllocator* allocator = new(addr) ScopedArenaAllocator(arena_stack); local
109 allocator->mark_ptr_ = reinterpret_cast<uint8_t*>(addr);
110 return allocator;
/art/runtime/gc/allocator/
H A Drosalloc-inl.h24 namespace allocator { namespace in namespace:art::gc
47 } // namespace allocator
H A Drosalloc.h38 namespace allocator { namespace in namespace:art::gc
40 // A runs-of-slots memory allocator.
409 // The base address of the memory region that's managed by this allocator.
417 // the end of the memory region that's currently managed by this allocator.
421 // the end of the memory region that's ever managed by this allocator.
479 // The base address of the memory region that's managed by this allocator.
481 // The end address of the memory region that's managed by this allocator.
530 // If kThreadUnsafe is true then the allocator may avoid acquiring some locks as an optimization.
598 } // namespace allocator
/art/compiler/optimizing/
H A Dlive_ranges_test.cc30 static HGraph* BuildGraph(const uint16_t* data, ArenaAllocator* allocator) { argument
31 HGraphBuilder builder(allocator);
58 ArenaAllocator allocator(&pool);
59 HGraph* graph = BuildGraph(data, &allocator);
61 CodeGenerator* codegen = CodeGenerator::Create(&allocator, graph, InstructionSet::kX86);
105 ArenaAllocator allocator(&pool);
106 HGraph* graph = BuildGraph(data, &allocator);
107 CodeGenerator* codegen = CodeGenerator::Create(&allocator, graph, InstructionSet::kX86);
154 ArenaAllocator allocator(&pool);
155 HGraph* graph = BuildGraph(data, &allocator);
[all...]
H A Doptimizing_unit_test.h41 ArenaAllocator* allocator,
43 LiveInterval* interval = new (allocator) LiveInterval(allocator, Primitive::kPrimInt);
39 BuildInterval(const size_t ranges[][2], size_t number_of_ranges, ArenaAllocator* allocator, int reg = -1) argument
H A Dparallel_move_resolver.h36 explicit ParallelMoveResolver(ArenaAllocator* allocator) : moves_(allocator, 32) {} argument
H A Dparallel_move_test.cc27 explicit TestParallelMoveResolver(ArenaAllocator* allocator) : ParallelMoveResolver(allocator) {} argument
67 static HParallelMove* BuildParallelMove(ArenaAllocator* allocator, argument
70 HParallelMove* moves = new (allocator) HParallelMove(allocator);
72 moves->AddMove(new (allocator) MoveOperands(
81 ArenaAllocator allocator(&pool);
84 TestParallelMoveResolver resolver(&allocator);
86 resolver.EmitNativeCode(BuildParallelMove(&allocator, moves, arraysize(moves)));
91 TestParallelMoveResolver resolver(&allocator);
[all...]
H A Dcodegen_test.cc51 static void Run(const InternalCodeAllocator& allocator, argument
56 CommonCompilerTest::MakeExecutable(allocator.GetMemory(), allocator.GetSize());
57 fptr f = reinterpret_cast<fptr>(allocator.GetMemory());
76 InternalCodeAllocator allocator; local
81 codegen->CompileBaseline(&allocator, true);
83 Run(allocator, *codegen, has_result, expected);
87 codegen->CompileBaseline(&allocator, true);
89 Run(allocator, *codegen, has_result, expected);
93 codegen->CompileBaseline(&allocator, tru
[all...]
H A Dgraph_test.cc28 static HBasicBlock* createIfBlock(HGraph* graph, ArenaAllocator* allocator) { argument
29 HBasicBlock* if_block = new (allocator) HBasicBlock(graph);
31 HInstruction* instr = new (allocator) HIntConstant(4);
33 HInstruction* equal = new (allocator) HEqual(instr, instr);
35 instr = new (allocator) HIf(equal);
40 static HBasicBlock* createGotoBlock(HGraph* graph, ArenaAllocator* allocator) { argument
41 HBasicBlock* block = new (allocator) HBasicBlock(graph);
43 HInstruction* got = new (allocator) HGoto();
48 static HBasicBlock* createReturnBlock(HGraph* graph, ArenaAllocator* allocator) { argument
49 HBasicBlock* block = new (allocator) HBasicBloc
56 createExitBlock(HGraph* graph, ArenaAllocator* allocator) argument
[all...]
H A Dregister_allocator_test.cc31 // Note: the register allocator tests rely on the fact that constants have live
36 ArenaAllocator allocator(&pool);
37 HGraphBuilder builder(&allocator);
43 CodeGenerator* codegen = CodeGenerator::Create(&allocator, graph, kX86);
46 RegisterAllocator register_allocator(&allocator, codegen, liveness);
52 * Unit testing of RegisterAllocator::ValidateIntervals. Register allocator
57 ArenaAllocator allocator(&pool);
58 HGraph* graph = new (&allocator) HGraph(&allocator);
59 CodeGenerator* codegen = CodeGenerator::Create(&allocator, grap
250 BuildSSAGraph(const uint16_t* data, ArenaAllocator* allocator) argument
[all...]
H A Dcode_generator_arm.h65 ParallelMoveResolverARM(ArenaAllocator* allocator, CodeGeneratorARM* codegen) argument
66 : ParallelMoveResolver(allocator), codegen_(codegen) {}
H A Dcode_generator_x86.h65 ParallelMoveResolverX86(ArenaAllocator* allocator, CodeGeneratorX86* codegen) argument
66 : ParallelMoveResolver(allocator), codegen_(codegen) {}
H A Dcode_generator_x86_64.h61 ParallelMoveResolverX86_64(ArenaAllocator* allocator, CodeGeneratorX86_64* codegen) argument
62 : ParallelMoveResolver(allocator), codegen_(codegen) {}
H A Doptimizing_compiler.cc128 CodeVectorAllocator allocator; local
147 codegen->CompileOptimized(&allocator);
151 codegen->CompileBaseline(&allocator);
172 allocator.GetMemory(),
H A Dstack_map_stream.h34 explicit StackMapStream(ArenaAllocator* allocator) argument
35 : stack_maps_(allocator, 10),
36 dex_register_maps_(allocator, 10 * 4),
37 inline_infos_(allocator, 2),
H A Dcode_generator.cc33 void CodeGenerator::CompileBaseline(CodeAllocator* allocator, bool is_leaf) { argument
64 uint8_t* buffer = allocator->Allocate(code_size);
69 void CodeGenerator::CompileOptimized(CodeAllocator* allocator) { argument
90 uint8_t* buffer = allocator->Allocate(code_size);
254 CodeGenerator* CodeGenerator::Create(ArenaAllocator* allocator, argument
260 return new (allocator) arm::CodeGeneratorARM(graph);
265 return new (allocator) x86::CodeGeneratorX86(graph);
268 return new (allocator) x86_64::CodeGeneratorX86_64(graph);
/art/runtime/entrypoints/quick/
H A Dquick_alloc_entrypoints.cc167 void SetQuickAllocEntryPointsAllocator(gc::AllocatorType allocator) { argument
168 entry_points_allocator = allocator;
/art/compiler/dex/
H A Dglobal_value_numbering.cc23 GlobalValueNumbering::GlobalValueNumbering(CompilationUnit* cu, ScopedArenaAllocator* allocator) argument
26 allocator_(allocator),
31 global_value_map_(std::less<uint64_t>(), allocator->Adapter()),
32 field_index_map_(FieldReferenceComparator(), allocator->Adapter()),
33 field_index_reverse_map_(allocator->Adapter()),
34 array_location_map_(ArrayLocationComparator(), allocator->Adapter()),
35 array_location_reverse_map_(allocator->Adapter()),
36 ref_set_map_(std::less<ValueNameSet>(), allocator->Adapter()),
37 lvns_(mir_graph_->GetNumBlocks(), nullptr, allocator->Adapter()),
39 merge_lvns_(allocator
46 PrepareBasicBlock(BasicBlock* bb, ScopedArenaAllocator* allocator) argument
[all...]
H A Dglobal_value_numbering.h31 GlobalValueNumbering(CompilationUnit* cu, ScopedArenaAllocator* allocator);
36 ScopedArenaAllocator* allocator = nullptr);
58 static void* operator new(size_t size, ScopedArenaAllocator* allocator) { argument
59 return allocator->Alloc(sizeof(GlobalValueNumbering), kArenaAllocMisc);
/art/runtime/gc/
H A Dheap-inl.h40 size_t byte_count, AllocatorType allocator,
49 // Need to check that we arent the large object allocator since the large object allocation code
60 if (allocator == kAllocatorTypeTLAB) {
64 if (allocator == kAllocatorTypeTLAB && byte_count <= self->TlabSize()) {
79 obj = TryToAllocate<kInstrumented, false>(self, allocator, byte_count, &bytes_allocated,
82 bool is_current_allocator = allocator == GetCurrentAllocator();
83 obj = AllocateInternalWithGc(self, allocator, byte_count, &bytes_allocated, &usable_size,
86 bool after_is_current_allocator = allocator == GetCurrentAllocator();
90 // If the allocator changed, we need to restart the allocation.
105 if (collector::SemiSpace::kUseRememberedSet && UNLIKELY(allocator
39 AllocObjectWithAllocator(Thread* self, mirror::Class* klass, size_t byte_count, AllocatorType allocator, const PreFenceVisitor& pre_fence_visitor) argument
[all...]
/art/runtime/gc/space/
H A Dvalgrind_malloc_space-inl.h96 A allocator, byte* begin,
100 S(name, mem_map, allocator, begin, end, limit, growth_limit, can_move_objects, starting_size,
95 ValgrindMallocSpace(const std::string& name, MemMap* mem_map, A allocator, byte* begin, byte* end, byte* limit, size_t growth_limit, size_t initial_size, bool can_move_objects, size_t starting_size) argument
H A Dmalloc_space.cc194 void* allocator = CreateAllocator(End(), starting_size_, initial_size_, capacity, local
201 *out_malloc_space = CreateInstance(alloc_space_name, mem_map.release(), allocator, End(), end,
/art/compiler/dex/quick/
H A Dresource_mask.h153 explicit ResourceMaskCache(ArenaAllocator* allocator) argument
154 : allocator_(allocator) {
/art/runtime/interpreter/
H A Dinterpreter.cc38 gc::AllocatorType allocator = runtime->GetHeap()->GetCurrentAllocator(); local
40 array_class->GetComponentSize(), allocator, true));

Completed in 2465 milliseconds

12