Searched defs:allocator (Results 1 - 25 of 70) sorted by relevance

123

/art/compiler/optimizing/
H A Dgraph_checker_test.cc30 HGraph* CreateSimpleCFG(ArenaAllocator* allocator) { argument
31 HGraph* graph = CreateGraph(allocator);
32 HBasicBlock* entry_block = new (allocator) HBasicBlock(graph);
33 entry_block->AddInstruction(new (allocator) HReturnVoid());
36 HBasicBlock* exit_block = new (allocator) HBasicBlock(graph);
37 exit_block->AddInstruction(new (allocator) HExit());
47 ArenaAllocator allocator(&pool);
48 HGraph* graph = CreateCFG(&allocator, data);
97 ArenaAllocator allocator(&pool);
99 HGraph* graph = CreateSimpleCFG(&allocator);
[all...]
H A Dlive_ranges_test.cc34 static HGraph* BuildGraph(const uint16_t* data, ArenaAllocator* allocator) { argument
35 HGraph* graph = CreateCFG(allocator, data);
62 ArenaAllocator allocator(&pool);
63 HGraph* graph = BuildGraph(data, &allocator);
111 ArenaAllocator allocator(&pool);
112 HGraph* graph = BuildGraph(data, &allocator);
162 ArenaAllocator allocator(&pool);
163 HGraph* graph = BuildGraph(data, &allocator);
240 ArenaAllocator allocator(&pool);
241 HGraph* graph = BuildGraph(data, &allocator);
[all...]
H A Dgraph_visualizer.h57 explicit DisassemblyInformation(ArenaAllocator* allocator) argument
59 instruction_intervals_(std::less<const HInstruction*>(), allocator->Adapter()),
60 slow_path_intervals_(allocator->Adapter()) {}
H A Dlinear_order.cc83 ArenaAllocator* allocator,
96 allocator->Adapter(kArenaAllocLinearOrder));
109 ArenaVector<HBasicBlock*> worklist(allocator->Adapter(kArenaAllocLinearOrder));
82 LinearizeGraph(const HGraph* graph, ArenaAllocator* allocator, ArenaVector<HBasicBlock*>* linear_order) argument
H A Dparallel_move_resolver.h34 explicit ParallelMoveResolver(ArenaAllocator* allocator) argument
35 : moves_(allocator->Adapter(kArenaAllocParallelMoveResolver)) {
56 explicit ParallelMoveResolverWithSwap(ArenaAllocator* allocator) argument
57 : ParallelMoveResolver(allocator) {}
125 explicit ParallelMoveResolverNoSwap(ArenaAllocator* allocator) argument
126 : ParallelMoveResolver(allocator),
127 scratches_(allocator->Adapter(kArenaAllocParallelMoveResolver)),
128 pending_moves_(allocator->Adapter(kArenaAllocParallelMoveResolver)),
129 allocator_(allocator) {
H A Dparallel_move_test.cc55 explicit TestParallelMoveResolverWithSwap(ArenaAllocator* allocator) argument
56 : ParallelMoveResolverWithSwap(allocator) {}
98 explicit TestParallelMoveResolverNoSwap(ArenaAllocator* allocator) argument
99 : ParallelMoveResolverNoSwap(allocator), scratch_index_(kScratchRegisterStartIndexForTest) {}
153 static HParallelMove* BuildParallelMove(ArenaAllocator* allocator, argument
156 HParallelMove* moves = new (allocator) HParallelMove(allocator);
184 ArenaAllocator allocator(&pool);
187 TypeParam resolver(&allocator);
189 resolver.EmitNativeCode(BuildParallelMove(&allocator, move
[all...]
H A Dgraph_test.cc27 static HBasicBlock* createIfBlock(HGraph* graph, ArenaAllocator* allocator) { argument
28 HBasicBlock* if_block = new (allocator) HBasicBlock(graph);
31 HInstruction* equal = new (allocator) HEqual(instr, instr);
33 instr = new (allocator) HIf(equal);
38 static HBasicBlock* createGotoBlock(HGraph* graph, ArenaAllocator* allocator) { argument
39 HBasicBlock* block = new (allocator) HBasicBlock(graph);
41 HInstruction* got = new (allocator) HGoto();
46 static HBasicBlock* createEntryBlock(HGraph* graph, ArenaAllocator* allocator) { argument
47 HBasicBlock* block = createGotoBlock(graph, allocator);
52 static HBasicBlock* createReturnBlock(HGraph* graph, ArenaAllocator* allocator) { argument
60 createExitBlock(HGraph* graph, ArenaAllocator* allocator) argument
[all...]
H A Doptimizing_unit_test.h50 ArenaAllocator* allocator,
53 LiveInterval* interval = LiveInterval::MakeInterval(allocator, Primitive::kPrimInt, defined_by);
80 inline HGraph* CreateGraph(ArenaAllocator* allocator) { argument
81 return new (allocator) HGraph(
82 allocator,
83 *reinterpret_cast<DexFile*>(allocator->Alloc(sizeof(DexFile))),
89 inline HGraph* CreateCFG(ArenaAllocator* allocator, argument
94 HGraph* graph = CreateGraph(allocator);
48 BuildInterval(const size_t ranges[][2], size_t number_of_ranges, ArenaAllocator* allocator, int reg = -1, HInstruction* defined_by = nullptr) argument
H A Dcode_generator_x86_64.h129 ParallelMoveResolverX86_64(ArenaAllocator* allocator, CodeGeneratorX86_64* codegen) argument
130 : ParallelMoveResolverWithSwap(allocator), codegen_(codegen) {}
361 void Finalize(CodeAllocator* allocator) OVERRIDE;
H A Dcodegen_test_utils.h226 static void Run(const InternalCodeAllocator& allocator, argument
233 CommonCompilerTest::MakeExecutable(allocator.GetMemory(), allocator.GetSize());
234 fptr f = reinterpret_cast<fptr>(allocator.GetMemory());
264 InternalCodeAllocator allocator; local
265 codegen->Compile(&allocator);
266 Run(allocator, *codegen, has_result, expected);
H A Ddead_code_elimination.cc309 ArenaAllocator* allocator = graph_->GetArena(); local
310 ArenaBitVector live_blocks(allocator, graph_->GetBlocks().size(), false, kArenaAllocDCE);
H A Dregister_allocator.cc31 RegisterAllocator::RegisterAllocator(ArenaAllocator* allocator, argument
34 : allocator_(allocator),
38 RegisterAllocator* RegisterAllocator::Create(ArenaAllocator* allocator, argument
44 return new (allocator) RegisterAllocatorLinearScan(allocator, codegen, analysis);
46 return new (allocator) RegisterAllocatorGraphColor(allocator, codegen, analysis);
95 ArenaAllocator* allocator,
102 allocator->Adapter(kArenaAllocRegisterAllocatorValidate));
116 ArenaBitVector::Create(allocator, max_en
91 ValidateIntervals(const ArenaVector<LiveInterval*>& intervals, size_t number_of_spill_slots, size_t number_of_out_slots, const CodeGenerator& codegen, ArenaAllocator* allocator, bool processing_core_registers, bool log_fatal_on_failure) argument
[all...]
H A Dssa_builder.cc617 ArenaAllocator* allocator = graph_->GetArena(); local
620 new (allocator) HPhi(allocator, phi->GetRegNumber(), inputs.size(), type);
H A Dbounds_check_elimination_test.cc359 ArenaAllocator* allocator,
363 HBasicBlock* entry = new (allocator) HBasicBlock(graph);
366 HInstruction* parameter = new (allocator) HParameterValue(
374 HBasicBlock* block = new (allocator) HBasicBlock(graph);
377 block->AddInstruction(new (allocator) HGoto());
379 HBasicBlock* loop_header = new (allocator) HBasicBlock(graph);
380 HBasicBlock* loop_body = new (allocator) HBasicBlock(graph);
381 HBasicBlock* exit = new (allocator) HBasicBlock(graph);
391 HPhi* phi = new (allocator) HPhi(allocator,
358 BuildSSAGraph1(HGraph* graph, ArenaAllocator* allocator, int initial, int increment, IfCondition cond = kCondGE) argument
473 BuildSSAGraph2(HGraph *graph, ArenaAllocator* allocator, int initial, int increment = -1, IfCondition cond = kCondLE) argument
583 BuildSSAGraph3(HGraph* graph, ArenaAllocator* allocator, int initial, int increment, IfCondition cond) argument
685 BuildSSAGraph4(HGraph* graph, ArenaAllocator* allocator, int initial, IfCondition cond = kCondGE) argument
[all...]
/art/runtime/base/
H A Darena_object.h31 void* operator new(size_t size, ArenaAllocator* allocator) { argument
32 return allocator->Alloc(size, kAllocKind);
55 void* operator new(size_t size, ArenaAllocator* allocator) { argument
56 return allocator->Alloc(size, kAllocKind);
H A Dscoped_arena_allocator.h126 // Fast single-threaded allocator. Allocated chunks are _not_ guaranteed to be zero-initialized.
129 // objects and allows nesting multiple allocators. Only the top allocator can be used but
136 // the allocator is not exactly a C++ block scope. For example, an optimization
137 // pass can create the scoped allocator in Start() and destroy it in End().
140 ScopedArenaAllocator* allocator = new(addr) ScopedArenaAllocator(arena_stack); local
141 allocator->mark_ptr_ = reinterpret_cast<uint8_t*>(addr);
142 return allocator;
H A Dbit_vector.cc22 #include "allocator.h"
28 Allocator* allocator,
33 allocator_(allocator),
43 Allocator* allocator)
45 allocator,
47 static_cast<uint32_t*>(allocator->Alloc(BitsToWords(start_bits) * kWordBytes))) {
53 Allocator* allocator)
55 allocator,
57 static_cast<uint32_t*>(allocator->Alloc(src.storage_size_ * kWordBytes))) {
27 BitVector(bool expandable, Allocator* allocator, uint32_t storage_size, uint32_t* storage) argument
41 BitVector(uint32_t start_bits, bool expandable, Allocator* allocator) argument
51 BitVector(const BitVector& src, bool expandable, Allocator* allocator) argument
/art/runtime/gc/allocator/
H A Ddlmalloc.h57 namespace allocator { namespace in namespace:art::gc
63 } // namespace allocator
H A Drosalloc-inl.h24 namespace allocator { namespace in namespace:art::gc
122 } // namespace allocator
/art/runtime/gc/
H A Dallocator_type.h27 kAllocatorTypeBumpPointer, // Use BumpPointer allocator, has entrypoints.
28 kAllocatorTypeTLAB, // Use TLAB allocator, has entrypoints.
29 kAllocatorTypeRosAlloc, // Use RosAlloc allocator, has entrypoints.
30 kAllocatorTypeDlMalloc, // Use dlmalloc allocator, has entrypoints.
31 kAllocatorTypeNonMoving, // Special allocator for non moving objects, doesn't have entrypoints.
38 inline constexpr bool IsTLABAllocator(AllocatorType allocator) { argument
39 return allocator == kAllocatorTypeTLAB || allocator == kAllocatorTypeRegionTLAB;
H A Dheap-inl.h46 AllocatorType allocator,
60 // Need to check that we aren't the large object allocator since the large object allocation code
80 if (IsTLABAllocator(allocator)) {
84 if (IsTLABAllocator(allocator) && byte_count <= self->TlabSize()) {
96 !kInstrumented && allocator == kAllocatorTypeRosAlloc &&
110 obj = TryToAllocate<kInstrumented, false>(self, allocator, byte_count, &bytes_allocated,
114 // or changes the allocator in a suspend point here, we need to retry the allocation.
116 allocator,
124 // allocator or instrumentation changed.
126 // AllocObject will pick up the new allocator typ
43 AllocObjectWithAllocator(Thread* self, ObjPtr<mirror::Class> klass, size_t byte_count, AllocatorType allocator, const PreFenceVisitor& pre_fence_visitor) argument
[all...]
/art/runtime/entrypoints/quick/
H A Dquick_alloc_entrypoints.cc194 void SetQuickAllocEntryPointsAllocator(gc::AllocatorType allocator) { argument
195 entry_points_allocator = allocator;
/art/test/130-hprof/src/
H A DMain.java48 Class<?> allocator = loader.loadClass("Allocator");
49 return allocator.getDeclaredMethod("allocObject", null).invoke(null);
135 Allocator allocator = new Allocator();
136 Dumper dumper = new Dumper(allocator);
137 allocator.start();
140 allocator.join();
164 Dumper(Allocator allocator) { argument
165 this.allocator = allocator;
167 Allocator allocator; field in class:Main.Dumper
[all...]
/art/runtime/gc/space/
H A Dmalloc_space.cc199 void* allocator = CreateAllocator(End(), starting_size_, initial_size_, capacity, local
206 *out_malloc_space = CreateInstance(mem_map.release(), alloc_space_name, allocator, End(), end,
/art/runtime/openjdkjvmti/
H A Djvmti_weak_table-inl.h252 : allocator(alloc),
253 data(reserve > 0 ? allocator.allocate(reserve) : nullptr),
260 allocator.deallocate(data, capacity);
279 Storage* tmp = allocator.allocate(new_capacity);
286 allocator.deallocate(old, capacity);
298 Allocator allocator; member in struct:openjdkjvmti::JvmtiWeakTable::ReleasableContainer
343 JvmtiAllocator<void> allocator(jvmti_env);
344 ReleasableContainer<jobject, JvmtiAllocator<jobject>> selected_objects(allocator,
346 ReleasableContainer<T, JvmtiAllocator<T>> selected_tags(allocator, initial_tag_size);

Completed in 1238 milliseconds

123