Searched refs:arena (Results 1 - 25 of 35) sorted by relevance

12

/art/compiler/utils/
H A Darena_bit_vector.cc25 explicit ArenaBitVectorAllocator(ArenaAlloc* arena) : arena_(arena) {} argument
34 static void* operator new(size_t size, ArenaAlloc* arena) { argument
35 return arena->Alloc(sizeof(ArenaBitVectorAllocator), kArenaAllocGrowableBitMap);
44 ArenaBitVector::ArenaBitVector(ArenaAllocator* arena, unsigned int start_bits, argument
47 new (arena) ArenaBitVectorAllocator<ArenaAllocator>(arena)), kind_(kind) {
51 ArenaBitVector::ArenaBitVector(ScopedArenaAllocator* arena, unsigned int start_bits, argument
54 new (arena) ArenaBitVectorAllocator<ScopedArenaAllocator>(arena)), kind
[all...]
H A Darena_bit_vector.h55 ArenaBitVector(ArenaAllocator* arena, uint32_t start_bits, bool expandable,
57 ArenaBitVector(ScopedArenaAllocator* arena, uint32_t start_bits, bool expandable,
61 static void* operator new(size_t size, ArenaAllocator* arena) { argument
62 return arena->Alloc(sizeof(ArenaBitVector), kArenaAllocGrowableBitMap);
64 static void* operator new(size_t size, ScopedArenaAllocator* arena) { argument
65 return arena->Alloc(sizeof(ArenaBitVector), kArenaAllocGrowableBitMap);
H A Darena_allocator_test.cc25 ArenaAllocator arena(&pool);
26 ArenaBitVector bv(&arena, 10, true);
H A Darena_allocator.cc88 for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
89 malloc_bytes += arena->Size();
90 lost_bytes += arena->RemainingSpace();
93 // The lost_bytes_adjustment is used to make up for the fact that the current arena
120 map_ = MemMap::MapAnonymous("dalvik-arena", NULL, size, PROT_READ | PROT_WRITE, false,
157 auto* arena = free_arenas_; local
159 delete arena;
[all...]
H A Dgrowable_array.h84 GrowableArray(ArenaAllocator* arena, size_t init_length, OatListKind kind = kGrowableArrayMisc) argument
85 : arena_(arena),
202 static void* operator new(size_t size, ArenaAllocator* arena) { argument
203 return arena->Alloc(sizeof(GrowableArray<T>), kArenaAllocGrowableArray);
/art/compiler/dex/
H A Dbackend.h51 explicit Backend(ArenaAllocator* arena) : arena_(arena) {} argument
H A Dmir_optimization_test.cc121 bb->successor_blocks = new (&cu_.arena) GrowableArray<SuccessorBlockInfo*>(
122 &cu_.arena, def->num_successors, kGrowableArraySuccessorBlocks);
125 static_cast<SuccessorBlockInfo*>(cu_.arena.Alloc(sizeof(SuccessorBlockInfo),
132 bb->predecessors = new (&cu_.arena) GrowableArray<BasicBlockId>(
133 &cu_.arena, def->num_predecessors, kGrowableArrayPredecessors);
140 cu_.arena.Alloc(sizeof(BasicBlockDataFlow), kArenaAllocDFInfo));
158 mirs_ = reinterpret_cast<MIR*>(cu_.arena.Alloc(sizeof(MIR) * count, kArenaAllocMIR));
179 cu_.arena.Alloc(sizeof(DexFile::CodeItem), kArenaAllocMisc));
212 cu_.mir_graph.reset(new MIRGraph(&cu_, &cu_.arena));
411 check_bb->successor_blocks = new (&cu_.arena) GrowableArra
[all...]
H A Dcompiler_ir.h85 ArenaAllocator arena; member in struct:art::CompilationUnit
H A Dmir_graph_test.cc78 bb->successor_blocks = new (&cu_.arena) GrowableArray<SuccessorBlockInfo*>(
79 &cu_.arena, def->num_successors, kGrowableArraySuccessorBlocks);
82 static_cast<SuccessorBlockInfo*>(cu_.arena.Alloc(sizeof(SuccessorBlockInfo),
89 bb->predecessors = new (&cu_.arena) GrowableArray<BasicBlockId>(
90 &cu_.arena, def->num_predecessors, kGrowableArrayPredecessors);
97 cu_.arena.Alloc(sizeof(BasicBlockDataFlow), kArenaAllocDFInfo));
157 cu_.mir_graph.reset(new MIRGraph(&cu_, &cu_.arena));
H A Dmir_graph.cc70 MIRGraph::MIRGraph(CompilationUnit* cu, ArenaAllocator* arena) argument
72 block_id_map_(std::less<unsigned int>(), arena->Adapter()),
80 use_counts_(arena, 256, kGrowableArrayMisc),
81 raw_use_counts_(arena, 256, kGrowableArrayMisc),
98 block_list_(arena, 100, kGrowableArrayBlockList),
104 dex_pc_to_block_map_(arena, 0, kGrowableArrayMisc),
105 m_units_(arena->Adapter()),
106 method_stack_(arena->Adapter()),
112 extended_basic_blocks_(arena->Adapter()),
116 arena_(arena),
1876 ArenaAllocator* arena = mir_graph->GetArena(); local
[all...]
H A Dfrontend.cc502 arena(pool),
705 cu.mir_graph.reset(new MIRGraph(&cu, &cu.arena));
759 /* Free Arenas from the cu.arena_stack for reuse by the cu.arena in the codegen. */
789 if (cu.arena.BytesAllocated() > (1 * 1024 *1024)) {
790 MemStats mem_stats(cu.arena.GetMemStats());
796 LOG(INFO) << "MEMINFO " << cu.arena.BytesAllocated() << " " << cu.mir_graph->GetNumBlocks()
H A Dglobal_value_numbering_test.cc195 bb->successor_blocks = new (&cu_.arena) GrowableArray<SuccessorBlockInfo*>(
196 &cu_.arena, def->num_successors, kGrowableArraySuccessorBlocks);
199 static_cast<SuccessorBlockInfo*>(cu_.arena.Alloc(sizeof(SuccessorBlockInfo),
206 bb->predecessors = new (&cu_.arena) GrowableArray<BasicBlockId>(
207 &cu_.arena, def->num_predecessors, kGrowableArrayPredecessors);
214 cu_.arena.Alloc(sizeof(BasicBlockDataFlow), kArenaAllocDFInfo));
233 mirs_ = reinterpret_cast<MIR*>(cu_.arena.Alloc(sizeof(MIR) * count, kArenaAllocMIR));
338 live_in_v_(new (&cu_.arena) ArenaBitVector(&cu_.arena, kMaxSsaRegs, false, kBitMapMisc)) {
339 cu_.mir_graph.reset(new MIRGraph(&cu_, &cu_.arena));
[all...]
/art/compiler/optimizing/
H A Dfind_loops_test.cc46 ArenaPool arena; local
47 HGraph* graph = TestCode(data, &arena);
58 ArenaPool arena; local
59 HGraph* graph = TestCode(data, &arena);
73 ArenaPool arena; local
74 HGraph* graph = TestCode(data, &arena);
89 ArenaPool arena; local
90 HGraph* graph = TestCode(data, &arena);
103 ArenaPool arena; local
104 HGraph* graph = TestCode(data, &arena);
148 ArenaPool arena; local
175 ArenaPool arena; local
199 ArenaPool arena; local
224 ArenaPool arena; local
250 ArenaPool arena; local
275 ArenaPool arena; local
306 ArenaPool arena; local
335 ArenaPool arena; local
349 ArenaPool arena; local
[all...]
H A Dstack_map_test.cc36 ArenaAllocator arena(&pool);
37 StackMapStream<size_t> stream(&arena);
39 ArenaBitVector sp_mask(&arena, 0, false);
45 void* memory = arena.Alloc(size, kArenaAllocMisc);
73 ArenaAllocator arena(&pool);
74 StackMapStream<size_t> stream(&arena);
76 ArenaBitVector sp_mask1(&arena, 0, true);
85 ArenaBitVector sp_mask2(&arena, 0, true);
92 void* memory = arena.Alloc(size, kArenaAllocMisc);
H A Dbuilder.h34 HGraphBuilder(ArenaAllocator* arena, argument
38 : arena_(arena),
39 branch_targets_(arena, 0),
40 locals_(arena, 0),
H A Dcodegen_test.cc71 ArenaAllocator arena(&pool);
72 HGraphBuilder builder(&arena);
78 CodeGenerator* codegen = CodeGenerator::Create(&arena, graph, kX86);
86 codegen = CodeGenerator::Create(&arena, graph, kArm);
92 codegen = CodeGenerator::Create(&arena, graph, kX86_64);
H A Doptimizing_compiler.cc105 ArenaAllocator arena(&pool);
106 HGraphBuilder builder(&arena, &dex_compilation_unit, &dex_file, GetCompilerDriver());
116 CodeGenerator* codegen = CodeGenerator::Create(&arena, graph, instruction_set);
H A Dnodes.h73 explicit HGraph(ArenaAllocator* arena) argument
74 : arena_(arena),
75 blocks_(arena, kDefaultNumberOfBlocks),
76 reverse_post_order_(arena, kDefaultNumberOfBlocks),
641 HEnvironment(ArenaAllocator* arena, size_t number_of_vregs) : vregs_(arena, number_of_vregs) { argument
1130 HInvoke(ArenaAllocator* arena, argument
1134 : inputs_(arena, number_of_arguments),
1170 HInvokeStatic(ArenaAllocator* arena, argument
1175 : HInvoke(arena, number_of_argument
1267 HPhi(ArenaAllocator* arena, uint32_t reg_number, size_t number_of_inputs, Primitive::Type type) argument
1531 HParallelMove(ArenaAllocator* arena) argument
[all...]
/art/compiler/
H A Dcompilers.cc105 mir_to_lir = ArmCodeGenerator(cu, cu->mir_graph.get(), &cu->arena);
108 mir_to_lir = Arm64CodeGenerator(cu, cu->mir_graph.get(), &cu->arena);
111 mir_to_lir = MipsCodeGenerator(cu, cu->mir_graph.get(), &cu->arena);
116 mir_to_lir = X86CodeGenerator(cu, cu->mir_graph.get(), &cu->arena);
H A Dcompiler.cc150 cu, cu->mir_graph.get(), &cu->arena,
/art/compiler/dex/portable/
H A Dmir_to_gbc.h47 ArenaAllocator* const arena,
53 MirConverter(CompilationUnit* cu, MIRGraph* mir_graph, ArenaAllocator* arena, argument
55 : Backend(arena),
69 llvm_values_(arena, mir_graph->GetNumSSARegs()),
/art/compiler/dex/quick/
H A Dmir_to_lir.h192 ArenaAllocator* const arena);
194 ArenaAllocator* const arena);
196 ArenaAllocator* const arena);
198 ArenaAllocator* const arena);
343 static void* operator new(size_t size, ArenaAllocator* arena) { argument
344 return arena->Alloc(size, kArenaAllocRegAlloc);
443 RegisterPool(Mir2Lir* m2l, ArenaAllocator* arena,
455 static void* operator new(size_t size, ArenaAllocator* arena) { argument
456 return arena->Alloc(size, kArenaAllocRegAlloc);
531 static void* operator new(size_t size, ArenaAllocator* arena) { argument
[all...]
H A Dralloc_util.cc58 Mir2Lir::RegisterPool::RegisterPool(Mir2Lir* m2l, ArenaAllocator* arena, argument
69 core_regs_(arena, core_regs.size()), next_core_reg_(0),
70 core64_regs_(arena, core64_regs.size()), next_core64_reg_(0),
71 sp_regs_(arena, sp_regs.size()), next_sp_reg_(0),
72 dp_regs_(arena, dp_regs.size()), next_dp_reg_(0), m2l_(m2l) {
86 RegisterInfo* info = new (arena) RegisterInfo(reg, m2l_->GetRegMaskCommon(reg));
91 RegisterInfo* info = new (arena) RegisterInfo(reg, m2l_->GetRegMaskCommon(reg));
96 RegisterInfo* info = new (arena) RegisterInfo(reg, m2l_->GetRegMaskCommon(reg));
101 RegisterInfo* info = new (arena) RegisterInfo(reg, m2l_->GetRegMaskCommon(reg));
129 RegisterInfo* invalid_reg = new (arena) RegisterInf
[all...]
H A Dcodegen_util.cc990 Mir2Lir::Mir2Lir(CompilationUnit* cu, MIRGraph* mir_graph, ArenaAllocator* arena) argument
991 : Backend(arena),
1000 switch_tables_(arena, 4, kGrowableArraySwitchTables),
1001 fill_array_data_(arena, 4, kGrowableArrayFillArrayData),
1002 tempreg_info_(arena, 20, kGrowableArrayMisc),
1003 reginfo_map_(arena, RegStorage::kMaxRegs, kGrowableArrayMisc),
1004 pointer_storage_(arena, 128, kGrowableArrayMisc),
1022 slow_paths_(arena, 32, kGrowableArraySlowPaths),
1024 mask_cache_(arena) {
/art/compiler/dex/quick/mips/
H A Dtarget_mips.cc574 MipsMir2Lir::MipsMir2Lir(CompilationUnit* cu, MIRGraph* mir_graph, ArenaAllocator* arena) argument
575 : Mir2Lir(cu, mir_graph, arena) {
586 ArenaAllocator* const arena) {
587 return new MipsMir2Lir(cu, mir_graph, arena);
585 MipsCodeGenerator(CompilationUnit* const cu, MIRGraph* const mir_graph, ArenaAllocator* const arena) argument

Completed in 194 milliseconds

12