/art/compiler/dex/ |
H A D | backend.h | 51 explicit Backend(ArenaAllocator* arena) : arena_(arena) {} 52 ArenaAllocator* const arena_; member in class:art::Backend
|
H A D | ssa_transformation.cc | 97 dfs_order_ = new (arena_) GrowableArray<BasicBlockId>(arena_, GetNumBlocks(), 106 dfs_post_order_ = new (arena_) GrowableArray<BasicBlockId>(arena_, GetNumBlocks(), 152 (arena_->Alloc(sizeof(ArenaBitVector *) * num_registers, 159 new (arena_) ArenaBitVector(arena_, GetNumBlocks(), false, kBitMapBMatrix); 185 new (arena_) GrowableArray<BasicBlockId>(arena_, num_reachable_blocks_, 276 bb->dominators = new (arena_) ArenaBitVecto [all...] |
H A D | mir_dataflow.cc | 933 new (arena_) ArenaBitVector(arena_, cu_->num_dalvik_registers, false, kBitMapUse); 935 new (arena_) ArenaBitVector(arena_, cu_->num_dalvik_registers, false, kBitMapDef); 937 new (arena_) ArenaBitVector(arena_, cu_->num_dalvik_registers, false, kBitMapLiveIn); 1021 mir->ssa_rep->uses = static_cast<int*>(arena_->Alloc(sizeof(int) * num_uses, kArenaAllocDFInfo)); 1023 mir->ssa_rep->fp_use = static_cast<bool*>(arena_->Alloc(sizeof(bool) * num_uses, kArenaAllocDFInfo)); 1031 mir->ssa_rep->defs = static_cast<int*>(arena_->Alloc(sizeof(int) * num_defs, 1033 mir->ssa_rep->fp_def = static_cast<bool*>(arena_ [all...] |
H A D | mir_graph.cc | 116 arena_(arena), 128 try_block_addr_ = new (arena_) ArenaBitVector(arena_, 0, true /* expandable */); 517 new (arena_) GrowableArray<SuccessorBlockInfo*>(arena_, size, kGrowableArraySuccessorBlocks); 523 static_cast<SuccessorBlockInfo*>(arena_->Alloc(sizeof(SuccessorBlockInfo), 570 cur_block->successor_blocks = new (arena_) GrowableArray<SuccessorBlockInfo*>( 571 arena_, 2, kGrowableArraySuccessorBlocks); 578 (arena_->Alloc(sizeof(SuccessorBlockInfo), kArenaAllocSuccessor)); 1340 ret = static_cast<char*>(arena_ [all...] |
H A D | mir_optimization.cc | 257 CompilerTemp *compiler_temp = static_cast<CompilerTemp *>(arena_->Alloc(sizeof(CompilerTemp), 281 static_cast<CompilerTemp *>(arena_->Alloc(sizeof(CompilerTemp), kArenaAllocRegAlloc)); 506 static_cast<int*>(arena_->Alloc(sizeof(int) * 3, kArenaAllocDFInfo)); 515 static_cast<int*>(arena_->Alloc(sizeof(int) * 1, kArenaAllocDFInfo)); 517 static_cast<bool*>(arena_->Alloc(sizeof(bool) * 1, kArenaAllocDFInfo)); 521 static_cast<bool*>(arena_->Alloc(sizeof(bool) * mir->ssa_rep->num_uses, 1304 static_cast<Checkstats*>(arena_->Alloc(sizeof(Checkstats), kArenaAllocDFInfo));
|
H A D | mir_graph.h | 625 return arena_; 629 opcode_count_ = static_cast<int*>(arena_->Alloc(kNumPackedOpcodes * sizeof(int), 1189 ArenaAllocator* arena_; member in class:art::MIRGraph
|
H A D | vreg_analysis.cc | 441 RegLocation* loc = static_cast<RegLocation*>(arena_->Alloc(max_regs * sizeof(*loc),
|
/art/compiler/utils/ |
H A D | arena_bit_vector.cc | 25 explicit ArenaBitVectorAllocator(ArenaAlloc* arena) : arena_(arena) {} 29 return arena_->Alloc(size, kArenaAllocGrowableBitMap); 40 ArenaAlloc* arena_; member in class:art::ArenaBitVectorAllocator
|
H A D | growable_array.h | 85 : arena_(arena), 89 elem_list_ = static_cast<T*>(arena_->Alloc(sizeof(T) * init_length, 103 T* new_array = static_cast<T*>(arena_->Alloc(sizeof(T) * target_length, 208 ArenaAllocator* const arena_; member in class:art::GrowableArray
|
/art/compiler/optimizing/ |
H A D | builder.cc | 76 HLocal* local = new (arena_) HLocal(i); 96 new (arena_) HParameterValue(parameter_index++, Primitive::kPrimNot); 99 entry_block_->AddInstruction(new (arena_) HStoreLocal(local, parameter)); 114 new (arena_) HParameterValue(parameter_index++, Primitive::GetType(shorty[pos - 1])); 119 entry_block_->AddInstruction(new (arena_) HStoreLocal(local, parameter)); 143 T* comparison = new (arena_) T(first, second); 145 HInstruction* ifinst = new (arena_) HIf(comparison); 159 T* comparison = new (arena_) T(value, GetIntConstant(0)); 161 HInstruction* ifinst = new (arena_) HIf(comparison); 181 graph_ = new (arena_) HGrap [all...] |
H A D | builder.h | 38 : arena_(arena), 110 ArenaAllocator* const arena_; member in class:art::HGraphBuilder
|
H A D | nodes.cc | 29 ArenaBitVector visiting(arena_, blocks_.Size(), false); 70 ArenaBitVector visited(arena_, blocks_.Size(), false); 87 GrowableArray<size_t> visits(arena_, blocks_.Size()); 96 ArenaBitVector visited(arena_, blocks_.Size(), false); 143 HBasicBlock* new_block = new (arena_) HBasicBlock(this); 145 new_block->AddInstruction(new (arena_) HGoto()); 165 HBasicBlock* new_back_edge = new (arena_) HBasicBlock(this); 167 new_back_edge->AddInstruction(new (arena_) HGoto()); 182 HBasicBlock* pre_header = new (arena_) HBasicBlock(this); 184 pre_header->AddInstruction(new (arena_) HGot [all...] |
H A D | nodes.h | 74 : arena_(arena), 83 ArenaAllocator* GetArena() const { return arena_; } 161 ArenaAllocator* const arena_; member in class:art::HGraph
|
/art/compiler/dex/quick/mips/ |
H A D | call_mips.cc | 71 static_cast<SwitchTable*>(arena_->Alloc(sizeof(SwitchTable), kArenaAllocData)); 76 static_cast<LIR**>(arena_->Alloc(elements * sizeof(LIR*), kArenaAllocLIR)); 148 static_cast<SwitchTable*>(arena_->Alloc(sizeof(SwitchTable), kArenaAllocData)); 152 tab_rec->targets = static_cast<LIR**>(arena_->Alloc(size * sizeof(LIR*), 227 reinterpret_cast<FillArrayData*>(arena_->Alloc(sizeof(FillArrayData), 343 AddSlowPath(new(arena_)StackOverflowSlowPath(this, branch, spill_count * 4));
|
H A D | target_mips.cc | 433 reg_pool_ = new (arena_) RegisterPool(this, arena_, core_regs, empty_pool /* core64 */, sp_regs,
|
/art/compiler/dex/quick/arm64/ |
H A D | call_arm64.cc | 53 static_cast<SwitchTable*>(arena_->Alloc(sizeof(SwitchTable), kArenaAllocData)); 57 tab_rec->targets = static_cast<LIR**>(arena_->Alloc(size * sizeof(LIR*), kArenaAllocLIR)); 105 static_cast<SwitchTable*>(arena_->Alloc(sizeof(SwitchTable), kArenaAllocData)); 110 static_cast<LIR**>(arena_->Alloc(size * sizeof(LIR*), kArenaAllocLIR)); 163 static_cast<FillArrayData*>(arena_->Alloc(sizeof(FillArrayData), kArenaAllocData)); 398 AddSlowPath(new(arena_)StackOverflowSlowPath(this, branch, frame_size_));
|
H A D | target_arm64.cc | 598 reg_pool_ = new (arena_) RegisterPool(this, arena_, core_regs, core64_regs, sp_regs, dp_regs,
|
/art/compiler/dex/quick/x86/ |
H A D | call_x86.cc | 70 static_cast<SwitchTable*>(arena_->Alloc(sizeof(SwitchTable), kArenaAllocData)); 74 tab_rec->targets = static_cast<LIR**>(arena_->Alloc(size * sizeof(LIR*), 141 static_cast<FillArrayData*>(arena_->Alloc(sizeof(FillArrayData), kArenaAllocData)); 280 new(arena_)StackOverflowSlowPath(this, branch,
|
H A D | target_x86.cc | 616 reg_pool_ = new (arena_) RegisterPool(this, arena_, core_regs_64, core_regs_64q, sp_regs_64, 620 reg_pool_ = new (arena_) RegisterPool(this, arena_, core_regs_32, empty_pool, sp_regs_32, 630 RegisterInfo* info = new (arena_) RegisterInfo(reg, GetRegMaskCommon(reg)); 2360 LIR* new_value = static_cast<LIR*>(arena_->Alloc(sizeof(LIR), kArenaAllocData));
|
/art/compiler/dex/quick/arm/ |
H A D | call_arm.cc | 53 static_cast<SwitchTable*>(arena_->Alloc(sizeof(SwitchTable), kArenaAllocData)); 57 tab_rec->targets = static_cast<LIR**>(arena_->Alloc(size * sizeof(LIR*), kArenaAllocLIR)); 101 static_cast<SwitchTable*>(arena_->Alloc(sizeof(SwitchTable), kArenaAllocData)); 106 static_cast<LIR**>(arena_->Alloc(size * sizeof(LIR*), kArenaAllocLIR)); 154 static_cast<FillArrayData*>(arena_->Alloc(sizeof(FillArrayData), kArenaAllocData)); 436 AddSlowPath(new(arena_)StackOverflowSlowPath(this, branch, true, spill_size)); 451 AddSlowPath(new(arena_)StackOverflowSlowPath(this, branch, false, frame_size_));
|
H A D | target_arm.cc | 570 reg_pool_ = new (arena_) RegisterPool(this, arena_, core_regs, empty_pool /* core64 */, sp_regs,
|
/art/compiler/dex/quick/ |
H A D | gen_common.cc | 81 AddSlowPath(new (arena_) DivZeroCheckSlowPath(this, branch)); 105 AddSlowPath(new (arena_) ArrayBoundsCheckSlowPath(this, branch, index, length)); 135 AddSlowPath(new (arena_) ArrayBoundsCheckSlowPath(this, branch, index, length)); 154 AddSlowPath(new (arena_) NullCheckSlowPath(this, branch)); 580 AddSlowPath(new (arena_) StaticFieldSlowPath(this, unresolved_branch, uninit_branch, cont, 665 AddSlowPath(new (arena_) StaticFieldSlowPath(this, unresolved_branch, uninit_branch, cont, 872 AddSlowPath(new (arena_) SlowPath(this, branch, cont, type_idx, rl_method, rl_result)); 935 AddSlowPath(new (arena_) SlowPath(this, fromfast, cont, r_method, string_idx)); 1144 AddSlowPath(new (arena_) InitTypeSlowPath(this, slow_path_branch, slow_path_target, 1291 AddSlowPath(new (arena_) SlowPat [all...] |
H A D | mir_to_lir-inl.h | 46 LIR* insn = static_cast<LIR*>(arena_->Alloc(sizeof(LIR), kArenaAllocLIR));
|
H A D | local_optimizations.cc | 496 static_cast<LIR*>(arena_->Alloc(sizeof(LIR), kArenaAllocLIR));
|
H A D | ralloc_util.cc | 1215 (arena_->Alloc(num_regs * sizeof(promotion_map_[0]), kArenaAllocRegAlloc)); 1234 static_cast<RefCounts*>(arena_->Alloc(sizeof(RefCounts) * core_reg_count_size, 1237 static_cast<RefCounts *>(arena_->Alloc(sizeof(RefCounts) * fp_reg_count_size,
|