Searched defs:cu (Results 1 - 23 of 23) sorted by relevance

/art/compiler/dex/
H A Dpass_driver_me_post_opt.h31 explicit PassDriverMEPostOpt(const PassManager* const manager, CompilationUnit* cu) argument
32 : PassDriverME(manager, cu) {
H A Dpass_driver_me_opts.h34 CompilationUnit* cu)
35 : PassDriverME(manager, cu), post_opt_pass_manager_(post_opt_pass_manager) {
32 PassDriverMEOpts(const PassManager* const manager, const PassManager* const post_opt_pass_manager, CompilationUnit* cu) argument
H A Dglobal_value_numbering.cc25 GlobalValueNumbering::GlobalValueNumbering(CompilationUnit* cu, ScopedArenaAllocator* allocator, argument
27 : cu_(cu),
28 mir_graph_(cu->mir_graph.get()),
H A Dpass_driver_me.h39 explicit PassDriverME(const PassManager* const pass_manager, CompilationUnit* cu) argument
42 pass_me_data_holder_.c_unit = cu;
H A Dglobal_value_numbering.h43 static bool Skip(CompilationUnit* cu) { argument
44 return (cu->disable_opt & (1u << kGlobalValueNumbering)) != 0u ||
45 cu->mir_graph->GetMaxNestedLoops() > kMaxAllowedNestedLoops;
54 GlobalValueNumbering(CompilationUnit* cu, ScopedArenaAllocator* allocator, Mode mode);
H A Dlocal_value_numbering.cc1009 CompilationUnit* cu = gvn_->GetCompilationUnit(); local
1010 const char* shorty = cu->shorty;
1012 if ((cu->access_flags & kAccStatic) == 0) {
H A Dmir_optimization.cc1356 static void DisableGVNDependentOptimizations(CompilationUnit* cu) { argument
1357 cu->disable_opt |= (1u << kGvnDeadCodeElimination);
H A Dmir_graph.cc83 MIRGraph::MIRGraph(CompilationUnit* cu, ArenaAllocator* arena) argument
86 cu_(cu),
/art/compiler/driver/
H A Ddex_compilation_unit.cc25 DexCompilationUnit::DexCompilationUnit(CompilationUnit* cu, argument
34 : cu_(cu),
/art/compiler/dex/quick/
H A Dquick_compiler.cc493 CompilationUnit* cu) const {
495 if (cu->mir_graph->GetNumOfCodeAndTempVRs() > kMaxAllowedDalvikRegisters) {
496 VLOG(compiler) << "Too many dalvik registers : " << cu->mir_graph->GetNumOfCodeAndTempVRs();
501 if (kSupportedTypes[cu->instruction_set] == nullptr &&
502 kUnsupportedOpcodesSize[cu->instruction_set] == 0U) {
508 if (!CanCompileShorty(shorty, cu->instruction_set)) {
513 const int *unsupport_list = kUnsupportedOpcodes[cu->instruction_set];
514 int unsupport_list_size = kUnsupportedOpcodesSize[cu->instruction_set];
516 for (unsigned int idx = 0; idx < cu->mir_graph->GetNumBlocks(); idx++) {
517 BasicBlock* bb = cu
720 PassDriverMEOpts pass_driver(GetPreOptPassManager(), GetPostOptPassManager(), &cu); local
800 GetCodeGenerator(CompilationUnit* cu, void* compilation_unit) argument
[all...]
H A Ddex_file_method_inliner.cc902 CompilationUnit* cu = mir_graph->GetCurrentDexCompilationUnit()->GetCompilationUnit(); local
903 if (cu->enable_debug & (1 << kDebugSlowFieldPath)) {
951 CompilationUnit* cu = mir_graph->GetCurrentDexCompilationUnit()->GetCompilationUnit(); local
952 if (cu->enable_debug & (1 << kDebugSlowFieldPath)) {
H A Dcodegen_util.cc1030 Mir2Lir::Mir2Lir(CompilationUnit* cu, MIRGraph* mir_graph, ArenaAllocator* arena) argument
1037 cu_(cu),
1070 dex_cache_arrays_layout_(cu->compiler_driver->GetDexCacheArraysLayout(cu->dex_file)),
1074 cu->compiler_driver->GetCompilerOptions().GetGenerateDebugInfo(),
H A Dgen_common.cc49 ALWAYS_INLINE static inline bool ForceSlowFieldPath(CompilationUnit* cu) { argument
50 return (cu->enable_debug & (1 << kDebugSlowFieldPath)) != 0;
53 ALWAYS_INLINE static inline bool ForceSlowStringPath(CompilationUnit* cu) { argument
54 return (cu->enable_debug & (1 << kDebugSlowStringPath)) != 0;
57 ALWAYS_INLINE static inline bool ForceSlowTypePath(CompilationUnit* cu) { argument
58 return (cu->enable_debug & (1 << kDebugSlowTypePath)) != 0;
1352 DexCompilationUnit* cu = mir_graph_->GetCurrentDexCompilationUnit(); local
1353 if (!needs_access_check && cu_->compiler_driver->IsSafeCast(cu, insn_idx)) {
1669 // Returns true if it added instructions to 'cu' to divide 'rl_src' by 'lit'
1716 // Returns true if it added instructions to 'cu' t
[all...]
H A Dgen_invoke.cc504 const CompilationUnit* cu, Mir2Lir* cg) {
505 if (cu->instruction_set != kX86 && cu->instruction_set != kX86_64) {
507 InstructionSetPointerSize(cu->instruction_set)).Int32Value();
523 static int NextVCallInsn(CompilationUnit* cu, CallInfo* info, argument
528 Mir2Lir* cg = static_cast<Mir2Lir*>(cu->cg.get());
544 cu->compiler_driver->GetInstructionSet());
552 if (CommonCallCodeLoadCodePointerIntoInvokeTgt(nullptr, cu, cg)) {
555 DCHECK(cu->instruction_set == kX86 || cu
503 CommonCallCodeLoadCodePointerIntoInvokeTgt(const RegStorage* alt_from, const CompilationUnit* cu, Mir2Lir* cg) argument
569 NextInterfaceCallInsn(CompilationUnit* cu, CallInfo* info, int state, const MethodReference& target_method, uint32_t method_idx, uintptr_t, uintptr_t, InvokeType) argument
610 NextInvokeInsnSP(CompilationUnit* cu, CallInfo* info, QuickEntrypointEnum trampoline, int state, const MethodReference& target_method, uint32_t method_idx) argument
639 NextStaticCallInsnSP(CompilationUnit* cu, CallInfo* info, int state, const MethodReference& target_method, uint32_t, uintptr_t, uintptr_t, InvokeType) argument
647 NextDirectCallInsnSP(CompilationUnit* cu, CallInfo* info, int state, const MethodReference& target_method, uint32_t, uintptr_t, uintptr_t, InvokeType) argument
654 NextSuperCallInsnSP(CompilationUnit* cu, CallInfo* info, int state, const MethodReference& target_method, uint32_t, uintptr_t, uintptr_t, InvokeType) argument
661 NextVCallInsnSP(CompilationUnit* cu, CallInfo* info, int state, const MethodReference& target_method, uint32_t, uintptr_t, uintptr_t, InvokeType) argument
668 NextInterfaceCallInsnWithAccessCheck(CompilationUnit* cu, CallInfo* info, int state, const MethodReference& target_method, uint32_t, uintptr_t, uintptr_t, InvokeType) argument
[all...]
/art/compiler/dex/quick/x86/
H A Dcall_x86.cc344 int X86Mir2Lir::X86NextSDCallInsn(CompilationUnit* cu, CallInfo* info, argument
349 X86Mir2Lir* cg = static_cast<X86Mir2Lir*>(cu->cg.get());
380 CHECK_EQ(cu->dex_file, target_method.dex_file);
382 cg->OpPcRelDexCacheArrayLoad(cu->dex_file, offset, cg->TargetReg(kArg0, kRef),
383 cu->target64);
404 CHECK_EQ(cu->dex_file, target_method.dex_file);
405 const size_t pointer_size = GetInstructionSetPointerSize(cu->instruction_set);
H A Dtarget_x86.cc836 X86Mir2Lir::X86Mir2Lir(CompilationUnit* cu, MIRGraph* mir_graph, ArenaAllocator* arena) argument
837 : Mir2Lir(cu, mir_graph, arena),
858 Mir2Lir* X86CodeGenerator(CompilationUnit* const cu, MIRGraph* const mir_graph, argument
860 return new X86Mir2Lir(cu, mir_graph, arena);
/art/compiler/dex/quick/arm/
H A Dcall_arm.cc614 static bool ArmUseRelativeCall(CompilationUnit* cu, const MethodReference& target_method) { argument
616 return cu->dex_file == target_method.dex_file;
623 int ArmMir2Lir::ArmNextSDCallInsn(CompilationUnit* cu, CallInfo* info, argument
628 ArmMir2Lir* cg = static_cast<ArmMir2Lir*>(cu->cg.get());
652 } else if (ArmUseRelativeCall(cu, target_method)) {
689 } else if (ArmUseRelativeCall(cu, target_method)) {
702 CHECK_EQ(cu->dex_file, target_method.dex_file);
711 cg->OpPcRelDexCacheArrayLoad(cu->dex_file, offset, arg0_ref, false);
H A Dtarget_arm.cc576 ArmMir2Lir::ArmMir2Lir(CompilationUnit* cu, MIRGraph* mir_graph, ArenaAllocator* arena) argument
577 : Mir2Lir(cu, mir_graph, arena),
591 Mir2Lir* ArmCodeGenerator(CompilationUnit* const cu, MIRGraph* const mir_graph, argument
593 return new ArmMir2Lir(cu, mir_graph, arena);
/art/compiler/dex/quick/arm64/
H A Dcall_arm64.cc448 static bool Arm64UseRelativeCall(CompilationUnit* cu, const MethodReference& target_method) { argument
450 return cu->compiler_driver->IsImage() || cu->dex_file == target_method.dex_file;
457 int Arm64Mir2Lir::Arm64NextSDCallInsn(CompilationUnit* cu, CallInfo* info, argument
462 Arm64Mir2Lir* cg = static_cast<Arm64Mir2Lir*>(cu->cg.get());
486 } else if (Arm64UseRelativeCall(cu, target_method)) {
523 } else if (Arm64UseRelativeCall(cu, target_method)) {
536 CHECK_EQ(cu->dex_file, target_method.dex_file);
543 cg->OpPcRelDexCacheArrayLoad(cu->dex_file, offset, arg0_ref, true);
H A Dtarget_arm64.cc606 Arm64Mir2Lir::Arm64Mir2Lir(CompilationUnit* cu, MIRGraph* mir_graph, ArenaAllocator* arena) argument
607 : Mir2Lir(cu, mir_graph, arena),
619 Mir2Lir* Arm64CodeGenerator(CompilationUnit* const cu, MIRGraph* const mir_graph, argument
621 return new Arm64Mir2Lir(cu, mir_graph, arena);
/art/compiler/dex/quick/mips/
H A Dcall_mips.cc418 static int NextSDCallInsn(CompilationUnit* cu, CallInfo* info, int state, argument
421 Mir2Lir* cg = static_cast<Mir2Lir*>(cu->cg.get());
432 InstructionSetPointerSize(cu->instruction_set)).Int32Value();
443 if (cu->target64) {
452 if (cu->target64) {
479 if (cu->target64) {
492 CHECK_EQ(cu->dex_file, target_method.dex_file);
493 const size_t pointer_size = GetInstructionSetPointerSize(cu->instruction_set);
502 InstructionSetPointerSize(cu->instruction_set)).Int32Value();
H A Dtarget_mips.cc900 MipsMir2Lir::MipsMir2Lir(CompilationUnit* cu, MIRGraph* mir_graph, ArenaAllocator* arena) argument
901 : Mir2Lir(cu, mir_graph, arena), in_to_reg_storage_mips64_mapper_(this),
903 isaIsR6_(cu_->target64 ? true : cu->compiler_driver->GetInstructionSetFeatures()
905 fpuIs32Bit_(cu_->target64 ? false : cu->compiler_driver->GetInstructionSetFeatures()
915 Mir2Lir* MipsCodeGenerator(CompilationUnit* const cu, MIRGraph* const mir_graph, argument
917 return new MipsMir2Lir(cu, mir_graph, arena);
/art/compiler/optimizing/
H A Dbuilder.cc542 static bool RequiresConstructorBarrier(const DexCompilationUnit* cu, const CompilerDriver& driver) { argument
544 if (cu == nullptr) {
549 return cu->IsConstructor()
550 && driver.RequiresConstructorBarrier(self, cu->GetDexFile(), cu->GetClassDefIndex());

Completed in 184 milliseconds