/art/compiler/optimizing/ |
H A D | intrinsics_x86.h | 35 explicit IntrinsicLocationsBuilderX86(CodeGeneratorX86* codegen); 60 explicit IntrinsicCodeGeneratorX86(CodeGeneratorX86* codegen) : codegen_(codegen) {} argument
|
H A D | intrinsics_x86_64.h | 35 explicit IntrinsicLocationsBuilderX86_64(CodeGeneratorX86_64* codegen); 60 explicit IntrinsicCodeGeneratorX86_64(CodeGeneratorX86_64* codegen) : codegen_(codegen) {} argument
|
H A D | intrinsics_arm.h | 64 explicit IntrinsicCodeGeneratorARM(CodeGeneratorARM* codegen) : codegen_(codegen) {} argument
|
H A D | intrinsics_arm64.h | 64 explicit IntrinsicCodeGeneratorARM64(CodeGeneratorARM64* codegen) : codegen_(codegen) {} argument
|
H A D | intrinsics.h | 82 CodeGenerator* codegen, 88 DCHECK(codegen->IsBaseline() || !invoke_static_or_direct->IsStaticWithExplicitClinitCheck()); 100 HParallelMove parallel_move(codegen->GetGraph()->GetArena()); 110 codegen->GetMoveResolver()->EmitNativeCode(¶llel_move); 81 MoveArguments(HInvoke* invoke, CodeGenerator* codegen, InvokeDexCallingConventionVisitor* calling_convention_visitor) argument
|
H A D | code_generator_arm.h | 101 ParallelMoveResolverARM(ArenaAllocator* allocator, CodeGeneratorARM* codegen) argument 102 : ParallelMoveResolverWithSwap(allocator), codegen_(codegen) {} 136 LocationsBuilderARM(HGraph* graph, CodeGeneratorARM* codegen) argument 137 : HGraphVisitor(graph), codegen_(codegen) {} 161 InstructionCodeGeneratorARM(HGraph* graph, CodeGeneratorARM* codegen);
|
H A D | code_generator_arm64.h | 142 InstructionCodeGeneratorARM64(HGraph* graph, CodeGeneratorARM64* codegen); 177 explicit LocationsBuilderARM64(HGraph* graph, CodeGeneratorARM64* codegen) argument 178 : HGraphVisitor(graph), codegen_(codegen) {} 200 ParallelMoveResolverARM64(ArenaAllocator* allocator, CodeGeneratorARM64* codegen) argument 201 : ParallelMoveResolverNoSwap(allocator), codegen_(codegen), vixl_temps_() {}
|
H A D | code_generator_mips64.h | 110 ParallelMoveResolverMIPS64(ArenaAllocator* allocator, CodeGeneratorMIPS64* codegen) argument 111 : ParallelMoveResolverWithSwap(allocator), codegen_(codegen) {} 144 LocationsBuilderMIPS64(HGraph* graph, CodeGeneratorMIPS64* codegen) argument 145 : HGraphVisitor(graph), codegen_(codegen) {} 170 InstructionCodeGeneratorMIPS64(HGraph* graph, CodeGeneratorMIPS64* codegen);
|
H A D | code_generator_x86.h | 95 ParallelMoveResolverX86(ArenaAllocator* allocator, CodeGeneratorX86* codegen) argument 96 : ParallelMoveResolverWithSwap(allocator), codegen_(codegen) {} 119 LocationsBuilderX86(HGraph* graph, CodeGeneratorX86* codegen) argument 120 : HGraphVisitor(graph), codegen_(codegen) {} 144 InstructionCodeGeneratorX86(HGraph* graph, CodeGeneratorX86* codegen);
|
H A D | code_generator_x86_64.h | 104 ParallelMoveResolverX86_64(ArenaAllocator* allocator, CodeGeneratorX86_64* codegen) argument 105 : ParallelMoveResolverWithSwap(allocator), codegen_(codegen) {} 129 LocationsBuilderX86_64(HGraph* graph, CodeGeneratorX86_64* codegen) argument 130 : HGraphVisitor(graph), codegen_(codegen) {} 154 InstructionCodeGeneratorX86_64(HGraph* graph, CodeGeneratorX86_64* codegen);
|
H A D | codegen_test.cc | 53 // Provide our own codegen, that ensures the C calling conventions 125 const CodeGenerator& codegen, 131 if (codegen.GetInstructionSet() == kThumb2) { 190 static void RunCodeOptimized(CodeGenerator* codegen, argument 199 SsaLivenessAnalysis liveness(graph, codegen); 202 RegisterAllocator register_allocator(graph->GetArena(), codegen, liveness); 207 codegen->CompileOptimized(&allocator); 208 Run(allocator, *codegen, has_result, expected); 124 Run(const InternalCodeAllocator& allocator, const CodeGenerator& codegen, bool has_result, Expected expected) argument
|
H A D | graph_visualizer.cc | 38 const CodeGenerator& codegen) 43 codegen_(codegen), 351 const CodeGenerator& codegen) 352 : output_(output), graph_(graph), codegen_(codegen) {} 34 HGraphVisualizerPrinter(HGraph* graph, std::ostream& output, const char* pass_name, bool is_after_pass, const CodeGenerator& codegen) argument 349 HGraphVisualizer(std::ostream* output, HGraph* graph, const CodeGenerator& codegen) argument
|
H A D | optimizing_compiler.cc | 95 const CodeGenerator& codegen, 102 visualizer_(visualizer_output, graph, codegen) { 224 CodeGenerator* codegen, 231 CompiledMethod* CompileBaseline(CodeGenerator* codegen, 362 // The codegen has a few assumptions that only the instruction simplifier can 383 CodeGenerator* codegen, 386 SsaLivenessAnalysis liveness(graph, codegen); 393 RegisterAllocator(graph->GetArena(), codegen, liveness).AllocateRegisters(); 398 CodeGenerator* codegen, 407 AllocateRegisters(graph, codegen, pass_info_printe 93 PassInfoPrinter(HGraph* graph, const char* method_name, const CodeGenerator& codegen, std::ostream* visualizer_output, CompilerDriver* compiler_driver) argument 382 AllocateRegisters(HGraph* graph, CodeGenerator* codegen, PassInfoPrinter* pass_info_printer) argument 397 CompileOptimized(HGraph* graph, CodeGenerator* codegen, CompilerDriver* compiler_driver, const DexFile& dex_file, const DexCompilationUnit& dex_compilation_unit, PassInfoPrinter* pass_info_printer) const argument 440 CompileBaseline( CodeGenerator* codegen, CompilerDriver* compiler_driver, const DexCompilationUnit& dex_compilation_unit) const argument [all...] |
H A D | code_generator.cc | 915 void SlowPathCode::RecordPcInfo(CodeGenerator* codegen, HInstruction* instruction, uint32_t dex_pc) { argument 916 codegen->RecordPcInfo(instruction, dex_pc, this); 919 void SlowPathCode::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) { argument 921 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath(); 922 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) { 923 if (!codegen->IsCoreCalleeSaveRegister(i)) { 929 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize()); 932 stack_offset += codegen->SaveCoreRegister(stack_offset, i); 937 for (size_t i = 0, e = codegen 949 RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) argument [all...] |
H A D | intrinsics_arm.cc | 41 #define __ codegen->GetAssembler()-> 43 static void MoveFromReturnRegister(Location trg, Primitive::Type type, CodeGeneratorARM* codegen) { argument 80 static void MoveArguments(HInvoke* invoke, CodeGeneratorARM* codegen) { argument 82 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor); 96 CodeGeneratorARM* codegen = down_cast<CodeGeneratorARM*>(codegen_in); variable 99 SaveLiveRegisters(codegen, invoke_->GetLocations()); 101 MoveArguments(invoke_, codegen); 104 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(), kArtMethodRegister); 105 RecordPcInfo(codegen, invoke_, invoke_->GetDexPc()); 116 MoveFromReturnRegister(out, invoke_->GetType(), codegen); 481 GenUnsafeGet(HInvoke* invoke, Primitive::Type type, bool is_volatile, CodeGeneratorARM* codegen) argument 614 GenUnsafePut(LocationSummary* locations, Primitive::Type type, bool is_volatile, bool is_ordered, CodeGeneratorARM* codegen) argument 710 GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM* codegen) argument 853 GenerateVisitStringIndexOf(HInvoke* invoke, ArmAssembler* assembler, CodeGeneratorARM* codegen, ArenaAllocator* allocator, bool start_at_zero) argument [all...] |
H A D | intrinsics_arm64.cc | 66 #define __ codegen->GetAssembler()->vixl_masm_-> 70 CodeGeneratorARM64* codegen) { 89 static void MoveArguments(HInvoke* invoke, CodeGeneratorARM64* codegen) { argument 91 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor); 105 CodeGeneratorARM64* codegen = down_cast<CodeGeneratorARM64*>(codegen_in); variable 108 SaveLiveRegisters(codegen, invoke_->GetLocations()); 110 MoveArguments(invoke_, codegen); 113 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(), kArtMethodRegister); 114 RecordPcInfo(codegen, invoke_, invoke_->GetDexPc()); 125 MoveFromReturnRegister(out, invoke_->GetType(), codegen); 68 MoveFromReturnRegister(Location trg, Primitive::Type type, CodeGeneratorARM64* codegen) argument 658 GenUnsafeGet(HInvoke* invoke, Primitive::Type type, bool is_volatile, CodeGeneratorARM64* codegen) argument 771 GenUnsafePut(LocationSummary* locations, Primitive::Type type, bool is_volatile, bool is_ordered, CodeGeneratorARM64* codegen) argument 845 GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM64* codegen) argument 996 GenerateVisitStringIndexOf(HInvoke* invoke, vixl::MacroAssembler* masm, CodeGeneratorARM64* codegen, ArenaAllocator* allocator, bool start_at_zero) argument [all...] |
H A D | register_allocator.cc | 41 CodeGenerator* codegen, 44 codegen_(codegen), 52 physical_core_register_intervals_(allocator, codegen->GetNumberOfCoreRegisters()), 53 physical_fp_register_intervals_(allocator, codegen->GetNumberOfFloatingPointRegisters()), 63 blocked_core_registers_(codegen->GetBlockedCoreRegisters()), 64 blocked_fp_registers_(codegen->GetBlockedFloatingPointRegisters()), 69 codegen->SetupBlockedRegisters(kIsBaseline); 70 physical_core_register_intervals_.SetSize(codegen->GetNumberOfCoreRegisters()); 71 physical_fp_register_intervals_.SetSize(codegen->GetNumberOfFloatingPointRegisters()); 75 reserved_out_slots_ = InstructionSetPointerSize(codegen 40 RegisterAllocator(ArenaAllocator* allocator, CodeGenerator* codegen, const SsaLivenessAnalysis& liveness) argument 464 ValidateIntervals(const GrowableArray<LiveInterval*>& intervals, size_t number_of_spill_slots, size_t number_of_out_slots, const CodeGenerator& codegen, ArenaAllocator* allocator, bool processing_core_registers, bool log_fatal_on_failure) argument [all...] |
H A D | intrinsics_x86.cc | 40 IntrinsicLocationsBuilderX86::IntrinsicLocationsBuilderX86(CodeGeneratorX86* codegen) argument 41 : arena_(codegen->GetGraph()->GetArena()), codegen_(codegen) { 59 #define __ reinterpret_cast<X86Assembler*>(codegen->GetAssembler())-> 64 CodeGeneratorX86* codegen) { 116 static void MoveArguments(HInvoke* invoke, CodeGeneratorX86* codegen) { argument 118 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor); 133 CodeGeneratorX86* codegen = down_cast<CodeGeneratorX86*>(codegen_in); variable 136 SaveLiveRegisters(codegen, invoke_->GetLocations()); 138 MoveArguments(invoke_, codegen); 62 MoveFromReturnRegister(Location target, Primitive::Type type, CodeGeneratorX86* codegen) argument 731 InvokeOutOfLineIntrinsic(CodeGeneratorX86* codegen, HInvoke* invoke) argument 746 CreateSSE41FPToFPLocations(ArenaAllocator* arena, HInvoke* invoke, CodeGeneratorX86* codegen) argument 765 GenSSE41FPToFPIntrinsic(CodeGeneratorX86* codegen, HInvoke* invoke, X86Assembler* assembler, int round_mode) argument 969 GenerateStringIndexOf(HInvoke* invoke, X86Assembler* assembler, CodeGeneratorX86* codegen, ArenaAllocator* allocator, bool start_at_zero) argument 1474 GenUnsafePut(LocationSummary* locations, Primitive::Type type, bool is_volatile, CodeGeneratorX86* codegen) argument 1583 GenCAS(Primitive::Type type, HInvoke* invoke, CodeGeneratorX86* codegen) argument [all...] |
H A D | intrinsics_x86_64.cc | 36 IntrinsicLocationsBuilderX86_64::IntrinsicLocationsBuilderX86_64(CodeGeneratorX86_64* codegen) argument 37 : arena_(codegen->GetGraph()->GetArena()), codegen_(codegen) { 55 #define __ reinterpret_cast<X86_64Assembler*>(codegen->GetAssembler())-> 60 CodeGeneratorX86_64* codegen) { 108 static void MoveArguments(HInvoke* invoke, CodeGeneratorX86_64* codegen) { argument 110 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor); 124 CodeGeneratorX86_64* codegen = down_cast<CodeGeneratorX86_64*>(codegen_in); variable 127 SaveLiveRegisters(codegen, invoke_->GetLocations()); 129 MoveArguments(invoke_, codegen); 58 MoveFromReturnRegister(Location trg, Primitive::Type type, CodeGeneratorX86_64* codegen) argument 288 MathAbsFP(LocationSummary* locations, bool is64bit, X86_64Assembler* assembler, CodeGeneratorX86_64* codegen) argument 388 GenMinMaxFP(LocationSummary* locations, bool is_min, bool is_double, X86_64Assembler* assembler, CodeGeneratorX86_64* codegen) argument 608 InvokeOutOfLineIntrinsic(CodeGeneratorX86_64* codegen, HInvoke* invoke) argument 623 CreateSSE41FPToFPLocations(ArenaAllocator* arena, HInvoke* invoke, CodeGeneratorX86_64* codegen) argument 642 GenSSE41FPToFPIntrinsic(CodeGeneratorX86_64* codegen, HInvoke* invoke, X86_64Assembler* assembler, int round_mode) argument 680 CreateSSE41FPToIntLocations(ArenaAllocator* arena, HInvoke* invoke, CodeGeneratorX86_64* codegen) argument 894 GenerateStringIndexOf(HInvoke* invoke, X86_64Assembler* assembler, CodeGeneratorX86_64* codegen, ArenaAllocator* allocator, bool start_at_zero) argument 1365 GenUnsafePut(LocationSummary* locations, Primitive::Type type, bool is_volatile, CodeGeneratorX86_64* codegen) argument 1450 GenCAS(Primitive::Type type, HInvoke* invoke, CodeGeneratorX86_64* codegen) argument [all...] |
H A D | ssa_liveness_analysis.h | 1095 SsaLivenessAnalysis(HGraph* graph, CodeGenerator* codegen) argument 1097 codegen_(codegen),
|
H A D | code_generator_arm.cc | 56 #define __ reinterpret_cast<ArmAssembler*>(codegen->GetAssembler())-> 63 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 64 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen); 79 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 80 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen); 96 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 97 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen); 99 SaveLiveRegisters(codegen, instruction_->GetLocations()); 102 RestoreLiveRegisters(codegen, instruction_->GetLocations()); 139 void EmitNativeCode(CodeGenerator* codegen) OVERRID 491 InstructionCodeGeneratorARM(HGraph* graph, CodeGeneratorARM* codegen) argument 1261 TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM* codegen) argument [all...] |
H A D | code_generator_arm64.cc | 104 #define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> 117 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 118 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen); 123 codegen->EmitParallelMoves( 143 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 144 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen); 166 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 168 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen); 171 SaveLiveRegisters(codegen, locations); 192 RestoreLiveRegisters(codegen, location 1067 InstructionCodeGeneratorARM64(HGraph* graph, CodeGeneratorARM64* codegen) argument 1997 TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) argument [all...] |
H A D | code_generator_mips64.cc | 106 #define __ down_cast<CodeGeneratorMIPS64*>(codegen)->GetAssembler()-> 118 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 119 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen); 124 codegen->EmitParallelMoves(index_location_, 149 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 150 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen); 174 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 176 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen); 179 SaveLiveRegisters(codegen, locations); 200 RestoreLiveRegisters(codegen, location 1003 InstructionCodeGeneratorMIPS64(HGraph* graph, CodeGeneratorMIPS64* codegen) argument [all...] |
H A D | code_generator_x86.cc | 46 #define __ reinterpret_cast<X86Assembler*>(codegen->GetAssembler())-> 52 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 55 RecordPcInfo(codegen, instruction_, instruction_->GetDexPc()); 67 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 70 RecordPcInfo(codegen, instruction_, instruction_->GetDexPc()); 82 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 107 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 108 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen); 121 RecordPcInfo(codegen, instruction_, instruction_->GetDexPc()); 137 void EmitNativeCode(CodeGenerator* codegen) OVERRID 464 InstructionCodeGeneratorX86(HGraph* graph, CodeGeneratorX86* codegen) argument 1212 TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86* codegen) argument [all...] |
H A D | code_generator_x86_64.cc | 49 #define __ reinterpret_cast<X86_64Assembler*>(codegen->GetAssembler())-> 55 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 59 RecordPcInfo(codegen, instruction_, instruction_->GetDexPc()); 71 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 75 RecordPcInfo(codegen, instruction_, instruction_->GetDexPc()); 88 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 120 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 121 CodeGeneratorX86_64* x64_codegen = down_cast<CodeGeneratorX86_64*>(codegen); 123 SaveLiveRegisters(codegen, instruction_->GetLocations()); 125 RecordPcInfo(codegen, instruction 452 InstructionCodeGeneratorX86_64(HGraph* graph, CodeGeneratorX86_64* codegen) argument 1316 TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) argument 4482 RIPFixup(const CodeGeneratorX86_64& codegen, int offset) argument [all...] |