Searched refs:assembler (Results 1 - 21 of 21) sorted by relevance

/art/compiler/optimizing/
H A Dintrinsics_utils.h24 #include "utils/assembler.h"
51 Assembler* assembler = codegen->GetAssembler(); variable
52 assembler->Bind(GetEntryLabel());
74 assembler->Jump(GetExitLabel());
H A Dintrinsics_arm.h37 ArmAssembler* assembler,
39 : arena_(arena), assembler_(assembler), features_(features) {}
36 IntrinsicLocationsBuilderARM(ArenaAllocator* arena, ArmAssembler* assembler, const ArmInstructionSetFeatures& features) argument
H A Dintrinsics_mips64.cc139 #define __ assembler->
149 static void MoveFPToInt(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) { argument
186 static void MoveIntToFP(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) { argument
225 Mips64Assembler* assembler) {
277 Mips64Assembler* assembler) {
308 Mips64Assembler* assembler) {
345 Mips64Assembler* assembler) {
388 static void MathAbsFP(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) { argument
425 static void GenAbsInteger(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) { argument
461 Mips64Assembler* assembler) {
223 GenReverseBytes(LocationSummary* locations, Primitive::Type type, Mips64Assembler* assembler) argument
275 GenNumberOfLeadingZeroes(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) argument
306 GenNumberOfTrailingZeroes(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) argument
343 GenReverse(LocationSummary* locations, Primitive::Type type, Mips64Assembler* assembler) argument
458 GenMinMaxFP(LocationSummary* locations, bool is_min, Primitive::Type type, Mips64Assembler* assembler) argument
572 GenMinMax(LocationSummary* locations, bool is_min, Mips64Assembler* assembler) argument
682 Mips64Assembler* assembler = GetAssembler(); local
706 Mips64Assembler* assembler = GetAssembler(); local
730 GenRoundingMode(LocationSummary* locations, FloatRoundingMode mode, Mips64Assembler* assembler) argument
799 Mips64Assembler* assembler = GetAssembler(); local
812 Mips64Assembler* assembler = GetAssembler(); local
825 Mips64Assembler* assembler = GetAssembler(); local
838 Mips64Assembler* assembler = GetAssembler(); local
859 Mips64Assembler* assembler = GetAssembler(); local
872 Mips64Assembler* assembler = GetAssembler(); local
885 Mips64Assembler* assembler = GetAssembler(); local
898 Mips64Assembler* assembler = GetAssembler(); local
914 Mips64Assembler* assembler = GetAssembler(); local
941 Mips64Assembler* assembler = codegen->GetAssembler(); local
1043 Mips64Assembler* assembler = codegen->GetAssembler(); local
1209 Mips64Assembler* assembler = codegen->GetAssembler(); local
1300 Mips64Assembler* assembler = GetAssembler(); local
1347 Mips64Assembler* assembler = GetAssembler(); local
1383 Mips64Assembler* assembler = GetAssembler(); local
1469 GenerateStringIndexOf(HInvoke* invoke, Mips64Assembler* assembler, CodeGeneratorMIPS64* codegen, ArenaAllocator* allocator, bool start_at_zero) argument
1577 Mips64Assembler* assembler = GetAssembler(); local
1611 Mips64Assembler* assembler = GetAssembler(); local
1642 Mips64Assembler* assembler = GetAssembler(); local
1662 GenIsInfinite(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) argument
[all...]
H A Dintrinsics_arm.cc66 #define __ assembler->
84 static void MoveFPToInt(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) { argument
96 static void MoveIntToFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) { argument
154 ArmAssembler* assembler) {
196 ArmAssembler* assembler) {
243 static void MathAbsFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) { argument
283 ArmAssembler* assembler) {
331 ArmAssembler* assembler) {
374 ArmAssembler* assembler = GetAssembler(); local
384 ArmAssembler* assembler local
152 GenNumberOfLeadingZeros(LocationSummary* locations, Primitive::Type type, ArmAssembler* assembler) argument
194 GenNumberOfTrailingZeros(LocationSummary* locations, Primitive::Type type, ArmAssembler* assembler) argument
281 GenAbsInteger(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) argument
329 GenMinMax(LocationSummary* locations, bool is_min, ArmAssembler* assembler) argument
395 ArmAssembler* assembler = GetAssembler(); local
406 ArmAssembler* assembler = GetAssembler(); local
427 ArmAssembler* assembler = GetAssembler(); local
446 ArmAssembler* assembler = GetAssembler(); local
456 ArmAssembler* assembler = GetAssembler(); local
466 ArmAssembler* assembler = GetAssembler(); local
480 ArmAssembler* assembler = GetAssembler(); local
493 ArmAssembler* assembler = GetAssembler(); local
505 ArmAssembler* assembler = codegen->GetAssembler(); local
692 ArmAssembler* assembler = codegen->GetAssembler(); local
835 ArmAssembler* assembler = codegen->GetAssembler(); local
951 ArmAssembler* assembler = GetAssembler(); local
999 ArmAssembler* assembler = GetAssembler(); local
1034 ArmAssembler* assembler = GetAssembler(); local
1112 GenerateVisitStringIndexOf(HInvoke* invoke, ArmAssembler* assembler, CodeGeneratorARM* codegen, ArenaAllocator* allocator, bool start_at_zero) argument
1216 ArmAssembler* assembler = GetAssembler(); local
1245 ArmAssembler* assembler = GetAssembler(); local
1270 ArmAssembler* assembler = GetAssembler(); local
1309 CheckPosition(ArmAssembler* assembler, Location pos, Register input, Location length, SlowPathCode* slow_path, Register input_len, Register temp, bool length_is_input_length = false) argument
1377 ArmAssembler* assembler = GetAssembler(); local
1659 GenFPToFPCall(HInvoke* invoke, ArmAssembler* assembler, CodeGeneratorARM* codegen, QuickEntrypointEnum entry) argument
1683 GenFPFPToFPCall(HInvoke* invoke, ArmAssembler* assembler, CodeGeneratorARM* codegen, QuickEntrypointEnum entry) argument
1853 ArmAssembler* assembler = GetAssembler(); local
1871 ArmAssembler* assembler = GetAssembler(); local
1888 ArmAssembler* assembler = GetAssembler(); local
1906 ArmAssembler* assembler = GetAssembler(); local
1923 ArmAssembler* assembler = GetAssembler(); local
1949 ArmAssembler* assembler = GetAssembler(); local
[all...]
H A Dintrinsics_x86_64.cc80 #define __ assembler->
98 static void MoveFPToInt(LocationSummary* locations, bool is64bit, X86_64Assembler* assembler) { argument
104 static void MoveIntToFP(LocationSummary* locations, bool is64bit, X86_64Assembler* assembler) { argument
148 X86_64Assembler* assembler) {
198 // TODO: Enable memory operations when the assembler supports them.
209 X86_64Assembler* assembler,
253 static void GenAbsInteger(LocationSummary* locations, bool is64bit, X86_64Assembler* assembler) { argument
294 X86_64Assembler* assembler,
423 X86_64Assembler* assembler) {
552 X86_64Assembler* assembler,
146 GenReverseBytes(LocationSummary* locations, Primitive::Type size, X86_64Assembler* assembler) argument
207 MathAbsFP(LocationSummary* locations, bool is64bit, X86_64Assembler* assembler, CodeGeneratorX86_64* codegen) argument
291 GenMinMaxFP(LocationSummary* locations, bool is_min, bool is_double, X86_64Assembler* assembler, CodeGeneratorX86_64* codegen) argument
422 GenMinMax(LocationSummary* locations, bool is_min, bool is_long, X86_64Assembler* assembler) argument
550 GenSSE41FPToFPIntrinsic(CodeGeneratorX86_64* codegen, HInvoke* invoke, X86_64Assembler* assembler, int round_mode) argument
631 X86_64Assembler* assembler = GetAssembler(); local
681 X86_64Assembler* assembler = GetAssembler(); local
734 X86_64Assembler* assembler = codegen->GetAssembler(); local
925 X86_64Assembler* assembler = GetAssembler(); local
976 CheckPosition(X86_64Assembler* assembler, Location pos, CpuRegister input, Location length, SlowPathCode* slow_path, CpuRegister input_len, CpuRegister temp, bool length_is_input_length = false) argument
1043 X86_64Assembler* assembler = GetAssembler(); local
1133 X86_64Assembler* assembler = GetAssembler(); local
1370 X86_64Assembler* assembler = GetAssembler(); local
1403 X86_64Assembler* assembler = GetAssembler(); local
1500 GenerateStringIndexOf(HInvoke* invoke, X86_64Assembler* assembler, CodeGeneratorX86_64* codegen, ArenaAllocator* allocator, bool start_at_zero) argument
1640 X86_64Assembler* assembler = GetAssembler(); local
1668 X86_64Assembler* assembler = GetAssembler(); local
1692 X86_64Assembler* assembler = GetAssembler(); local
1726 X86_64Assembler* assembler = GetAssembler(); local
1776 GenPeek(LocationSummary* locations, Primitive::Type size, X86_64Assembler* assembler) argument
1840 GenPoke(LocationSummary* locations, Primitive::Type size, X86_64Assembler* assembler) argument
1935 X86_64Assembler* assembler = down_cast<X86_64Assembler*>(codegen->GetAssembler()); local
2088 X86_64Assembler* assembler = down_cast<X86_64Assembler*>(codegen->GetAssembler()); local
2190 X86_64Assembler* assembler = down_cast<X86_64Assembler*>(codegen->GetAssembler()); local
2308 SwapBits(CpuRegister reg, CpuRegister temp, int32_t shift, int32_t mask, X86_64Assembler* assembler) argument
2321 X86_64Assembler* assembler = GetAssembler(); local
2352 SwapBits64(CpuRegister reg, CpuRegister temp, CpuRegister temp_mask, int32_t shift, int64_t mask, X86_64Assembler* assembler) argument
2365 X86_64Assembler* assembler = GetAssembler(); local
2401 GenBitCount(X86_64Assembler* assembler, CodeGeneratorX86_64* codegen, HInvoke* invoke, bool is_long) argument
2460 GenOneBit(X86_64Assembler* assembler, CodeGeneratorX86_64* codegen, HInvoke* invoke, bool is_high, bool is_long) argument
2589 GenLeadingZeros(X86_64Assembler* assembler, CodeGeneratorX86_64* codegen, HInvoke* invoke, bool is_long) argument
2663 GenTrailingZeros(X86_64Assembler* assembler, CodeGeneratorX86_64* codegen, HInvoke* invoke, bool is_long) argument
[all...]
H A Dintrinsics_x86.cc86 #define __ assembler->
111 static void MoveFPToInt(LocationSummary* locations, bool is64bit, X86Assembler* assembler) { argument
126 static void MoveIntToFP(LocationSummary* locations, bool is64bit, X86Assembler* assembler) { argument
196 X86Assembler* assembler) {
235 X86Assembler* assembler = GetAssembler(); local
256 // TODO: Enable memory operations when the assembler supports them.
275 X86Assembler* assembler,
333 static void GenAbsInteger(LocationSummary* locations, X86Assembler* assembler) { argument
361 static void GenAbsLong(LocationSummary* locations, X86Assembler* assembler) { argument
406 X86Assembler* assembler,
194 GenReverseBytes(LocationSummary* locations, Primitive::Type size, X86Assembler* assembler) argument
273 MathAbsFP(LocationSummary* locations, bool is64bit, X86Assembler* assembler, CodeGeneratorX86* codegen) argument
403 GenMinMaxFP(LocationSummary* locations, bool is_min, bool is_double, X86Assembler* assembler, CodeGeneratorX86* codegen) argument
568 GenMinMax(LocationSummary* locations, bool is_min, bool is_long, X86Assembler* assembler) argument
730 GenSSE41FPToFPIntrinsic(CodeGeneratorX86* codegen, HInvoke* invoke, X86Assembler* assembler, int round_mode) argument
811 X86Assembler* assembler = GetAssembler(); local
858 X86Assembler* assembler = codegen->GetAssembler(); local
1063 X86Assembler* assembler = GetAssembler(); local
1125 CheckPosition(X86Assembler* assembler, Location pos, Register input, Register length, SlowPathCode* slow_path, Register input_len, Register temp) argument
1171 X86Assembler* assembler = GetAssembler(); local
1264 X86Assembler* assembler = GetAssembler(); local
1296 X86Assembler* assembler = GetAssembler(); local
1398 GenerateStringIndexOf(HInvoke* invoke, X86Assembler* assembler, CodeGeneratorX86* codegen, ArenaAllocator* allocator, bool start_at_zero) argument
1539 X86Assembler* assembler = GetAssembler(); local
1566 X86Assembler* assembler = GetAssembler(); local
1589 X86Assembler* assembler = GetAssembler(); local
1623 X86Assembler* assembler = GetAssembler(); local
1681 GenPeek(LocationSummary* locations, Primitive::Type size, X86Assembler* assembler) argument
1752 GenPoke(LocationSummary* locations, Primitive::Type size, X86Assembler* assembler) argument
1846 X86Assembler* assembler = down_cast<X86Assembler*>(codegen->GetAssembler()); local
2041 X86Assembler* assembler = down_cast<X86Assembler*>(codegen->GetAssembler()); local
2165 X86Assembler* assembler = down_cast<X86Assembler*>(codegen->GetAssembler()); local
2292 SwapBits(Register reg, Register temp, int32_t shift, int32_t mask, X86Assembler* assembler) argument
2305 X86Assembler* assembler = GetAssembler(); local
2336 X86Assembler* assembler = GetAssembler(); local
2380 GenBitCount(X86Assembler* assembler, CodeGeneratorX86* codegen, HInvoke* invoke, bool is_long) argument
2448 GenLeadingZeros(X86Assembler* assembler, CodeGeneratorX86* codegen, HInvoke* invoke, bool is_long) argument
2553 GenTrailingZeros(X86Assembler* assembler, CodeGeneratorX86* codegen, HInvoke* invoke, bool is_long) argument
[all...]
H A Dintrinsics_mips.cc150 #define __ assembler->
160 static void MoveFPToInt(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) { argument
202 static void MoveIntToFP(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) { argument
251 MipsAssembler* assembler) {
445 MipsAssembler* assembler) {
493 MipsAssembler* assembler) {
616 MipsAssembler* assembler) {
748 static void MathAbsFP(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) { argument
777 static void GenAbsInteger(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) { argument
827 MipsAssembler* assembler) {
246 GenReverse(LocationSummary* locations, Primitive::Type type, bool isR2OrNewer, bool isR6, bool reverseBits, MipsAssembler* assembler) argument
442 GenNumberOfLeadingZeroes(LocationSummary* locations, bool is64bit, bool isR6, MipsAssembler* assembler) argument
490 GenNumberOfTrailingZeroes(LocationSummary* locations, bool is64bit, bool isR6, MipsAssembler* assembler) argument
613 GenBitCount(LocationSummary* locations, Primitive::Type type, bool isR6, MipsAssembler* assembler) argument
823 GenMinMaxFP(LocationSummary* locations, bool is_min, Primitive::Type type, bool is_R6, MipsAssembler* assembler) argument
1056 GenMinMax(LocationSummary* locations, bool is_min, Primitive::Type type, bool is_R6, MipsAssembler* assembler) argument
1284 MipsAssembler* assembler = GetAssembler(); local
1297 MipsAssembler* assembler = GetAssembler(); local
1310 MipsAssembler* assembler = GetAssembler(); local
1342 MipsAssembler* assembler = GetAssembler(); local
1360 MipsAssembler* assembler = GetAssembler(); local
1390 MipsAssembler* assembler = GetAssembler(); local
1403 MipsAssembler* assembler = GetAssembler(); local
1425 MipsAssembler* assembler = GetAssembler(); local
1443 MipsAssembler* assembler = GetAssembler(); local
1468 MipsAssembler* assembler = GetAssembler(); local
1501 MipsAssembler* assembler = codegen->GetAssembler(); local
1611 MipsAssembler* assembler = codegen->GetAssembler(); local
1796 MipsAssembler* assembler = codegen->GetAssembler(); local
1889 MipsAssembler* assembler = GetAssembler(); local
1936 MipsAssembler* assembler = GetAssembler(); local
1973 MipsAssembler* assembler = GetAssembler(); local
2059 GenerateStringIndexOf(HInvoke* invoke, bool start_at_zero, MipsAssembler* assembler, CodeGeneratorMIPS* codegen, ArenaAllocator* allocator) argument
2184 MipsAssembler* assembler = GetAssembler(); local
2216 MipsAssembler* assembler = GetAssembler(); local
2246 MipsAssembler* assembler = GetAssembler(); local
2264 GenIsInfinite(LocationSummary* locations, const Primitive::Type type, const bool isR6, MipsAssembler* assembler) argument
2322 GenHighestOneBit(LocationSummary* locations, const Primitive::Type type, bool isR6, MipsAssembler* assembler) argument
2387 GenLowestOneBit(LocationSummary* locations, const Primitive::Type type, bool isR6, MipsAssembler* assembler) argument
[all...]
H A Dcode_generator_x86_64.cc30 #include "utils/assembler.h"
6682 X86_64Assembler* assembler = codegen_->GetAssembler(); local
6685 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
6689 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
6701 assembler->AppendInt32(offset_to_block);
6711 X86_64Assembler* assembler = GetAssembler(); local
6712 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
6714 assembler->Align(4, 0);
6715 constant_area_start_ = assembler->CodeSize();
6723 assembler
[all...]
H A Dcode_generator_x86.cc30 #include "utils/assembler.h"
7258 X86Assembler* assembler = codegen_->GetAssembler(); local
7261 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
7278 assembler->AppendInt32(offset_to_block);
7288 X86Assembler* assembler = GetAssembler(); local
7289 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
7292 assembler->Align(4, 0);
7293 constant_area_start_ = assembler->CodeSize();
7301 assembler->AddConstantArea();
H A Dcode_generator_arm.cc32 #include "utils/assembler.h"
3991 ArmAssembler* assembler = codegen_->GetAssembler(); local
3992 if (assembler->ShifterOperandCanHold(kNoRegister, kNoRegister, opcode, value, &so)) {
4006 return assembler->ShifterOperandCanHold(kNoRegister, kNoRegister, neg_opcode, ~value, &so);
/art/compiler/linker/arm/
H A Drelative_patcher_thumb2.cc84 arm::Thumb2Assembler assembler(&arena);
85 assembler.LoadFromOffset(
88 assembler.bkpt(0);
89 assembler.FinalizeCode();
90 std::vector<uint8_t> thunk_code(assembler.CodeSize());
92 assembler.FinalizeInstructions(code);
/art/compiler/trampolines/
H A Dtrampoline_compiler.cc46 #define __ assembler.
54 Thumb2Assembler assembler(arena);
84 Arm64Assembler assembler(arena);
123 MipsAssembler assembler(arena);
155 Mips64Assembler assembler(arena);
187 X86Assembler assembler(arena);
208 x86_64::X86_64Assembler assembler(arena);
/art/compiler/utils/x86_64/
H A Dassembler_x86_64_test.cc363 std::string shll_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { argument
370 assembler->shll(*reg, shifter);
386 std::string shlq_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { argument
393 assembler->shlq(*reg, shifter);
409 std::string shrl_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { argument
416 assembler->shrl(*reg, shifter);
432 std::string shrq_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { argument
439 assembler->shrq(*reg, shifter);
455 std::string sarl_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { argument
462 assembler
478 sarq_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) argument
501 rorl_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) argument
524 roll_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) argument
547 rorq_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) argument
570 rolq_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) argument
1170 x87_fn(AssemblerX86_64Test::Base* assembler_test ATTRIBUTE_UNUSED, x86_64::X86_64Assembler* assembler) argument
1233 ret_and_leave_fn(AssemblerX86_64Test::Base* assembler_test ATTRIBUTE_UNUSED, x86_64::X86_64Assembler* assembler) argument
1448 setcc_test_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) argument
1496 buildframe_test_fn(AssemblerX86_64Test::Base* assembler_test ATTRIBUTE_UNUSED, x86_64::X86_64Assembler* assembler) argument
1541 removeframe_test_fn(AssemblerX86_64Test::Base* assembler_test ATTRIBUTE_UNUSED, x86_64::X86_64Assembler* assembler) argument
1570 increaseframe_test_fn(AssemblerX86_64Test::Base* assembler_test ATTRIBUTE_UNUSED, x86_64::X86_64Assembler* assembler) argument
1589 decreaseframe_test_fn(AssemblerX86_64Test::Base* assembler_test ATTRIBUTE_UNUSED, x86_64::X86_64Assembler* assembler) argument
[all...]
/art/compiler/utils/
H A Dassembler_thumb_test.cc175 #define __ assembler->
177 void EmitAndCheck(arm::Thumb2Assembler* assembler, const char* testname, argument
188 void EmitAndCheck(arm::Thumb2Assembler* assembler, const char* testname) { argument
193 EmitAndCheck(assembler, testname, results->second);
200 Thumb2AssemblerTest() : pool(), arena(&pool), assembler(&arena) { }
204 arm::Thumb2Assembler assembler; member in class:art::arm::Thumb2AssemblerTest
207 #define __ assembler.
217 EmitAndCheck(&assembler, "SimpleMov");
226 EmitAndCheck(&assembler, "SimpleMov32");
234 EmitAndCheck(&assembler, "SimpleMovAd
[all...]
H A Dassembler_test.h20 #include "assembler.h"
52 typedef std::string (*TestFn)(AssemblerTest* assembler_test, Ass* assembler);
58 // This driver assumes the assembler has already been called.
490 // Get the name of the assembler, e.g., "as" by default.
495 // Switches to the assembler command. Default none.
/art/compiler/linker/arm64/
H A Drelative_patcher_arm64.cc252 arm64::Arm64Assembler assembler(&arena);
255 assembler.JumpTo(ManagedRegister(arm64::X0), offset, ManagedRegister(arm64::IP0));
257 assembler.FinalizeCode();
258 std::vector<uint8_t> thunk_code(assembler.CodeSize());
260 assembler.FinalizeInstructions(code);
/art/compiler/utils/x86/
H A Dassembler_x86_test.cc280 std::string rorl_fn(AssemblerX86Test::Base* assembler_test, x86::X86Assembler* assembler) { argument
287 assembler->rorl(*reg, shifter);
303 std::string roll_fn(AssemblerX86Test::Base* assembler_test, x86::X86Assembler* assembler) { argument
310 assembler->roll(*reg, shifter);
/art/compiler/utils/arm/
H A Dassembler_arm.cc578 static void EmitLoad(ArmAssembler* assembler, ManagedRegister m_dst,
585 assembler->LoadFromOffset(kLoadWord, dst.AsCoreRegister(), src_register, src_offset);
588 assembler->LoadFromOffset(kLoadWord, dst.AsRegisterPairLow(), src_register, src_offset);
589 assembler->LoadFromOffset(kLoadWord, dst.AsRegisterPairHigh(), src_register, src_offset + 4);
591 assembler->LoadSFromOffset(dst.AsSRegister(), src_register, src_offset);
594 assembler->LoadDFromOffset(dst.AsDRegister(), src_register, src_offset);
H A Dassembler_thumb2.h368 // Force the assembler to generate 32 bit instructions.
575 // Prepare the assembler->fixup_dependents_ and each Fixup's dependents_start_/count_.
576 static void PrepareDependents(Thumb2Assembler* assembler);
578 ArrayRef<const FixupId> Dependents(const Thumb2Assembler& assembler) const {
579 return ArrayRef<const FixupId>(assembler.fixup_dependents_).SubArray(dependents_start_,
608 // Emit the branch instruction into the assembler buffer. This does the
642 static void ForExpandableDependencies(Thumb2Assembler* assembler, Function fn);
654 uint32_t location_; // Offset into assembler buffer in bytes.
655 uint32_t target_; // Offset into assembler buffer in bytes.
658 // array in the assembler an
[all...]
H A Dassembler_thumb2.cc31 void Thumb2Assembler::Fixup::ForExpandableDependencies(Thumb2Assembler* assembler, Function fn) { argument
35 Fixup* fixups = assembler->fixups_.data();
36 for (FixupId fixup_id = 0u, end_id = assembler->fixups_.size(); fixup_id != end_id; ++fixup_id) {
54 void Thumb2Assembler::Fixup::PrepareDependents(Thumb2Assembler* assembler) { argument
63 Fixup* fixups = assembler->fixups_.data();
65 assembler,
72 for (FixupId fixup_id = 0u, end_id = assembler->fixups_.size(); fixup_id != end_id; ++fixup_id) {
80 assembler->fixup_dependents_.resize(number_of_dependents);
81 FixupId* dependents = assembler->fixup_dependents_.data();
83 assembler,
[all...]
/art/compiler/
H A DAndroid.mk79 utils/assembler.cc \

Completed in 6715 milliseconds