Searched refs:lir (Results 1 - 24 of 24) sorted by relevance

/art/compiler/dex/quick/mips/
H A Dassemble_mips.cc592 void MipsMir2Lir::ConvertShortToLongBranch(LIR* lir) { argument
595 int opcode = lir->opcode;
596 int dalvik_offset = lir->dalvik_offset;
619 LIR* hop_branch = RawLIR(dalvik_offset, opcode, lir->operands[0],
620 lir->operands[1], 0, 0, 0, hop_target);
621 InsertLIRBefore(lir, hop_branch);
624 InsertLIRBefore(lir, curr_pc);
627 lir->target);
628 InsertLIRBefore(lir, delta_hi);
629 InsertLIRBefore(lir, ancho
650 LIR *lir; local
855 GetInsnSize(LIR* lir) argument
863 LIR* lir; local
[all...]
H A Dcodegen_mips.h127 void DumpResourceMask(LIR* lir, const ResourceMask& mask, const char* prefix) OVERRIDE;
128 void SetupTargetResourceMasks(LIR* lir, uint64_t flags, ResourceMask* use_mask,
132 std::string BuildInsnString(const char* fmt, LIR* lir, unsigned char* base_addr);
135 size_t GetInsnSize(LIR* lir) OVERRIDE;
136 bool IsUnconditionalBranch(LIR* lir);
273 void ConvertShortToLongBranch(LIR* lir);
H A Dtarget_mips.cc312 void MipsMir2Lir::SetupTargetResourceMasks(LIR* lir, uint64_t flags, ResourceMask* use_mask, argument
314 DCHECK(!lir->flags.use_def_invalid);
368 std::string MipsMir2Lir::BuildInsnString(const char *fmt, LIR *lir, unsigned char* base_addr) { argument
385 operand = lir->operands[nc-'0'];
419 reinterpret_cast<uintptr_t>(base_addr) + lir->offset + 4 + (operand << 1),
420 lir->target);
426 int offset_1 = lir->operands[0];
427 int offset_2 = NEXT_LIR(lir)->operands[0];
429 (((reinterpret_cast<uintptr_t>(base_addr) + lir->offset + 4) & ~3) +
884 bool MipsMir2Lir::IsUnconditionalBranch(LIR* lir) { argument
[all...]
/art/compiler/dex/quick/arm/
H A Dassemble_arm.cc1085 uint8_t* ArmMir2Lir::EncodeLIRs(uint8_t* write_pos, LIR* lir) { argument
1087 for (; lir != nullptr; lir = NEXT_LIR(lir)) {
1088 lir->offset = (write_pos - write_buffer);
1089 if (!lir->flags.is_nop) {
1090 int opcode = lir->opcode;
1094 if (lir->offset & 0x2) {
1100 } else if (LIKELY(!lir->flags.is_nop)) {
1101 const ArmEncodingMap *encoder = &EncodingMap[lir
1241 LIR* lir; local
1636 GetInsnSize(LIR* lir) argument
[all...]
H A Dtarget_arm.cc163 void ArmMir2Lir::SetupTargetResourceMasks(LIR* lir, uint64_t flags, argument
166 DCHECK(!lir->flags.use_def_invalid);
168 int opcode = lir->opcode;
183 def_mask->SetBits(EncodeArmRegList(lir->operands[0]));
187 def_mask->SetBits(EncodeArmRegList(lir->operands[1]));
191 def_mask->SetBits(EncodeArmRegList(lir->operands[0]));
195 for (int i = 0; i < lir->operands[2]; i++) {
196 SetupRegMask(def_mask, lir->operands[1] + i);
210 use_mask->SetBits(EncodeArmRegList(lir->operands[0]));
214 use_mask->SetBits(EncodeArmRegList(lir
354 BuildInsnString(const char* fmt, LIR* lir, unsigned char* base_addr) argument
561 IsUnconditionalBranch(LIR* lir) argument
[all...]
H A Dcodegen_arm.h127 static uint8_t* EncodeLIRs(uint8_t* write_pos, LIR* lir);
128 void DumpResourceMask(LIR* lir, const ResourceMask& mask, const char* prefix) OVERRIDE;
129 void SetupTargetResourceMasks(LIR* lir, uint64_t flags,
133 std::string BuildInsnString(const char* fmt, LIR* lir, unsigned char* base_addr);
136 size_t GetInsnSize(LIR* lir) OVERRIDE;
137 bool IsUnconditionalBranch(LIR* lir);
273 size_t GetInstructionOffset(LIR* lir);
H A Dutility_arm.cc892 LIR* lir = nullptr; local
894 lir = NewLIR3(opcode, r_src_dest.GetReg(), r_ptr.GetReg(), encoded_disp);
896 lir = NewLIR4(opcode, r_src_dest.GetLowReg(), r_src_dest.GetHighReg(), r_ptr.GetReg(),
902 return lir;
1257 size_t ArmMir2Lir::GetInstructionOffset(LIR* lir) { argument
1258 uint64_t check_flags = GetTargetInstFlags(lir->opcode);
1260 size_t offset = (check_flags & IS_TERTIARY_OP) ? lir->operands[2] : 0;
H A Dint_arm.cc1091 LIR* lir = NewLIR2(kThumb2LdrPcRel12, reg.GetReg(), 0); local
1092 lir->target = target;
/art/compiler/dex/quick/x86/
H A Dassemble_x86.cc709 size_t X86Mir2Lir::GetInsnSize(LIR* lir) { argument
710 DCHECK(!IsPseudoLirOp(lir->opcode));
711 const X86EncodingMap* entry = &X86Mir2Lir::EncodingMap[lir->opcode];
712 DCHECK_EQ(entry->opcode, lir->opcode) << entry->name;
718 return lir->operands[0]; // Length of nop is sole operand.
721 case kRegOpcode: // lir operands - 0: reg
722 return ComputeSize(entry, NO_REG, NO_REG, lir->operands[0], 0);
723 case kReg: // lir operands - 0: reg
724 return ComputeSize(entry, NO_REG, NO_REG, lir->operands[0], 0);
725 case kMem: // lir operand
1618 EmitUnimplemented(const X86EncodingMap* entry, LIR* lir) argument
1634 LIR *lir; local
1952 LIR* lir; local
[all...]
H A Dquick_assemble_x86_test.cc141 LIR lir; local
142 memset(&lir, 0, sizeof(LIR));
143 lir.opcode = opcode;
144 lir.operands[0] = op0;
145 lir.operands[1] = op1;
146 lir.operands[2] = op2;
147 lir.operands[3] = op3;
148 lir.operands[4] = op4;
149 lir.flags.size = m2l->GetInsnSize(&lir);
[all...]
H A Dcodegen_x86.h160 void DumpResourceMask(LIR* lir, const ResourceMask& mask, const char* prefix) OVERRIDE;
161 void SetupTargetResourceMasks(LIR* lir, uint64_t flags,
165 std::string BuildInsnString(const char* fmt, LIR* lir, unsigned char* base_addr) OVERRIDE;
168 size_t GetInsnSize(LIR* lir) OVERRIDE;
169 bool IsUnconditionalBranch(LIR* lir) OVERRIDE;
496 void EmitUnimplemented(const X86EncodingMap* entry, LIR* lir);
H A Dfp_x86.cc653 LIR *lir = NewLIR3(kX86And32MI, rs_rX86_SP_32.GetReg(), displacement, 0x7fffffff); local
654 AnnotateDalvikRegAccess(lir, displacement >> 2, false /*is_load */, false /* is_64bit */);
655 AnnotateDalvikRegAccess(lir, displacement >> 2, true /* is_load */, false /* is_64bit*/);
717 LIR *lir = NewLIR3(kX86And32MI, rs_rX86_SP_32.GetReg(), displacement + HIWORD_OFFSET, 0x7fffffff); local
718 AnnotateDalvikRegAccess(lir, (displacement + HIWORD_OFFSET) >> 2, true /* is_load */, true /* is_64bit*/);
719 AnnotateDalvikRegAccess(lir, (displacement + HIWORD_OFFSET) >> 2, false /*is_load */, true /* is_64bit */);
H A Dint_x86.cc102 LIR* branch = NewLIR2(kX86Jcc8, 0 /* lir operand for Jcc offset */ ,
121 LIR* branch = NewLIR2(kX86Jcc8, 0 /* lir operand for Jcc offset */ , cc);
1980 LIR *lir = NewLIR3(x86op, cu_->target64 ? rl_dest.reg.GetReg() : rl_dest.reg.GetLowReg(), local
1982 AnnotateDalvikRegAccess(lir, (displacement + LOWORD_OFFSET) >> 2,
1986 lir = NewLIR3(x86op, rl_dest.reg.GetHighReg(), r_base, displacement + HIWORD_OFFSET);
1987 AnnotateDalvikRegAccess(lir, (displacement + HIWORD_OFFSET) >> 2,
2023 LIR *lir = NewLIR3(x86op, r_base, displacement + LOWORD_OFFSET, local
2025 AnnotateDalvikRegAccess(lir, (displacement + LOWORD_OFFSET) >> 2,
2027 AnnotateDalvikRegAccess(lir, (displacement + LOWORD_OFFSET) >> 2,
2031 lir
2874 LIR *lir = NewLIR3(x86op, r_base, displacement + LOWORD_OFFSET, val); local
2906 LIR *lir = NewLIR3(x86op, r_base, displacement + LOWORD_OFFSET, val_lo); local
2914 LIR *lir = NewLIR3(x86op, r_base, displacement + HIWORD_OFFSET, val_hi); local
[all...]
H A Dtarget_x86.cc277 void X86Mir2Lir::SetupTargetResourceMasks(LIR* lir, uint64_t flags, argument
280 DCHECK(!lir->flags.use_def_invalid);
315 if (lir->opcode == kX86RepneScasw) {
357 std::string X86Mir2Lir::BuildInsnString(const char *fmt, LIR *lir, unsigned char* base_addr) { argument
376 int operand = lir->operands[operand_number];
387 static_cast<uint32_t>(lir->operands[operand_number+1]));
408 reinterpret_cast<uintptr_t>(base_addr) + lir->offset + operand,
409 lir->target);
810 bool X86Mir2Lir::IsUnconditionalBranch(LIR* lir) { argument
811 return (lir
[all...]
/art/compiler/dex/quick/
H A Dmir_to_lir-inl.h165 inline void Mir2Lir::SetupResourceMasks(LIR* lir) { argument
166 int opcode = lir->opcode;
169 lir->u.m.use_mask = lir->u.m.def_mask = &kEncodeNone;
171 lir->flags.fixup = kFixupLabel;
180 lir->flags.fixup = kFixupLabel;
184 lir->flags.size = GetInsnSize(lir);
185 estimated_native_code_size_ += lir->flags.size;
211 lir
[all...]
H A Dcodegen_util.cc114 void Mir2Lir::UnlinkLIR(LIR* lir) { argument
115 if (UNLIKELY(lir == first_lir_insn_)) {
116 first_lir_insn_ = lir->next;
117 if (lir->next != nullptr) {
118 lir->next->prev = nullptr;
120 DCHECK(lir->next == nullptr);
121 DCHECK(lir == last_lir_insn_);
124 } else if (lir == last_lir_insn_) {
125 last_lir_insn_ = lir->prev;
126 lir
134 NopLIR(LIR* lir) argument
141 SetMemRefType(LIR* lir, bool is_load, int mem_type) argument
181 AnnotateDalvikRegAccess(LIR* lir, int reg_id, bool is_load, bool is64bit) argument
199 DumpLIRInsn(LIR* lir, unsigned char* base_addr) argument
618 AssignLiteralOffsetCommon(LIR* lir, CodeOffset offset) argument
626 AssignLiteralPointerOffsetCommon(LIR* lir, CodeOffset offset, unsigned int element_size) argument
1203 AppendLIR(LIR* lir) argument
[all...]
H A Dlocal_optimizations.cc93 inline void Mir2Lir::EliminateLoad(LIR* lir, int reg_id) { argument
94 DCHECK(RegStorage::SameRegType(lir->operands[0], reg_id));
98 if (lir->operands[0] == reg_id) {
99 NopLIR(lir);
106 dest_reg = RegStorage::Solo32(lir->operands[0]);
110 dest_reg = RegStorage::Solo64(lir->operands[0]);
114 dest_reg = RegStorage::FloatSolo32(lir->operands[0]);
118 dest_reg = RegStorage::FloatSolo64(lir->operands[0]);
125 ConvertMemOpIntoMove(lir, dest_reg, src_reg);
126 NopLIR(lir);
[all...]
H A Dmir_to_lir.h190 #define NEXT_LIR(lir) (lir->next)
191 #define PREV_LIR(lir) (lir->prev)
555 virtual size_t GetInstructionOffset(LIR* lir);
616 void AppendLIR(LIR* lir);
643 void SetupResourceMasks(LIR* lir);
644 void SetMemRefType(LIR* lir, bool is_load, int mem_type);
645 void AnnotateDalvikRegAccess(LIR* lir, int reg_id, bool is_load, bool is64bit);
649 void EliminateLoad(LIR* lir, in
[all...]
H A Dmir_to_lir.cc1414 size_t Mir2Lir::GetInstructionOffset(LIR* lir) { argument
1415 UNUSED(lir);
/art/compiler/dex/quick/arm64/
H A Dassemble_arm64.cc688 uint8_t* Arm64Mir2Lir::EncodeLIRs(uint8_t* write_pos, LIR* lir) { argument
690 for (; lir != nullptr; lir = NEXT_LIR(lir)) {
691 lir->offset = (write_pos - write_buffer);
692 bool opcode_is_wide = IS_WIDE(lir->opcode);
693 A64Opcode opcode = UNWIDE(lir->opcode);
699 if (LIKELY(!lir->flags.is_nop)) {
708 uint32_t operand = lir->operands[i];
785 << " @ 0x" << std::hex << lir
860 GetPrevEmittingLIR(LIR* lir) argument
872 LIR* lir; local
1101 GetInsnSize(LIR* lir) argument
[all...]
H A Dtarget_arm64.cc168 void Arm64Mir2Lir::SetupTargetResourceMasks(LIR* lir, uint64_t flags, argument
171 DCHECK(!lir->flags.use_def_invalid);
340 std::string Arm64Mir2Lir::BuildInsnString(const char* fmt, LIR* lir, unsigned char* base_addr) { argument
357 operand = lir->operands[nc-'0'];
362 int omittable = ((IS_WIDE(lir->opcode)) ? EncodeExtend(kA64Uxtw, 0) :
412 snprintf(tbuf, arraysize(tbuf), "%c%d", (IS_WIDE(lir->opcode)) ? 'd' : 's',
416 bool is_wide = IS_WIDE(lir->opcode);
458 snprintf(tbuf, arraysize(tbuf), "%d", operand*((IS_WIDE(lir->opcode)) ? 8 : 4));
470 strcpy(tbuf, (IS_WIDE(lir->opcode)) ? ", lsl #3" : ", lsl #2");
477 reinterpret_cast<uintptr_t>(base_addr) + lir
592 IsUnconditionalBranch(LIR* lir) argument
[all...]
H A Dcodegen_arm64.h116 void DumpResourceMask(LIR* lir, const ResourceMask& mask, const char* prefix) OVERRIDE;
117 void SetupTargetResourceMasks(LIR* lir, uint64_t flags,
121 std::string BuildInsnString(const char* fmt, LIR* lir, unsigned char* base_addr) OVERRIDE;
124 size_t GetInsnSize(LIR* lir) OVERRIDE;
125 bool IsUnconditionalBranch(LIR* lir) OVERRIDE;
241 size_t GetInstructionOffset(LIR* lir) OVERRIDE;
350 size_t GetLoadStoreSize(LIR* lir);
358 uint8_t* EncodeLIRs(uint8_t* write_pos, LIR* lir);
H A Dutility_arm64.cc92 size_t Arm64Mir2Lir::GetLoadStoreSize(LIR* lir) { argument
93 bool opcode_is_wide = IS_WIDE(lir->opcode);
94 A64Opcode opcode = UNWIDE(lir->opcode);
101 size_t Arm64Mir2Lir::GetInstructionOffset(LIR* lir) { argument
102 size_t offset = lir->operands[2];
103 uint64_t check_flags = GetTargetInstFlags(lir->opcode);
107 offset = offset * (1 << GetLoadStoreSize(lir));
H A Dint_arm64.cc942 LIR* lir = NewLIR2(kA64Ldr2rp, As32BitReg(reg).GetReg(), 0); local
943 lir->target = target;

Completed in 606 milliseconds