Searched defs:LoadMI (Results 1 - 12 of 12) sorted by relevance

/external/llvm/lib/CodeGen/
H A DImplicitNullChecks.cpp113 MachineInstr *insertFaultingLoad(MachineInstr *LoadMI, MachineBasicBlock *MBB,
480 /// Wrap a machine load instruction, LoadMI, into a FAULTING_LOAD_OP machine
481 /// instruction. The FAULTING_LOAD_OP instruction does the same load as LoadMI
485 ImplicitNullChecks::insertFaultingLoad(MachineInstr *LoadMI, argument
492 unsigned NumDefs = LoadMI->getDesc().getNumDefs();
497 DefReg = LoadMI->defs().begin()->getReg();
498 assert(std::distance(LoadMI->defs().begin(), LoadMI->defs().end()) == 1 &&
504 .addImm(LoadMI->getOpcode());
506 for (auto &MO : LoadMI
[all...]
H A DInlineSpiller.cpp203 MachineInstr *LoadMI = nullptr);
724 /// @param LoadMI Load instruction to use instead of stack slot when non-null.
728 MachineInstr *LoadMI) {
758 if (LoadMI && MO.isDef())
768 LoadMI ? TII.foldMemoryOperand(*MI, FoldOps, *LoadMI, &LIS)
727 foldMemoryOperand(ArrayRef<std::pair<MachineInstr*, unsigned> > Ops, MachineInstr *LoadMI) argument
H A DTargetInstrInfo.cpp780 MachineInstr &LoadMI,
782 assert(LoadMI.canFoldAsLoad() && "LoadMI isn't foldable!");
796 isLoadFromStackSlot(LoadMI, FrameIndex)) {
803 NewMI = foldMemoryOperandImpl(MF, MI, Ops, MI, LoadMI, LIS);
810 NewMI->setMemRefs(LoadMI.memoperands_begin(), LoadMI.memoperands_end());
815 for (MachineInstr::mmo_iterator I = LoadMI.memoperands_begin(),
816 E = LoadMI.memoperands_end();
778 foldMemoryOperand(MachineInstr &MI, ArrayRef<unsigned> Ops, MachineInstr &LoadMI, LiveIntervals *LIS) const argument
/external/llvm/lib/Target/AArch64/
H A DAArch64LoadStoreOptimizer.cpp1103 MachineInstr &LoadMI = *I; local
1104 unsigned BaseReg = getLdStBaseOp(LoadMI).getReg();
1129 if (MI.mayStore() && isMatchingStore(LoadMI, MI) &&
1131 isLdOffsetInRangeOfSt(LoadMI, MI, TII) &&
1149 if (MI.mayStore() && mayAlias(LoadMI, MI, TII))
H A DAArch64FastISel.cpp4371 const auto *LoadMI = MI; local
4372 if (LoadMI->getOpcode() == TargetOpcode::COPY &&
4373 LoadMI->getOperand(1).getSubReg() == AArch64::sub_32) {
4375 LoadMI = MRI.getUniqueVRegDef(LoadReg);
4376 assert(LoadMI && "Expected valid instruction");
4378 if (!(IsZExt && isZExtLoad(LoadMI)) && !(!IsZExt && isSExtLoad(LoadMI)))
/external/llvm/lib/Target/X86/
H A DX86MCInstLower.cpp908 MCInst LoadMI; local
909 LoadMI.setOpcode(LoadOpcode);
912 LoadMI.addOperand(MCOperand::createReg(LoadDefRegister));
918 LoadMI.addOperand(MaybeOperand.getValue());
920 OutStreamer->EmitInstruction(LoadMI, getSubtargetInfo());
H A DX86FastISel.cpp778 MachineInstrBuilder LoadMI = local
780 addFullAddress(LoadMI, StubAM);
H A DX86InstrInfo.cpp6151 /// Check if \p LoadMI is a partial register load that we can't fold into \p MI
6165 static bool isNonFoldablePartialRegisterLoad(const MachineInstr &LoadMI, argument
6168 unsigned Opc = LoadMI.getOpcode();
6171 MF.getRegInfo().getRegClass(LoadMI.getOperand(0).getReg())->getSize();
6220 MachineBasicBlock::iterator InsertPt, MachineInstr &LoadMI,
6223 unsigned NumOps = LoadMI.getDesc().getNumOperands();
6225 if (isLoadFromStackSlot(LoadMI, FrameIndex)) {
6226 if (isNonFoldablePartialRegisterLoad(LoadMI, MI, MF))
6240 if (LoadMI.hasOneMemOperand())
6241 Alignment = (*LoadMI
6218 foldMemoryOperandImpl( MachineFunction &MF, MachineInstr &MI, ArrayRef<unsigned> Ops, MachineBasicBlock::iterator InsertPt, MachineInstr &LoadMI, LiveIntervals *LIS) const argument
[all...]
/external/swiftshader/third_party/LLVM/lib/CodeGen/
H A DInlineSpiller.cpp178 MachineInstr *LoadMI = 0);
1014 /// @param LoadMI Load instruction to use instead of stack slot when non-null.
1018 MachineInstr *LoadMI) {
1032 if (LoadMI && MO.isDef())
1040 LoadMI ? TII.foldMemoryOperand(MI, FoldOps, LoadMI)
1045 if (!LoadMI)
1016 foldMemoryOperand(MachineBasicBlock::iterator MI, const SmallVectorImpl<unsigned> &Ops, MachineInstr *LoadMI) argument
/external/llvm/include/llvm/Target/
H A DTargetInstrInfo.h827 MachineInstr &LoadMI,
920 MachineBasicBlock::iterator InsertPt, MachineInstr &LoadMI,
918 foldMemoryOperandImpl( MachineFunction &MF, MachineInstr &MI, ArrayRef<unsigned> Ops, MachineBasicBlock::iterator InsertPt, MachineInstr &LoadMI, LiveIntervals *LIS = nullptr) const argument
/external/llvm/lib/Target/SystemZ/
H A DSystemZInstrInfo.cpp1029 MachineBasicBlock::iterator InsertPt, MachineInstr &LoadMI,
1027 foldMemoryOperandImpl( MachineFunction &MF, MachineInstr &MI, ArrayRef<unsigned> Ops, MachineBasicBlock::iterator InsertPt, MachineInstr &LoadMI, LiveIntervals *LIS) const argument
/external/swiftshader/third_party/LLVM/lib/Target/X86/
H A DX86FastISel.cpp548 MachineInstrBuilder LoadMI = local
550 addFullAddress(LoadMI, StubAM);

Completed in 338 milliseconds