LiveRangeEdit.cpp revision 1edc3cf65d54130542fc91bac67ecf616ef88d48
1//===--- LiveRangeEdit.cpp - Basic tools for editing a register live range --===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// The LiveRangeEdit class represents changes done to a virtual register when it 11// is spilled or split. 12//===----------------------------------------------------------------------===// 13 14#define DEBUG_TYPE "regalloc" 15#include "LiveRangeEdit.h" 16#include "VirtRegMap.h" 17#include "llvm/ADT/SetVector.h" 18#include "llvm/CodeGen/CalcSpillWeights.h" 19#include "llvm/CodeGen/LiveIntervalAnalysis.h" 20#include "llvm/CodeGen/MachineRegisterInfo.h" 21#include "llvm/Target/TargetInstrInfo.h" 22#include "llvm/Support/Debug.h" 23#include "llvm/Support/raw_ostream.h" 24 25using namespace llvm; 26 27LiveInterval &LiveRangeEdit::createFrom(unsigned OldReg, 28 LiveIntervals &LIS, 29 VirtRegMap &VRM) { 30 MachineRegisterInfo &MRI = VRM.getRegInfo(); 31 unsigned VReg = MRI.createVirtualRegister(MRI.getRegClass(OldReg)); 32 VRM.grow(); 33 VRM.setIsSplitFromReg(VReg, VRM.getOriginal(OldReg)); 34 LiveInterval &LI = LIS.getOrCreateInterval(VReg); 35 newRegs_.push_back(&LI); 36 return LI; 37} 38 39void LiveRangeEdit::checkRematerializable(VNInfo *VNI, 40 const MachineInstr *DefMI, 41 const TargetInstrInfo &tii, 42 AliasAnalysis *aa) { 43 assert(DefMI && "Missing instruction"); 44 if (tii.isTriviallyReMaterializable(DefMI, aa)) 45 remattable_.insert(VNI); 46 scannedRemattable_ = true; 47} 48 49void LiveRangeEdit::scanRemattable(LiveIntervals &lis, 50 const TargetInstrInfo &tii, 51 AliasAnalysis *aa) { 52 for (LiveInterval::vni_iterator I = parent_.vni_begin(), 53 E = parent_.vni_end(); I != E; ++I) { 54 VNInfo *VNI = *I; 55 if (VNI->isUnused()) 56 continue; 57 MachineInstr *DefMI = lis.getInstructionFromIndex(VNI->def); 58 if (!DefMI) 59 continue; 60 checkRematerializable(VNI, DefMI, tii, aa); 61 } 62} 63 64bool LiveRangeEdit::anyRematerializable(LiveIntervals &lis, 65 const TargetInstrInfo &tii, 66 AliasAnalysis *aa) { 67 if (!scannedRemattable_) 68 scanRemattable(lis, tii, aa); 69 return !remattable_.empty(); 70} 71 72/// allUsesAvailableAt - Return true if all registers used by OrigMI at 73/// OrigIdx are also available with the same value at UseIdx. 74bool LiveRangeEdit::allUsesAvailableAt(const MachineInstr *OrigMI, 75 SlotIndex OrigIdx, 76 SlotIndex UseIdx, 77 LiveIntervals &lis) { 78 OrigIdx = OrigIdx.getUseIndex(); 79 UseIdx = UseIdx.getUseIndex(); 80 for (unsigned i = 0, e = OrigMI->getNumOperands(); i != e; ++i) { 81 const MachineOperand &MO = OrigMI->getOperand(i); 82 if (!MO.isReg() || !MO.getReg() || MO.isDef()) 83 continue; 84 // Reserved registers are OK. 85 if (MO.isUndef() || !lis.hasInterval(MO.getReg())) 86 continue; 87 // We cannot depend on virtual registers in uselessRegs_. 88 if (uselessRegs_) 89 for (unsigned ui = 0, ue = uselessRegs_->size(); ui != ue; ++ui) 90 if ((*uselessRegs_)[ui]->reg == MO.getReg()) 91 return false; 92 93 LiveInterval &li = lis.getInterval(MO.getReg()); 94 const VNInfo *OVNI = li.getVNInfoAt(OrigIdx); 95 if (!OVNI) 96 continue; 97 if (OVNI != li.getVNInfoAt(UseIdx)) 98 return false; 99 } 100 return true; 101} 102 103bool LiveRangeEdit::canRematerializeAt(Remat &RM, 104 SlotIndex UseIdx, 105 bool cheapAsAMove, 106 LiveIntervals &lis) { 107 assert(scannedRemattable_ && "Call anyRematerializable first"); 108 109 // Use scanRemattable info. 110 if (!remattable_.count(RM.ParentVNI)) 111 return false; 112 113 // No defining instruction provided. 114 SlotIndex DefIdx; 115 if (RM.OrigMI) 116 DefIdx = lis.getInstructionIndex(RM.OrigMI); 117 else { 118 DefIdx = RM.ParentVNI->def; 119 RM.OrigMI = lis.getInstructionFromIndex(DefIdx); 120 assert(RM.OrigMI && "No defining instruction for remattable value"); 121 } 122 123 // If only cheap remats were requested, bail out early. 124 if (cheapAsAMove && !RM.OrigMI->getDesc().isAsCheapAsAMove()) 125 return false; 126 127 // Verify that all used registers are available with the same values. 128 if (!allUsesAvailableAt(RM.OrigMI, DefIdx, UseIdx, lis)) 129 return false; 130 131 return true; 132} 133 134SlotIndex LiveRangeEdit::rematerializeAt(MachineBasicBlock &MBB, 135 MachineBasicBlock::iterator MI, 136 unsigned DestReg, 137 const Remat &RM, 138 LiveIntervals &lis, 139 const TargetInstrInfo &tii, 140 const TargetRegisterInfo &tri) { 141 assert(RM.OrigMI && "Invalid remat"); 142 tii.reMaterialize(MBB, MI, DestReg, 0, RM.OrigMI, tri); 143 rematted_.insert(RM.ParentVNI); 144 return lis.InsertMachineInstrInMaps(--MI).getDefIndex(); 145} 146 147void LiveRangeEdit::eraseVirtReg(unsigned Reg, LiveIntervals &LIS) { 148 if (delegate_ && delegate_->LRE_CanEraseVirtReg(Reg)) 149 LIS.removeInterval(Reg); 150} 151 152bool LiveRangeEdit::foldAsLoad(LiveInterval *LI, 153 SmallVectorImpl<MachineInstr*> &Dead, 154 MachineRegisterInfo &MRI, 155 LiveIntervals &LIS, 156 const TargetInstrInfo &TII) { 157 MachineInstr *DefMI = 0, *UseMI = 0; 158 159 // Check that there is a single def and a single use. 160 for (MachineRegisterInfo::reg_nodbg_iterator I = MRI.reg_nodbg_begin(LI->reg), 161 E = MRI.reg_nodbg_end(); I != E; ++I) { 162 MachineOperand &MO = I.getOperand(); 163 MachineInstr *MI = MO.getParent(); 164 if (MO.isDef()) { 165 if (DefMI && DefMI != MI) 166 return false; 167 if (!MI->getDesc().canFoldAsLoad()) 168 return false; 169 DefMI = MI; 170 } else if (!MO.isUndef()) { 171 if (UseMI && UseMI != MI) 172 return false; 173 // FIXME: Targets don't know how to fold subreg uses. 174 if (MO.getSubReg()) 175 return false; 176 UseMI = MI; 177 } 178 } 179 if (!DefMI || !UseMI) 180 return false; 181 182 DEBUG(dbgs() << "Try to fold single def: " << *DefMI 183 << " into single use: " << *UseMI); 184 185 SmallVector<unsigned, 8> Ops; 186 if (UseMI->readsWritesVirtualRegister(LI->reg, &Ops).second) 187 return false; 188 189 MachineInstr *FoldMI = TII.foldMemoryOperand(UseMI, Ops, DefMI); 190 if (!FoldMI) 191 return false; 192 DEBUG(dbgs() << " folded: " << *FoldMI); 193 LIS.ReplaceMachineInstrInMaps(UseMI, FoldMI); 194 UseMI->eraseFromParent(); 195 DefMI->addRegisterDead(LI->reg, 0); 196 Dead.push_back(DefMI); 197 return true; 198} 199 200void LiveRangeEdit::eliminateDeadDefs(SmallVectorImpl<MachineInstr*> &Dead, 201 LiveIntervals &LIS, VirtRegMap &VRM, 202 const TargetInstrInfo &TII) { 203 SetVector<LiveInterval*, 204 SmallVector<LiveInterval*, 8>, 205 SmallPtrSet<LiveInterval*, 8> > ToShrink; 206 MachineRegisterInfo &MRI = VRM.getRegInfo(); 207 208 for (;;) { 209 // Erase all dead defs. 210 while (!Dead.empty()) { 211 MachineInstr *MI = Dead.pop_back_val(); 212 assert(MI->allDefsAreDead() && "Def isn't really dead"); 213 SlotIndex Idx = LIS.getInstructionIndex(MI).getDefIndex(); 214 215 // Never delete inline asm. 216 if (MI->isInlineAsm()) { 217 DEBUG(dbgs() << "Won't delete: " << Idx << '\t' << *MI); 218 continue; 219 } 220 221 // Use the same criteria as DeadMachineInstructionElim. 222 bool SawStore = false; 223 if (!MI->isSafeToMove(&TII, 0, SawStore)) { 224 DEBUG(dbgs() << "Can't delete: " << Idx << '\t' << *MI); 225 continue; 226 } 227 228 DEBUG(dbgs() << "Deleting dead def " << Idx << '\t' << *MI); 229 230 // Check for live intervals that may shrink 231 for (MachineInstr::mop_iterator MOI = MI->operands_begin(), 232 MOE = MI->operands_end(); MOI != MOE; ++MOI) { 233 if (!MOI->isReg()) 234 continue; 235 unsigned Reg = MOI->getReg(); 236 if (!TargetRegisterInfo::isVirtualRegister(Reg)) 237 continue; 238 LiveInterval &LI = LIS.getInterval(Reg); 239 240 // Shrink read registers, unless it is likely to be expensive and 241 // unlikely to change anything. We typically don't want to shrink the 242 // PIC base register that has lots of uses everywhere. 243 // Always shrink COPY uses that probably come from live range splitting. 244 if (MI->readsVirtualRegister(Reg) && 245 (MI->isCopy() || MOI->isDef() || MRI.hasOneNonDBGUse(Reg) || 246 LI.killedAt(Idx))) 247 ToShrink.insert(&LI); 248 249 // Remove defined value. 250 if (MOI->isDef()) { 251 if (VNInfo *VNI = LI.getVNInfoAt(Idx)) { 252 if (delegate_) 253 delegate_->LRE_WillShrinkVirtReg(LI.reg); 254 LI.removeValNo(VNI); 255 if (LI.empty()) { 256 ToShrink.remove(&LI); 257 eraseVirtReg(Reg, LIS); 258 } 259 } 260 } 261 } 262 263 if (delegate_) 264 delegate_->LRE_WillEraseInstruction(MI); 265 LIS.RemoveMachineInstrFromMaps(MI); 266 MI->eraseFromParent(); 267 } 268 269 if (ToShrink.empty()) 270 break; 271 272 // Shrink just one live interval. Then delete new dead defs. 273 LiveInterval *LI = ToShrink.back(); 274 ToShrink.pop_back(); 275 if (foldAsLoad(LI, Dead, MRI, LIS, TII)) 276 continue; 277 if (delegate_) 278 delegate_->LRE_WillShrinkVirtReg(LI->reg); 279 if (!LIS.shrinkToUses(LI, &Dead)) 280 continue; 281 282 // LI may have been separated, create new intervals. 283 LI->RenumberValues(LIS); 284 ConnectedVNInfoEqClasses ConEQ(LIS); 285 unsigned NumComp = ConEQ.Classify(LI); 286 if (NumComp <= 1) 287 continue; 288 DEBUG(dbgs() << NumComp << " components: " << *LI << '\n'); 289 SmallVector<LiveInterval*, 8> Dups(1, LI); 290 for (unsigned i = 1; i != NumComp; ++i) { 291 Dups.push_back(&createFrom(LI->reg, LIS, VRM)); 292 if (delegate_) 293 delegate_->LRE_DidCloneVirtReg(Dups.back()->reg, LI->reg); 294 } 295 ConEQ.Distribute(&Dups[0], MRI); 296 } 297} 298 299void LiveRangeEdit::calculateRegClassAndHint(MachineFunction &MF, 300 LiveIntervals &LIS, 301 const MachineLoopInfo &Loops) { 302 VirtRegAuxInfo VRAI(MF, LIS, Loops); 303 for (iterator I = begin(), E = end(); I != E; ++I) { 304 LiveInterval &LI = **I; 305 VRAI.CalculateRegClass(LI.reg); 306 VRAI.CalculateWeightAndHint(LI); 307 } 308} 309