LiveIntervalAnalysis.cpp revision 2aeef00d681bf4010d234949043fe0beb29547e2
1//===-- LiveIntervalAnalysis.cpp - Live Interval Analysis -----------------===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements the LiveInterval analysis pass which is used
11// by the Linear Scan Register allocator. This pass linearizes the
12// basic blocks of the function in DFS order and uses the
13// LiveVariables pass to conservatively compute live intervals for
14// each virtual and physical register.
15//
16//===----------------------------------------------------------------------===//
17
18#define DEBUG_TYPE "regalloc"
19#include "llvm/CodeGen/LiveIntervalAnalysis.h"
20#include "LiveRangeCalc.h"
21#include "llvm/ADT/DenseSet.h"
22#include "llvm/ADT/STLExtras.h"
23#include "llvm/Analysis/AliasAnalysis.h"
24#include "llvm/CodeGen/LiveVariables.h"
25#include "llvm/CodeGen/MachineDominators.h"
26#include "llvm/CodeGen/MachineInstr.h"
27#include "llvm/CodeGen/MachineRegisterInfo.h"
28#include "llvm/CodeGen/Passes.h"
29#include "llvm/CodeGen/VirtRegMap.h"
30#include "llvm/IR/Value.h"
31#include "llvm/Support/BlockFrequency.h"
32#include "llvm/Support/CommandLine.h"
33#include "llvm/Support/Debug.h"
34#include "llvm/Support/ErrorHandling.h"
35#include "llvm/Support/raw_ostream.h"
36#include "llvm/Target/TargetInstrInfo.h"
37#include "llvm/Target/TargetMachine.h"
38#include "llvm/Target/TargetRegisterInfo.h"
39#include <algorithm>
40#include <cmath>
41#include <limits>
42using namespace llvm;
43
44char LiveIntervals::ID = 0;
45char &llvm::LiveIntervalsID = LiveIntervals::ID;
46INITIALIZE_PASS_BEGIN(LiveIntervals, "liveintervals",
47                "Live Interval Analysis", false, false)
48INITIALIZE_AG_DEPENDENCY(AliasAnalysis)
49INITIALIZE_PASS_DEPENDENCY(LiveVariables)
50INITIALIZE_PASS_DEPENDENCY(MachineDominatorTree)
51INITIALIZE_PASS_DEPENDENCY(SlotIndexes)
52INITIALIZE_PASS_END(LiveIntervals, "liveintervals",
53                "Live Interval Analysis", false, false)
54
55#ifndef NDEBUG
56static cl::opt<bool> EnablePrecomputePhysRegs(
57  "precompute-phys-liveness", cl::Hidden,
58  cl::desc("Eagerly compute live intervals for all physreg units."));
59#else
60static bool EnablePrecomputePhysRegs = false;
61#endif // NDEBUG
62
63void LiveIntervals::getAnalysisUsage(AnalysisUsage &AU) const {
64  AU.setPreservesCFG();
65  AU.addRequired<AliasAnalysis>();
66  AU.addPreserved<AliasAnalysis>();
67  // LiveVariables isn't really required by this analysis, it is only required
68  // here to make sure it is live during TwoAddressInstructionPass and
69  // PHIElimination. This is temporary.
70  AU.addRequired<LiveVariables>();
71  AU.addPreserved<LiveVariables>();
72  AU.addPreservedID(MachineLoopInfoID);
73  AU.addRequiredTransitiveID(MachineDominatorsID);
74  AU.addPreservedID(MachineDominatorsID);
75  AU.addPreserved<SlotIndexes>();
76  AU.addRequiredTransitive<SlotIndexes>();
77  MachineFunctionPass::getAnalysisUsage(AU);
78}
79
80LiveIntervals::LiveIntervals() : MachineFunctionPass(ID),
81  DomTree(0), LRCalc(0) {
82  initializeLiveIntervalsPass(*PassRegistry::getPassRegistry());
83}
84
85LiveIntervals::~LiveIntervals() {
86  delete LRCalc;
87}
88
89void LiveIntervals::releaseMemory() {
90  // Free the live intervals themselves.
91  for (unsigned i = 0, e = VirtRegIntervals.size(); i != e; ++i)
92    delete VirtRegIntervals[TargetRegisterInfo::index2VirtReg(i)];
93  VirtRegIntervals.clear();
94  RegMaskSlots.clear();
95  RegMaskBits.clear();
96  RegMaskBlocks.clear();
97
98  for (unsigned i = 0, e = RegUnitIntervals.size(); i != e; ++i)
99    delete RegUnitIntervals[i];
100  RegUnitIntervals.clear();
101
102  // Release VNInfo memory regions, VNInfo objects don't need to be dtor'd.
103  VNInfoAllocator.Reset();
104}
105
106/// runOnMachineFunction - calculates LiveIntervals
107///
108bool LiveIntervals::runOnMachineFunction(MachineFunction &fn) {
109  MF = &fn;
110  MRI = &MF->getRegInfo();
111  TM = &fn.getTarget();
112  TRI = TM->getRegisterInfo();
113  TII = TM->getInstrInfo();
114  AA = &getAnalysis<AliasAnalysis>();
115  Indexes = &getAnalysis<SlotIndexes>();
116  DomTree = &getAnalysis<MachineDominatorTree>();
117  if (!LRCalc)
118    LRCalc = new LiveRangeCalc();
119
120  // Allocate space for all virtual registers.
121  VirtRegIntervals.resize(MRI->getNumVirtRegs());
122
123  computeVirtRegs();
124  computeRegMasks();
125  computeLiveInRegUnits();
126
127  if (EnablePrecomputePhysRegs) {
128    // For stress testing, precompute live ranges of all physical register
129    // units, including reserved registers.
130    for (unsigned i = 0, e = TRI->getNumRegUnits(); i != e; ++i)
131      getRegUnit(i);
132  }
133  DEBUG(dump());
134  return true;
135}
136
137/// print - Implement the dump method.
138void LiveIntervals::print(raw_ostream &OS, const Module* ) const {
139  OS << "********** INTERVALS **********\n";
140
141  // Dump the regunits.
142  for (unsigned i = 0, e = RegUnitIntervals.size(); i != e; ++i)
143    if (LiveInterval *LI = RegUnitIntervals[i])
144      OS << PrintRegUnit(i, TRI) << " = " << *LI << '\n';
145
146  // Dump the virtregs.
147  for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) {
148    unsigned Reg = TargetRegisterInfo::index2VirtReg(i);
149    if (hasInterval(Reg))
150      OS << PrintReg(Reg) << " = " << getInterval(Reg) << '\n';
151  }
152
153  OS << "RegMasks:";
154  for (unsigned i = 0, e = RegMaskSlots.size(); i != e; ++i)
155    OS << ' ' << RegMaskSlots[i];
156  OS << '\n';
157
158  printInstrs(OS);
159}
160
161void LiveIntervals::printInstrs(raw_ostream &OS) const {
162  OS << "********** MACHINEINSTRS **********\n";
163  MF->print(OS, Indexes);
164}
165
166#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
167void LiveIntervals::dumpInstrs() const {
168  printInstrs(dbgs());
169}
170#endif
171
172LiveInterval* LiveIntervals::createInterval(unsigned reg) {
173  float Weight = TargetRegisterInfo::isPhysicalRegister(reg) ? HUGE_VALF : 0.0F;
174  return new LiveInterval(reg, Weight);
175}
176
177
178/// computeVirtRegInterval - Compute the live interval of a virtual register,
179/// based on defs and uses.
180void LiveIntervals::computeVirtRegInterval(LiveInterval *LI) {
181  assert(LRCalc && "LRCalc not initialized.");
182  assert(LI->empty() && "Should only compute empty intervals.");
183  LRCalc->reset(MF, getSlotIndexes(), DomTree, &getVNInfoAllocator());
184  LRCalc->createDeadDefs(LI);
185  LRCalc->extendToUses(LI);
186}
187
188void LiveIntervals::computeVirtRegs() {
189  for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) {
190    unsigned Reg = TargetRegisterInfo::index2VirtReg(i);
191    if (MRI->reg_nodbg_empty(Reg))
192      continue;
193    LiveInterval *LI = createInterval(Reg);
194    VirtRegIntervals[Reg] = LI;
195    computeVirtRegInterval(LI);
196  }
197}
198
199void LiveIntervals::computeRegMasks() {
200  RegMaskBlocks.resize(MF->getNumBlockIDs());
201
202  // Find all instructions with regmask operands.
203  for (MachineFunction::iterator MBBI = MF->begin(), E = MF->end();
204       MBBI != E; ++MBBI) {
205    MachineBasicBlock *MBB = MBBI;
206    std::pair<unsigned, unsigned> &RMB = RegMaskBlocks[MBB->getNumber()];
207    RMB.first = RegMaskSlots.size();
208    for (MachineBasicBlock::iterator MI = MBB->begin(), ME = MBB->end();
209         MI != ME; ++MI)
210      for (MIOperands MO(MI); MO.isValid(); ++MO) {
211        if (!MO->isRegMask())
212          continue;
213          RegMaskSlots.push_back(Indexes->getInstructionIndex(MI).getRegSlot());
214          RegMaskBits.push_back(MO->getRegMask());
215      }
216    // Compute the number of register mask instructions in this block.
217    RMB.second = RegMaskSlots.size() - RMB.first;
218  }
219}
220
221//===----------------------------------------------------------------------===//
222//                           Register Unit Liveness
223//===----------------------------------------------------------------------===//
224//
225// Fixed interference typically comes from ABI boundaries: Function arguments
226// and return values are passed in fixed registers, and so are exception
227// pointers entering landing pads. Certain instructions require values to be
228// present in specific registers. That is also represented through fixed
229// interference.
230//
231
232/// computeRegUnitInterval - Compute the live interval of a register unit, based
233/// on the uses and defs of aliasing registers.  The interval should be empty,
234/// or contain only dead phi-defs from ABI blocks.
235void LiveIntervals::computeRegUnitInterval(LiveInterval *LI) {
236  unsigned Unit = LI->reg;
237
238  assert(LRCalc && "LRCalc not initialized.");
239  LRCalc->reset(MF, getSlotIndexes(), DomTree, &getVNInfoAllocator());
240
241  // The physregs aliasing Unit are the roots and their super-registers.
242  // Create all values as dead defs before extending to uses. Note that roots
243  // may share super-registers. That's OK because createDeadDefs() is
244  // idempotent. It is very rare for a register unit to have multiple roots, so
245  // uniquing super-registers is probably not worthwhile.
246  for (MCRegUnitRootIterator Roots(Unit, TRI); Roots.isValid(); ++Roots) {
247    for (MCSuperRegIterator Supers(*Roots, TRI, /*IncludeSelf=*/true);
248         Supers.isValid(); ++Supers) {
249      if (!MRI->reg_empty(*Supers))
250        LRCalc->createDeadDefs(LI, *Supers);
251    }
252  }
253
254  // Now extend LI to reach all uses.
255  // Ignore uses of reserved registers. We only track defs of those.
256  for (MCRegUnitRootIterator Roots(Unit, TRI); Roots.isValid(); ++Roots) {
257    for (MCSuperRegIterator Supers(*Roots, TRI, /*IncludeSelf=*/true);
258         Supers.isValid(); ++Supers) {
259      unsigned Reg = *Supers;
260      if (!MRI->isReserved(Reg) && !MRI->reg_empty(Reg))
261        LRCalc->extendToUses(LI, Reg);
262    }
263  }
264}
265
266
267/// computeLiveInRegUnits - Precompute the live ranges of any register units
268/// that are live-in to an ABI block somewhere. Register values can appear
269/// without a corresponding def when entering the entry block or a landing pad.
270///
271void LiveIntervals::computeLiveInRegUnits() {
272  RegUnitIntervals.resize(TRI->getNumRegUnits());
273  DEBUG(dbgs() << "Computing live-in reg-units in ABI blocks.\n");
274
275  // Keep track of the intervals allocated.
276  SmallVector<LiveInterval*, 8> NewIntvs;
277
278  // Check all basic blocks for live-ins.
279  for (MachineFunction::const_iterator MFI = MF->begin(), MFE = MF->end();
280       MFI != MFE; ++MFI) {
281    const MachineBasicBlock *MBB = MFI;
282
283    // We only care about ABI blocks: Entry + landing pads.
284    if ((MFI != MF->begin() && !MBB->isLandingPad()) || MBB->livein_empty())
285      continue;
286
287    // Create phi-defs at Begin for all live-in registers.
288    SlotIndex Begin = Indexes->getMBBStartIdx(MBB);
289    DEBUG(dbgs() << Begin << "\tBB#" << MBB->getNumber());
290    for (MachineBasicBlock::livein_iterator LII = MBB->livein_begin(),
291         LIE = MBB->livein_end(); LII != LIE; ++LII) {
292      for (MCRegUnitIterator Units(*LII, TRI); Units.isValid(); ++Units) {
293        unsigned Unit = *Units;
294        LiveInterval *Intv = RegUnitIntervals[Unit];
295        if (!Intv) {
296          Intv = RegUnitIntervals[Unit] = new LiveInterval(Unit, HUGE_VALF);
297          NewIntvs.push_back(Intv);
298        }
299        VNInfo *VNI = Intv->createDeadDef(Begin, getVNInfoAllocator());
300        (void)VNI;
301        DEBUG(dbgs() << ' ' << PrintRegUnit(Unit, TRI) << '#' << VNI->id);
302      }
303    }
304    DEBUG(dbgs() << '\n');
305  }
306  DEBUG(dbgs() << "Created " << NewIntvs.size() << " new intervals.\n");
307
308  // Compute the 'normal' part of the intervals.
309  for (unsigned i = 0, e = NewIntvs.size(); i != e; ++i)
310    computeRegUnitInterval(NewIntvs[i]);
311}
312
313
314/// shrinkToUses - After removing some uses of a register, shrink its live
315/// range to just the remaining uses. This method does not compute reaching
316/// defs for new uses, and it doesn't remove dead defs.
317bool LiveIntervals::shrinkToUses(LiveInterval *li,
318                                 SmallVectorImpl<MachineInstr*> *dead) {
319  DEBUG(dbgs() << "Shrink: " << *li << '\n');
320  assert(TargetRegisterInfo::isVirtualRegister(li->reg)
321         && "Can only shrink virtual registers");
322  // Find all the values used, including PHI kills.
323  SmallVector<std::pair<SlotIndex, VNInfo*>, 16> WorkList;
324
325  // Blocks that have already been added to WorkList as live-out.
326  SmallPtrSet<MachineBasicBlock*, 16> LiveOut;
327
328  // Visit all instructions reading li->reg.
329  for (MachineRegisterInfo::reg_iterator I = MRI->reg_begin(li->reg);
330       MachineInstr *UseMI = I.skipInstruction();) {
331    if (UseMI->isDebugValue() || !UseMI->readsVirtualRegister(li->reg))
332      continue;
333    SlotIndex Idx = getInstructionIndex(UseMI).getRegSlot();
334    LiveRangeQuery LRQ(*li, Idx);
335    VNInfo *VNI = LRQ.valueIn();
336    if (!VNI) {
337      // This shouldn't happen: readsVirtualRegister returns true, but there is
338      // no live value. It is likely caused by a target getting <undef> flags
339      // wrong.
340      DEBUG(dbgs() << Idx << '\t' << *UseMI
341                   << "Warning: Instr claims to read non-existent value in "
342                    << *li << '\n');
343      continue;
344    }
345    // Special case: An early-clobber tied operand reads and writes the
346    // register one slot early.
347    if (VNInfo *DefVNI = LRQ.valueDefined())
348      Idx = DefVNI->def;
349
350    WorkList.push_back(std::make_pair(Idx, VNI));
351  }
352
353  // Create a new live interval with only minimal live segments per def.
354  LiveInterval NewLI(li->reg, 0);
355  for (LiveInterval::vni_iterator I = li->vni_begin(), E = li->vni_end();
356       I != E; ++I) {
357    VNInfo *VNI = *I;
358    if (VNI->isUnused())
359      continue;
360    NewLI.addRange(LiveRange(VNI->def, VNI->def.getDeadSlot(), VNI));
361  }
362
363  // Keep track of the PHIs that are in use.
364  SmallPtrSet<VNInfo*, 8> UsedPHIs;
365
366  // Extend intervals to reach all uses in WorkList.
367  while (!WorkList.empty()) {
368    SlotIndex Idx = WorkList.back().first;
369    VNInfo *VNI = WorkList.back().second;
370    WorkList.pop_back();
371    const MachineBasicBlock *MBB = getMBBFromIndex(Idx.getPrevSlot());
372    SlotIndex BlockStart = getMBBStartIdx(MBB);
373
374    // Extend the live range for VNI to be live at Idx.
375    if (VNInfo *ExtVNI = NewLI.extendInBlock(BlockStart, Idx)) {
376      (void)ExtVNI;
377      assert(ExtVNI == VNI && "Unexpected existing value number");
378      // Is this a PHIDef we haven't seen before?
379      if (!VNI->isPHIDef() || VNI->def != BlockStart || !UsedPHIs.insert(VNI))
380        continue;
381      // The PHI is live, make sure the predecessors are live-out.
382      for (MachineBasicBlock::const_pred_iterator PI = MBB->pred_begin(),
383           PE = MBB->pred_end(); PI != PE; ++PI) {
384        if (!LiveOut.insert(*PI))
385          continue;
386        SlotIndex Stop = getMBBEndIdx(*PI);
387        // A predecessor is not required to have a live-out value for a PHI.
388        if (VNInfo *PVNI = li->getVNInfoBefore(Stop))
389          WorkList.push_back(std::make_pair(Stop, PVNI));
390      }
391      continue;
392    }
393
394    // VNI is live-in to MBB.
395    DEBUG(dbgs() << " live-in at " << BlockStart << '\n');
396    NewLI.addRange(LiveRange(BlockStart, Idx, VNI));
397
398    // Make sure VNI is live-out from the predecessors.
399    for (MachineBasicBlock::const_pred_iterator PI = MBB->pred_begin(),
400         PE = MBB->pred_end(); PI != PE; ++PI) {
401      if (!LiveOut.insert(*PI))
402        continue;
403      SlotIndex Stop = getMBBEndIdx(*PI);
404      assert(li->getVNInfoBefore(Stop) == VNI &&
405             "Wrong value out of predecessor");
406      WorkList.push_back(std::make_pair(Stop, VNI));
407    }
408  }
409
410  // Handle dead values.
411  bool CanSeparate = false;
412  for (LiveInterval::vni_iterator I = li->vni_begin(), E = li->vni_end();
413       I != E; ++I) {
414    VNInfo *VNI = *I;
415    if (VNI->isUnused())
416      continue;
417    LiveInterval::iterator LII = NewLI.FindLiveRangeContaining(VNI->def);
418    assert(LII != NewLI.end() && "Missing live range for PHI");
419    if (LII->end != VNI->def.getDeadSlot())
420      continue;
421    if (VNI->isPHIDef()) {
422      // This is a dead PHI. Remove it.
423      VNI->markUnused();
424      NewLI.removeRange(*LII);
425      DEBUG(dbgs() << "Dead PHI at " << VNI->def << " may separate interval\n");
426      CanSeparate = true;
427    } else {
428      // This is a dead def. Make sure the instruction knows.
429      MachineInstr *MI = getInstructionFromIndex(VNI->def);
430      assert(MI && "No instruction defining live value");
431      MI->addRegisterDead(li->reg, TRI);
432      if (dead && MI->allDefsAreDead()) {
433        DEBUG(dbgs() << "All defs dead: " << VNI->def << '\t' << *MI);
434        dead->push_back(MI);
435      }
436    }
437  }
438
439  // Move the trimmed ranges back.
440  li->ranges.swap(NewLI.ranges);
441  DEBUG(dbgs() << "Shrunk: " << *li << '\n');
442  return CanSeparate;
443}
444
445void LiveIntervals::extendToIndices(LiveInterval *LI,
446                                    ArrayRef<SlotIndex> Indices) {
447  assert(LRCalc && "LRCalc not initialized.");
448  LRCalc->reset(MF, getSlotIndexes(), DomTree, &getVNInfoAllocator());
449  for (unsigned i = 0, e = Indices.size(); i != e; ++i)
450    LRCalc->extend(LI, Indices[i]);
451}
452
453void LiveIntervals::pruneValue(LiveInterval *LI, SlotIndex Kill,
454                               SmallVectorImpl<SlotIndex> *EndPoints) {
455  LiveRangeQuery LRQ(*LI, Kill);
456  VNInfo *VNI = LRQ.valueOut();
457  if (!VNI)
458    return;
459
460  MachineBasicBlock *KillMBB = Indexes->getMBBFromIndex(Kill);
461  SlotIndex MBBStart, MBBEnd;
462  tie(MBBStart, MBBEnd) = Indexes->getMBBRange(KillMBB);
463
464  // If VNI isn't live out from KillMBB, the value is trivially pruned.
465  if (LRQ.endPoint() < MBBEnd) {
466    LI->removeRange(Kill, LRQ.endPoint());
467    if (EndPoints) EndPoints->push_back(LRQ.endPoint());
468    return;
469  }
470
471  // VNI is live out of KillMBB.
472  LI->removeRange(Kill, MBBEnd);
473  if (EndPoints) EndPoints->push_back(MBBEnd);
474
475  // Find all blocks that are reachable from KillMBB without leaving VNI's live
476  // range. It is possible that KillMBB itself is reachable, so start a DFS
477  // from each successor.
478  typedef SmallPtrSet<MachineBasicBlock*, 9> VisitedTy;
479  VisitedTy Visited;
480  for (MachineBasicBlock::succ_iterator
481       SuccI = KillMBB->succ_begin(), SuccE = KillMBB->succ_end();
482       SuccI != SuccE; ++SuccI) {
483    for (df_ext_iterator<MachineBasicBlock*, VisitedTy>
484         I = df_ext_begin(*SuccI, Visited), E = df_ext_end(*SuccI, Visited);
485         I != E;) {
486      MachineBasicBlock *MBB = *I;
487
488      // Check if VNI is live in to MBB.
489      tie(MBBStart, MBBEnd) = Indexes->getMBBRange(MBB);
490      LiveRangeQuery LRQ(*LI, MBBStart);
491      if (LRQ.valueIn() != VNI) {
492        // This block isn't part of the VNI live range. Prune the search.
493        I.skipChildren();
494        continue;
495      }
496
497      // Prune the search if VNI is killed in MBB.
498      if (LRQ.endPoint() < MBBEnd) {
499        LI->removeRange(MBBStart, LRQ.endPoint());
500        if (EndPoints) EndPoints->push_back(LRQ.endPoint());
501        I.skipChildren();
502        continue;
503      }
504
505      // VNI is live through MBB.
506      LI->removeRange(MBBStart, MBBEnd);
507      if (EndPoints) EndPoints->push_back(MBBEnd);
508      ++I;
509    }
510  }
511}
512
513//===----------------------------------------------------------------------===//
514// Register allocator hooks.
515//
516
517void LiveIntervals::addKillFlags(const VirtRegMap *VRM) {
518  // Keep track of regunit ranges.
519  SmallVector<std::pair<LiveInterval*, LiveInterval::iterator>, 8> RU;
520
521  for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) {
522    unsigned Reg = TargetRegisterInfo::index2VirtReg(i);
523    if (MRI->reg_nodbg_empty(Reg))
524      continue;
525    LiveInterval *LI = &getInterval(Reg);
526    if (LI->empty())
527      continue;
528
529    // Find the regunit intervals for the assigned register. They may overlap
530    // the virtual register live range, cancelling any kills.
531    RU.clear();
532    for (MCRegUnitIterator Units(VRM->getPhys(Reg), TRI); Units.isValid();
533         ++Units) {
534      LiveInterval *RUInt = &getRegUnit(*Units);
535      if (RUInt->empty())
536        continue;
537      RU.push_back(std::make_pair(RUInt, RUInt->find(LI->begin()->end)));
538    }
539
540    // Every instruction that kills Reg corresponds to a live range end point.
541    for (LiveInterval::iterator RI = LI->begin(), RE = LI->end(); RI != RE;
542         ++RI) {
543      // A block index indicates an MBB edge.
544      if (RI->end.isBlock())
545        continue;
546      MachineInstr *MI = getInstructionFromIndex(RI->end);
547      if (!MI)
548        continue;
549
550      // Check if any of the reguints are live beyond the end of RI. That could
551      // happen when a physreg is defined as a copy of a virtreg:
552      //
553      //   %EAX = COPY %vreg5
554      //   FOO %vreg5         <--- MI, cancel kill because %EAX is live.
555      //   BAR %EAX<kill>
556      //
557      // There should be no kill flag on FOO when %vreg5 is rewritten as %EAX.
558      bool CancelKill = false;
559      for (unsigned u = 0, e = RU.size(); u != e; ++u) {
560        LiveInterval *RInt = RU[u].first;
561        LiveInterval::iterator &I = RU[u].second;
562        if (I == RInt->end())
563          continue;
564        I = RInt->advanceTo(I, RI->end);
565        if (I == RInt->end() || I->start >= RI->end)
566          continue;
567        // I is overlapping RI.
568        CancelKill = true;
569        break;
570      }
571      if (CancelKill)
572        MI->clearRegisterKills(Reg, NULL);
573      else
574        MI->addRegisterKilled(Reg, NULL);
575    }
576  }
577}
578
579MachineBasicBlock*
580LiveIntervals::intervalIsInOneMBB(const LiveInterval &LI) const {
581  // A local live range must be fully contained inside the block, meaning it is
582  // defined and killed at instructions, not at block boundaries. It is not
583  // live in or or out of any block.
584  //
585  // It is technically possible to have a PHI-defined live range identical to a
586  // single block, but we are going to return false in that case.
587
588  SlotIndex Start = LI.beginIndex();
589  if (Start.isBlock())
590    return NULL;
591
592  SlotIndex Stop = LI.endIndex();
593  if (Stop.isBlock())
594    return NULL;
595
596  // getMBBFromIndex doesn't need to search the MBB table when both indexes
597  // belong to proper instructions.
598  MachineBasicBlock *MBB1 = Indexes->getMBBFromIndex(Start);
599  MachineBasicBlock *MBB2 = Indexes->getMBBFromIndex(Stop);
600  return MBB1 == MBB2 ? MBB1 : NULL;
601}
602
603bool
604LiveIntervals::hasPHIKill(const LiveInterval &LI, const VNInfo *VNI) const {
605  for (LiveInterval::const_vni_iterator I = LI.vni_begin(), E = LI.vni_end();
606       I != E; ++I) {
607    const VNInfo *PHI = *I;
608    if (PHI->isUnused() || !PHI->isPHIDef())
609      continue;
610    const MachineBasicBlock *PHIMBB = getMBBFromIndex(PHI->def);
611    // Conservatively return true instead of scanning huge predecessor lists.
612    if (PHIMBB->pred_size() > 100)
613      return true;
614    for (MachineBasicBlock::const_pred_iterator
615         PI = PHIMBB->pred_begin(), PE = PHIMBB->pred_end(); PI != PE; ++PI)
616      if (VNI == LI.getVNInfoBefore(Indexes->getMBBEndIdx(*PI)))
617        return true;
618  }
619  return false;
620}
621
622float
623LiveIntervals::getSpillWeight(bool isDef, bool isUse, BlockFrequency freq) {
624  const float Scale = 1.0f / BlockFrequency::getEntryFrequency();
625  return (isDef + isUse) * (freq.getFrequency() * Scale);
626}
627
628LiveRange LiveIntervals::addLiveRangeToEndOfBlock(unsigned reg,
629                                                  MachineInstr* startInst) {
630  LiveInterval& Interval = getOrCreateInterval(reg);
631  VNInfo* VN = Interval.getNextValue(
632    SlotIndex(getInstructionIndex(startInst).getRegSlot()),
633    getVNInfoAllocator());
634  LiveRange LR(
635     SlotIndex(getInstructionIndex(startInst).getRegSlot()),
636     getMBBEndIdx(startInst->getParent()), VN);
637  Interval.addRange(LR);
638
639  return LR;
640}
641
642
643//===----------------------------------------------------------------------===//
644//                          Register mask functions
645//===----------------------------------------------------------------------===//
646
647bool LiveIntervals::checkRegMaskInterference(LiveInterval &LI,
648                                             BitVector &UsableRegs) {
649  if (LI.empty())
650    return false;
651  LiveInterval::iterator LiveI = LI.begin(), LiveE = LI.end();
652
653  // Use a smaller arrays for local live ranges.
654  ArrayRef<SlotIndex> Slots;
655  ArrayRef<const uint32_t*> Bits;
656  if (MachineBasicBlock *MBB = intervalIsInOneMBB(LI)) {
657    Slots = getRegMaskSlotsInBlock(MBB->getNumber());
658    Bits = getRegMaskBitsInBlock(MBB->getNumber());
659  } else {
660    Slots = getRegMaskSlots();
661    Bits = getRegMaskBits();
662  }
663
664  // We are going to enumerate all the register mask slots contained in LI.
665  // Start with a binary search of RegMaskSlots to find a starting point.
666  ArrayRef<SlotIndex>::iterator SlotI =
667    std::lower_bound(Slots.begin(), Slots.end(), LiveI->start);
668  ArrayRef<SlotIndex>::iterator SlotE = Slots.end();
669
670  // No slots in range, LI begins after the last call.
671  if (SlotI == SlotE)
672    return false;
673
674  bool Found = false;
675  for (;;) {
676    assert(*SlotI >= LiveI->start);
677    // Loop over all slots overlapping this segment.
678    while (*SlotI < LiveI->end) {
679      // *SlotI overlaps LI. Collect mask bits.
680      if (!Found) {
681        // This is the first overlap. Initialize UsableRegs to all ones.
682        UsableRegs.clear();
683        UsableRegs.resize(TRI->getNumRegs(), true);
684        Found = true;
685      }
686      // Remove usable registers clobbered by this mask.
687      UsableRegs.clearBitsNotInMask(Bits[SlotI-Slots.begin()]);
688      if (++SlotI == SlotE)
689        return Found;
690    }
691    // *SlotI is beyond the current LI segment.
692    LiveI = LI.advanceTo(LiveI, *SlotI);
693    if (LiveI == LiveE)
694      return Found;
695    // Advance SlotI until it overlaps.
696    while (*SlotI < LiveI->start)
697      if (++SlotI == SlotE)
698        return Found;
699  }
700}
701
702//===----------------------------------------------------------------------===//
703//                         IntervalUpdate class.
704//===----------------------------------------------------------------------===//
705
706// HMEditor is a toolkit used by handleMove to trim or extend live intervals.
707class LiveIntervals::HMEditor {
708private:
709  LiveIntervals& LIS;
710  const MachineRegisterInfo& MRI;
711  const TargetRegisterInfo& TRI;
712  SlotIndex OldIdx;
713  SlotIndex NewIdx;
714  SmallPtrSet<LiveInterval*, 8> Updated;
715  bool UpdateFlags;
716
717public:
718  HMEditor(LiveIntervals& LIS, const MachineRegisterInfo& MRI,
719           const TargetRegisterInfo& TRI,
720           SlotIndex OldIdx, SlotIndex NewIdx, bool UpdateFlags)
721    : LIS(LIS), MRI(MRI), TRI(TRI), OldIdx(OldIdx), NewIdx(NewIdx),
722      UpdateFlags(UpdateFlags) {}
723
724  // FIXME: UpdateFlags is a workaround that creates live intervals for all
725  // physregs, even those that aren't needed for regalloc, in order to update
726  // kill flags. This is wasteful. Eventually, LiveVariables will strip all kill
727  // flags, and postRA passes will use a live register utility instead.
728  LiveInterval *getRegUnitLI(unsigned Unit) {
729    if (UpdateFlags)
730      return &LIS.getRegUnit(Unit);
731    return LIS.getCachedRegUnit(Unit);
732  }
733
734  /// Update all live ranges touched by MI, assuming a move from OldIdx to
735  /// NewIdx.
736  void updateAllRanges(MachineInstr *MI) {
737    DEBUG(dbgs() << "handleMove " << OldIdx << " -> " << NewIdx << ": " << *MI);
738    bool hasRegMask = false;
739    for (MIOperands MO(MI); MO.isValid(); ++MO) {
740      if (MO->isRegMask())
741        hasRegMask = true;
742      if (!MO->isReg())
743        continue;
744      // Aggressively clear all kill flags.
745      // They are reinserted by VirtRegRewriter.
746      if (MO->isUse())
747        MO->setIsKill(false);
748
749      unsigned Reg = MO->getReg();
750      if (!Reg)
751        continue;
752      if (TargetRegisterInfo::isVirtualRegister(Reg)) {
753        updateRange(LIS.getInterval(Reg));
754        continue;
755      }
756
757      // For physregs, only update the regunits that actually have a
758      // precomputed live range.
759      for (MCRegUnitIterator Units(Reg, &TRI); Units.isValid(); ++Units)
760        if (LiveInterval *LI = getRegUnitLI(*Units))
761          updateRange(*LI);
762    }
763    if (hasRegMask)
764      updateRegMaskSlots();
765  }
766
767private:
768  /// Update a single live range, assuming an instruction has been moved from
769  /// OldIdx to NewIdx.
770  void updateRange(LiveInterval &LI) {
771    if (!Updated.insert(&LI))
772      return;
773    DEBUG({
774      dbgs() << "     ";
775      if (TargetRegisterInfo::isVirtualRegister(LI.reg))
776        dbgs() << PrintReg(LI.reg);
777      else
778        dbgs() << PrintRegUnit(LI.reg, &TRI);
779      dbgs() << ":\t" << LI << '\n';
780    });
781    if (SlotIndex::isEarlierInstr(OldIdx, NewIdx))
782      handleMoveDown(LI);
783    else
784      handleMoveUp(LI);
785    DEBUG(dbgs() << "        -->\t" << LI << '\n');
786    LI.verify();
787  }
788
789  /// Update LI to reflect an instruction has been moved downwards from OldIdx
790  /// to NewIdx.
791  ///
792  /// 1. Live def at OldIdx:
793  ///    Move def to NewIdx, assert endpoint after NewIdx.
794  ///
795  /// 2. Live def at OldIdx, killed at NewIdx:
796  ///    Change to dead def at NewIdx.
797  ///    (Happens when bundling def+kill together).
798  ///
799  /// 3. Dead def at OldIdx:
800  ///    Move def to NewIdx, possibly across another live value.
801  ///
802  /// 4. Def at OldIdx AND at NewIdx:
803  ///    Remove live range [OldIdx;NewIdx) and value defined at OldIdx.
804  ///    (Happens when bundling multiple defs together).
805  ///
806  /// 5. Value read at OldIdx, killed before NewIdx:
807  ///    Extend kill to NewIdx.
808  ///
809  void handleMoveDown(LiveInterval &LI) {
810    // First look for a kill at OldIdx.
811    LiveInterval::iterator I = LI.find(OldIdx.getBaseIndex());
812    LiveInterval::iterator E = LI.end();
813    // Is LI even live at OldIdx?
814    if (I == E || SlotIndex::isEarlierInstr(OldIdx, I->start))
815      return;
816
817    // Handle a live-in value.
818    if (!SlotIndex::isSameInstr(I->start, OldIdx)) {
819      bool isKill = SlotIndex::isSameInstr(OldIdx, I->end);
820      // If the live-in value already extends to NewIdx, there is nothing to do.
821      if (!SlotIndex::isEarlierInstr(I->end, NewIdx))
822        return;
823      // Aggressively remove all kill flags from the old kill point.
824      // Kill flags shouldn't be used while live intervals exist, they will be
825      // reinserted by VirtRegRewriter.
826      if (MachineInstr *KillMI = LIS.getInstructionFromIndex(I->end))
827        for (MIBundleOperands MO(KillMI); MO.isValid(); ++MO)
828          if (MO->isReg() && MO->isUse())
829            MO->setIsKill(false);
830      // Adjust I->end to reach NewIdx. This may temporarily make LI invalid by
831      // overlapping ranges. Case 5 above.
832      I->end = NewIdx.getRegSlot(I->end.isEarlyClobber());
833      // If this was a kill, there may also be a def. Otherwise we're done.
834      if (!isKill)
835        return;
836      ++I;
837    }
838
839    // Check for a def at OldIdx.
840    if (I == E || !SlotIndex::isSameInstr(OldIdx, I->start))
841      return;
842    // We have a def at OldIdx.
843    VNInfo *DefVNI = I->valno;
844    assert(DefVNI->def == I->start && "Inconsistent def");
845    DefVNI->def = NewIdx.getRegSlot(I->start.isEarlyClobber());
846    // If the defined value extends beyond NewIdx, just move the def down.
847    // This is case 1 above.
848    if (SlotIndex::isEarlierInstr(NewIdx, I->end)) {
849      I->start = DefVNI->def;
850      return;
851    }
852    // The remaining possibilities are now:
853    // 2. Live def at OldIdx, killed at NewIdx: isSameInstr(I->end, NewIdx).
854    // 3. Dead def at OldIdx: I->end = OldIdx.getDeadSlot().
855    // In either case, it is possible that there is an existing def at NewIdx.
856    assert((I->end == OldIdx.getDeadSlot() ||
857            SlotIndex::isSameInstr(I->end, NewIdx)) &&
858            "Cannot move def below kill");
859    LiveInterval::iterator NewI = LI.advanceTo(I, NewIdx.getRegSlot());
860    if (NewI != E && SlotIndex::isSameInstr(NewI->start, NewIdx)) {
861      // There is an existing def at NewIdx, case 4 above. The def at OldIdx is
862      // coalesced into that value.
863      assert(NewI->valno != DefVNI && "Multiple defs of value?");
864      LI.removeValNo(DefVNI);
865      return;
866    }
867    // There was no existing def at NewIdx. Turn *I into a dead def at NewIdx.
868    // If the def at OldIdx was dead, we allow it to be moved across other LI
869    // values. The new range should be placed immediately before NewI, move any
870    // intermediate ranges up.
871    assert(NewI != I && "Inconsistent iterators");
872    std::copy(llvm::next(I), NewI, I);
873    *llvm::prior(NewI) = LiveRange(DefVNI->def, NewIdx.getDeadSlot(), DefVNI);
874  }
875
876  /// Update LI to reflect an instruction has been moved upwards from OldIdx
877  /// to NewIdx.
878  ///
879  /// 1. Live def at OldIdx:
880  ///    Hoist def to NewIdx.
881  ///
882  /// 2. Dead def at OldIdx:
883  ///    Hoist def+end to NewIdx, possibly move across other values.
884  ///
885  /// 3. Dead def at OldIdx AND existing def at NewIdx:
886  ///    Remove value defined at OldIdx, coalescing it with existing value.
887  ///
888  /// 4. Live def at OldIdx AND existing def at NewIdx:
889  ///    Remove value defined at NewIdx, hoist OldIdx def to NewIdx.
890  ///    (Happens when bundling multiple defs together).
891  ///
892  /// 5. Value killed at OldIdx:
893  ///    Hoist kill to NewIdx, then scan for last kill between NewIdx and
894  ///    OldIdx.
895  ///
896  void handleMoveUp(LiveInterval &LI) {
897    // First look for a kill at OldIdx.
898    LiveInterval::iterator I = LI.find(OldIdx.getBaseIndex());
899    LiveInterval::iterator E = LI.end();
900    // Is LI even live at OldIdx?
901    if (I == E || SlotIndex::isEarlierInstr(OldIdx, I->start))
902      return;
903
904    // Handle a live-in value.
905    if (!SlotIndex::isSameInstr(I->start, OldIdx)) {
906      // If the live-in value isn't killed here, there is nothing to do.
907      if (!SlotIndex::isSameInstr(OldIdx, I->end))
908        return;
909      // Adjust I->end to end at NewIdx. If we are hoisting a kill above
910      // another use, we need to search for that use. Case 5 above.
911      I->end = NewIdx.getRegSlot(I->end.isEarlyClobber());
912      ++I;
913      // If OldIdx also defines a value, there couldn't have been another use.
914      if (I == E || !SlotIndex::isSameInstr(I->start, OldIdx)) {
915        // No def, search for the new kill.
916        // This can never be an early clobber kill since there is no def.
917        llvm::prior(I)->end = findLastUseBefore(LI.reg).getRegSlot();
918        return;
919      }
920    }
921
922    // Now deal with the def at OldIdx.
923    assert(I != E && SlotIndex::isSameInstr(I->start, OldIdx) && "No def?");
924    VNInfo *DefVNI = I->valno;
925    assert(DefVNI->def == I->start && "Inconsistent def");
926    DefVNI->def = NewIdx.getRegSlot(I->start.isEarlyClobber());
927
928    // Check for an existing def at NewIdx.
929    LiveInterval::iterator NewI = LI.find(NewIdx.getRegSlot());
930    if (SlotIndex::isSameInstr(NewI->start, NewIdx)) {
931      assert(NewI->valno != DefVNI && "Same value defined more than once?");
932      // There is an existing def at NewIdx.
933      if (I->end.isDead()) {
934        // Case 3: Remove the dead def at OldIdx.
935        LI.removeValNo(DefVNI);
936        return;
937      }
938      // Case 4: Replace def at NewIdx with live def at OldIdx.
939      I->start = DefVNI->def;
940      LI.removeValNo(NewI->valno);
941      return;
942    }
943
944    // There is no existing def at NewIdx. Hoist DefVNI.
945    if (!I->end.isDead()) {
946      // Leave the end point of a live def.
947      I->start = DefVNI->def;
948      return;
949    }
950
951    // DefVNI is a dead def. It may have been moved across other values in LI,
952    // so move I up to NewI. Slide [NewI;I) down one position.
953    std::copy_backward(NewI, I, llvm::next(I));
954    *NewI = LiveRange(DefVNI->def, NewIdx.getDeadSlot(), DefVNI);
955  }
956
957  void updateRegMaskSlots() {
958    SmallVectorImpl<SlotIndex>::iterator RI =
959      std::lower_bound(LIS.RegMaskSlots.begin(), LIS.RegMaskSlots.end(),
960                       OldIdx);
961    assert(RI != LIS.RegMaskSlots.end() && *RI == OldIdx.getRegSlot() &&
962           "No RegMask at OldIdx.");
963    *RI = NewIdx.getRegSlot();
964    assert((RI == LIS.RegMaskSlots.begin() ||
965            SlotIndex::isEarlierInstr(*llvm::prior(RI), *RI)) &&
966            "Cannot move regmask instruction above another call");
967    assert((llvm::next(RI) == LIS.RegMaskSlots.end() ||
968            SlotIndex::isEarlierInstr(*RI, *llvm::next(RI))) &&
969            "Cannot move regmask instruction below another call");
970  }
971
972  // Return the last use of reg between NewIdx and OldIdx.
973  SlotIndex findLastUseBefore(unsigned Reg) {
974
975    if (TargetRegisterInfo::isVirtualRegister(Reg)) {
976      SlotIndex LastUse = NewIdx;
977      for (MachineRegisterInfo::use_nodbg_iterator
978             UI = MRI.use_nodbg_begin(Reg),
979             UE = MRI.use_nodbg_end();
980           UI != UE; UI.skipInstruction()) {
981        const MachineInstr* MI = &*UI;
982        SlotIndex InstSlot = LIS.getSlotIndexes()->getInstructionIndex(MI);
983        if (InstSlot > LastUse && InstSlot < OldIdx)
984          LastUse = InstSlot;
985      }
986      return LastUse;
987    }
988
989    // This is a regunit interval, so scanning the use list could be very
990    // expensive. Scan upwards from OldIdx instead.
991    assert(NewIdx < OldIdx && "Expected upwards move");
992    SlotIndexes *Indexes = LIS.getSlotIndexes();
993    MachineBasicBlock *MBB = Indexes->getMBBFromIndex(NewIdx);
994
995    // OldIdx may not correspond to an instruction any longer, so set MII to
996    // point to the next instruction after OldIdx, or MBB->end().
997    MachineBasicBlock::iterator MII = MBB->end();
998    if (MachineInstr *MI = Indexes->getInstructionFromIndex(
999                           Indexes->getNextNonNullIndex(OldIdx)))
1000      if (MI->getParent() == MBB)
1001        MII = MI;
1002
1003    MachineBasicBlock::iterator Begin = MBB->begin();
1004    while (MII != Begin) {
1005      if ((--MII)->isDebugValue())
1006        continue;
1007      SlotIndex Idx = Indexes->getInstructionIndex(MII);
1008
1009      // Stop searching when NewIdx is reached.
1010      if (!SlotIndex::isEarlierInstr(NewIdx, Idx))
1011        return NewIdx;
1012
1013      // Check if MII uses Reg.
1014      for (MIBundleOperands MO(MII); MO.isValid(); ++MO)
1015        if (MO->isReg() &&
1016            TargetRegisterInfo::isPhysicalRegister(MO->getReg()) &&
1017            TRI.hasRegUnit(MO->getReg(), Reg))
1018          return Idx;
1019    }
1020    // Didn't reach NewIdx. It must be the first instruction in the block.
1021    return NewIdx;
1022  }
1023};
1024
1025void LiveIntervals::handleMove(MachineInstr* MI, bool UpdateFlags) {
1026  assert(!MI->isBundled() && "Can't handle bundled instructions yet.");
1027  SlotIndex OldIndex = Indexes->getInstructionIndex(MI);
1028  Indexes->removeMachineInstrFromMaps(MI);
1029  SlotIndex NewIndex = Indexes->insertMachineInstrInMaps(MI);
1030  assert(getMBBStartIdx(MI->getParent()) <= OldIndex &&
1031         OldIndex < getMBBEndIdx(MI->getParent()) &&
1032         "Cannot handle moves across basic block boundaries.");
1033
1034  HMEditor HME(*this, *MRI, *TRI, OldIndex, NewIndex, UpdateFlags);
1035  HME.updateAllRanges(MI);
1036}
1037
1038void LiveIntervals::handleMoveIntoBundle(MachineInstr* MI,
1039                                         MachineInstr* BundleStart,
1040                                         bool UpdateFlags) {
1041  SlotIndex OldIndex = Indexes->getInstructionIndex(MI);
1042  SlotIndex NewIndex = Indexes->getInstructionIndex(BundleStart);
1043  HMEditor HME(*this, *MRI, *TRI, OldIndex, NewIndex, UpdateFlags);
1044  HME.updateAllRanges(MI);
1045}
1046
1047void
1048LiveIntervals::repairIntervalsInRange(MachineBasicBlock *MBB,
1049                                      MachineBasicBlock::iterator Begin,
1050                                      MachineBasicBlock::iterator End,
1051                                      ArrayRef<unsigned> OrigRegs) {
1052  // Find anchor points, which are at the beginning/end of blocks or at
1053  // instructions that already have indexes.
1054  while (Begin != MBB->begin() && !Indexes->hasIndex(Begin))
1055    --Begin;
1056  while (End != MBB->end() && !Indexes->hasIndex(End))
1057    ++End;
1058
1059  SlotIndex endIdx;
1060  if (End == MBB->end())
1061    endIdx = getMBBEndIdx(MBB).getPrevSlot();
1062  else
1063    endIdx = getInstructionIndex(End);
1064
1065  Indexes->repairIndexesInRange(MBB, Begin, End);
1066
1067  for (MachineBasicBlock::iterator I = End; I != Begin;) {
1068    --I;
1069    MachineInstr *MI = I;
1070    if (MI->isDebugValue())
1071      continue;
1072    for (MachineInstr::const_mop_iterator MOI = MI->operands_begin(),
1073         MOE = MI->operands_end(); MOI != MOE; ++MOI) {
1074      if (MOI->isReg() &&
1075          TargetRegisterInfo::isVirtualRegister(MOI->getReg()) &&
1076          !hasInterval(MOI->getReg())) {
1077        LiveInterval &LI = getOrCreateInterval(MOI->getReg());
1078        computeVirtRegInterval(&LI);
1079      }
1080    }
1081  }
1082
1083  for (unsigned i = 0, e = OrigRegs.size(); i != e; ++i) {
1084    unsigned Reg = OrigRegs[i];
1085    if (!TargetRegisterInfo::isVirtualRegister(Reg))
1086      continue;
1087
1088    LiveInterval &LI = getInterval(Reg);
1089    // FIXME: Should we support undefs that gain defs?
1090    if (!LI.hasAtLeastOneValue())
1091      continue;
1092
1093    LiveInterval::iterator LII = LI.find(endIdx);
1094    SlotIndex lastUseIdx;
1095    if (LII != LI.end() && LII->start < endIdx)
1096      lastUseIdx = LII->end;
1097    else
1098      --LII;
1099
1100    for (MachineBasicBlock::iterator I = End; I != Begin;) {
1101      --I;
1102      MachineInstr *MI = I;
1103      if (MI->isDebugValue())
1104        continue;
1105
1106      SlotIndex instrIdx = getInstructionIndex(MI);
1107      bool isStartValid = getInstructionFromIndex(LII->start);
1108      bool isEndValid = getInstructionFromIndex(LII->end);
1109
1110      // FIXME: This doesn't currently handle early-clobber or multiple removed
1111      // defs inside of the region to repair.
1112      for (MachineInstr::mop_iterator OI = MI->operands_begin(),
1113           OE = MI->operands_end(); OI != OE; ++OI) {
1114        const MachineOperand &MO = *OI;
1115        if (!MO.isReg() || MO.getReg() != Reg)
1116          continue;
1117
1118        if (MO.isDef()) {
1119          if (!isStartValid) {
1120            if (LII->end.isDead()) {
1121              SlotIndex prevStart;
1122              if (LII != LI.begin())
1123                prevStart = llvm::prior(LII)->start;
1124
1125              // FIXME: This could be more efficient if there was a removeRange
1126              // method that returned an iterator.
1127              LI.removeRange(*LII, true);
1128              if (prevStart.isValid())
1129                LII = LI.find(prevStart);
1130              else
1131                LII = LI.begin();
1132            } else {
1133              LII->start = instrIdx.getRegSlot();
1134              LII->valno->def = instrIdx.getRegSlot();
1135              if (MO.getSubReg() && !MO.isUndef())
1136                lastUseIdx = instrIdx.getRegSlot();
1137              else
1138                lastUseIdx = SlotIndex();
1139              continue;
1140            }
1141          }
1142
1143          if (!lastUseIdx.isValid()) {
1144            VNInfo *VNI = LI.getNextValue(instrIdx.getRegSlot(),
1145                                          VNInfoAllocator);
1146            LiveRange LR(instrIdx.getRegSlot(), instrIdx.getDeadSlot(), VNI);
1147            LII = LI.addRange(LR);
1148          } else if (LII->start != instrIdx.getRegSlot()) {
1149            VNInfo *VNI = LI.getNextValue(instrIdx.getRegSlot(),
1150                                          VNInfoAllocator);
1151            LiveRange LR(instrIdx.getRegSlot(), lastUseIdx, VNI);
1152            LII = LI.addRange(LR);
1153          }
1154
1155          if (MO.getSubReg() && !MO.isUndef())
1156            lastUseIdx = instrIdx.getRegSlot();
1157          else
1158            lastUseIdx = SlotIndex();
1159        } else if (MO.isUse()) {
1160          // FIXME: This should probably be handled outside of this branch,
1161          // either as part of the def case (for defs inside of the region) or
1162          // after the loop over the region.
1163          if (!isEndValid && !LII->end.isBlock())
1164            LII->end = instrIdx.getRegSlot();
1165          if (!lastUseIdx.isValid())
1166            lastUseIdx = instrIdx.getRegSlot();
1167        }
1168      }
1169    }
1170  }
1171}
1172