VirtRegMap.cpp revision a1c1e78ab8e0bf30d31fd76e4088af2b8638fe90
1//===-- llvm/CodeGen/VirtRegMap.cpp - Virtual Register Map ----------------===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file was developed by the LLVM research group and is distributed under
6// the University of Illinois Open Source License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements the VirtRegMap class.
11//
12// It also contains implementations of the the Spiller interface, which, given a
13// virtual register map and a machine function, eliminates all virtual
14// references by replacing them with physical register references - adding spill
15// code as necessary.
16//
17//===----------------------------------------------------------------------===//
18
19#define DEBUG_TYPE "spiller"
20#include "VirtRegMap.h"
21#include "llvm/Function.h"
22#include "llvm/CodeGen/MachineFrameInfo.h"
23#include "llvm/CodeGen/MachineFunction.h"
24#include "llvm/CodeGen/SSARegMap.h"
25#include "llvm/Target/TargetMachine.h"
26#include "llvm/Target/TargetInstrInfo.h"
27#include "llvm/Support/CommandLine.h"
28#include "llvm/Support/Debug.h"
29#include "llvm/Support/Compiler.h"
30#include "llvm/ADT/BitVector.h"
31#include "llvm/ADT/Statistic.h"
32#include "llvm/ADT/STLExtras.h"
33#include "llvm/ADT/SmallSet.h"
34#include <algorithm>
35using namespace llvm;
36
37STATISTIC(NumSpills, "Number of register spills");
38STATISTIC(NumReMats, "Number of re-materialization");
39STATISTIC(NumDRM   , "Number of re-materializable defs elided");
40STATISTIC(NumStores, "Number of stores added");
41STATISTIC(NumLoads , "Number of loads added");
42STATISTIC(NumReused, "Number of values reused");
43STATISTIC(NumDSE   , "Number of dead stores elided");
44STATISTIC(NumDCE   , "Number of copies elided");
45
46namespace {
47  enum SpillerName { simple, local };
48
49  static cl::opt<SpillerName>
50  SpillerOpt("spiller",
51             cl::desc("Spiller to use: (default: local)"),
52             cl::Prefix,
53             cl::values(clEnumVal(simple, "  simple spiller"),
54                        clEnumVal(local,  "  local spiller"),
55                        clEnumValEnd),
56             cl::init(local));
57}
58
59//===----------------------------------------------------------------------===//
60//  VirtRegMap implementation
61//===----------------------------------------------------------------------===//
62
63VirtRegMap::VirtRegMap(MachineFunction &mf)
64  : TII(*mf.getTarget().getInstrInfo()), MF(mf),
65    Virt2PhysMap(NO_PHYS_REG), Virt2StackSlotMap(NO_STACK_SLOT),
66    Virt2ReMatIdMap(NO_STACK_SLOT), ReMatMap(NULL),
67    ReMatId(MAX_STACK_SLOT+1) {
68  grow();
69}
70
71void VirtRegMap::grow() {
72  unsigned LastVirtReg = MF.getSSARegMap()->getLastVirtReg();
73  Virt2PhysMap.grow(LastVirtReg);
74  Virt2StackSlotMap.grow(LastVirtReg);
75  Virt2ReMatIdMap.grow(LastVirtReg);
76  ReMatMap.grow(LastVirtReg);
77}
78
79int VirtRegMap::assignVirt2StackSlot(unsigned virtReg) {
80  assert(MRegisterInfo::isVirtualRegister(virtReg));
81  assert(Virt2StackSlotMap[virtReg] == NO_STACK_SLOT &&
82         "attempt to assign stack slot to already spilled register");
83  const TargetRegisterClass* RC = MF.getSSARegMap()->getRegClass(virtReg);
84  int frameIndex = MF.getFrameInfo()->CreateStackObject(RC->getSize(),
85                                                        RC->getAlignment());
86  Virt2StackSlotMap[virtReg] = frameIndex;
87  ++NumSpills;
88  return frameIndex;
89}
90
91void VirtRegMap::assignVirt2StackSlot(unsigned virtReg, int frameIndex) {
92  assert(MRegisterInfo::isVirtualRegister(virtReg));
93  assert(Virt2StackSlotMap[virtReg] == NO_STACK_SLOT &&
94         "attempt to assign stack slot to already spilled register");
95  assert((frameIndex >= 0 ||
96          (frameIndex >= MF.getFrameInfo()->getObjectIndexBegin())) &&
97         "illegal fixed frame index");
98  Virt2StackSlotMap[virtReg] = frameIndex;
99}
100
101int VirtRegMap::assignVirtReMatId(unsigned virtReg) {
102  assert(MRegisterInfo::isVirtualRegister(virtReg));
103  assert(Virt2ReMatIdMap[virtReg] == NO_STACK_SLOT &&
104         "attempt to assign re-mat id to already spilled register");
105  Virt2ReMatIdMap[virtReg] = ReMatId;
106  return ReMatId++;
107}
108
109void VirtRegMap::assignVirtReMatId(unsigned virtReg, int id) {
110  assert(MRegisterInfo::isVirtualRegister(virtReg));
111  assert(Virt2ReMatIdMap[virtReg] == NO_STACK_SLOT &&
112         "attempt to assign re-mat id to already spilled register");
113  Virt2ReMatIdMap[virtReg] = id;
114}
115
116void VirtRegMap::virtFolded(unsigned VirtReg, MachineInstr *OldMI,
117                            unsigned OpNo, MachineInstr *NewMI) {
118  // Move previous memory references folded to new instruction.
119  MI2VirtMapTy::iterator IP = MI2VirtMap.lower_bound(NewMI);
120  for (MI2VirtMapTy::iterator I = MI2VirtMap.lower_bound(OldMI),
121         E = MI2VirtMap.end(); I != E && I->first == OldMI; ) {
122    MI2VirtMap.insert(IP, std::make_pair(NewMI, I->second));
123    MI2VirtMap.erase(I++);
124  }
125
126  ModRef MRInfo;
127  const TargetInstrDescriptor *TID = OldMI->getInstrDescriptor();
128  if (TID->getOperandConstraint(OpNo, TOI::TIED_TO) != -1 ||
129      TID->findTiedToSrcOperand(OpNo) != -1) {
130    // Folded a two-address operand.
131    MRInfo = isModRef;
132  } else if (OldMI->getOperand(OpNo).isDef()) {
133    MRInfo = isMod;
134  } else {
135    MRInfo = isRef;
136  }
137
138  // add new memory reference
139  MI2VirtMap.insert(IP, std::make_pair(NewMI, std::make_pair(VirtReg, MRInfo)));
140}
141
142void VirtRegMap::print(std::ostream &OS) const {
143  const MRegisterInfo* MRI = MF.getTarget().getRegisterInfo();
144
145  OS << "********** REGISTER MAP **********\n";
146  for (unsigned i = MRegisterInfo::FirstVirtualRegister,
147         e = MF.getSSARegMap()->getLastVirtReg(); i <= e; ++i) {
148    if (Virt2PhysMap[i] != (unsigned)VirtRegMap::NO_PHYS_REG)
149      OS << "[reg" << i << " -> " << MRI->getName(Virt2PhysMap[i]) << "]\n";
150
151  }
152
153  for (unsigned i = MRegisterInfo::FirstVirtualRegister,
154         e = MF.getSSARegMap()->getLastVirtReg(); i <= e; ++i)
155    if (Virt2StackSlotMap[i] != VirtRegMap::NO_STACK_SLOT)
156      OS << "[reg" << i << " -> fi#" << Virt2StackSlotMap[i] << "]\n";
157  OS << '\n';
158}
159
160void VirtRegMap::dump() const {
161  print(DOUT);
162}
163
164
165//===----------------------------------------------------------------------===//
166// Simple Spiller Implementation
167//===----------------------------------------------------------------------===//
168
169Spiller::~Spiller() {}
170
171namespace {
172  struct VISIBILITY_HIDDEN SimpleSpiller : public Spiller {
173    bool runOnMachineFunction(MachineFunction& mf, VirtRegMap &VRM);
174  };
175}
176
177bool SimpleSpiller::runOnMachineFunction(MachineFunction &MF, VirtRegMap &VRM) {
178  DOUT << "********** REWRITE MACHINE CODE **********\n";
179  DOUT << "********** Function: " << MF.getFunction()->getName() << '\n';
180  const TargetMachine &TM = MF.getTarget();
181  const MRegisterInfo &MRI = *TM.getRegisterInfo();
182
183  // LoadedRegs - Keep track of which vregs are loaded, so that we only load
184  // each vreg once (in the case where a spilled vreg is used by multiple
185  // operands).  This is always smaller than the number of operands to the
186  // current machine instr, so it should be small.
187  std::vector<unsigned> LoadedRegs;
188
189  for (MachineFunction::iterator MBBI = MF.begin(), E = MF.end();
190       MBBI != E; ++MBBI) {
191    DOUT << MBBI->getBasicBlock()->getName() << ":\n";
192    MachineBasicBlock &MBB = *MBBI;
193    for (MachineBasicBlock::iterator MII = MBB.begin(),
194           E = MBB.end(); MII != E; ++MII) {
195      MachineInstr &MI = *MII;
196      for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) {
197        MachineOperand &MO = MI.getOperand(i);
198        if (MO.isRegister() && MO.getReg())
199          if (MRegisterInfo::isVirtualRegister(MO.getReg())) {
200            unsigned VirtReg = MO.getReg();
201            unsigned PhysReg = VRM.getPhys(VirtReg);
202            if (!VRM.isAssignedReg(VirtReg)) {
203              int StackSlot = VRM.getStackSlot(VirtReg);
204              const TargetRegisterClass* RC =
205                MF.getSSARegMap()->getRegClass(VirtReg);
206
207              if (MO.isUse() &&
208                  std::find(LoadedRegs.begin(), LoadedRegs.end(), VirtReg)
209                  == LoadedRegs.end()) {
210                MRI.loadRegFromStackSlot(MBB, &MI, PhysReg, StackSlot, RC);
211                LoadedRegs.push_back(VirtReg);
212                ++NumLoads;
213                DOUT << '\t' << *prior(MII);
214              }
215
216              if (MO.isDef()) {
217                MRI.storeRegToStackSlot(MBB, next(MII), PhysReg, StackSlot, RC);
218                ++NumStores;
219              }
220            }
221            MF.setPhysRegUsed(PhysReg);
222            MI.getOperand(i).setReg(PhysReg);
223          } else {
224            MF.setPhysRegUsed(MO.getReg());
225          }
226      }
227
228      DOUT << '\t' << MI;
229      LoadedRegs.clear();
230    }
231  }
232  return true;
233}
234
235//===----------------------------------------------------------------------===//
236//  Local Spiller Implementation
237//===----------------------------------------------------------------------===//
238
239namespace {
240  /// LocalSpiller - This spiller does a simple pass over the machine basic
241  /// block to attempt to keep spills in registers as much as possible for
242  /// blocks that have low register pressure (the vreg may be spilled due to
243  /// register pressure in other blocks).
244  class VISIBILITY_HIDDEN LocalSpiller : public Spiller {
245    const MRegisterInfo *MRI;
246    const TargetInstrInfo *TII;
247  public:
248    bool runOnMachineFunction(MachineFunction &MF, VirtRegMap &VRM) {
249      MRI = MF.getTarget().getRegisterInfo();
250      TII = MF.getTarget().getInstrInfo();
251      DOUT << "\n**** Local spiller rewriting function '"
252           << MF.getFunction()->getName() << "':\n";
253      DOUT << "**** Machine Instrs (NOTE! Does not include spills and reloads!) ****\n";
254      DEBUG(MF.dump());
255
256      for (MachineFunction::iterator MBB = MF.begin(), E = MF.end();
257           MBB != E; ++MBB)
258        RewriteMBB(*MBB, VRM);
259
260      DOUT << "**** Post Machine Instrs ****\n";
261      DEBUG(MF.dump());
262
263      return true;
264    }
265  private:
266    void RewriteMBB(MachineBasicBlock &MBB, VirtRegMap &VRM);
267  };
268}
269
270/// AvailableSpills - As the local spiller is scanning and rewriting an MBB from
271/// top down, keep track of which spills slots or remat are available in each
272/// register.
273///
274/// Note that not all physregs are created equal here.  In particular, some
275/// physregs are reloads that we are allowed to clobber or ignore at any time.
276/// Other physregs are values that the register allocated program is using that
277/// we cannot CHANGE, but we can read if we like.  We keep track of this on a
278/// per-stack-slot / remat id basis as the low bit in the value of the
279/// SpillSlotsAvailable entries.  The predicate 'canClobberPhysReg()' checks
280/// this bit and addAvailable sets it if.
281namespace {
282class VISIBILITY_HIDDEN AvailableSpills {
283  const MRegisterInfo *MRI;
284  const TargetInstrInfo *TII;
285
286  // SpillSlotsOrReMatsAvailable - This map keeps track of all of the spilled
287  // or remat'ed virtual register values that are still available, due to being
288  // loaded or stored to, but not invalidated yet.
289  std::map<int, unsigned> SpillSlotsOrReMatsAvailable;
290
291  // PhysRegsAvailable - This is the inverse of SpillSlotsOrReMatsAvailable,
292  // indicating which stack slot values are currently held by a physreg.  This
293  // is used to invalidate entries in SpillSlotsOrReMatsAvailable when a
294  // physreg is modified.
295  std::multimap<unsigned, int> PhysRegsAvailable;
296
297  void disallowClobberPhysRegOnly(unsigned PhysReg);
298
299  void ClobberPhysRegOnly(unsigned PhysReg);
300public:
301  AvailableSpills(const MRegisterInfo *mri, const TargetInstrInfo *tii)
302    : MRI(mri), TII(tii) {
303  }
304
305  const MRegisterInfo *getRegInfo() const { return MRI; }
306
307  /// getSpillSlotOrReMatPhysReg - If the specified stack slot or remat is
308  /// available in a  physical register, return that PhysReg, otherwise
309  /// return 0.
310  unsigned getSpillSlotOrReMatPhysReg(int Slot) const {
311    std::map<int, unsigned>::const_iterator I =
312      SpillSlotsOrReMatsAvailable.find(Slot);
313    if (I != SpillSlotsOrReMatsAvailable.end()) {
314      return I->second >> 1;  // Remove the CanClobber bit.
315    }
316    return 0;
317  }
318
319  /// addAvailable - Mark that the specified stack slot / remat is available in
320  /// the specified physreg.  If CanClobber is true, the physreg can be modified
321  /// at any time without changing the semantics of the program.
322  void addAvailable(int SlotOrReMat, MachineInstr *MI, unsigned Reg,
323                    bool CanClobber = true) {
324    // If this stack slot is thought to be available in some other physreg,
325    // remove its record.
326    ModifyStackSlotOrReMat(SlotOrReMat);
327
328    PhysRegsAvailable.insert(std::make_pair(Reg, SlotOrReMat));
329    SpillSlotsOrReMatsAvailable[SlotOrReMat]= (Reg << 1) | (unsigned)CanClobber;
330
331    if (SlotOrReMat > VirtRegMap::MAX_STACK_SLOT)
332      DOUT << "Remembering RM#" << SlotOrReMat-VirtRegMap::MAX_STACK_SLOT-1;
333    else
334      DOUT << "Remembering SS#" << SlotOrReMat;
335    DOUT << " in physreg " << MRI->getName(Reg) << "\n";
336  }
337
338  /// canClobberPhysReg - Return true if the spiller is allowed to change the
339  /// value of the specified stackslot register if it desires.  The specified
340  /// stack slot must be available in a physreg for this query to make sense.
341  bool canClobberPhysReg(int SlotOrReMat) const {
342    assert(SpillSlotsOrReMatsAvailable.count(SlotOrReMat) &&
343           "Value not available!");
344    return SpillSlotsOrReMatsAvailable.find(SlotOrReMat)->second & 1;
345  }
346
347  /// disallowClobberPhysReg - Unset the CanClobber bit of the specified
348  /// stackslot register. The register is still available but is no longer
349  /// allowed to be modifed.
350  void disallowClobberPhysReg(unsigned PhysReg);
351
352  /// ClobberPhysReg - This is called when the specified physreg changes
353  /// value.  We use this to invalidate any info about stuff we thing lives in
354  /// it and any of its aliases.
355  void ClobberPhysReg(unsigned PhysReg);
356
357  /// ModifyStackSlotOrReMat - This method is called when the value in a stack
358  /// slot changes.  This removes information about which register the previous
359  /// value for this slot lives in (as the previous value is dead now).
360  void ModifyStackSlotOrReMat(int SlotOrReMat);
361};
362}
363
364/// disallowClobberPhysRegOnly - Unset the CanClobber bit of the specified
365/// stackslot register. The register is still available but is no longer
366/// allowed to be modifed.
367void AvailableSpills::disallowClobberPhysRegOnly(unsigned PhysReg) {
368  std::multimap<unsigned, int>::iterator I =
369    PhysRegsAvailable.lower_bound(PhysReg);
370  while (I != PhysRegsAvailable.end() && I->first == PhysReg) {
371    int SlotOrReMat = I->second;
372    I++;
373    assert((SpillSlotsOrReMatsAvailable[SlotOrReMat] >> 1) == PhysReg &&
374           "Bidirectional map mismatch!");
375    SpillSlotsOrReMatsAvailable[SlotOrReMat] &= ~1;
376    DOUT << "PhysReg " << MRI->getName(PhysReg)
377         << " copied, it is available for use but can no longer be modified\n";
378  }
379}
380
381/// disallowClobberPhysReg - Unset the CanClobber bit of the specified
382/// stackslot register and its aliases. The register and its aliases may
383/// still available but is no longer allowed to be modifed.
384void AvailableSpills::disallowClobberPhysReg(unsigned PhysReg) {
385  for (const unsigned *AS = MRI->getAliasSet(PhysReg); *AS; ++AS)
386    disallowClobberPhysRegOnly(*AS);
387  disallowClobberPhysRegOnly(PhysReg);
388}
389
390/// ClobberPhysRegOnly - This is called when the specified physreg changes
391/// value.  We use this to invalidate any info about stuff we thing lives in it.
392void AvailableSpills::ClobberPhysRegOnly(unsigned PhysReg) {
393  std::multimap<unsigned, int>::iterator I =
394    PhysRegsAvailable.lower_bound(PhysReg);
395  while (I != PhysRegsAvailable.end() && I->first == PhysReg) {
396    int SlotOrReMat = I->second;
397    PhysRegsAvailable.erase(I++);
398    assert((SpillSlotsOrReMatsAvailable[SlotOrReMat] >> 1) == PhysReg &&
399           "Bidirectional map mismatch!");
400    SpillSlotsOrReMatsAvailable.erase(SlotOrReMat);
401    DOUT << "PhysReg " << MRI->getName(PhysReg)
402         << " clobbered, invalidating ";
403    if (SlotOrReMat > VirtRegMap::MAX_STACK_SLOT)
404      DOUT << "RM#" << SlotOrReMat-VirtRegMap::MAX_STACK_SLOT-1 << "\n";
405    else
406      DOUT << "SS#" << SlotOrReMat << "\n";
407  }
408}
409
410/// ClobberPhysReg - This is called when the specified physreg changes
411/// value.  We use this to invalidate any info about stuff we thing lives in
412/// it and any of its aliases.
413void AvailableSpills::ClobberPhysReg(unsigned PhysReg) {
414  for (const unsigned *AS = MRI->getAliasSet(PhysReg); *AS; ++AS)
415    ClobberPhysRegOnly(*AS);
416  ClobberPhysRegOnly(PhysReg);
417}
418
419/// ModifyStackSlotOrReMat - This method is called when the value in a stack
420/// slot changes.  This removes information about which register the previous
421/// value for this slot lives in (as the previous value is dead now).
422void AvailableSpills::ModifyStackSlotOrReMat(int SlotOrReMat) {
423  std::map<int, unsigned>::iterator It =
424    SpillSlotsOrReMatsAvailable.find(SlotOrReMat);
425  if (It == SpillSlotsOrReMatsAvailable.end()) return;
426  unsigned Reg = It->second >> 1;
427  SpillSlotsOrReMatsAvailable.erase(It);
428
429  // This register may hold the value of multiple stack slots, only remove this
430  // stack slot from the set of values the register contains.
431  std::multimap<unsigned, int>::iterator I = PhysRegsAvailable.lower_bound(Reg);
432  for (; ; ++I) {
433    assert(I != PhysRegsAvailable.end() && I->first == Reg &&
434           "Map inverse broken!");
435    if (I->second == SlotOrReMat) break;
436  }
437  PhysRegsAvailable.erase(I);
438}
439
440
441
442/// InvalidateKills - MI is going to be deleted. If any of its operands are
443/// marked kill, then invalidate the information.
444static void InvalidateKills(MachineInstr &MI, BitVector &RegKills,
445                            std::vector<MachineOperand*> &KillOps,
446                            SmallVector<unsigned, 1> *KillRegs = NULL) {
447  for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) {
448    MachineOperand &MO = MI.getOperand(i);
449    if (!MO.isReg() || !MO.isUse() || !MO.isKill())
450      continue;
451    unsigned Reg = MO.getReg();
452    if (KillRegs)
453      KillRegs->push_back(Reg);
454    if (KillOps[Reg] == &MO) {
455      RegKills.reset(Reg);
456      KillOps[Reg] = NULL;
457    }
458  }
459}
460
461/// InvalidateRegDef - If the def operand of the specified def MI is now dead
462/// (since it's spill instruction is removed), mark it isDead. Also checks if
463/// the def MI has other definition operands that are not dead. Returns it by
464/// reference.
465static bool InvalidateRegDef(MachineBasicBlock::iterator I,
466                             MachineInstr &NewDef, unsigned Reg,
467                             bool &HasLiveDef) {
468  // Due to remat, it's possible this reg isn't being reused. That is,
469  // the def of this reg (by prev MI) is now dead.
470  MachineInstr *DefMI = I;
471  MachineOperand *DefOp = NULL;
472  for (unsigned i = 0, e = DefMI->getNumOperands(); i != e; ++i) {
473    MachineOperand &MO = DefMI->getOperand(i);
474    if (MO.isReg() && MO.isDef()) {
475      if (MO.getReg() == Reg)
476        DefOp = &MO;
477      else if (!MO.isDead())
478        HasLiveDef = true;
479    }
480  }
481  if (!DefOp)
482    return false;
483
484  bool FoundUse = false, Done = false;
485  MachineBasicBlock::iterator E = NewDef;
486  ++I; ++E;
487  for (; !Done && I != E; ++I) {
488    MachineInstr *NMI = I;
489    for (unsigned j = 0, ee = NMI->getNumOperands(); j != ee; ++j) {
490      MachineOperand &MO = NMI->getOperand(j);
491      if (!MO.isReg() || MO.getReg() != Reg)
492        continue;
493      if (MO.isUse())
494        FoundUse = true;
495      Done = true; // Stop after scanning all the operands of this MI.
496    }
497  }
498  if (!FoundUse) {
499    // Def is dead!
500    DefOp->setIsDead();
501    return true;
502  }
503  return false;
504}
505
506/// UpdateKills - Track and update kill info. If a MI reads a register that is
507/// marked kill, then it must be due to register reuse. Transfer the kill info
508/// over.
509static void UpdateKills(MachineInstr &MI, BitVector &RegKills,
510                        std::vector<MachineOperand*> &KillOps) {
511  const TargetInstrDescriptor *TID = MI.getInstrDescriptor();
512  for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) {
513    MachineOperand &MO = MI.getOperand(i);
514    if (!MO.isReg() || !MO.isUse())
515      continue;
516    unsigned Reg = MO.getReg();
517    if (Reg == 0)
518      continue;
519
520    if (RegKills[Reg]) {
521      // That can't be right. Register is killed but not re-defined and it's
522      // being reused. Let's fix that.
523      KillOps[Reg]->unsetIsKill();
524      if (i < TID->numOperands &&
525          TID->getOperandConstraint(i, TOI::TIED_TO) == -1)
526        // Unless it's a two-address operand, this is the new kill.
527        MO.setIsKill();
528    }
529
530    if (MO.isKill()) {
531      RegKills.set(Reg);
532      KillOps[Reg] = &MO;
533    }
534  }
535
536  for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) {
537    const MachineOperand &MO = MI.getOperand(i);
538    if (!MO.isReg() || !MO.isDef())
539      continue;
540    unsigned Reg = MO.getReg();
541    RegKills.reset(Reg);
542    KillOps[Reg] = NULL;
543  }
544}
545
546
547// ReusedOp - For each reused operand, we keep track of a bit of information, in
548// case we need to rollback upon processing a new operand.  See comments below.
549namespace {
550  struct ReusedOp {
551    // The MachineInstr operand that reused an available value.
552    unsigned Operand;
553
554    // StackSlotOrReMat - The spill slot or remat id of the value being reused.
555    unsigned StackSlotOrReMat;
556
557    // PhysRegReused - The physical register the value was available in.
558    unsigned PhysRegReused;
559
560    // AssignedPhysReg - The physreg that was assigned for use by the reload.
561    unsigned AssignedPhysReg;
562
563    // VirtReg - The virtual register itself.
564    unsigned VirtReg;
565
566    ReusedOp(unsigned o, unsigned ss, unsigned prr, unsigned apr,
567             unsigned vreg)
568      : Operand(o), StackSlotOrReMat(ss), PhysRegReused(prr),
569        AssignedPhysReg(apr), VirtReg(vreg) {}
570  };
571
572  /// ReuseInfo - This maintains a collection of ReuseOp's for each operand that
573  /// is reused instead of reloaded.
574  class VISIBILITY_HIDDEN ReuseInfo {
575    MachineInstr &MI;
576    std::vector<ReusedOp> Reuses;
577    BitVector PhysRegsClobbered;
578  public:
579    ReuseInfo(MachineInstr &mi, const MRegisterInfo *mri) : MI(mi) {
580      PhysRegsClobbered.resize(mri->getNumRegs());
581    }
582
583    bool hasReuses() const {
584      return !Reuses.empty();
585    }
586
587    /// addReuse - If we choose to reuse a virtual register that is already
588    /// available instead of reloading it, remember that we did so.
589    void addReuse(unsigned OpNo, unsigned StackSlotOrReMat,
590                  unsigned PhysRegReused, unsigned AssignedPhysReg,
591                  unsigned VirtReg) {
592      // If the reload is to the assigned register anyway, no undo will be
593      // required.
594      if (PhysRegReused == AssignedPhysReg) return;
595
596      // Otherwise, remember this.
597      Reuses.push_back(ReusedOp(OpNo, StackSlotOrReMat, PhysRegReused,
598                                AssignedPhysReg, VirtReg));
599    }
600
601    void markClobbered(unsigned PhysReg) {
602      PhysRegsClobbered.set(PhysReg);
603    }
604
605    bool isClobbered(unsigned PhysReg) const {
606      return PhysRegsClobbered.test(PhysReg);
607    }
608
609    /// GetRegForReload - We are about to emit a reload into PhysReg.  If there
610    /// is some other operand that is using the specified register, either pick
611    /// a new register to use, or evict the previous reload and use this reg.
612    unsigned GetRegForReload(unsigned PhysReg, MachineInstr *MI,
613                             AvailableSpills &Spills,
614                             std::vector<MachineInstr*> &MaybeDeadStores,
615                             SmallSet<unsigned, 8> &Rejected,
616                             BitVector &RegKills,
617                             std::vector<MachineOperand*> &KillOps,
618                             VirtRegMap &VRM) {
619      if (Reuses.empty()) return PhysReg;  // This is most often empty.
620
621      for (unsigned ro = 0, e = Reuses.size(); ro != e; ++ro) {
622        ReusedOp &Op = Reuses[ro];
623        // If we find some other reuse that was supposed to use this register
624        // exactly for its reload, we can change this reload to use ITS reload
625        // register. That is, unless its reload register has already been
626        // considered and subsequently rejected because it has also been reused
627        // by another operand.
628        if (Op.PhysRegReused == PhysReg &&
629            Rejected.count(Op.AssignedPhysReg) == 0) {
630          // Yup, use the reload register that we didn't use before.
631          unsigned NewReg = Op.AssignedPhysReg;
632          Rejected.insert(PhysReg);
633          return GetRegForReload(NewReg, MI, Spills, MaybeDeadStores, Rejected,
634                                 RegKills, KillOps, VRM);
635        } else {
636          // Otherwise, we might also have a problem if a previously reused
637          // value aliases the new register.  If so, codegen the previous reload
638          // and use this one.
639          unsigned PRRU = Op.PhysRegReused;
640          const MRegisterInfo *MRI = Spills.getRegInfo();
641          if (MRI->areAliases(PRRU, PhysReg)) {
642            // Okay, we found out that an alias of a reused register
643            // was used.  This isn't good because it means we have
644            // to undo a previous reuse.
645            MachineBasicBlock *MBB = MI->getParent();
646            const TargetRegisterClass *AliasRC =
647              MBB->getParent()->getSSARegMap()->getRegClass(Op.VirtReg);
648
649            // Copy Op out of the vector and remove it, we're going to insert an
650            // explicit load for it.
651            ReusedOp NewOp = Op;
652            Reuses.erase(Reuses.begin()+ro);
653
654            // Ok, we're going to try to reload the assigned physreg into the
655            // slot that we were supposed to in the first place.  However, that
656            // register could hold a reuse.  Check to see if it conflicts or
657            // would prefer us to use a different register.
658            unsigned NewPhysReg = GetRegForReload(NewOp.AssignedPhysReg,
659                                                  MI, Spills, MaybeDeadStores,
660                                              Rejected, RegKills, KillOps, VRM);
661
662            if (NewOp.StackSlotOrReMat > VirtRegMap::MAX_STACK_SLOT) {
663              MRI->reMaterialize(*MBB, MI, NewPhysReg,
664                                 VRM.getReMaterializedMI(NewOp.VirtReg));
665              ++NumReMats;
666            } else {
667              MRI->loadRegFromStackSlot(*MBB, MI, NewPhysReg,
668                                        NewOp.StackSlotOrReMat, AliasRC);
669              // Any stores to this stack slot are not dead anymore.
670              MaybeDeadStores[NewOp.StackSlotOrReMat] = NULL;
671              ++NumLoads;
672            }
673            Spills.ClobberPhysReg(NewPhysReg);
674            Spills.ClobberPhysReg(NewOp.PhysRegReused);
675
676            MI->getOperand(NewOp.Operand).setReg(NewPhysReg);
677
678            Spills.addAvailable(NewOp.StackSlotOrReMat, MI, NewPhysReg);
679            MachineBasicBlock::iterator MII = MI;
680            --MII;
681            UpdateKills(*MII, RegKills, KillOps);
682            DOUT << '\t' << *MII;
683
684            DOUT << "Reuse undone!\n";
685            --NumReused;
686
687            // Finally, PhysReg is now available, go ahead and use it.
688            return PhysReg;
689          }
690        }
691      }
692      return PhysReg;
693    }
694
695    /// GetRegForReload - Helper for the above GetRegForReload(). Add a
696    /// 'Rejected' set to remember which registers have been considered and
697    /// rejected for the reload. This avoids infinite looping in case like
698    /// this:
699    /// t1 := op t2, t3
700    /// t2 <- assigned r0 for use by the reload but ended up reuse r1
701    /// t3 <- assigned r1 for use by the reload but ended up reuse r0
702    /// t1 <- desires r1
703    ///       sees r1 is taken by t2, tries t2's reload register r0
704    ///       sees r0 is taken by t3, tries t3's reload register r1
705    ///       sees r1 is taken by t2, tries t2's reload register r0 ...
706    unsigned GetRegForReload(unsigned PhysReg, MachineInstr *MI,
707                             AvailableSpills &Spills,
708                             std::vector<MachineInstr*> &MaybeDeadStores,
709                             BitVector &RegKills,
710                             std::vector<MachineOperand*> &KillOps,
711                             VirtRegMap &VRM) {
712      SmallSet<unsigned, 8> Rejected;
713      return GetRegForReload(PhysReg, MI, Spills, MaybeDeadStores, Rejected,
714                             RegKills, KillOps, VRM);
715    }
716  };
717}
718
719
720/// rewriteMBB - Keep track of which spills are available even after the
721/// register allocator is done with them.  If possible, avoid reloading vregs.
722void LocalSpiller::RewriteMBB(MachineBasicBlock &MBB, VirtRegMap &VRM) {
723  DOUT << MBB.getBasicBlock()->getName() << ":\n";
724
725  MachineFunction &MF = *MBB.getParent();
726
727  // Spills - Keep track of which spilled values are available in physregs so
728  // that we can choose to reuse the physregs instead of emitting reloads.
729  AvailableSpills Spills(MRI, TII);
730
731  // MaybeDeadStores - When we need to write a value back into a stack slot,
732  // keep track of the inserted store.  If the stack slot value is never read
733  // (because the value was used from some available register, for example), and
734  // subsequently stored to, the original store is dead.  This map keeps track
735  // of inserted stores that are not used.  If we see a subsequent store to the
736  // same stack slot, the original store is deleted.
737  std::vector<MachineInstr*> MaybeDeadStores;
738  MaybeDeadStores.resize(MF.getFrameInfo()->getObjectIndexEnd(), NULL);
739
740  // ReMatDefs - These are rematerializable def MIs which are not deleted.
741  SmallSet<MachineInstr*, 4> ReMatDefs;
742
743  // Keep track of kill information.
744  BitVector RegKills(MRI->getNumRegs());
745  std::vector<MachineOperand*>  KillOps;
746  KillOps.resize(MRI->getNumRegs(), NULL);
747
748  for (MachineBasicBlock::iterator MII = MBB.begin(), E = MBB.end();
749       MII != E; ) {
750    MachineInstr &MI = *MII;
751    MachineBasicBlock::iterator NextMII = MII; ++NextMII;
752    VirtRegMap::MI2VirtMapTy::const_iterator I, End;
753
754    bool Erased = false;
755    bool BackTracked = false;
756
757    /// ReusedOperands - Keep track of operand reuse in case we need to undo
758    /// reuse.
759    ReuseInfo ReusedOperands(MI, MRI);
760
761    // Loop over all of the implicit defs, clearing them from our available
762    // sets.
763    const TargetInstrDescriptor *TID = MI.getInstrDescriptor();
764    if (TID->ImplicitDefs) {
765      const unsigned *ImpDef = TID->ImplicitDefs;
766      for ( ; *ImpDef; ++ImpDef) {
767        MF.setPhysRegUsed(*ImpDef);
768        ReusedOperands.markClobbered(*ImpDef);
769        Spills.ClobberPhysReg(*ImpDef);
770      }
771    }
772
773    // Process all of the spilled uses and all non spilled reg references.
774    for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) {
775      MachineOperand &MO = MI.getOperand(i);
776      if (!MO.isRegister() || MO.getReg() == 0)
777        continue;   // Ignore non-register operands.
778
779      if (MRegisterInfo::isPhysicalRegister(MO.getReg())) {
780        // Ignore physregs for spilling, but remember that it is used by this
781        // function.
782        MF.setPhysRegUsed(MO.getReg());
783        ReusedOperands.markClobbered(MO.getReg());
784        continue;
785      }
786
787      assert(MRegisterInfo::isVirtualRegister(MO.getReg()) &&
788             "Not a virtual or a physical register?");
789
790      unsigned VirtReg = MO.getReg();
791      if (VRM.isAssignedReg(VirtReg)) {
792        // This virtual register was assigned a physreg!
793        unsigned Phys = VRM.getPhys(VirtReg);
794        MF.setPhysRegUsed(Phys);
795        if (MO.isDef())
796          ReusedOperands.markClobbered(Phys);
797        MI.getOperand(i).setReg(Phys);
798        continue;
799      }
800
801      // This virtual register is now known to be a spilled value.
802      if (!MO.isUse())
803        continue;  // Handle defs in the loop below (handle use&def here though)
804
805      bool DoReMat = VRM.isReMaterialized(VirtReg);
806      int SSorRMId = DoReMat
807        ? VRM.getReMatId(VirtReg) : VRM.getStackSlot(VirtReg);
808      int ReuseSlot = SSorRMId;
809
810      // Check to see if this stack slot is available.
811      unsigned PhysReg = Spills.getSpillSlotOrReMatPhysReg(SSorRMId);
812      if (!PhysReg && DoReMat) {
813        // This use is rematerializable. But perhaps the value is available in
814        // stack if the definition is not deleted. If so, check if we can
815        // reuse the value.
816        ReuseSlot = VRM.getStackSlot(VirtReg);
817        if (ReuseSlot != VirtRegMap::NO_STACK_SLOT)
818          PhysReg = Spills.getSpillSlotOrReMatPhysReg(ReuseSlot);
819      }
820      if (PhysReg) {
821        // This spilled operand might be part of a two-address operand.  If this
822        // is the case, then changing it will necessarily require changing the
823        // def part of the instruction as well.  However, in some cases, we
824        // aren't allowed to modify the reused register.  If none of these cases
825        // apply, reuse it.
826        bool CanReuse = true;
827        int ti = TID->getOperandConstraint(i, TOI::TIED_TO);
828        if (ti != -1 &&
829            MI.getOperand(ti).isReg() &&
830            MI.getOperand(ti).getReg() == VirtReg) {
831          // Okay, we have a two address operand.  We can reuse this physreg as
832          // long as we are allowed to clobber the value and there isn't an
833          // earlier def that has already clobbered the physreg.
834          CanReuse = Spills.canClobberPhysReg(ReuseSlot) &&
835            !ReusedOperands.isClobbered(PhysReg);
836        }
837
838        if (CanReuse) {
839          // If this stack slot value is already available, reuse it!
840          if (ReuseSlot > VirtRegMap::MAX_STACK_SLOT)
841            DOUT << "Reusing RM#" << ReuseSlot-VirtRegMap::MAX_STACK_SLOT-1;
842          else
843            DOUT << "Reusing SS#" << ReuseSlot;
844          DOUT << " from physreg "
845               << MRI->getName(PhysReg) << " for vreg"
846               << VirtReg <<" instead of reloading into physreg "
847               << MRI->getName(VRM.getPhys(VirtReg)) << "\n";
848          MI.getOperand(i).setReg(PhysReg);
849
850          // The only technical detail we have is that we don't know that
851          // PhysReg won't be clobbered by a reloaded stack slot that occurs
852          // later in the instruction.  In particular, consider 'op V1, V2'.
853          // If V1 is available in physreg R0, we would choose to reuse it
854          // here, instead of reloading it into the register the allocator
855          // indicated (say R1).  However, V2 might have to be reloaded
856          // later, and it might indicate that it needs to live in R0.  When
857          // this occurs, we need to have information available that
858          // indicates it is safe to use R1 for the reload instead of R0.
859          //
860          // To further complicate matters, we might conflict with an alias,
861          // or R0 and R1 might not be compatible with each other.  In this
862          // case, we actually insert a reload for V1 in R1, ensuring that
863          // we can get at R0 or its alias.
864          ReusedOperands.addReuse(i, ReuseSlot, PhysReg,
865                                  VRM.getPhys(VirtReg), VirtReg);
866          if (ti != -1)
867            // Only mark it clobbered if this is a use&def operand.
868            ReusedOperands.markClobbered(PhysReg);
869          ++NumReused;
870
871          if (MI.getOperand(i).isKill() &&
872              ReuseSlot <= VirtRegMap::MAX_STACK_SLOT) {
873            // This was the last use and the spilled value is still available
874            // for reuse. That means the spill was unnecessary!
875            MachineInstr* DeadStore = MaybeDeadStores[ReuseSlot];
876            if (DeadStore) {
877              DOUT << "Removed dead store:\t" << *DeadStore;
878              InvalidateKills(*DeadStore, RegKills, KillOps);
879              MBB.erase(DeadStore);
880              VRM.RemoveFromFoldedVirtMap(DeadStore);
881              MaybeDeadStores[ReuseSlot] = NULL;
882              ++NumDSE;
883            }
884          }
885          continue;
886        }
887
888        // Otherwise we have a situation where we have a two-address instruction
889        // whose mod/ref operand needs to be reloaded.  This reload is already
890        // available in some register "PhysReg", but if we used PhysReg as the
891        // operand to our 2-addr instruction, the instruction would modify
892        // PhysReg.  This isn't cool if something later uses PhysReg and expects
893        // to get its initial value.
894        //
895        // To avoid this problem, and to avoid doing a load right after a store,
896        // we emit a copy from PhysReg into the designated register for this
897        // operand.
898        unsigned DesignatedReg = VRM.getPhys(VirtReg);
899        assert(DesignatedReg && "Must map virtreg to physreg!");
900
901        // Note that, if we reused a register for a previous operand, the
902        // register we want to reload into might not actually be
903        // available.  If this occurs, use the register indicated by the
904        // reuser.
905        if (ReusedOperands.hasReuses())
906          DesignatedReg = ReusedOperands.GetRegForReload(DesignatedReg, &MI,
907                               Spills, MaybeDeadStores, RegKills, KillOps, VRM);
908
909        // If the mapped designated register is actually the physreg we have
910        // incoming, we don't need to inserted a dead copy.
911        if (DesignatedReg == PhysReg) {
912          // If this stack slot value is already available, reuse it!
913          if (ReuseSlot > VirtRegMap::MAX_STACK_SLOT)
914            DOUT << "Reusing RM#" << ReuseSlot-VirtRegMap::MAX_STACK_SLOT-1;
915          else
916            DOUT << "Reusing SS#" << ReuseSlot;
917          DOUT << " from physreg " << MRI->getName(PhysReg) << " for vreg"
918               << VirtReg
919               << " instead of reloading into same physreg.\n";
920          MI.getOperand(i).setReg(PhysReg);
921          ReusedOperands.markClobbered(PhysReg);
922          ++NumReused;
923          continue;
924        }
925
926        const TargetRegisterClass* RC = MF.getSSARegMap()->getRegClass(VirtReg);
927        MF.setPhysRegUsed(DesignatedReg);
928        ReusedOperands.markClobbered(DesignatedReg);
929        MRI->copyRegToReg(MBB, &MI, DesignatedReg, PhysReg, RC);
930
931        MachineInstr *CopyMI = prior(MII);
932        UpdateKills(*CopyMI, RegKills, KillOps);
933
934        // This invalidates DesignatedReg.
935        Spills.ClobberPhysReg(DesignatedReg);
936
937        Spills.addAvailable(ReuseSlot, &MI, DesignatedReg);
938        MI.getOperand(i).setReg(DesignatedReg);
939        DOUT << '\t' << *prior(MII);
940        ++NumReused;
941        continue;
942      }
943
944      // Otherwise, reload it and remember that we have it.
945      PhysReg = VRM.getPhys(VirtReg);
946      assert(PhysReg && "Must map virtreg to physreg!");
947      const TargetRegisterClass* RC = MF.getSSARegMap()->getRegClass(VirtReg);
948
949      // Note that, if we reused a register for a previous operand, the
950      // register we want to reload into might not actually be
951      // available.  If this occurs, use the register indicated by the
952      // reuser.
953      if (ReusedOperands.hasReuses())
954        PhysReg = ReusedOperands.GetRegForReload(PhysReg, &MI,
955                               Spills, MaybeDeadStores, RegKills, KillOps, VRM);
956
957      MF.setPhysRegUsed(PhysReg);
958      ReusedOperands.markClobbered(PhysReg);
959      if (DoReMat) {
960        MRI->reMaterialize(MBB, &MI, PhysReg, VRM.getReMaterializedMI(VirtReg));
961        ++NumReMats;
962      } else {
963        MRI->loadRegFromStackSlot(MBB, &MI, PhysReg, SSorRMId, RC);
964        ++NumLoads;
965      }
966      // This invalidates PhysReg.
967      Spills.ClobberPhysReg(PhysReg);
968
969      // Any stores to this stack slot are not dead anymore.
970      if (!DoReMat)
971        MaybeDeadStores[SSorRMId] = NULL;
972      Spills.addAvailable(SSorRMId, &MI, PhysReg);
973      // Assumes this is the last use. IsKill will be unset if reg is reused
974      // unless it's a two-address operand.
975      if (TID->getOperandConstraint(i, TOI::TIED_TO) == -1)
976        MI.getOperand(i).setIsKill();
977      MI.getOperand(i).setReg(PhysReg);
978      UpdateKills(*prior(MII), RegKills, KillOps);
979      DOUT << '\t' << *prior(MII);
980    }
981
982    DOUT << '\t' << MI;
983
984    // If we have folded references to memory operands, make sure we clear all
985    // physical registers that may contain the value of the spilled virtual
986    // register
987    SmallSet<int, 1> FoldedSS;
988    for (tie(I, End) = VRM.getFoldedVirts(&MI); I != End; ++I) {
989      DOUT << "Folded vreg: " << I->second.first << "  MR: "
990           << I->second.second;
991      unsigned VirtReg = I->second.first;
992      VirtRegMap::ModRef MR = I->second.second;
993      if (VRM.isAssignedReg(VirtReg)) {
994        DOUT << ": No stack slot!\n";
995        continue;
996      }
997      int SS = VRM.getStackSlot(VirtReg);
998      FoldedSS.insert(SS);
999      DOUT << " - StackSlot: " << SS << "\n";
1000
1001      // If this folded instruction is just a use, check to see if it's a
1002      // straight load from the virt reg slot.
1003      if ((MR & VirtRegMap::isRef) && !(MR & VirtRegMap::isMod)) {
1004        int FrameIdx;
1005        if (unsigned DestReg = TII->isLoadFromStackSlot(&MI, FrameIdx)) {
1006          if (FrameIdx == SS) {
1007            // If this spill slot is available, turn it into a copy (or nothing)
1008            // instead of leaving it as a load!
1009            if (unsigned InReg = Spills.getSpillSlotOrReMatPhysReg(SS)) {
1010              DOUT << "Promoted Load To Copy: " << MI;
1011              if (DestReg != InReg) {
1012                MRI->copyRegToReg(MBB, &MI, DestReg, InReg,
1013                                  MF.getSSARegMap()->getRegClass(VirtReg));
1014                // Revisit the copy so we make sure to notice the effects of the
1015                // operation on the destreg (either needing to RA it if it's
1016                // virtual or needing to clobber any values if it's physical).
1017                NextMII = &MI;
1018                --NextMII;  // backtrack to the copy.
1019                BackTracked = true;
1020              } else
1021                DOUT << "Removing now-noop copy: " << MI;
1022
1023              VRM.RemoveFromFoldedVirtMap(&MI);
1024              MBB.erase(&MI);
1025              Erased = true;
1026              goto ProcessNextInst;
1027            }
1028          }
1029        }
1030      }
1031
1032      // If this reference is not a use, any previous store is now dead.
1033      // Otherwise, the store to this stack slot is not dead anymore.
1034      MachineInstr* DeadStore = MaybeDeadStores[SS];
1035      if (DeadStore) {
1036        if (!(MR & VirtRegMap::isRef)) {  // Previous store is dead.
1037          // If we get here, the store is dead, nuke it now.
1038          assert(VirtRegMap::isMod && "Can't be modref!");
1039          DOUT << "Removed dead store:\t" << *DeadStore;
1040          InvalidateKills(*DeadStore, RegKills, KillOps);
1041          MBB.erase(DeadStore);
1042          VRM.RemoveFromFoldedVirtMap(DeadStore);
1043          ++NumDSE;
1044        }
1045        MaybeDeadStores[SS] = NULL;
1046      }
1047
1048      // If the spill slot value is available, and this is a new definition of
1049      // the value, the value is not available anymore.
1050      if (MR & VirtRegMap::isMod) {
1051        // Notice that the value in this stack slot has been modified.
1052        Spills.ModifyStackSlotOrReMat(SS);
1053
1054        // If this is *just* a mod of the value, check to see if this is just a
1055        // store to the spill slot (i.e. the spill got merged into the copy). If
1056        // so, realize that the vreg is available now, and add the store to the
1057        // MaybeDeadStore info.
1058        int StackSlot;
1059        if (!(MR & VirtRegMap::isRef)) {
1060          if (unsigned SrcReg = TII->isStoreToStackSlot(&MI, StackSlot)) {
1061            assert(MRegisterInfo::isPhysicalRegister(SrcReg) &&
1062                   "Src hasn't been allocated yet?");
1063            // Okay, this is certainly a store of SrcReg to [StackSlot].  Mark
1064            // this as a potentially dead store in case there is a subsequent
1065            // store into the stack slot without a read from it.
1066            MaybeDeadStores[StackSlot] = &MI;
1067
1068            // If the stack slot value was previously available in some other
1069            // register, change it now.  Otherwise, make the register available,
1070            // in PhysReg.
1071            Spills.addAvailable(StackSlot, &MI, SrcReg, false/*don't clobber*/);
1072          }
1073        }
1074      }
1075    }
1076
1077    // Process all of the spilled defs.
1078    for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) {
1079      MachineOperand &MO = MI.getOperand(i);
1080      if (MO.isRegister() && MO.getReg() && MO.isDef()) {
1081        unsigned VirtReg = MO.getReg();
1082
1083        if (!MRegisterInfo::isVirtualRegister(VirtReg)) {
1084          // Check to see if this is a noop copy.  If so, eliminate the
1085          // instruction before considering the dest reg to be changed.
1086          unsigned Src, Dst;
1087          if (TII->isMoveInstr(MI, Src, Dst) && Src == Dst) {
1088            ++NumDCE;
1089            DOUT << "Removing now-noop copy: " << MI;
1090            MBB.erase(&MI);
1091            Erased = true;
1092            VRM.RemoveFromFoldedVirtMap(&MI);
1093            Spills.disallowClobberPhysReg(VirtReg);
1094            goto ProcessNextInst;
1095          }
1096
1097          // If it's not a no-op copy, it clobbers the value in the destreg.
1098          Spills.ClobberPhysReg(VirtReg);
1099          ReusedOperands.markClobbered(VirtReg);
1100
1101          // Check to see if this instruction is a load from a stack slot into
1102          // a register.  If so, this provides the stack slot value in the reg.
1103          int FrameIdx;
1104          if (unsigned DestReg = TII->isLoadFromStackSlot(&MI, FrameIdx)) {
1105            assert(DestReg == VirtReg && "Unknown load situation!");
1106
1107            // If it is a folded reference, then it's not safe to clobber.
1108            bool Folded = FoldedSS.count(FrameIdx);
1109            // Otherwise, if it wasn't available, remember that it is now!
1110            Spills.addAvailable(FrameIdx, &MI, DestReg, !Folded);
1111            goto ProcessNextInst;
1112          }
1113
1114          continue;
1115        }
1116
1117        bool DoReMat = VRM.isReMaterialized(VirtReg);
1118        if (DoReMat)
1119          ReMatDefs.insert(&MI);
1120
1121        // The only vregs left are stack slot definitions.
1122        int StackSlot = VRM.getStackSlot(VirtReg);
1123        const TargetRegisterClass *RC = MF.getSSARegMap()->getRegClass(VirtReg);
1124
1125        // If this def is part of a two-address operand, make sure to execute
1126        // the store from the correct physical register.
1127        unsigned PhysReg;
1128        int TiedOp = MI.getInstrDescriptor()->findTiedToSrcOperand(i);
1129        if (TiedOp != -1)
1130          PhysReg = MI.getOperand(TiedOp).getReg();
1131        else {
1132          PhysReg = VRM.getPhys(VirtReg);
1133          if (ReusedOperands.isClobbered(PhysReg)) {
1134            // Another def has taken the assigned physreg. It must have been a
1135            // use&def which got it due to reuse. Undo the reuse!
1136            PhysReg = ReusedOperands.GetRegForReload(PhysReg, &MI,
1137                               Spills, MaybeDeadStores, RegKills, KillOps, VRM);
1138          }
1139        }
1140
1141        MF.setPhysRegUsed(PhysReg);
1142        ReusedOperands.markClobbered(PhysReg);
1143        MI.getOperand(i).setReg(PhysReg);
1144        if (!MO.isDead()) {
1145          MRI->storeRegToStackSlot(MBB, next(MII), PhysReg, StackSlot, RC);
1146          DOUT << "Store:\t" << *next(MII);
1147
1148          // If there is a dead store to this stack slot, nuke it now.
1149          MachineInstr *&LastStore = MaybeDeadStores[StackSlot];
1150          if (LastStore) {
1151            DOUT << "Removed dead store:\t" << *LastStore;
1152            ++NumDSE;
1153            SmallVector<unsigned, 1> KillRegs;
1154            InvalidateKills(*LastStore, RegKills, KillOps, &KillRegs);
1155            MachineBasicBlock::iterator PrevMII = LastStore;
1156            bool CheckDef = PrevMII != MBB.begin();
1157            if (CheckDef)
1158              --PrevMII;
1159            MBB.erase(LastStore);
1160            VRM.RemoveFromFoldedVirtMap(LastStore);
1161            if (CheckDef) {
1162              // Look at defs of killed registers on the store. Mark the defs
1163              // as dead since the store has been deleted and they aren't
1164              // being reused.
1165              for (unsigned j = 0, ee = KillRegs.size(); j != ee; ++j) {
1166                bool HasOtherDef = false;
1167                if (InvalidateRegDef(PrevMII, MI, KillRegs[j], HasOtherDef)) {
1168                  MachineInstr *DeadDef = PrevMII;
1169                  if (ReMatDefs.count(DeadDef) && !HasOtherDef) {
1170                    // FIXME: This assumes a remat def does not have side
1171                    // effects.
1172                    MBB.erase(DeadDef);
1173                    VRM.RemoveFromFoldedVirtMap(DeadDef);
1174                    ++NumDRM;
1175                  }
1176                }
1177              }
1178            }
1179          }
1180          LastStore = next(MII);
1181
1182          // If the stack slot value was previously available in some other
1183          // register, change it now.  Otherwise, make the register available,
1184          // in PhysReg.
1185          Spills.ModifyStackSlotOrReMat(StackSlot);
1186          Spills.ClobberPhysReg(PhysReg);
1187          Spills.addAvailable(StackSlot, LastStore, PhysReg);
1188          ++NumStores;
1189
1190          // Check to see if this is a noop copy.  If so, eliminate the
1191          // instruction before considering the dest reg to be changed.
1192          {
1193            unsigned Src, Dst;
1194            if (TII->isMoveInstr(MI, Src, Dst) && Src == Dst) {
1195              ++NumDCE;
1196              DOUT << "Removing now-noop copy: " << MI;
1197              MBB.erase(&MI);
1198              Erased = true;
1199              VRM.RemoveFromFoldedVirtMap(&MI);
1200              UpdateKills(*LastStore, RegKills, KillOps);
1201              goto ProcessNextInst;
1202            }
1203          }
1204        }
1205      }
1206    }
1207  ProcessNextInst:
1208    if (!Erased && !BackTracked)
1209      for (MachineBasicBlock::iterator II = MI; II != NextMII; ++II)
1210        UpdateKills(*II, RegKills, KillOps);
1211    MII = NextMII;
1212  }
1213}
1214
1215
1216llvm::Spiller* llvm::createSpiller() {
1217  switch (SpillerOpt) {
1218  default: assert(0 && "Unreachable!");
1219  case local:
1220    return new LocalSpiller();
1221  case simple:
1222    return new SimpleSpiller();
1223  }
1224}
1225