RegAllocFast.cpp revision daa8e0efc9b646d89ca9deb9acd3b8f2eb349a1d
1//===-- RegAllocFast.cpp - A fast register allocator for debug code -------===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This register allocator allocates registers to a basic block at a time, 11// attempting to keep values in registers and reusing registers as appropriate. 12// 13//===----------------------------------------------------------------------===// 14 15#define DEBUG_TYPE "regalloc" 16#include "llvm/BasicBlock.h" 17#include "llvm/CodeGen/MachineFunctionPass.h" 18#include "llvm/CodeGen/MachineInstr.h" 19#include "llvm/CodeGen/MachineFrameInfo.h" 20#include "llvm/CodeGen/MachineRegisterInfo.h" 21#include "llvm/CodeGen/Passes.h" 22#include "llvm/CodeGen/RegAllocRegistry.h" 23#include "llvm/Target/TargetInstrInfo.h" 24#include "llvm/Target/TargetMachine.h" 25#include "llvm/Support/CommandLine.h" 26#include "llvm/Support/Debug.h" 27#include "llvm/Support/ErrorHandling.h" 28#include "llvm/Support/raw_ostream.h" 29#include "llvm/ADT/DenseMap.h" 30#include "llvm/ADT/IndexedMap.h" 31#include "llvm/ADT/SmallSet.h" 32#include "llvm/ADT/SmallVector.h" 33#include "llvm/ADT/Statistic.h" 34#include "llvm/ADT/STLExtras.h" 35#include <algorithm> 36using namespace llvm; 37 38static cl::opt<bool> VerifyFastRegalloc("verify-fast-regalloc", cl::Hidden, 39 cl::desc("Verify machine code before fast regalloc")); 40 41STATISTIC(NumStores, "Number of stores added"); 42STATISTIC(NumLoads , "Number of loads added"); 43STATISTIC(NumCopies, "Number of copies coalesced"); 44 45static RegisterRegAlloc 46 fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator); 47 48namespace { 49 class RAFast : public MachineFunctionPass { 50 public: 51 static char ID; 52 RAFast() : MachineFunctionPass(&ID), StackSlotForVirtReg(-1), 53 atEndOfBlock(false) {} 54 private: 55 const TargetMachine *TM; 56 MachineFunction *MF; 57 MachineRegisterInfo *MRI; 58 const TargetRegisterInfo *TRI; 59 const TargetInstrInfo *TII; 60 61 // StackSlotForVirtReg - Maps virtual regs to the frame index where these 62 // values are spilled. 63 IndexedMap<int, VirtReg2IndexFunctor> StackSlotForVirtReg; 64 65 // Everything we know about a live virtual register. 66 struct LiveReg { 67 MachineInstr *LastUse; // Last instr to use reg. 68 unsigned PhysReg; // Currently held here. 69 unsigned short LastOpNum; // OpNum on LastUse. 70 bool Dirty; // Register needs spill. 71 72 LiveReg(unsigned p=0) : LastUse(0), PhysReg(p), LastOpNum(0), 73 Dirty(false) { 74 assert(p && "Don't create LiveRegs without a PhysReg"); 75 } 76 }; 77 78 typedef DenseMap<unsigned, LiveReg> LiveRegMap; 79 80 // LiveVirtRegs - This map contains entries for each virtual register 81 // that is currently available in a physical register. 82 LiveRegMap LiveVirtRegs; 83 84 // RegState - Track the state of a physical register. 85 enum RegState { 86 // A disabled register is not available for allocation, but an alias may 87 // be in use. A register can only be moved out of the disabled state if 88 // all aliases are disabled. 89 regDisabled, 90 91 // A free register is not currently in use and can be allocated 92 // immediately without checking aliases. 93 regFree, 94 95 // A reserved register has been assigned expolicitly (e.g., setting up a 96 // call parameter), and it remains reserved until it is used. 97 regReserved 98 99 // A register state may also be a virtual register number, indication that 100 // the physical register is currently allocated to a virtual register. In 101 // that case, LiveVirtRegs contains the inverse mapping. 102 }; 103 104 // PhysRegState - One of the RegState enums, or a virtreg. 105 std::vector<unsigned> PhysRegState; 106 107 // UsedInInstr - BitVector of physregs that are used in the current 108 // instruction, and so cannot be allocated. 109 BitVector UsedInInstr; 110 111 // Allocatable - vector of allocatable physical registers. 112 BitVector Allocatable; 113 114 // atEndOfBlock - This flag is set after allocating all instructions in a 115 // block, before emitting final spills. When it is set, LiveRegMap is no 116 // longer updated properly sonce it will be cleared anyway. 117 bool atEndOfBlock; 118 119 public: 120 virtual const char *getPassName() const { 121 return "Fast Register Allocator"; 122 } 123 124 virtual void getAnalysisUsage(AnalysisUsage &AU) const { 125 AU.setPreservesCFG(); 126 AU.addRequiredID(PHIEliminationID); 127 AU.addRequiredID(TwoAddressInstructionPassID); 128 MachineFunctionPass::getAnalysisUsage(AU); 129 } 130 131 private: 132 bool runOnMachineFunction(MachineFunction &Fn); 133 void AllocateBasicBlock(MachineBasicBlock &MBB); 134 int getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC); 135 bool isLastUseOfLocalReg(MachineOperand&); 136 137 void addKillFlag(LiveRegMap::iterator i); 138 void killVirtReg(LiveRegMap::iterator i); 139 void killVirtReg(unsigned VirtReg); 140 void spillVirtReg(MachineBasicBlock &MBB, MachineBasicBlock::iterator MI, 141 LiveRegMap::iterator i, bool isKill); 142 void spillVirtReg(MachineBasicBlock &MBB, MachineBasicBlock::iterator MI, 143 unsigned VirtReg, bool isKill); 144 145 void usePhysReg(MachineOperand&); 146 void definePhysReg(MachineBasicBlock &MBB, MachineInstr *MI, 147 unsigned PhysReg, RegState NewState); 148 LiveRegMap::iterator assignVirtToPhysReg(unsigned VirtReg, 149 unsigned PhysReg); 150 LiveRegMap::iterator allocVirtReg(MachineBasicBlock &MBB, MachineInstr *MI, 151 unsigned VirtReg, unsigned Hint); 152 unsigned defineVirtReg(MachineBasicBlock &MBB, MachineInstr *MI, 153 unsigned OpNum, unsigned VirtReg, unsigned Hint); 154 unsigned reloadVirtReg(MachineBasicBlock &MBB, MachineInstr *MI, 155 unsigned OpNum, unsigned VirtReg, unsigned Hint); 156 void spillAll(MachineBasicBlock &MBB, MachineInstr *MI); 157 void setPhysReg(MachineOperand &MO, unsigned PhysReg); 158 }; 159 char RAFast::ID = 0; 160} 161 162/// getStackSpaceFor - This allocates space for the specified virtual register 163/// to be held on the stack. 164int RAFast::getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC) { 165 // Find the location Reg would belong... 166 int SS = StackSlotForVirtReg[VirtReg]; 167 if (SS != -1) 168 return SS; // Already has space allocated? 169 170 // Allocate a new stack object for this spill location... 171 int FrameIdx = MF->getFrameInfo()->CreateSpillStackObject(RC->getSize(), 172 RC->getAlignment()); 173 174 // Assign the slot. 175 StackSlotForVirtReg[VirtReg] = FrameIdx; 176 return FrameIdx; 177} 178 179/// isLastUseOfLocalReg - Return true if MO is the only remaining reference to 180/// its virtual register, and it is guaranteed to be a block-local register. 181/// 182bool RAFast::isLastUseOfLocalReg(MachineOperand &MO) { 183 // Check for non-debug uses or defs following MO. 184 // This is the most likely way to fail - fast path it. 185 MachineOperand *i = &MO; 186 while ((i = i->getNextOperandForReg())) 187 if (!i->isDebug()) 188 return false; 189 190 // If the register has ever been spilled or reloaded, we conservatively assume 191 // it is a global register used in multiple blocks. 192 if (StackSlotForVirtReg[MO.getReg()] != -1) 193 return false; 194 195 // Check that the use/def chain has exactly one operand - MO. 196 return &MRI->reg_nodbg_begin(MO.getReg()).getOperand() == &MO; 197} 198 199/// addKillFlag - Set kill flags on last use of a virtual register. 200void RAFast::addKillFlag(LiveRegMap::iterator lri) { 201 assert(lri != LiveVirtRegs.end() && "Killing unmapped virtual register"); 202 const LiveReg &LR = lri->second; 203 if (LR.LastUse) { 204 MachineOperand &MO = LR.LastUse->getOperand(LR.LastOpNum); 205 if (MO.isDef()) 206 MO.setIsDead(); 207 else if (!LR.LastUse->isRegTiedToDefOperand(LR.LastOpNum)) 208 MO.setIsKill(); 209 } 210} 211 212/// killVirtReg - Mark virtreg as no longer available. 213void RAFast::killVirtReg(LiveRegMap::iterator lri) { 214 addKillFlag(lri); 215 const LiveReg &LR = lri->second; 216 assert(PhysRegState[LR.PhysReg] == lri->first && "Broken RegState mapping"); 217 PhysRegState[LR.PhysReg] = regFree; 218 // Erase from LiveVirtRegs unless we're at the end of the block when 219 // everything will be bulk erased. 220 if (!atEndOfBlock) 221 LiveVirtRegs.erase(lri); 222} 223 224/// killVirtReg - Mark virtreg as no longer available. 225void RAFast::killVirtReg(unsigned VirtReg) { 226 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 227 "killVirtReg needs a virtual register"); 228 LiveRegMap::iterator lri = LiveVirtRegs.find(VirtReg); 229 if (lri != LiveVirtRegs.end()) 230 killVirtReg(lri); 231} 232 233/// spillVirtReg - This method spills the value specified by VirtReg into the 234/// corresponding stack slot if needed. If isKill is set, the register is also 235/// killed. 236void RAFast::spillVirtReg(MachineBasicBlock &MBB, 237 MachineBasicBlock::iterator MI, 238 unsigned VirtReg, bool isKill) { 239 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 240 "Spilling a physical register is illegal!"); 241 LiveRegMap::iterator lri = LiveVirtRegs.find(VirtReg); 242 assert(lri != LiveVirtRegs.end() && "Spilling unmapped virtual register"); 243 spillVirtReg(MBB, MI, lri, isKill); 244} 245 246/// spillVirtReg - Do the actual work of spilling. 247void RAFast::spillVirtReg(MachineBasicBlock &MBB, 248 MachineBasicBlock::iterator MI, 249 LiveRegMap::iterator lri, bool isKill) { 250 LiveReg &LR = lri->second; 251 assert(PhysRegState[LR.PhysReg] == lri->first && "Broken RegState mapping"); 252 253 // If this physreg is used by the instruction, we want to kill it on the 254 // instruction, not on the spill. 255 bool spillKill = isKill && LR.LastUse != MI; 256 257 if (LR.Dirty) { 258 LR.Dirty = false; 259 DEBUG(dbgs() << "Spilling %reg" << lri->first 260 << " in " << TRI->getName(LR.PhysReg)); 261 const TargetRegisterClass *RC = MRI->getRegClass(lri->first); 262 int FrameIndex = getStackSpaceFor(lri->first, RC); 263 DEBUG(dbgs() << " to stack slot #" << FrameIndex << "\n"); 264 TII->storeRegToStackSlot(MBB, MI, LR.PhysReg, spillKill, 265 FrameIndex, RC, TRI); 266 ++NumStores; // Update statistics 267 268 if (spillKill) 269 LR.LastUse = 0; // Don't kill register again 270 else if (!isKill) { 271 MachineInstr *Spill = llvm::prior(MI); 272 LR.LastUse = Spill; 273 LR.LastOpNum = Spill->findRegisterUseOperandIdx(LR.PhysReg); 274 } 275 } 276 277 if (isKill) 278 killVirtReg(lri); 279} 280 281/// spillAll - Spill all dirty virtregs without killing them. 282void RAFast::spillAll(MachineBasicBlock &MBB, MachineInstr *MI) { 283 SmallVector<unsigned, 16> Dirty; 284 for (LiveRegMap::iterator i = LiveVirtRegs.begin(), 285 e = LiveVirtRegs.end(); i != e; ++i) 286 if (i->second.Dirty) 287 Dirty.push_back(i->first); 288 for (unsigned i = 0, e = Dirty.size(); i != e; ++i) 289 spillVirtReg(MBB, MI, Dirty[i], false); 290} 291 292/// usePhysReg - Handle the direct use of a physical register. 293/// Check that the register is not used by a virtreg. 294/// Kill the physreg, marking it free. 295/// This may add implicit kills to MO->getParent() and invalidate MO. 296void RAFast::usePhysReg(MachineOperand &MO) { 297 unsigned PhysReg = MO.getReg(); 298 assert(TargetRegisterInfo::isPhysicalRegister(PhysReg) && 299 "Bad usePhysReg operand"); 300 301 switch (PhysRegState[PhysReg]) { 302 case regDisabled: 303 break; 304 case regReserved: 305 PhysRegState[PhysReg] = regFree; 306 // Fall through 307 case regFree: 308 UsedInInstr.set(PhysReg); 309 MO.setIsKill(); 310 return; 311 default: 312 // The physreg was allocated to a virtual register. That means to value we 313 // wanted has been clobbered. 314 llvm_unreachable("Instruction uses an allocated register"); 315 } 316 317 // Maybe a superregister is reserved? 318 for (const unsigned *AS = TRI->getAliasSet(PhysReg); 319 unsigned Alias = *AS; ++AS) { 320 switch (PhysRegState[Alias]) { 321 case regDisabled: 322 break; 323 case regReserved: 324 assert(TRI->isSuperRegister(PhysReg, Alias) && 325 "Instruction is not using a subregister of a reserved register"); 326 // Leave the superregister in the working set. 327 PhysRegState[Alias] = regFree; 328 UsedInInstr.set(Alias); 329 MO.getParent()->addRegisterKilled(Alias, TRI, true); 330 return; 331 case regFree: 332 if (TRI->isSuperRegister(PhysReg, Alias)) { 333 // Leave the superregister in the working set. 334 UsedInInstr.set(Alias); 335 MO.getParent()->addRegisterKilled(Alias, TRI, true); 336 return; 337 } 338 // Some other alias was in the working set - clear it. 339 PhysRegState[Alias] = regDisabled; 340 break; 341 default: 342 llvm_unreachable("Instruction uses an alias of an allocated register"); 343 } 344 } 345 346 // All aliases are disabled, bring register into working set. 347 PhysRegState[PhysReg] = regFree; 348 UsedInInstr.set(PhysReg); 349 MO.setIsKill(); 350} 351 352/// definePhysReg - Mark PhysReg as reserved or free after spilling any 353/// virtregs. This is very similar to defineVirtReg except the physreg is 354/// reserved instead of allocated. 355void RAFast::definePhysReg(MachineBasicBlock &MBB, MachineInstr *MI, 356 unsigned PhysReg, RegState NewState) { 357 UsedInInstr.set(PhysReg); 358 switch (unsigned VirtReg = PhysRegState[PhysReg]) { 359 case regDisabled: 360 break; 361 default: 362 spillVirtReg(MBB, MI, VirtReg, true); 363 // Fall through. 364 case regFree: 365 case regReserved: 366 PhysRegState[PhysReg] = NewState; 367 return; 368 } 369 370 // This is a disabled register, disable all aliases. 371 PhysRegState[PhysReg] = NewState; 372 for (const unsigned *AS = TRI->getAliasSet(PhysReg); 373 unsigned Alias = *AS; ++AS) { 374 UsedInInstr.set(Alias); 375 switch (unsigned VirtReg = PhysRegState[Alias]) { 376 case regDisabled: 377 break; 378 default: 379 spillVirtReg(MBB, MI, VirtReg, true); 380 // Fall through. 381 case regFree: 382 case regReserved: 383 PhysRegState[Alias] = regDisabled; 384 if (TRI->isSuperRegister(PhysReg, Alias)) 385 return; 386 break; 387 } 388 } 389} 390 391 392/// assignVirtToPhysReg - This method updates local state so that we know 393/// that PhysReg is the proper container for VirtReg now. The physical 394/// register must not be used for anything else when this is called. 395/// 396RAFast::LiveRegMap::iterator 397RAFast::assignVirtToPhysReg(unsigned VirtReg, unsigned PhysReg) { 398 DEBUG(dbgs() << "Assigning %reg" << VirtReg << " to " 399 << TRI->getName(PhysReg) << "\n"); 400 PhysRegState[PhysReg] = VirtReg; 401 return LiveVirtRegs.insert(std::make_pair(VirtReg, PhysReg)).first; 402} 403 404/// allocVirtReg - Allocate a physical register for VirtReg. 405RAFast::LiveRegMap::iterator RAFast::allocVirtReg(MachineBasicBlock &MBB, 406 MachineInstr *MI, 407 unsigned VirtReg, 408 unsigned Hint) { 409 const unsigned spillCost = 100; 410 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 411 "Can only allocate virtual registers"); 412 413 const TargetRegisterClass *RC = MRI->getRegClass(VirtReg); 414 TargetRegisterClass::iterator AOB = RC->allocation_order_begin(*MF); 415 TargetRegisterClass::iterator AOE = RC->allocation_order_end(*MF); 416 417 // Ignore invalid hints. 418 if (Hint && (!TargetRegisterInfo::isPhysicalRegister(Hint) || 419 !RC->contains(Hint) || UsedInInstr.test(Hint) || 420 !Allocatable.test(Hint))) 421 Hint = 0; 422 423 // If there is no hint, peek at the first use of this register. 424 if (!Hint && !MRI->use_nodbg_empty(VirtReg)) { 425 MachineInstr &MI = *MRI->use_nodbg_begin(VirtReg); 426 unsigned SrcReg, DstReg, SrcSubReg, DstSubReg; 427 // Copy to physreg -> use physreg as hint. 428 if (TII->isMoveInstr(MI, SrcReg, DstReg, SrcSubReg, DstSubReg) && 429 SrcReg == VirtReg && TargetRegisterInfo::isPhysicalRegister(DstReg) && 430 RC->contains(DstReg) && !UsedInInstr.test(DstReg) && 431 Allocatable.test(DstReg)) { 432 Hint = DstReg; 433 DEBUG(dbgs() << "%reg" << VirtReg << " gets hint from " << MI); 434 } 435 } 436 437 // Take hint when possible. 438 if (Hint) { 439 assert(RC->contains(Hint) && !UsedInInstr.test(Hint) && 440 Allocatable.test(Hint) && "Invalid hint should have been cleared"); 441 switch(PhysRegState[Hint]) { 442 case regDisabled: 443 case regReserved: 444 break; 445 default: 446 spillVirtReg(MBB, MI, PhysRegState[Hint], true); 447 // Fall through. 448 case regFree: 449 return assignVirtToPhysReg(VirtReg, Hint); 450 } 451 } 452 453 // First try to find a completely free register. 454 unsigned BestCost = 0, BestReg = 0; 455 bool hasDisabled = false; 456 for (TargetRegisterClass::iterator I = AOB; I != AOE; ++I) { 457 unsigned PhysReg = *I; 458 switch(PhysRegState[PhysReg]) { 459 case regDisabled: 460 hasDisabled = true; 461 case regReserved: 462 continue; 463 case regFree: 464 if (!UsedInInstr.test(PhysReg)) 465 return assignVirtToPhysReg(VirtReg, PhysReg); 466 continue; 467 default: 468 // Grab the first spillable register we meet. 469 if (!BestReg && !UsedInInstr.test(PhysReg)) 470 BestReg = PhysReg, BestCost = spillCost; 471 continue; 472 } 473 } 474 475 DEBUG(dbgs() << "Allocating %reg" << VirtReg << " from " << RC->getName() 476 << " candidate=" << TRI->getName(BestReg) << "\n"); 477 478 // Try to extend the working set for RC if there were any disabled registers. 479 if (hasDisabled && (!BestReg || BestCost >= spillCost)) { 480 for (TargetRegisterClass::iterator I = AOB; I != AOE; ++I) { 481 unsigned PhysReg = *I; 482 if (PhysRegState[PhysReg] != regDisabled || UsedInInstr.test(PhysReg)) 483 continue; 484 485 // Calculate the cost of bringing PhysReg into the working set. 486 unsigned Cost=0; 487 bool Impossible = false; 488 for (const unsigned *AS = TRI->getAliasSet(PhysReg); 489 unsigned Alias = *AS; ++AS) { 490 if (UsedInInstr.test(Alias)) { 491 Impossible = true; 492 break; 493 } 494 switch (PhysRegState[Alias]) { 495 case regDisabled: 496 break; 497 case regReserved: 498 Impossible = true; 499 break; 500 case regFree: 501 Cost++; 502 break; 503 default: 504 Cost += spillCost; 505 break; 506 } 507 } 508 if (Impossible) continue; 509 DEBUG(dbgs() << "- candidate " << TRI->getName(PhysReg) 510 << " cost=" << Cost << "\n"); 511 if (!BestReg || Cost < BestCost) { 512 BestReg = PhysReg; 513 BestCost = Cost; 514 if (Cost < spillCost) break; 515 } 516 } 517 } 518 519 if (BestReg) { 520 // BestCost is 0 when all aliases are already disabled. 521 if (BestCost) { 522 if (PhysRegState[BestReg] != regDisabled) 523 spillVirtReg(MBB, MI, PhysRegState[BestReg], true); 524 else { 525 // Make sure all aliases are disabled. 526 for (const unsigned *AS = TRI->getAliasSet(BestReg); 527 unsigned Alias = *AS; ++AS) { 528 switch (PhysRegState[Alias]) { 529 case regDisabled: 530 continue; 531 case regFree: 532 PhysRegState[Alias] = regDisabled; 533 break; 534 default: 535 spillVirtReg(MBB, MI, PhysRegState[Alias], true); 536 PhysRegState[Alias] = regDisabled; 537 break; 538 } 539 } 540 } 541 } 542 return assignVirtToPhysReg(VirtReg, BestReg); 543 } 544 545 // Nothing we can do. 546 std::string msg; 547 raw_string_ostream Msg(msg); 548 Msg << "Ran out of registers during register allocation!"; 549 if (MI->isInlineAsm()) { 550 Msg << "\nPlease check your inline asm statement for " 551 << "invalid constraints:\n"; 552 MI->print(Msg, TM); 553 } 554 report_fatal_error(Msg.str()); 555 return LiveVirtRegs.end(); 556} 557 558/// defineVirtReg - Allocate a register for VirtReg and mark it as dirty. 559unsigned RAFast::defineVirtReg(MachineBasicBlock &MBB, MachineInstr *MI, 560 unsigned OpNum, unsigned VirtReg, unsigned Hint) { 561 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 562 "Not a virtual register"); 563 LiveRegMap::iterator lri = LiveVirtRegs.find(VirtReg); 564 if (lri == LiveVirtRegs.end()) 565 lri = allocVirtReg(MBB, MI, VirtReg, Hint); 566 else 567 addKillFlag(lri); // Kill before redefine. 568 LiveReg &LR = lri->second; 569 LR.LastUse = MI; 570 LR.LastOpNum = OpNum; 571 LR.Dirty = true; 572 UsedInInstr.set(LR.PhysReg); 573 return LR.PhysReg; 574} 575 576/// reloadVirtReg - Make sure VirtReg is available in a physreg and return it. 577unsigned RAFast::reloadVirtReg(MachineBasicBlock &MBB, MachineInstr *MI, 578 unsigned OpNum, unsigned VirtReg, unsigned Hint) { 579 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 580 "Not a virtual register"); 581 LiveRegMap::iterator lri = LiveVirtRegs.find(VirtReg); 582 if (lri == LiveVirtRegs.end()) { 583 lri = allocVirtReg(MBB, MI, VirtReg, Hint); 584 const TargetRegisterClass *RC = MRI->getRegClass(VirtReg); 585 int FrameIndex = getStackSpaceFor(VirtReg, RC); 586 DEBUG(dbgs() << "Reloading %reg" << VirtReg << " into " 587 << TRI->getName(lri->second.PhysReg) << "\n"); 588 TII->loadRegFromStackSlot(MBB, MI, lri->second.PhysReg, FrameIndex, RC, 589 TRI); 590 ++NumLoads; 591 } else if (lri->second.Dirty) { 592 MachineOperand &MO = MI->getOperand(OpNum); 593 if (isLastUseOfLocalReg(MO)) { 594 DEBUG(dbgs() << "Killing last use: " << MO << "\n"); 595 MO.setIsKill(); 596 } else if (MO.isKill()) { 597 DEBUG(dbgs() << "Clearing dubious kill: " << MO << "\n"); 598 MO.setIsKill(false); 599 } 600 } 601 LiveReg &LR = lri->second; 602 LR.LastUse = MI; 603 LR.LastOpNum = OpNum; 604 UsedInInstr.set(LR.PhysReg); 605 return LR.PhysReg; 606} 607 608// setPhysReg - Change MO the refer the PhysReg, considering subregs. 609void RAFast::setPhysReg(MachineOperand &MO, unsigned PhysReg) { 610 if (unsigned Idx = MO.getSubReg()) { 611 MO.setReg(PhysReg ? TRI->getSubReg(PhysReg, Idx) : 0); 612 MO.setSubReg(0); 613 } else 614 MO.setReg(PhysReg); 615} 616 617void RAFast::AllocateBasicBlock(MachineBasicBlock &MBB) { 618 DEBUG(dbgs() << "\nAllocating " << MBB); 619 620 atEndOfBlock = false; 621 PhysRegState.assign(TRI->getNumRegs(), regDisabled); 622 assert(LiveVirtRegs.empty() && "Mapping not cleared form last block?"); 623 624 MachineBasicBlock::iterator MII = MBB.begin(); 625 626 // Add live-in registers as live. 627 for (MachineBasicBlock::livein_iterator I = MBB.livein_begin(), 628 E = MBB.livein_end(); I != E; ++I) 629 definePhysReg(MBB, MII, *I, regReserved); 630 631 SmallVector<unsigned, 8> VirtKills, PhysDefs; 632 SmallVector<MachineInstr*, 32> Coalesced; 633 634 // Otherwise, sequentially allocate each instruction in the MBB. 635 while (MII != MBB.end()) { 636 MachineInstr *MI = MII++; 637 const TargetInstrDesc &TID = MI->getDesc(); 638 DEBUG({ 639 dbgs() << "\n>> " << *MI << "Regs:"; 640 for (unsigned Reg = 1, E = TRI->getNumRegs(); Reg != E; ++Reg) { 641 if (PhysRegState[Reg] == regDisabled) continue; 642 dbgs() << " " << TRI->getName(Reg); 643 switch(PhysRegState[Reg]) { 644 case regFree: 645 break; 646 case regReserved: 647 dbgs() << "*"; 648 break; 649 default: 650 dbgs() << "=%reg" << PhysRegState[Reg]; 651 if (LiveVirtRegs[PhysRegState[Reg]].Dirty) 652 dbgs() << "*"; 653 assert(LiveVirtRegs[PhysRegState[Reg]].PhysReg == Reg && 654 "Bad inverse map"); 655 break; 656 } 657 } 658 dbgs() << '\n'; 659 // Check that LiveVirtRegs is the inverse. 660 for (LiveRegMap::iterator i = LiveVirtRegs.begin(), 661 e = LiveVirtRegs.end(); i != e; ++i) { 662 assert(TargetRegisterInfo::isVirtualRegister(i->first) && 663 "Bad map key"); 664 assert(TargetRegisterInfo::isPhysicalRegister(i->second.PhysReg) && 665 "Bad map value"); 666 assert(PhysRegState[i->second.PhysReg] == i->first && 667 "Bad inverse map"); 668 } 669 }); 670 671 // Debug values are not allowed to change codegen in any way. 672 if (MI->isDebugValue()) { 673 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 674 MachineOperand &MO = MI->getOperand(i); 675 if (!MO.isReg()) continue; 676 unsigned Reg = MO.getReg(); 677 if (!Reg || TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 678 LiveRegMap::iterator lri = LiveVirtRegs.find(Reg); 679 if (lri != LiveVirtRegs.end()) 680 setPhysReg(MO, lri->second.PhysReg); 681 else 682 MO.setReg(0); // We can't allocate a physreg for a DebugValue, sorry! 683 } 684 // Next instruction. 685 continue; 686 } 687 688 // If this is a copy, we may be able to coalesce. 689 unsigned CopySrc, CopyDst, CopySrcSub, CopyDstSub; 690 if (!TII->isMoveInstr(*MI, CopySrc, CopyDst, CopySrcSub, CopyDstSub)) 691 CopySrc = CopyDst = 0; 692 693 // Track registers used by instruction. 694 UsedInInstr.reset(); 695 PhysDefs.clear(); 696 697 // First scan. 698 // Mark physreg uses and early clobbers as used. 699 // Find the end of the virtreg operands 700 unsigned VirtOpEnd = 0; 701 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 702 MachineOperand &MO = MI->getOperand(i); 703 if (!MO.isReg()) continue; 704 unsigned Reg = MO.getReg(); 705 if (!Reg) continue; 706 if (TargetRegisterInfo::isVirtualRegister(Reg)) { 707 VirtOpEnd = i+1; 708 continue; 709 } 710 if (!Allocatable.test(Reg)) continue; 711 if (MO.isUse()) { 712 usePhysReg(MO); 713 } else if (MO.isEarlyClobber()) { 714 definePhysReg(MBB, MI, Reg, MO.isDead() ? regFree : regReserved); 715 PhysDefs.push_back(Reg); 716 } 717 } 718 719 // Second scan. 720 // Allocate virtreg uses and early clobbers. 721 // Collect VirtKills 722 for (unsigned i = 0; i != VirtOpEnd; ++i) { 723 MachineOperand &MO = MI->getOperand(i); 724 if (!MO.isReg()) continue; 725 unsigned Reg = MO.getReg(); 726 if (!Reg || TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 727 if (MO.isUse()) { 728 unsigned PhysReg = reloadVirtReg(MBB, MI, i, Reg, CopyDst); 729 CopySrc = (CopySrc == Reg || CopySrc == PhysReg) ? PhysReg : 0; 730 setPhysReg(MO, PhysReg); 731 if (MO.isKill()) 732 VirtKills.push_back(Reg); 733 } else if (MO.isEarlyClobber()) { 734 unsigned PhysReg = defineVirtReg(MBB, MI, i, Reg, 0); 735 setPhysReg(MO, PhysReg); 736 PhysDefs.push_back(PhysReg); 737 } 738 } 739 740 // Process virtreg kills 741 for (unsigned i = 0, e = VirtKills.size(); i != e; ++i) 742 killVirtReg(VirtKills[i]); 743 VirtKills.clear(); 744 745 MRI->addPhysRegsUsed(UsedInInstr); 746 747 // Track registers defined by instruction - early clobbers at this point. 748 UsedInInstr.reset(); 749 for (unsigned i = 0, e = PhysDefs.size(); i != e; ++i) { 750 unsigned PhysReg = PhysDefs[i]; 751 UsedInInstr.set(PhysReg); 752 for (const unsigned *AS = TRI->getAliasSet(PhysReg); 753 unsigned Alias = *AS; ++AS) 754 UsedInInstr.set(Alias); 755 } 756 757 // Third scan. 758 // Allocate defs and collect dead defs. 759 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 760 MachineOperand &MO = MI->getOperand(i); 761 if (!MO.isReg() || !MO.isDef() || !MO.getReg()) continue; 762 unsigned Reg = MO.getReg(); 763 764 if (TargetRegisterInfo::isPhysicalRegister(Reg)) { 765 if (!Allocatable.test(Reg)) continue; 766 definePhysReg(MBB, MI, Reg, (MO.isImplicit() || MO.isDead()) ? 767 regFree : regReserved); 768 continue; 769 } 770 unsigned PhysReg = defineVirtReg(MBB, MI, i, Reg, CopySrc); 771 if (MO.isDead()) { 772 VirtKills.push_back(Reg); 773 CopyDst = 0; // cancel coalescing; 774 } else 775 CopyDst = (CopyDst == Reg || CopyDst == PhysReg) ? PhysReg : 0; 776 setPhysReg(MO, PhysReg); 777 } 778 779 // Spill all dirty virtregs before a call, in case of an exception. 780 if (TID.isCall()) { 781 DEBUG(dbgs() << " Spilling remaining registers before call.\n"); 782 spillAll(MBB, MI); 783 } 784 785 // Process virtreg deads. 786 for (unsigned i = 0, e = VirtKills.size(); i != e; ++i) 787 killVirtReg(VirtKills[i]); 788 VirtKills.clear(); 789 790 MRI->addPhysRegsUsed(UsedInInstr); 791 792 if (CopyDst && CopyDst == CopySrc && CopyDstSub == CopySrcSub) { 793 DEBUG(dbgs() << "-- coalescing: " << *MI); 794 Coalesced.push_back(MI); 795 } else { 796 DEBUG(dbgs() << "<< " << *MI); 797 } 798 } 799 800 // Spill all physical registers holding virtual registers now. 801 atEndOfBlock = true; 802 MachineBasicBlock::iterator MI = MBB.getFirstTerminator(); 803 if (MI != MBB.end() && MI->getDesc().isReturn()) { 804 // This is a return block, kill all virtual registers. 805 DEBUG(dbgs() << "Killing live registers at end of return block.\n"); 806 for (LiveRegMap::iterator i = LiveVirtRegs.begin(), e = LiveVirtRegs.end(); 807 i != e; ++i) 808 killVirtReg(i); 809 } else { 810 // This is a normal block, spill any dirty virtregs. 811 DEBUG(dbgs() << "Spilling live registers at end of block.\n"); 812 for (LiveRegMap::iterator i = LiveVirtRegs.begin(), e = LiveVirtRegs.end(); 813 i != e; ++i) 814 spillVirtReg(MBB, MI, i, true); 815 } 816 LiveVirtRegs.clear(); 817 818 // Erase all the coalesced copies. We are delaying it until now because 819 // LiveVirtsRegs might refer to the instrs. 820 for (unsigned i = 0, e = Coalesced.size(); i != e; ++i) 821 MBB.erase(Coalesced[i]); 822 NumCopies += Coalesced.size(); 823 824 DEBUG(MBB.dump()); 825} 826 827/// runOnMachineFunction - Register allocate the whole function 828/// 829bool RAFast::runOnMachineFunction(MachineFunction &Fn) { 830 DEBUG(dbgs() << "********** FAST REGISTER ALLOCATION **********\n" 831 << "********** Function: " 832 << ((Value*)Fn.getFunction())->getName() << '\n'); 833 if (VerifyFastRegalloc) 834 Fn.verify(this, true); 835 MF = &Fn; 836 MRI = &MF->getRegInfo(); 837 TM = &Fn.getTarget(); 838 TRI = TM->getRegisterInfo(); 839 TII = TM->getInstrInfo(); 840 841 UsedInInstr.resize(TRI->getNumRegs()); 842 Allocatable = TRI->getAllocatableSet(*MF); 843 844 // initialize the virtual->physical register map to have a 'null' 845 // mapping for all virtual registers 846 unsigned LastVirtReg = MRI->getLastVirtReg(); 847 StackSlotForVirtReg.grow(LastVirtReg); 848 849 // Loop over all of the basic blocks, eliminating virtual register references 850 for (MachineFunction::iterator MBB = Fn.begin(), MBBe = Fn.end(); 851 MBB != MBBe; ++MBB) 852 AllocateBasicBlock(*MBB); 853 854 // Make sure the set of used physregs is closed under subreg operations. 855 MRI->closePhysRegsUsed(*TRI); 856 857 StackSlotForVirtReg.clear(); 858 return true; 859} 860 861FunctionPass *llvm::createFastRegisterAllocator() { 862 return new RAFast(); 863} 864