RegAllocFast.cpp revision 548643c573d53950e28e9e810cd0454ba9a21af0
1//===-- RegAllocFast.cpp - A fast register allocator for debug code -------===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This register allocator allocates registers to a basic block at a time, 11// attempting to keep values in registers and reusing registers as appropriate. 12// 13//===----------------------------------------------------------------------===// 14 15#define DEBUG_TYPE "regalloc" 16#include "llvm/BasicBlock.h" 17#include "llvm/CodeGen/MachineFunctionPass.h" 18#include "llvm/CodeGen/MachineInstr.h" 19#include "llvm/CodeGen/MachineFrameInfo.h" 20#include "llvm/CodeGen/MachineRegisterInfo.h" 21#include "llvm/CodeGen/Passes.h" 22#include "llvm/CodeGen/RegAllocRegistry.h" 23#include "llvm/Target/TargetInstrInfo.h" 24#include "llvm/Target/TargetMachine.h" 25#include "llvm/Support/CommandLine.h" 26#include "llvm/Support/Debug.h" 27#include "llvm/Support/ErrorHandling.h" 28#include "llvm/Support/raw_ostream.h" 29#include "llvm/ADT/DenseMap.h" 30#include "llvm/ADT/IndexedMap.h" 31#include "llvm/ADT/SmallSet.h" 32#include "llvm/ADT/SmallVector.h" 33#include "llvm/ADT/Statistic.h" 34#include "llvm/ADT/STLExtras.h" 35#include <algorithm> 36using namespace llvm; 37 38static cl::opt<bool> VerifyFastRegalloc("verify-fast-regalloc", cl::Hidden, 39 cl::desc("Verify machine code before fast regalloc")); 40 41STATISTIC(NumStores, "Number of stores added"); 42STATISTIC(NumLoads , "Number of loads added"); 43STATISTIC(NumCopies, "Number of copies coalesced"); 44 45static RegisterRegAlloc 46 fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator); 47 48namespace { 49 class RAFast : public MachineFunctionPass { 50 public: 51 static char ID; 52 RAFast() : MachineFunctionPass(&ID), StackSlotForVirtReg(-1), 53 isBulkSpilling(false) {} 54 private: 55 const TargetMachine *TM; 56 MachineFunction *MF; 57 MachineRegisterInfo *MRI; 58 const TargetRegisterInfo *TRI; 59 const TargetInstrInfo *TII; 60 61 // Basic block currently being allocated. 62 MachineBasicBlock *MBB; 63 64 // StackSlotForVirtReg - Maps virtual regs to the frame index where these 65 // values are spilled. 66 IndexedMap<int, VirtReg2IndexFunctor> StackSlotForVirtReg; 67 68 // Everything we know about a live virtual register. 69 struct LiveReg { 70 MachineInstr *LastUse; // Last instr to use reg. 71 unsigned PhysReg; // Currently held here. 72 unsigned short LastOpNum; // OpNum on LastUse. 73 bool Dirty; // Register needs spill. 74 75 LiveReg(unsigned p=0) : LastUse(0), PhysReg(p), LastOpNum(0), 76 Dirty(false) {} 77 }; 78 79 typedef DenseMap<unsigned, LiveReg> LiveRegMap; 80 typedef LiveRegMap::value_type LiveRegEntry; 81 82 // LiveVirtRegs - This map contains entries for each virtual register 83 // that is currently available in a physical register. 84 LiveRegMap LiveVirtRegs; 85 86 // RegState - Track the state of a physical register. 87 enum RegState { 88 // A disabled register is not available for allocation, but an alias may 89 // be in use. A register can only be moved out of the disabled state if 90 // all aliases are disabled. 91 regDisabled, 92 93 // A free register is not currently in use and can be allocated 94 // immediately without checking aliases. 95 regFree, 96 97 // A reserved register has been assigned expolicitly (e.g., setting up a 98 // call parameter), and it remains reserved until it is used. 99 regReserved 100 101 // A register state may also be a virtual register number, indication that 102 // the physical register is currently allocated to a virtual register. In 103 // that case, LiveVirtRegs contains the inverse mapping. 104 }; 105 106 // PhysRegState - One of the RegState enums, or a virtreg. 107 std::vector<unsigned> PhysRegState; 108 109 // UsedInInstr - BitVector of physregs that are used in the current 110 // instruction, and so cannot be allocated. 111 BitVector UsedInInstr; 112 113 // Allocatable - vector of allocatable physical registers. 114 BitVector Allocatable; 115 116 // isBulkSpilling - This flag is set when LiveRegMap will be cleared 117 // completely after spilling all live registers. LiveRegMap entries should 118 // not be erased. 119 bool isBulkSpilling; 120 121 enum { 122 spillClean = 1, 123 spillDirty = 100, 124 spillImpossible = ~0u 125 }; 126 public: 127 virtual const char *getPassName() const { 128 return "Fast Register Allocator"; 129 } 130 131 virtual void getAnalysisUsage(AnalysisUsage &AU) const { 132 AU.setPreservesCFG(); 133 AU.addRequiredID(PHIEliminationID); 134 AU.addRequiredID(TwoAddressInstructionPassID); 135 MachineFunctionPass::getAnalysisUsage(AU); 136 } 137 138 private: 139 bool runOnMachineFunction(MachineFunction &Fn); 140 void AllocateBasicBlock(); 141 int getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC); 142 bool isLastUseOfLocalReg(MachineOperand&); 143 144 void addKillFlag(const LiveReg&); 145 void killVirtReg(LiveRegMap::iterator); 146 void killVirtReg(unsigned VirtReg); 147 void spillVirtReg(MachineBasicBlock::iterator MI, LiveRegMap::iterator); 148 void spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg); 149 150 void usePhysReg(MachineOperand&); 151 void definePhysReg(MachineInstr *MI, unsigned PhysReg, RegState NewState); 152 unsigned calcSpillCost(unsigned PhysReg) const; 153 void assignVirtToPhysReg(LiveRegEntry &LRE, unsigned PhysReg); 154 void allocVirtReg(MachineInstr *MI, LiveRegEntry &LRE, unsigned Hint); 155 LiveRegMap::iterator defineVirtReg(MachineInstr *MI, unsigned OpNum, 156 unsigned VirtReg, unsigned Hint); 157 LiveRegMap::iterator reloadVirtReg(MachineInstr *MI, unsigned OpNum, 158 unsigned VirtReg, unsigned Hint); 159 void spillAll(MachineInstr *MI); 160 bool setPhysReg(MachineOperand &MO, unsigned PhysReg); 161 }; 162 char RAFast::ID = 0; 163} 164 165/// getStackSpaceFor - This allocates space for the specified virtual register 166/// to be held on the stack. 167int RAFast::getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC) { 168 // Find the location Reg would belong... 169 int SS = StackSlotForVirtReg[VirtReg]; 170 if (SS != -1) 171 return SS; // Already has space allocated? 172 173 // Allocate a new stack object for this spill location... 174 int FrameIdx = MF->getFrameInfo()->CreateSpillStackObject(RC->getSize(), 175 RC->getAlignment()); 176 177 // Assign the slot. 178 StackSlotForVirtReg[VirtReg] = FrameIdx; 179 return FrameIdx; 180} 181 182/// isLastUseOfLocalReg - Return true if MO is the only remaining reference to 183/// its virtual register, and it is guaranteed to be a block-local register. 184/// 185bool RAFast::isLastUseOfLocalReg(MachineOperand &MO) { 186 // Check for non-debug uses or defs following MO. 187 // This is the most likely way to fail - fast path it. 188 MachineOperand *Next = &MO; 189 while ((Next = Next->getNextOperandForReg())) 190 if (!Next->isDebug()) 191 return false; 192 193 // If the register has ever been spilled or reloaded, we conservatively assume 194 // it is a global register used in multiple blocks. 195 if (StackSlotForVirtReg[MO.getReg()] != -1) 196 return false; 197 198 // Check that the use/def chain has exactly one operand - MO. 199 return &MRI->reg_nodbg_begin(MO.getReg()).getOperand() == &MO; 200} 201 202/// addKillFlag - Set kill flags on last use of a virtual register. 203void RAFast::addKillFlag(const LiveReg &LR) { 204 if (!LR.LastUse) return; 205 MachineOperand &MO = LR.LastUse->getOperand(LR.LastOpNum); 206 if (MO.isDef()) 207 MO.setIsDead(); 208 else if (!LR.LastUse->isRegTiedToDefOperand(LR.LastOpNum)) 209 MO.setIsKill(); 210} 211 212/// killVirtReg - Mark virtreg as no longer available. 213void RAFast::killVirtReg(LiveRegMap::iterator LRI) { 214 addKillFlag(LRI->second); 215 const LiveReg &LR = LRI->second; 216 assert(PhysRegState[LR.PhysReg] == LRI->first && "Broken RegState mapping"); 217 PhysRegState[LR.PhysReg] = regFree; 218 // Erase from LiveVirtRegs unless we're spilling in bulk. 219 if (!isBulkSpilling) 220 LiveVirtRegs.erase(LRI); 221} 222 223/// killVirtReg - Mark virtreg as no longer available. 224void RAFast::killVirtReg(unsigned VirtReg) { 225 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 226 "killVirtReg needs a virtual register"); 227 LiveRegMap::iterator LRI = LiveVirtRegs.find(VirtReg); 228 if (LRI != LiveVirtRegs.end()) 229 killVirtReg(LRI); 230} 231 232/// spillVirtReg - This method spills the value specified by VirtReg into the 233/// corresponding stack slot if needed. If isKill is set, the register is also 234/// killed. 235void RAFast::spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg) { 236 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 237 "Spilling a physical register is illegal!"); 238 LiveRegMap::iterator LRI = LiveVirtRegs.find(VirtReg); 239 assert(LRI != LiveVirtRegs.end() && "Spilling unmapped virtual register"); 240 spillVirtReg(MI, LRI); 241} 242 243/// spillVirtReg - Do the actual work of spilling. 244void RAFast::spillVirtReg(MachineBasicBlock::iterator MI, 245 LiveRegMap::iterator LRI) { 246 LiveReg &LR = LRI->second; 247 assert(PhysRegState[LR.PhysReg] == LRI->first && "Broken RegState mapping"); 248 249 if (LR.Dirty) { 250 // If this physreg is used by the instruction, we want to kill it on the 251 // instruction, not on the spill. 252 bool SpillKill = LR.LastUse != MI; 253 LR.Dirty = false; 254 DEBUG(dbgs() << "Spilling %reg" << LRI->first 255 << " in " << TRI->getName(LR.PhysReg)); 256 const TargetRegisterClass *RC = MRI->getRegClass(LRI->first); 257 int FI = getStackSpaceFor(LRI->first, RC); 258 DEBUG(dbgs() << " to stack slot #" << FI << "\n"); 259 TII->storeRegToStackSlot(*MBB, MI, LR.PhysReg, SpillKill, FI, RC, TRI); 260 ++NumStores; // Update statistics 261 262 if (SpillKill) 263 LR.LastUse = 0; // Don't kill register again 264 } 265 killVirtReg(LRI); 266} 267 268/// spillAll - Spill all dirty virtregs without killing them. 269void RAFast::spillAll(MachineInstr *MI) { 270 isBulkSpilling = true; 271 for (LiveRegMap::iterator i = LiveVirtRegs.begin(), 272 e = LiveVirtRegs.end(); i != e; ++i) 273 spillVirtReg(MI, i); 274 LiveVirtRegs.clear(); 275 isBulkSpilling = false; 276} 277 278/// usePhysReg - Handle the direct use of a physical register. 279/// Check that the register is not used by a virtreg. 280/// Kill the physreg, marking it free. 281/// This may add implicit kills to MO->getParent() and invalidate MO. 282void RAFast::usePhysReg(MachineOperand &MO) { 283 unsigned PhysReg = MO.getReg(); 284 assert(TargetRegisterInfo::isPhysicalRegister(PhysReg) && 285 "Bad usePhysReg operand"); 286 287 switch (PhysRegState[PhysReg]) { 288 case regDisabled: 289 break; 290 case regReserved: 291 PhysRegState[PhysReg] = regFree; 292 // Fall through 293 case regFree: 294 UsedInInstr.set(PhysReg); 295 MO.setIsKill(); 296 return; 297 default: 298 // The physreg was allocated to a virtual register. That means to value we 299 // wanted has been clobbered. 300 llvm_unreachable("Instruction uses an allocated register"); 301 } 302 303 // Maybe a superregister is reserved? 304 for (const unsigned *AS = TRI->getAliasSet(PhysReg); 305 unsigned Alias = *AS; ++AS) { 306 switch (PhysRegState[Alias]) { 307 case regDisabled: 308 break; 309 case regReserved: 310 assert(TRI->isSuperRegister(PhysReg, Alias) && 311 "Instruction is not using a subregister of a reserved register"); 312 // Leave the superregister in the working set. 313 PhysRegState[Alias] = regFree; 314 UsedInInstr.set(Alias); 315 MO.getParent()->addRegisterKilled(Alias, TRI, true); 316 return; 317 case regFree: 318 if (TRI->isSuperRegister(PhysReg, Alias)) { 319 // Leave the superregister in the working set. 320 UsedInInstr.set(Alias); 321 MO.getParent()->addRegisterKilled(Alias, TRI, true); 322 return; 323 } 324 // Some other alias was in the working set - clear it. 325 PhysRegState[Alias] = regDisabled; 326 break; 327 default: 328 llvm_unreachable("Instruction uses an alias of an allocated register"); 329 } 330 } 331 332 // All aliases are disabled, bring register into working set. 333 PhysRegState[PhysReg] = regFree; 334 UsedInInstr.set(PhysReg); 335 MO.setIsKill(); 336} 337 338/// definePhysReg - Mark PhysReg as reserved or free after spilling any 339/// virtregs. This is very similar to defineVirtReg except the physreg is 340/// reserved instead of allocated. 341void RAFast::definePhysReg(MachineInstr *MI, unsigned PhysReg, 342 RegState NewState) { 343 UsedInInstr.set(PhysReg); 344 switch (unsigned VirtReg = PhysRegState[PhysReg]) { 345 case regDisabled: 346 break; 347 default: 348 spillVirtReg(MI, VirtReg); 349 // Fall through. 350 case regFree: 351 case regReserved: 352 PhysRegState[PhysReg] = NewState; 353 return; 354 } 355 356 // This is a disabled register, disable all aliases. 357 PhysRegState[PhysReg] = NewState; 358 for (const unsigned *AS = TRI->getAliasSet(PhysReg); 359 unsigned Alias = *AS; ++AS) { 360 UsedInInstr.set(Alias); 361 switch (unsigned VirtReg = PhysRegState[Alias]) { 362 case regDisabled: 363 break; 364 default: 365 spillVirtReg(MI, VirtReg); 366 // Fall through. 367 case regFree: 368 case regReserved: 369 PhysRegState[Alias] = regDisabled; 370 if (TRI->isSuperRegister(PhysReg, Alias)) 371 return; 372 break; 373 } 374 } 375} 376 377 378// calcSpillCost - Return the cost of spilling clearing out PhysReg and 379// aliases so it is free for allocation. 380// Returns 0 when PhysReg is free or disabled with all aliases disabled - it 381// can be allocated directly. 382// Returns spillImpossible when PhysReg or an alias can't be spilled. 383unsigned RAFast::calcSpillCost(unsigned PhysReg) const { 384 switch (unsigned VirtReg = PhysRegState[PhysReg]) { 385 case regDisabled: 386 break; 387 case regFree: 388 return 0; 389 case regReserved: 390 return spillImpossible; 391 default: 392 return LiveVirtRegs.lookup(VirtReg).Dirty ? spillDirty : spillClean; 393 } 394 395 // This is a disabled register, add up const of aliases. 396 unsigned Cost = 0; 397 for (const unsigned *AS = TRI->getAliasSet(PhysReg); 398 unsigned Alias = *AS; ++AS) { 399 switch (unsigned VirtReg = PhysRegState[Alias]) { 400 case regDisabled: 401 break; 402 case regFree: 403 ++Cost; 404 break; 405 case regReserved: 406 return spillImpossible; 407 default: 408 Cost += LiveVirtRegs.lookup(VirtReg).Dirty ? spillDirty : spillClean; 409 break; 410 } 411 } 412 return Cost; 413} 414 415 416/// assignVirtToPhysReg - This method updates local state so that we know 417/// that PhysReg is the proper container for VirtReg now. The physical 418/// register must not be used for anything else when this is called. 419/// 420void RAFast::assignVirtToPhysReg(LiveRegEntry &LRE, unsigned PhysReg) { 421 DEBUG(dbgs() << "Assigning %reg" << LRE.first << " to " 422 << TRI->getName(PhysReg) << "\n"); 423 PhysRegState[PhysReg] = LRE.first; 424 assert(!LRE.second.PhysReg && "Already assigned a physreg"); 425 LRE.second.PhysReg = PhysReg; 426} 427 428/// allocVirtReg - Allocate a physical register for VirtReg. 429void RAFast::allocVirtReg(MachineInstr *MI, LiveRegEntry &LRE, unsigned Hint) { 430 const unsigned VirtReg = LRE.first; 431 432 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 433 "Can only allocate virtual registers"); 434 435 const TargetRegisterClass *RC = MRI->getRegClass(VirtReg); 436 437 // Ignore invalid hints. 438 if (Hint && (!TargetRegisterInfo::isPhysicalRegister(Hint) || 439 !RC->contains(Hint) || UsedInInstr.test(Hint) || 440 !Allocatable.test(Hint))) 441 Hint = 0; 442 443 // Take hint when possible. 444 if (Hint) { 445 assert(RC->contains(Hint) && !UsedInInstr.test(Hint) && 446 Allocatable.test(Hint) && "Invalid hint should have been cleared"); 447 switch(calcSpillCost(Hint)) { 448 default: 449 definePhysReg(MI, Hint, regFree); 450 // Fall through. 451 case 0: 452 return assignVirtToPhysReg(LRE, Hint); 453 case spillImpossible: 454 break; 455 } 456 } 457 458 TargetRegisterClass::iterator AOB = RC->allocation_order_begin(*MF); 459 TargetRegisterClass::iterator AOE = RC->allocation_order_end(*MF); 460 461 // First try to find a completely free register. 462 for (TargetRegisterClass::iterator I = AOB; I != AOE; ++I) { 463 unsigned PhysReg = *I; 464 if (PhysRegState[PhysReg] == regFree && !UsedInInstr.test(PhysReg)) 465 return assignVirtToPhysReg(LRE, PhysReg); 466 } 467 468 DEBUG(dbgs() << "Allocating %reg" << VirtReg << " from " << RC->getName() 469 << "\n"); 470 471 unsigned BestReg = 0, BestCost = spillImpossible; 472 for (TargetRegisterClass::iterator I = AOB; I != AOE; ++I) { 473 unsigned Cost = calcSpillCost(*I); 474 if (Cost < BestCost) { 475 BestReg = *I; 476 BestCost = Cost; 477 if (Cost == 0) break; 478 } 479 } 480 481 if (BestReg) { 482 // BestCost is 0 when all aliases are already disabled. 483 if (BestCost) 484 definePhysReg(MI, BestReg, regFree); 485 return assignVirtToPhysReg(LRE, BestReg); 486 } 487 488 // Nothing we can do. 489 std::string msg; 490 raw_string_ostream Msg(msg); 491 Msg << "Ran out of registers during register allocation!"; 492 if (MI->isInlineAsm()) { 493 Msg << "\nPlease check your inline asm statement for " 494 << "invalid constraints:\n"; 495 MI->print(Msg, TM); 496 } 497 report_fatal_error(Msg.str()); 498} 499 500/// defineVirtReg - Allocate a register for VirtReg and mark it as dirty. 501RAFast::LiveRegMap::iterator 502RAFast::defineVirtReg(MachineInstr *MI, unsigned OpNum, 503 unsigned VirtReg, unsigned Hint) { 504 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 505 "Not a virtual register"); 506 LiveRegMap::iterator LRI; 507 bool New; 508 tie(LRI, New) = LiveVirtRegs.insert(std::make_pair(VirtReg, LiveReg())); 509 LiveReg &LR = LRI->second; 510 if (New) { 511 // If there is no hint, peek at the only use of this register. 512 if ((!Hint || !TargetRegisterInfo::isPhysicalRegister(Hint)) && 513 MRI->hasOneNonDBGUse(VirtReg)) { 514 unsigned SrcReg, DstReg, SrcSubReg, DstSubReg; 515 // It's a copy, use the destination register as a hint. 516 if (TII->isMoveInstr(*MRI->use_nodbg_begin(VirtReg), 517 SrcReg, DstReg, SrcSubReg, DstSubReg)) 518 Hint = DstReg; 519 } 520 allocVirtReg(MI, *LRI, Hint); 521 } else 522 addKillFlag(LR); // Kill before redefine. 523 assert(LR.PhysReg && "Register not assigned"); 524 LR.LastUse = MI; 525 LR.LastOpNum = OpNum; 526 LR.Dirty = true; 527 UsedInInstr.set(LR.PhysReg); 528 return LRI; 529} 530 531/// reloadVirtReg - Make sure VirtReg is available in a physreg and return it. 532RAFast::LiveRegMap::iterator 533RAFast::reloadVirtReg(MachineInstr *MI, unsigned OpNum, 534 unsigned VirtReg, unsigned Hint) { 535 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 536 "Not a virtual register"); 537 LiveRegMap::iterator LRI; 538 bool New; 539 tie(LRI, New) = LiveVirtRegs.insert(std::make_pair(VirtReg, LiveReg())); 540 LiveReg &LR = LRI->second; 541 MachineOperand &MO = MI->getOperand(OpNum); 542 if (New) { 543 allocVirtReg(MI, *LRI, Hint); 544 const TargetRegisterClass *RC = MRI->getRegClass(VirtReg); 545 int FrameIndex = getStackSpaceFor(VirtReg, RC); 546 DEBUG(dbgs() << "Reloading %reg" << VirtReg << " into " 547 << TRI->getName(LR.PhysReg) << "\n"); 548 TII->loadRegFromStackSlot(*MBB, MI, LR.PhysReg, FrameIndex, RC, TRI); 549 ++NumLoads; 550 } else if (LR.Dirty) { 551 if (isLastUseOfLocalReg(MO)) { 552 DEBUG(dbgs() << "Killing last use: " << MO << "\n"); 553 MO.setIsKill(); 554 } else if (MO.isKill()) { 555 DEBUG(dbgs() << "Clearing dubious kill: " << MO << "\n"); 556 MO.setIsKill(false); 557 } 558 } else if (MO.isKill()) { 559 // We must remove kill flags from uses of reloaded registers because the 560 // register would be killed immediately, and there might be a second use: 561 // %foo = OR %x<kill>, %x 562 // This would cause a second reload of %x into a different register. 563 DEBUG(dbgs() << "Clearing clean kill: " << MO << "\n"); 564 MO.setIsKill(false); 565 } 566 assert(LR.PhysReg && "Register not assigned"); 567 LR.LastUse = MI; 568 LR.LastOpNum = OpNum; 569 UsedInInstr.set(LR.PhysReg); 570 return LRI; 571} 572 573// setPhysReg - Change MO the refer the PhysReg, considering subregs. 574// This may invalidate MO if it is necessary to add implicit kills for a 575// superregister. 576// Return tru if MO kills its register. 577bool RAFast::setPhysReg(MachineOperand &MO, unsigned PhysReg) { 578 if (!MO.getSubReg()) { 579 MO.setReg(PhysReg); 580 return MO.isKill() || MO.isDead(); 581 } 582 583 // Handle subregister index. 584 MO.setReg(PhysReg ? TRI->getSubReg(PhysReg, MO.getSubReg()) : 0); 585 MO.setSubReg(0); 586 if (MO.isUse()) { 587 if (MO.isKill()) { 588 MO.getParent()->addRegisterKilled(PhysReg, TRI, true); 589 return true; 590 } 591 return false; 592 } 593 // A subregister def implicitly defines the whole physreg. 594 if (MO.isDead()) { 595 MO.getParent()->addRegisterDead(PhysReg, TRI, true); 596 return true; 597 } 598 MO.getParent()->addRegisterDefined(PhysReg, TRI); 599 return false; 600} 601 602void RAFast::AllocateBasicBlock() { 603 DEBUG(dbgs() << "\nAllocating " << *MBB); 604 605 PhysRegState.assign(TRI->getNumRegs(), regDisabled); 606 assert(LiveVirtRegs.empty() && "Mapping not cleared form last block?"); 607 608 MachineBasicBlock::iterator MII = MBB->begin(); 609 610 // Add live-in registers as live. 611 for (MachineBasicBlock::livein_iterator I = MBB->livein_begin(), 612 E = MBB->livein_end(); I != E; ++I) 613 definePhysReg(MII, *I, regReserved); 614 615 SmallVector<unsigned, 8> PhysECs; 616 SmallVector<MachineInstr*, 32> Coalesced; 617 618 // Otherwise, sequentially allocate each instruction in the MBB. 619 while (MII != MBB->end()) { 620 MachineInstr *MI = MII++; 621 const TargetInstrDesc &TID = MI->getDesc(); 622 DEBUG({ 623 dbgs() << "\n>> " << *MI << "Regs:"; 624 for (unsigned Reg = 1, E = TRI->getNumRegs(); Reg != E; ++Reg) { 625 if (PhysRegState[Reg] == regDisabled) continue; 626 dbgs() << " " << TRI->getName(Reg); 627 switch(PhysRegState[Reg]) { 628 case regFree: 629 break; 630 case regReserved: 631 dbgs() << "*"; 632 break; 633 default: 634 dbgs() << "=%reg" << PhysRegState[Reg]; 635 if (LiveVirtRegs[PhysRegState[Reg]].Dirty) 636 dbgs() << "*"; 637 assert(LiveVirtRegs[PhysRegState[Reg]].PhysReg == Reg && 638 "Bad inverse map"); 639 break; 640 } 641 } 642 dbgs() << '\n'; 643 // Check that LiveVirtRegs is the inverse. 644 for (LiveRegMap::iterator i = LiveVirtRegs.begin(), 645 e = LiveVirtRegs.end(); i != e; ++i) { 646 assert(TargetRegisterInfo::isVirtualRegister(i->first) && 647 "Bad map key"); 648 assert(TargetRegisterInfo::isPhysicalRegister(i->second.PhysReg) && 649 "Bad map value"); 650 assert(PhysRegState[i->second.PhysReg] == i->first && 651 "Bad inverse map"); 652 } 653 }); 654 655 // Debug values are not allowed to change codegen in any way. 656 if (MI->isDebugValue()) { 657 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 658 MachineOperand &MO = MI->getOperand(i); 659 if (!MO.isReg()) continue; 660 unsigned Reg = MO.getReg(); 661 if (!Reg || TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 662 LiveRegMap::iterator LRI = LiveVirtRegs.find(Reg); 663 if (LRI != LiveVirtRegs.end()) 664 setPhysReg(MO, LRI->second.PhysReg); 665 else 666 MO.setReg(0); // We can't allocate a physreg for a DebugValue, sorry! 667 } 668 // Next instruction. 669 continue; 670 } 671 672 // If this is a copy, we may be able to coalesce. 673 unsigned CopySrc, CopyDst, CopySrcSub, CopyDstSub; 674 if (!TII->isMoveInstr(*MI, CopySrc, CopyDst, CopySrcSub, CopyDstSub)) 675 CopySrc = CopyDst = 0; 676 677 // Track registers used by instruction. 678 UsedInInstr.reset(); 679 PhysECs.clear(); 680 681 // First scan. 682 // Mark physreg uses and early clobbers as used. 683 // Find the end of the virtreg operands 684 unsigned VirtOpEnd = 0; 685 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 686 MachineOperand &MO = MI->getOperand(i); 687 if (!MO.isReg()) continue; 688 unsigned Reg = MO.getReg(); 689 if (!Reg) continue; 690 if (TargetRegisterInfo::isVirtualRegister(Reg)) { 691 VirtOpEnd = i+1; 692 continue; 693 } 694 if (!Allocatable.test(Reg)) continue; 695 if (MO.isUse()) { 696 usePhysReg(MO); 697 } else if (MO.isEarlyClobber()) { 698 definePhysReg(MI, Reg, MO.isDead() ? regFree : regReserved); 699 PhysECs.push_back(Reg); 700 } 701 } 702 703 // Second scan. 704 // Allocate virtreg uses and early clobbers. 705 // Collect VirtKills 706 for (unsigned i = 0; i != VirtOpEnd; ++i) { 707 MachineOperand &MO = MI->getOperand(i); 708 if (!MO.isReg()) continue; 709 unsigned Reg = MO.getReg(); 710 if (!Reg || TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 711 if (MO.isUse()) { 712 LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, CopyDst); 713 unsigned PhysReg = LRI->second.PhysReg; 714 CopySrc = (CopySrc == Reg || CopySrc == PhysReg) ? PhysReg : 0; 715 if (setPhysReg(MO, PhysReg)) 716 killVirtReg(LRI); 717 } else if (MO.isEarlyClobber()) { 718 LiveRegMap::iterator LRI = defineVirtReg(MI, i, Reg, 0); 719 unsigned PhysReg = LRI->second.PhysReg; 720 setPhysReg(MO, PhysReg); 721 PhysECs.push_back(PhysReg); 722 } 723 } 724 725 MRI->addPhysRegsUsed(UsedInInstr); 726 727 // Track registers defined by instruction - early clobbers at this point. 728 UsedInInstr.reset(); 729 for (unsigned i = 0, e = PhysECs.size(); i != e; ++i) { 730 unsigned PhysReg = PhysECs[i]; 731 UsedInInstr.set(PhysReg); 732 for (const unsigned *AS = TRI->getAliasSet(PhysReg); 733 unsigned Alias = *AS; ++AS) 734 UsedInInstr.set(Alias); 735 } 736 737 unsigned DefOpEnd = MI->getNumOperands(); 738 if (TID.isCall()) { 739 // Spill all virtregs before a call. This serves two purposes: 1. If an 740 // exception is thrown, the landing pad is going to expect to find registers 741 // in their spill slots, and 2. we don't have to wade through all the 742 // <imp-def> operands on the call instruction. 743 DefOpEnd = VirtOpEnd; 744 DEBUG(dbgs() << " Spilling remaining registers before call.\n"); 745 spillAll(MI); 746 } 747 748 // Third scan. 749 // Allocate defs and collect dead defs. 750 for (unsigned i = 0; i != DefOpEnd; ++i) { 751 MachineOperand &MO = MI->getOperand(i); 752 if (!MO.isReg() || !MO.isDef() || !MO.getReg()) continue; 753 unsigned Reg = MO.getReg(); 754 755 if (TargetRegisterInfo::isPhysicalRegister(Reg)) { 756 if (!Allocatable.test(Reg)) continue; 757 definePhysReg(MI, Reg, (MO.isImplicit() || MO.isDead()) ? 758 regFree : regReserved); 759 continue; 760 } 761 LiveRegMap::iterator LRI = defineVirtReg(MI, i, Reg, CopySrc); 762 unsigned PhysReg = LRI->second.PhysReg; 763 if (setPhysReg(MO, PhysReg)) { 764 killVirtReg(LRI); 765 CopyDst = 0; // cancel coalescing; 766 } else 767 CopyDst = (CopyDst == Reg || CopyDst == PhysReg) ? PhysReg : 0; 768 } 769 770 MRI->addPhysRegsUsed(UsedInInstr); 771 772 if (CopyDst && CopyDst == CopySrc && CopyDstSub == CopySrcSub) { 773 DEBUG(dbgs() << "-- coalescing: " << *MI); 774 Coalesced.push_back(MI); 775 } else { 776 DEBUG(dbgs() << "<< " << *MI); 777 } 778 } 779 780 // Spill all physical registers holding virtual registers now. 781 DEBUG(dbgs() << "Spilling live registers at end of block.\n"); 782 spillAll(MBB->getFirstTerminator()); 783 784 // Erase all the coalesced copies. We are delaying it until now because 785 // LiveVirtRegs might refer to the instrs. 786 for (unsigned i = 0, e = Coalesced.size(); i != e; ++i) 787 MBB->erase(Coalesced[i]); 788 NumCopies += Coalesced.size(); 789 790 DEBUG(MBB->dump()); 791} 792 793/// runOnMachineFunction - Register allocate the whole function 794/// 795bool RAFast::runOnMachineFunction(MachineFunction &Fn) { 796 DEBUG(dbgs() << "********** FAST REGISTER ALLOCATION **********\n" 797 << "********** Function: " 798 << ((Value*)Fn.getFunction())->getName() << '\n'); 799 if (VerifyFastRegalloc) 800 Fn.verify(this, true); 801 MF = &Fn; 802 MRI = &MF->getRegInfo(); 803 TM = &Fn.getTarget(); 804 TRI = TM->getRegisterInfo(); 805 TII = TM->getInstrInfo(); 806 807 UsedInInstr.resize(TRI->getNumRegs()); 808 Allocatable = TRI->getAllocatableSet(*MF); 809 810 // initialize the virtual->physical register map to have a 'null' 811 // mapping for all virtual registers 812 unsigned LastVirtReg = MRI->getLastVirtReg(); 813 StackSlotForVirtReg.grow(LastVirtReg); 814 815 // Loop over all of the basic blocks, eliminating virtual register references 816 for (MachineFunction::iterator MBBi = Fn.begin(), MBBe = Fn.end(); 817 MBBi != MBBe; ++MBBi) { 818 MBB = &*MBBi; 819 AllocateBasicBlock(); 820 } 821 822 // Make sure the set of used physregs is closed under subreg operations. 823 MRI->closePhysRegsUsed(*TRI); 824 825 StackSlotForVirtReg.clear(); 826 return true; 827} 828 829FunctionPass *llvm::createFastRegisterAllocator() { 830 return new RAFast(); 831} 832