RegAllocFast.cpp revision 46a9fd6231d2753aedb3248402b9db36d7bc515f
1//===-- RegAllocFast.cpp - A fast register allocator for debug code -------===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This register allocator allocates registers to a basic block at a time, 11// attempting to keep values in registers and reusing registers as appropriate. 12// 13//===----------------------------------------------------------------------===// 14 15#define DEBUG_TYPE "regalloc" 16#include "llvm/BasicBlock.h" 17#include "llvm/CodeGen/MachineFunctionPass.h" 18#include "llvm/CodeGen/MachineInstr.h" 19#include "llvm/CodeGen/MachineFrameInfo.h" 20#include "llvm/CodeGen/MachineRegisterInfo.h" 21#include "llvm/CodeGen/Passes.h" 22#include "llvm/CodeGen/RegAllocRegistry.h" 23#include "llvm/Target/TargetInstrInfo.h" 24#include "llvm/Target/TargetMachine.h" 25#include "llvm/Support/CommandLine.h" 26#include "llvm/Support/Debug.h" 27#include "llvm/Support/ErrorHandling.h" 28#include "llvm/Support/raw_ostream.h" 29#include "llvm/ADT/DenseMap.h" 30#include "llvm/ADT/IndexedMap.h" 31#include "llvm/ADT/SmallSet.h" 32#include "llvm/ADT/SmallVector.h" 33#include "llvm/ADT/Statistic.h" 34#include "llvm/ADT/STLExtras.h" 35#include <algorithm> 36using namespace llvm; 37 38STATISTIC(NumStores, "Number of stores added"); 39STATISTIC(NumLoads , "Number of loads added"); 40STATISTIC(NumCopies, "Number of copies coalesced"); 41 42static RegisterRegAlloc 43 fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator); 44 45namespace { 46 class RAFast : public MachineFunctionPass { 47 public: 48 static char ID; 49 RAFast() : MachineFunctionPass(&ID), StackSlotForVirtReg(-1), 50 isBulkSpilling(false) {} 51 private: 52 const TargetMachine *TM; 53 MachineFunction *MF; 54 MachineRegisterInfo *MRI; 55 const TargetRegisterInfo *TRI; 56 const TargetInstrInfo *TII; 57 58 // Basic block currently being allocated. 59 MachineBasicBlock *MBB; 60 61 // StackSlotForVirtReg - Maps virtual regs to the frame index where these 62 // values are spilled. 63 IndexedMap<int, VirtReg2IndexFunctor> StackSlotForVirtReg; 64 65 // Everything we know about a live virtual register. 66 struct LiveReg { 67 MachineInstr *LastUse; // Last instr to use reg. 68 unsigned PhysReg; // Currently held here. 69 unsigned short LastOpNum; // OpNum on LastUse. 70 bool Dirty; // Register needs spill. 71 72 LiveReg(unsigned p=0) : LastUse(0), PhysReg(p), LastOpNum(0), 73 Dirty(false) {} 74 }; 75 76 typedef DenseMap<unsigned, LiveReg> LiveRegMap; 77 typedef LiveRegMap::value_type LiveRegEntry; 78 79 // LiveVirtRegs - This map contains entries for each virtual register 80 // that is currently available in a physical register. 81 LiveRegMap LiveVirtRegs; 82 83 // RegState - Track the state of a physical register. 84 enum RegState { 85 // A disabled register is not available for allocation, but an alias may 86 // be in use. A register can only be moved out of the disabled state if 87 // all aliases are disabled. 88 regDisabled, 89 90 // A free register is not currently in use and can be allocated 91 // immediately without checking aliases. 92 regFree, 93 94 // A reserved register has been assigned expolicitly (e.g., setting up a 95 // call parameter), and it remains reserved until it is used. 96 regReserved 97 98 // A register state may also be a virtual register number, indication that 99 // the physical register is currently allocated to a virtual register. In 100 // that case, LiveVirtRegs contains the inverse mapping. 101 }; 102 103 // PhysRegState - One of the RegState enums, or a virtreg. 104 std::vector<unsigned> PhysRegState; 105 106 // UsedInInstr - BitVector of physregs that are used in the current 107 // instruction, and so cannot be allocated. 108 BitVector UsedInInstr; 109 110 // Allocatable - vector of allocatable physical registers. 111 BitVector Allocatable; 112 113 // isBulkSpilling - This flag is set when LiveRegMap will be cleared 114 // completely after spilling all live registers. LiveRegMap entries should 115 // not be erased. 116 bool isBulkSpilling; 117 118 enum { 119 spillClean = 1, 120 spillDirty = 100, 121 spillImpossible = ~0u 122 }; 123 public: 124 virtual const char *getPassName() const { 125 return "Fast Register Allocator"; 126 } 127 128 virtual void getAnalysisUsage(AnalysisUsage &AU) const { 129 AU.setPreservesCFG(); 130 AU.addRequiredID(PHIEliminationID); 131 AU.addRequiredID(TwoAddressInstructionPassID); 132 MachineFunctionPass::getAnalysisUsage(AU); 133 } 134 135 private: 136 bool runOnMachineFunction(MachineFunction &Fn); 137 void AllocateBasicBlock(); 138 int getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC); 139 bool isLastUseOfLocalReg(MachineOperand&); 140 141 void addKillFlag(const LiveReg&); 142 void killVirtReg(LiveRegMap::iterator); 143 void killVirtReg(unsigned VirtReg); 144 void spillVirtReg(MachineBasicBlock::iterator MI, LiveRegMap::iterator); 145 void spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg); 146 147 void usePhysReg(MachineOperand&); 148 void definePhysReg(MachineInstr *MI, unsigned PhysReg, RegState NewState); 149 unsigned calcSpillCost(unsigned PhysReg) const; 150 void assignVirtToPhysReg(LiveRegEntry &LRE, unsigned PhysReg); 151 void allocVirtReg(MachineInstr *MI, LiveRegEntry &LRE, unsigned Hint); 152 LiveRegMap::iterator defineVirtReg(MachineInstr *MI, unsigned OpNum, 153 unsigned VirtReg, unsigned Hint); 154 LiveRegMap::iterator reloadVirtReg(MachineInstr *MI, unsigned OpNum, 155 unsigned VirtReg, unsigned Hint); 156 void spillAll(MachineInstr *MI); 157 bool setPhysReg(MachineInstr *MI, unsigned OpNum, unsigned PhysReg); 158 }; 159 char RAFast::ID = 0; 160} 161 162/// getStackSpaceFor - This allocates space for the specified virtual register 163/// to be held on the stack. 164int RAFast::getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC) { 165 // Find the location Reg would belong... 166 int SS = StackSlotForVirtReg[VirtReg]; 167 if (SS != -1) 168 return SS; // Already has space allocated? 169 170 // Allocate a new stack object for this spill location... 171 int FrameIdx = MF->getFrameInfo()->CreateSpillStackObject(RC->getSize(), 172 RC->getAlignment()); 173 174 // Assign the slot. 175 StackSlotForVirtReg[VirtReg] = FrameIdx; 176 return FrameIdx; 177} 178 179/// isLastUseOfLocalReg - Return true if MO is the only remaining reference to 180/// its virtual register, and it is guaranteed to be a block-local register. 181/// 182bool RAFast::isLastUseOfLocalReg(MachineOperand &MO) { 183 // Check for non-debug uses or defs following MO. 184 // This is the most likely way to fail - fast path it. 185 MachineOperand *Next = &MO; 186 while ((Next = Next->getNextOperandForReg())) 187 if (!Next->isDebug()) 188 return false; 189 190 // If the register has ever been spilled or reloaded, we conservatively assume 191 // it is a global register used in multiple blocks. 192 if (StackSlotForVirtReg[MO.getReg()] != -1) 193 return false; 194 195 // Check that the use/def chain has exactly one operand - MO. 196 return &MRI->reg_nodbg_begin(MO.getReg()).getOperand() == &MO; 197} 198 199/// addKillFlag - Set kill flags on last use of a virtual register. 200void RAFast::addKillFlag(const LiveReg &LR) { 201 if (!LR.LastUse) return; 202 MachineOperand &MO = LR.LastUse->getOperand(LR.LastOpNum); 203 if (MO.isUse() && !LR.LastUse->isRegTiedToDefOperand(LR.LastOpNum)) { 204 if (MO.getReg() == LR.PhysReg) 205 MO.setIsKill(); 206 else 207 LR.LastUse->addRegisterKilled(LR.PhysReg, TRI, true); 208 } 209} 210 211/// killVirtReg - Mark virtreg as no longer available. 212void RAFast::killVirtReg(LiveRegMap::iterator LRI) { 213 addKillFlag(LRI->second); 214 const LiveReg &LR = LRI->second; 215 assert(PhysRegState[LR.PhysReg] == LRI->first && "Broken RegState mapping"); 216 PhysRegState[LR.PhysReg] = regFree; 217 // Erase from LiveVirtRegs unless we're spilling in bulk. 218 if (!isBulkSpilling) 219 LiveVirtRegs.erase(LRI); 220} 221 222/// killVirtReg - Mark virtreg as no longer available. 223void RAFast::killVirtReg(unsigned VirtReg) { 224 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 225 "killVirtReg needs a virtual register"); 226 LiveRegMap::iterator LRI = LiveVirtRegs.find(VirtReg); 227 if (LRI != LiveVirtRegs.end()) 228 killVirtReg(LRI); 229} 230 231/// spillVirtReg - This method spills the value specified by VirtReg into the 232/// corresponding stack slot if needed. If isKill is set, the register is also 233/// killed. 234void RAFast::spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg) { 235 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 236 "Spilling a physical register is illegal!"); 237 LiveRegMap::iterator LRI = LiveVirtRegs.find(VirtReg); 238 assert(LRI != LiveVirtRegs.end() && "Spilling unmapped virtual register"); 239 spillVirtReg(MI, LRI); 240} 241 242/// spillVirtReg - Do the actual work of spilling. 243void RAFast::spillVirtReg(MachineBasicBlock::iterator MI, 244 LiveRegMap::iterator LRI) { 245 LiveReg &LR = LRI->second; 246 assert(PhysRegState[LR.PhysReg] == LRI->first && "Broken RegState mapping"); 247 248 if (LR.Dirty) { 249 // If this physreg is used by the instruction, we want to kill it on the 250 // instruction, not on the spill. 251 bool SpillKill = LR.LastUse != MI; 252 LR.Dirty = false; 253 DEBUG(dbgs() << "Spilling %reg" << LRI->first 254 << " in " << TRI->getName(LR.PhysReg)); 255 const TargetRegisterClass *RC = MRI->getRegClass(LRI->first); 256 int FI = getStackSpaceFor(LRI->first, RC); 257 DEBUG(dbgs() << " to stack slot #" << FI << "\n"); 258 TII->storeRegToStackSlot(*MBB, MI, LR.PhysReg, SpillKill, FI, RC, TRI); 259 ++NumStores; // Update statistics 260 261 if (SpillKill) 262 LR.LastUse = 0; // Don't kill register again 263 } 264 killVirtReg(LRI); 265} 266 267/// spillAll - Spill all dirty virtregs without killing them. 268void RAFast::spillAll(MachineInstr *MI) { 269 if (LiveVirtRegs.empty()) return; 270 isBulkSpilling = true; 271 // The LiveRegMap is keyed by an unsigned (the virtreg number), so the order 272 // of spilling here is deterministic, if arbitrary. 273 for (LiveRegMap::iterator i = LiveVirtRegs.begin(), e = LiveVirtRegs.end(); 274 i != e; ++i) 275 spillVirtReg(MI, i); 276 LiveVirtRegs.clear(); 277 isBulkSpilling = false; 278} 279 280/// usePhysReg - Handle the direct use of a physical register. 281/// Check that the register is not used by a virtreg. 282/// Kill the physreg, marking it free. 283/// This may add implicit kills to MO->getParent() and invalidate MO. 284void RAFast::usePhysReg(MachineOperand &MO) { 285 unsigned PhysReg = MO.getReg(); 286 assert(TargetRegisterInfo::isPhysicalRegister(PhysReg) && 287 "Bad usePhysReg operand"); 288 289 switch (PhysRegState[PhysReg]) { 290 case regDisabled: 291 break; 292 case regReserved: 293 PhysRegState[PhysReg] = regFree; 294 // Fall through 295 case regFree: 296 UsedInInstr.set(PhysReg); 297 MO.setIsKill(); 298 return; 299 default: 300 // The physreg was allocated to a virtual register. That means to value we 301 // wanted has been clobbered. 302 llvm_unreachable("Instruction uses an allocated register"); 303 } 304 305 // Maybe a superregister is reserved? 306 for (const unsigned *AS = TRI->getAliasSet(PhysReg); 307 unsigned Alias = *AS; ++AS) { 308 switch (PhysRegState[Alias]) { 309 case regDisabled: 310 break; 311 case regReserved: 312 assert(TRI->isSuperRegister(PhysReg, Alias) && 313 "Instruction is not using a subregister of a reserved register"); 314 // Leave the superregister in the working set. 315 PhysRegState[Alias] = regFree; 316 UsedInInstr.set(Alias); 317 MO.getParent()->addRegisterKilled(Alias, TRI, true); 318 return; 319 case regFree: 320 if (TRI->isSuperRegister(PhysReg, Alias)) { 321 // Leave the superregister in the working set. 322 UsedInInstr.set(Alias); 323 MO.getParent()->addRegisterKilled(Alias, TRI, true); 324 return; 325 } 326 // Some other alias was in the working set - clear it. 327 PhysRegState[Alias] = regDisabled; 328 break; 329 default: 330 llvm_unreachable("Instruction uses an alias of an allocated register"); 331 } 332 } 333 334 // All aliases are disabled, bring register into working set. 335 PhysRegState[PhysReg] = regFree; 336 UsedInInstr.set(PhysReg); 337 MO.setIsKill(); 338} 339 340/// definePhysReg - Mark PhysReg as reserved or free after spilling any 341/// virtregs. This is very similar to defineVirtReg except the physreg is 342/// reserved instead of allocated. 343void RAFast::definePhysReg(MachineInstr *MI, unsigned PhysReg, 344 RegState NewState) { 345 UsedInInstr.set(PhysReg); 346 switch (unsigned VirtReg = PhysRegState[PhysReg]) { 347 case regDisabled: 348 break; 349 default: 350 spillVirtReg(MI, VirtReg); 351 // Fall through. 352 case regFree: 353 case regReserved: 354 PhysRegState[PhysReg] = NewState; 355 return; 356 } 357 358 // This is a disabled register, disable all aliases. 359 PhysRegState[PhysReg] = NewState; 360 for (const unsigned *AS = TRI->getAliasSet(PhysReg); 361 unsigned Alias = *AS; ++AS) { 362 UsedInInstr.set(Alias); 363 switch (unsigned VirtReg = PhysRegState[Alias]) { 364 case regDisabled: 365 break; 366 default: 367 spillVirtReg(MI, VirtReg); 368 // Fall through. 369 case regFree: 370 case regReserved: 371 PhysRegState[Alias] = regDisabled; 372 if (TRI->isSuperRegister(PhysReg, Alias)) 373 return; 374 break; 375 } 376 } 377} 378 379 380// calcSpillCost - Return the cost of spilling clearing out PhysReg and 381// aliases so it is free for allocation. 382// Returns 0 when PhysReg is free or disabled with all aliases disabled - it 383// can be allocated directly. 384// Returns spillImpossible when PhysReg or an alias can't be spilled. 385unsigned RAFast::calcSpillCost(unsigned PhysReg) const { 386 if (UsedInInstr.test(PhysReg)) 387 return spillImpossible; 388 switch (unsigned VirtReg = PhysRegState[PhysReg]) { 389 case regDisabled: 390 break; 391 case regFree: 392 return 0; 393 case regReserved: 394 return spillImpossible; 395 default: 396 return LiveVirtRegs.lookup(VirtReg).Dirty ? spillDirty : spillClean; 397 } 398 399 // This is a disabled register, add up const of aliases. 400 unsigned Cost = 0; 401 for (const unsigned *AS = TRI->getAliasSet(PhysReg); 402 unsigned Alias = *AS; ++AS) { 403 if (UsedInInstr.test(Alias)) 404 return spillImpossible; 405 switch (unsigned VirtReg = PhysRegState[Alias]) { 406 case regDisabled: 407 break; 408 case regFree: 409 ++Cost; 410 break; 411 case regReserved: 412 return spillImpossible; 413 default: 414 Cost += LiveVirtRegs.lookup(VirtReg).Dirty ? spillDirty : spillClean; 415 break; 416 } 417 } 418 return Cost; 419} 420 421 422/// assignVirtToPhysReg - This method updates local state so that we know 423/// that PhysReg is the proper container for VirtReg now. The physical 424/// register must not be used for anything else when this is called. 425/// 426void RAFast::assignVirtToPhysReg(LiveRegEntry &LRE, unsigned PhysReg) { 427 DEBUG(dbgs() << "Assigning %reg" << LRE.first << " to " 428 << TRI->getName(PhysReg) << "\n"); 429 PhysRegState[PhysReg] = LRE.first; 430 assert(!LRE.second.PhysReg && "Already assigned a physreg"); 431 LRE.second.PhysReg = PhysReg; 432} 433 434/// allocVirtReg - Allocate a physical register for VirtReg. 435void RAFast::allocVirtReg(MachineInstr *MI, LiveRegEntry &LRE, unsigned Hint) { 436 const unsigned VirtReg = LRE.first; 437 438 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 439 "Can only allocate virtual registers"); 440 441 const TargetRegisterClass *RC = MRI->getRegClass(VirtReg); 442 443 // Ignore invalid hints. 444 if (Hint && (!TargetRegisterInfo::isPhysicalRegister(Hint) || 445 !RC->contains(Hint) || !Allocatable.test(Hint))) 446 Hint = 0; 447 448 // Take hint when possible. 449 if (Hint) { 450 switch(calcSpillCost(Hint)) { 451 default: 452 definePhysReg(MI, Hint, regFree); 453 // Fall through. 454 case 0: 455 return assignVirtToPhysReg(LRE, Hint); 456 case spillImpossible: 457 break; 458 } 459 } 460 461 TargetRegisterClass::iterator AOB = RC->allocation_order_begin(*MF); 462 TargetRegisterClass::iterator AOE = RC->allocation_order_end(*MF); 463 464 // First try to find a completely free register. 465 for (TargetRegisterClass::iterator I = AOB; I != AOE; ++I) { 466 unsigned PhysReg = *I; 467 if (PhysRegState[PhysReg] == regFree && !UsedInInstr.test(PhysReg)) 468 return assignVirtToPhysReg(LRE, PhysReg); 469 } 470 471 DEBUG(dbgs() << "Allocating %reg" << VirtReg << " from " << RC->getName() 472 << "\n"); 473 474 unsigned BestReg = 0, BestCost = spillImpossible; 475 for (TargetRegisterClass::iterator I = AOB; I != AOE; ++I) { 476 unsigned Cost = calcSpillCost(*I); 477 // Cost is 0 when all aliases are already disabled. 478 if (Cost == 0) 479 return assignVirtToPhysReg(LRE, *I); 480 if (Cost < BestCost) 481 BestReg = *I, BestCost = Cost; 482 } 483 484 if (BestReg) { 485 definePhysReg(MI, BestReg, regFree); 486 return assignVirtToPhysReg(LRE, BestReg); 487 } 488 489 // Nothing we can do. 490 std::string msg; 491 raw_string_ostream Msg(msg); 492 Msg << "Ran out of registers during register allocation!"; 493 if (MI->isInlineAsm()) { 494 Msg << "\nPlease check your inline asm statement for " 495 << "invalid constraints:\n"; 496 MI->print(Msg, TM); 497 } 498 report_fatal_error(Msg.str()); 499} 500 501/// defineVirtReg - Allocate a register for VirtReg and mark it as dirty. 502RAFast::LiveRegMap::iterator 503RAFast::defineVirtReg(MachineInstr *MI, unsigned OpNum, 504 unsigned VirtReg, unsigned Hint) { 505 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 506 "Not a virtual register"); 507 LiveRegMap::iterator LRI; 508 bool New; 509 tie(LRI, New) = LiveVirtRegs.insert(std::make_pair(VirtReg, LiveReg())); 510 LiveReg &LR = LRI->second; 511 bool PartialRedef = MI->getOperand(OpNum).getSubReg(); 512 if (New) { 513 // If there is no hint, peek at the only use of this register. 514 if ((!Hint || !TargetRegisterInfo::isPhysicalRegister(Hint)) && 515 MRI->hasOneNonDBGUse(VirtReg)) { 516 unsigned SrcReg, DstReg, SrcSubReg, DstSubReg; 517 // It's a copy, use the destination register as a hint. 518 if (TII->isMoveInstr(*MRI->use_nodbg_begin(VirtReg), 519 SrcReg, DstReg, SrcSubReg, DstSubReg)) 520 Hint = DstReg; 521 } 522 allocVirtReg(MI, *LRI, Hint); 523 // If this is only a partial redefinition, we must reload the other parts. 524 if (PartialRedef && MI->readsVirtualRegister(VirtReg)) { 525 const TargetRegisterClass *RC = MRI->getRegClass(VirtReg); 526 int FI = getStackSpaceFor(VirtReg, RC); 527 DEBUG(dbgs() << "Reloading for partial redef: %reg" << VirtReg << "\n"); 528 TII->loadRegFromStackSlot(*MBB, MI, LR.PhysReg, FI, RC, TRI); 529 ++NumLoads; 530 } 531 } else if (LR.LastUse && !PartialRedef) { 532 // Redefining a live register - kill at the last use, unless it is this 533 // instruction defining VirtReg multiple times. 534 if (LR.LastUse != MI || LR.LastUse->getOperand(LR.LastOpNum).isUse()) 535 addKillFlag(LR); 536 } 537 assert(LR.PhysReg && "Register not assigned"); 538 LR.LastUse = MI; 539 LR.LastOpNum = OpNum; 540 LR.Dirty = true; 541 UsedInInstr.set(LR.PhysReg); 542 return LRI; 543} 544 545/// reloadVirtReg - Make sure VirtReg is available in a physreg and return it. 546RAFast::LiveRegMap::iterator 547RAFast::reloadVirtReg(MachineInstr *MI, unsigned OpNum, 548 unsigned VirtReg, unsigned Hint) { 549 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 550 "Not a virtual register"); 551 LiveRegMap::iterator LRI; 552 bool New; 553 tie(LRI, New) = LiveVirtRegs.insert(std::make_pair(VirtReg, LiveReg())); 554 LiveReg &LR = LRI->second; 555 MachineOperand &MO = MI->getOperand(OpNum); 556 if (New) { 557 allocVirtReg(MI, *LRI, Hint); 558 const TargetRegisterClass *RC = MRI->getRegClass(VirtReg); 559 int FrameIndex = getStackSpaceFor(VirtReg, RC); 560 DEBUG(dbgs() << "Reloading %reg" << VirtReg << " into " 561 << TRI->getName(LR.PhysReg) << "\n"); 562 TII->loadRegFromStackSlot(*MBB, MI, LR.PhysReg, FrameIndex, RC, TRI); 563 ++NumLoads; 564 } else if (LR.Dirty) { 565 if (isLastUseOfLocalReg(MO)) { 566 DEBUG(dbgs() << "Killing last use: " << MO << "\n"); 567 MO.setIsKill(); 568 } else if (MO.isKill()) { 569 DEBUG(dbgs() << "Clearing dubious kill: " << MO << "\n"); 570 MO.setIsKill(false); 571 } 572 } else if (MO.isKill()) { 573 // We must remove kill flags from uses of reloaded registers because the 574 // register would be killed immediately, and there might be a second use: 575 // %foo = OR %x<kill>, %x 576 // This would cause a second reload of %x into a different register. 577 DEBUG(dbgs() << "Clearing clean kill: " << MO << "\n"); 578 MO.setIsKill(false); 579 } 580 assert(LR.PhysReg && "Register not assigned"); 581 LR.LastUse = MI; 582 LR.LastOpNum = OpNum; 583 UsedInInstr.set(LR.PhysReg); 584 return LRI; 585} 586 587// setPhysReg - Change operand OpNum in MI the refer the PhysReg, considering 588// subregs. This may invalidate any operand pointers. 589// Return true if the operand kills its register. 590bool RAFast::setPhysReg(MachineInstr *MI, unsigned OpNum, unsigned PhysReg) { 591 MachineOperand &MO = MI->getOperand(OpNum); 592 if (!MO.getSubReg()) { 593 MO.setReg(PhysReg); 594 return MO.isKill() || MO.isDead(); 595 } 596 597 // Handle subregister index. 598 MO.setReg(PhysReg ? TRI->getSubReg(PhysReg, MO.getSubReg()) : 0); 599 MO.setSubReg(0); 600 601 // A kill flag implies killing the full register. Add corresponding super 602 // register kill. 603 if (MO.isKill()) { 604 MI->addRegisterKilled(PhysReg, TRI, true); 605 return true; 606 } 607 return MO.isDead(); 608} 609 610void RAFast::AllocateBasicBlock() { 611 DEBUG(dbgs() << "\nAllocating " << *MBB); 612 613 PhysRegState.assign(TRI->getNumRegs(), regDisabled); 614 assert(LiveVirtRegs.empty() && "Mapping not cleared form last block?"); 615 616 MachineBasicBlock::iterator MII = MBB->begin(); 617 618 // Add live-in registers as live. 619 for (MachineBasicBlock::livein_iterator I = MBB->livein_begin(), 620 E = MBB->livein_end(); I != E; ++I) 621 definePhysReg(MII, *I, regReserved); 622 623 SmallVector<unsigned, 8> PhysECs, VirtDead; 624 SmallVector<MachineInstr*, 32> Coalesced; 625 626 // Otherwise, sequentially allocate each instruction in the MBB. 627 while (MII != MBB->end()) { 628 MachineInstr *MI = MII++; 629 const TargetInstrDesc &TID = MI->getDesc(); 630 DEBUG({ 631 dbgs() << "\n>> " << *MI << "Regs:"; 632 for (unsigned Reg = 1, E = TRI->getNumRegs(); Reg != E; ++Reg) { 633 if (PhysRegState[Reg] == regDisabled) continue; 634 dbgs() << " " << TRI->getName(Reg); 635 switch(PhysRegState[Reg]) { 636 case regFree: 637 break; 638 case regReserved: 639 dbgs() << "*"; 640 break; 641 default: 642 dbgs() << "=%reg" << PhysRegState[Reg]; 643 if (LiveVirtRegs[PhysRegState[Reg]].Dirty) 644 dbgs() << "*"; 645 assert(LiveVirtRegs[PhysRegState[Reg]].PhysReg == Reg && 646 "Bad inverse map"); 647 break; 648 } 649 } 650 dbgs() << '\n'; 651 // Check that LiveVirtRegs is the inverse. 652 for (LiveRegMap::iterator i = LiveVirtRegs.begin(), 653 e = LiveVirtRegs.end(); i != e; ++i) { 654 assert(TargetRegisterInfo::isVirtualRegister(i->first) && 655 "Bad map key"); 656 assert(TargetRegisterInfo::isPhysicalRegister(i->second.PhysReg) && 657 "Bad map value"); 658 assert(PhysRegState[i->second.PhysReg] == i->first && 659 "Bad inverse map"); 660 } 661 }); 662 663 // Debug values are not allowed to change codegen in any way. 664 if (MI->isDebugValue()) { 665 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 666 MachineOperand &MO = MI->getOperand(i); 667 if (!MO.isReg()) continue; 668 unsigned Reg = MO.getReg(); 669 if (!Reg || TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 670 LiveRegMap::iterator LRI = LiveVirtRegs.find(Reg); 671 if (LRI != LiveVirtRegs.end()) 672 setPhysReg(MI, i, LRI->second.PhysReg); 673 else 674 MO.setReg(0); // We can't allocate a physreg for a DebugValue, sorry! 675 } 676 // Next instruction. 677 continue; 678 } 679 680 // If this is a copy, we may be able to coalesce. 681 unsigned CopySrc, CopyDst, CopySrcSub, CopyDstSub; 682 if (!TII->isMoveInstr(*MI, CopySrc, CopyDst, CopySrcSub, CopyDstSub)) 683 CopySrc = CopyDst = 0; 684 685 // Track registers used by instruction. 686 UsedInInstr.reset(); 687 PhysECs.clear(); 688 689 // First scan. 690 // Mark physreg uses and early clobbers as used. 691 // Find the end of the virtreg operands 692 unsigned VirtOpEnd = 0; 693 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 694 MachineOperand &MO = MI->getOperand(i); 695 if (!MO.isReg()) continue; 696 unsigned Reg = MO.getReg(); 697 if (!Reg) continue; 698 if (TargetRegisterInfo::isVirtualRegister(Reg)) { 699 VirtOpEnd = i+1; 700 continue; 701 } 702 if (!Allocatable.test(Reg)) continue; 703 if (MO.isUse()) { 704 usePhysReg(MO); 705 } else if (MO.isEarlyClobber()) { 706 definePhysReg(MI, Reg, MO.isDead() ? regFree : regReserved); 707 PhysECs.push_back(Reg); 708 } 709 } 710 711 // Second scan. 712 // Allocate virtreg uses and early clobbers. 713 // Collect VirtKills 714 for (unsigned i = 0; i != VirtOpEnd; ++i) { 715 MachineOperand &MO = MI->getOperand(i); 716 if (!MO.isReg()) continue; 717 unsigned Reg = MO.getReg(); 718 if (!Reg || TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 719 if (MO.isUse()) { 720 LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, CopyDst); 721 unsigned PhysReg = LRI->second.PhysReg; 722 CopySrc = (CopySrc == Reg || CopySrc == PhysReg) ? PhysReg : 0; 723 if (setPhysReg(MI, i, PhysReg)) 724 killVirtReg(LRI); 725 } else if (MO.isEarlyClobber()) { 726 // Note: defineVirtReg may invalidate MO. 727 LiveRegMap::iterator LRI = defineVirtReg(MI, i, Reg, 0); 728 unsigned PhysReg = LRI->second.PhysReg; 729 setPhysReg(MI, i, PhysReg); 730 PhysECs.push_back(PhysReg); 731 } 732 } 733 734 MRI->addPhysRegsUsed(UsedInInstr); 735 736 // Track registers defined by instruction - early clobbers at this point. 737 UsedInInstr.reset(); 738 for (unsigned i = 0, e = PhysECs.size(); i != e; ++i) { 739 unsigned PhysReg = PhysECs[i]; 740 UsedInInstr.set(PhysReg); 741 for (const unsigned *AS = TRI->getAliasSet(PhysReg); 742 unsigned Alias = *AS; ++AS) 743 UsedInInstr.set(Alias); 744 } 745 746 unsigned DefOpEnd = MI->getNumOperands(); 747 if (TID.isCall()) { 748 // Spill all virtregs before a call. This serves two purposes: 1. If an 749 // exception is thrown, the landing pad is going to expect to find registers 750 // in their spill slots, and 2. we don't have to wade through all the 751 // <imp-def> operands on the call instruction. 752 DefOpEnd = VirtOpEnd; 753 DEBUG(dbgs() << " Spilling remaining registers before call.\n"); 754 spillAll(MI); 755 } 756 757 // Third scan. 758 // Allocate defs and collect dead defs. 759 for (unsigned i = 0; i != DefOpEnd; ++i) { 760 MachineOperand &MO = MI->getOperand(i); 761 if (!MO.isReg() || !MO.isDef() || !MO.getReg()) continue; 762 unsigned Reg = MO.getReg(); 763 764 if (TargetRegisterInfo::isPhysicalRegister(Reg)) { 765 if (!Allocatable.test(Reg)) continue; 766 definePhysReg(MI, Reg, (MO.isImplicit() || MO.isDead()) ? 767 regFree : regReserved); 768 continue; 769 } 770 LiveRegMap::iterator LRI = defineVirtReg(MI, i, Reg, CopySrc); 771 unsigned PhysReg = LRI->second.PhysReg; 772 if (setPhysReg(MI, i, PhysReg)) { 773 VirtDead.push_back(Reg); 774 CopyDst = 0; // cancel coalescing; 775 } else 776 CopyDst = (CopyDst == Reg || CopyDst == PhysReg) ? PhysReg : 0; 777 } 778 779 // Kill dead defs after the scan to ensure that multiple defs of the same 780 // register are allocated identically. We didn't need to do this for uses 781 // because we are crerating our own kill flags, and they are always at the 782 // last use. 783 for (unsigned i = 0, e = VirtDead.size(); i != e; ++i) 784 killVirtReg(VirtDead[i]); 785 VirtDead.clear(); 786 787 MRI->addPhysRegsUsed(UsedInInstr); 788 789 if (CopyDst && CopyDst == CopySrc && CopyDstSub == CopySrcSub) { 790 DEBUG(dbgs() << "-- coalescing: " << *MI); 791 Coalesced.push_back(MI); 792 } else { 793 DEBUG(dbgs() << "<< " << *MI); 794 } 795 } 796 797 // Spill all physical registers holding virtual registers now. 798 DEBUG(dbgs() << "Spilling live registers at end of block.\n"); 799 spillAll(MBB->getFirstTerminator()); 800 801 // Erase all the coalesced copies. We are delaying it until now because 802 // LiveVirtRegs might refer to the instrs. 803 for (unsigned i = 0, e = Coalesced.size(); i != e; ++i) 804 MBB->erase(Coalesced[i]); 805 NumCopies += Coalesced.size(); 806 807 DEBUG(MBB->dump()); 808} 809 810/// runOnMachineFunction - Register allocate the whole function 811/// 812bool RAFast::runOnMachineFunction(MachineFunction &Fn) { 813 DEBUG(dbgs() << "********** FAST REGISTER ALLOCATION **********\n" 814 << "********** Function: " 815 << ((Value*)Fn.getFunction())->getName() << '\n'); 816 MF = &Fn; 817 MRI = &MF->getRegInfo(); 818 TM = &Fn.getTarget(); 819 TRI = TM->getRegisterInfo(); 820 TII = TM->getInstrInfo(); 821 822 UsedInInstr.resize(TRI->getNumRegs()); 823 Allocatable = TRI->getAllocatableSet(*MF); 824 825 // initialize the virtual->physical register map to have a 'null' 826 // mapping for all virtual registers 827 unsigned LastVirtReg = MRI->getLastVirtReg(); 828 StackSlotForVirtReg.grow(LastVirtReg); 829 830 // Loop over all of the basic blocks, eliminating virtual register references 831 for (MachineFunction::iterator MBBi = Fn.begin(), MBBe = Fn.end(); 832 MBBi != MBBe; ++MBBi) { 833 MBB = &*MBBi; 834 AllocateBasicBlock(); 835 } 836 837 // Make sure the set of used physregs is closed under subreg operations. 838 MRI->closePhysRegsUsed(*TRI); 839 840 StackSlotForVirtReg.clear(); 841 return true; 842} 843 844FunctionPass *llvm::createFastRegisterAllocator() { 845 return new RAFast(); 846} 847