RegAllocFast.cpp revision 5818a3ee4d1a5b4474c67ce70e245237b1edf29b
1//===-- RegAllocFast.cpp - A fast register allocator for debug code -------===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This register allocator allocates registers to a basic block at a time, 11// attempting to keep values in registers and reusing registers as appropriate. 12// 13//===----------------------------------------------------------------------===// 14 15#define DEBUG_TYPE "regalloc" 16#include "llvm/BasicBlock.h" 17#include "llvm/CodeGen/MachineFunctionPass.h" 18#include "llvm/CodeGen/MachineInstr.h" 19#include "llvm/CodeGen/MachineInstrBuilder.h" 20#include "llvm/CodeGen/MachineFrameInfo.h" 21#include "llvm/CodeGen/MachineRegisterInfo.h" 22#include "llvm/CodeGen/Passes.h" 23#include "llvm/CodeGen/RegAllocRegistry.h" 24#include "llvm/Target/TargetInstrInfo.h" 25#include "llvm/Target/TargetMachine.h" 26#include "llvm/Support/CommandLine.h" 27#include "llvm/Support/Debug.h" 28#include "llvm/Support/ErrorHandling.h" 29#include "llvm/Support/raw_ostream.h" 30#include "llvm/ADT/DenseMap.h" 31#include "llvm/ADT/IndexedMap.h" 32#include "llvm/ADT/SmallSet.h" 33#include "llvm/ADT/SmallVector.h" 34#include "llvm/ADT/Statistic.h" 35#include "llvm/ADT/STLExtras.h" 36#include <algorithm> 37using namespace llvm; 38 39STATISTIC(NumStores, "Number of stores added"); 40STATISTIC(NumLoads , "Number of loads added"); 41STATISTIC(NumCopies, "Number of copies coalesced"); 42 43static RegisterRegAlloc 44 fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator); 45 46namespace { 47 class RAFast : public MachineFunctionPass { 48 public: 49 static char ID; 50 RAFast() : MachineFunctionPass(ID), StackSlotForVirtReg(-1), 51 isBulkSpilling(false) {} 52 private: 53 const TargetMachine *TM; 54 MachineFunction *MF; 55 MachineRegisterInfo *MRI; 56 const TargetRegisterInfo *TRI; 57 const TargetInstrInfo *TII; 58 59 // Basic block currently being allocated. 60 MachineBasicBlock *MBB; 61 62 // StackSlotForVirtReg - Maps virtual regs to the frame index where these 63 // values are spilled. 64 IndexedMap<int, VirtReg2IndexFunctor> StackSlotForVirtReg; 65 66 // Everything we know about a live virtual register. 67 struct LiveReg { 68 MachineInstr *LastUse; // Last instr to use reg. 69 unsigned PhysReg; // Currently held here. 70 unsigned short LastOpNum; // OpNum on LastUse. 71 bool Dirty; // Register needs spill. 72 73 LiveReg(unsigned p=0) : LastUse(0), PhysReg(p), LastOpNum(0), 74 Dirty(false) {} 75 }; 76 77 typedef DenseMap<unsigned, LiveReg> LiveRegMap; 78 typedef LiveRegMap::value_type LiveRegEntry; 79 80 // LiveVirtRegs - This map contains entries for each virtual register 81 // that is currently available in a physical register. 82 LiveRegMap LiveVirtRegs; 83 84 DenseMap<unsigned, MachineInstr *> LiveDbgValueMap; 85 86 // RegState - Track the state of a physical register. 87 enum RegState { 88 // A disabled register is not available for allocation, but an alias may 89 // be in use. A register can only be moved out of the disabled state if 90 // all aliases are disabled. 91 regDisabled, 92 93 // A free register is not currently in use and can be allocated 94 // immediately without checking aliases. 95 regFree, 96 97 // A reserved register has been assigned expolicitly (e.g., setting up a 98 // call parameter), and it remains reserved until it is used. 99 regReserved 100 101 // A register state may also be a virtual register number, indication that 102 // the physical register is currently allocated to a virtual register. In 103 // that case, LiveVirtRegs contains the inverse mapping. 104 }; 105 106 // PhysRegState - One of the RegState enums, or a virtreg. 107 std::vector<unsigned> PhysRegState; 108 109 // UsedInInstr - BitVector of physregs that are used in the current 110 // instruction, and so cannot be allocated. 111 BitVector UsedInInstr; 112 113 // Allocatable - vector of allocatable physical registers. 114 BitVector Allocatable; 115 116 // SkippedInstrs - Descriptors of instructions whose clobber list was 117 // ignored because all registers were spilled. It is still necessary to 118 // mark all the clobbered registers as used by the function. 119 SmallPtrSet<const TargetInstrDesc*, 4> SkippedInstrs; 120 121 // isBulkSpilling - This flag is set when LiveRegMap will be cleared 122 // completely after spilling all live registers. LiveRegMap entries should 123 // not be erased. 124 bool isBulkSpilling; 125 126 enum { 127 spillClean = 1, 128 spillDirty = 100, 129 spillImpossible = ~0u 130 }; 131 public: 132 virtual const char *getPassName() const { 133 return "Fast Register Allocator"; 134 } 135 136 virtual void getAnalysisUsage(AnalysisUsage &AU) const { 137 AU.setPreservesCFG(); 138 AU.addRequiredID(PHIEliminationID); 139 AU.addRequiredID(TwoAddressInstructionPassID); 140 MachineFunctionPass::getAnalysisUsage(AU); 141 } 142 143 private: 144 bool runOnMachineFunction(MachineFunction &Fn); 145 void AllocateBasicBlock(); 146 void handleThroughOperands(MachineInstr *MI, 147 SmallVectorImpl<unsigned> &VirtDead); 148 int getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC); 149 bool isLastUseOfLocalReg(MachineOperand&); 150 151 void addKillFlag(const LiveReg&); 152 void killVirtReg(LiveRegMap::iterator); 153 void killVirtReg(unsigned VirtReg); 154 void spillVirtReg(MachineBasicBlock::iterator MI, LiveRegMap::iterator); 155 void spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg); 156 157 void usePhysReg(MachineOperand&); 158 void definePhysReg(MachineInstr *MI, unsigned PhysReg, RegState NewState); 159 unsigned calcSpillCost(unsigned PhysReg) const; 160 void assignVirtToPhysReg(LiveRegEntry &LRE, unsigned PhysReg); 161 void allocVirtReg(MachineInstr *MI, LiveRegEntry &LRE, unsigned Hint); 162 LiveRegMap::iterator defineVirtReg(MachineInstr *MI, unsigned OpNum, 163 unsigned VirtReg, unsigned Hint); 164 LiveRegMap::iterator reloadVirtReg(MachineInstr *MI, unsigned OpNum, 165 unsigned VirtReg, unsigned Hint); 166 void spillAll(MachineInstr *MI); 167 bool setPhysReg(MachineInstr *MI, unsigned OpNum, unsigned PhysReg); 168 }; 169 char RAFast::ID = 0; 170} 171 172/// getStackSpaceFor - This allocates space for the specified virtual register 173/// to be held on the stack. 174int RAFast::getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC) { 175 // Find the location Reg would belong... 176 int SS = StackSlotForVirtReg[VirtReg]; 177 if (SS != -1) 178 return SS; // Already has space allocated? 179 180 // Allocate a new stack object for this spill location... 181 int FrameIdx = MF->getFrameInfo()->CreateSpillStackObject(RC->getSize(), 182 RC->getAlignment()); 183 184 // Assign the slot. 185 StackSlotForVirtReg[VirtReg] = FrameIdx; 186 return FrameIdx; 187} 188 189/// isLastUseOfLocalReg - Return true if MO is the only remaining reference to 190/// its virtual register, and it is guaranteed to be a block-local register. 191/// 192bool RAFast::isLastUseOfLocalReg(MachineOperand &MO) { 193 // Check for non-debug uses or defs following MO. 194 // This is the most likely way to fail - fast path it. 195 MachineOperand *Next = &MO; 196 while ((Next = Next->getNextOperandForReg())) 197 if (!Next->isDebug()) 198 return false; 199 200 // If the register has ever been spilled or reloaded, we conservatively assume 201 // it is a global register used in multiple blocks. 202 if (StackSlotForVirtReg[MO.getReg()] != -1) 203 return false; 204 205 // Check that the use/def chain has exactly one operand - MO. 206 return &MRI->reg_nodbg_begin(MO.getReg()).getOperand() == &MO; 207} 208 209/// addKillFlag - Set kill flags on last use of a virtual register. 210void RAFast::addKillFlag(const LiveReg &LR) { 211 if (!LR.LastUse) return; 212 MachineOperand &MO = LR.LastUse->getOperand(LR.LastOpNum); 213 if (MO.isUse() && !LR.LastUse->isRegTiedToDefOperand(LR.LastOpNum)) { 214 if (MO.getReg() == LR.PhysReg) 215 MO.setIsKill(); 216 else 217 LR.LastUse->addRegisterKilled(LR.PhysReg, TRI, true); 218 } 219} 220 221/// killVirtReg - Mark virtreg as no longer available. 222void RAFast::killVirtReg(LiveRegMap::iterator LRI) { 223 addKillFlag(LRI->second); 224 const LiveReg &LR = LRI->second; 225 assert(PhysRegState[LR.PhysReg] == LRI->first && "Broken RegState mapping"); 226 PhysRegState[LR.PhysReg] = regFree; 227 // Erase from LiveVirtRegs unless we're spilling in bulk. 228 if (!isBulkSpilling) 229 LiveVirtRegs.erase(LRI); 230} 231 232/// killVirtReg - Mark virtreg as no longer available. 233void RAFast::killVirtReg(unsigned VirtReg) { 234 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 235 "killVirtReg needs a virtual register"); 236 LiveRegMap::iterator LRI = LiveVirtRegs.find(VirtReg); 237 if (LRI != LiveVirtRegs.end()) 238 killVirtReg(LRI); 239} 240 241/// spillVirtReg - This method spills the value specified by VirtReg into the 242/// corresponding stack slot if needed. 243void RAFast::spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg) { 244 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 245 "Spilling a physical register is illegal!"); 246 LiveRegMap::iterator LRI = LiveVirtRegs.find(VirtReg); 247 assert(LRI != LiveVirtRegs.end() && "Spilling unmapped virtual register"); 248 spillVirtReg(MI, LRI); 249} 250 251/// spillVirtReg - Do the actual work of spilling. 252void RAFast::spillVirtReg(MachineBasicBlock::iterator MI, 253 LiveRegMap::iterator LRI) { 254 LiveReg &LR = LRI->second; 255 assert(PhysRegState[LR.PhysReg] == LRI->first && "Broken RegState mapping"); 256 257 if (LR.Dirty) { 258 // If this physreg is used by the instruction, we want to kill it on the 259 // instruction, not on the spill. 260 bool SpillKill = LR.LastUse != MI; 261 LR.Dirty = false; 262 DEBUG(dbgs() << "Spilling %reg" << LRI->first 263 << " in " << TRI->getName(LR.PhysReg)); 264 const TargetRegisterClass *RC = MRI->getRegClass(LRI->first); 265 int FI = getStackSpaceFor(LRI->first, RC); 266 DEBUG(dbgs() << " to stack slot #" << FI << "\n"); 267 TII->storeRegToStackSlot(*MBB, MI, LR.PhysReg, SpillKill, FI, RC, TRI); 268 ++NumStores; // Update statistics 269 270 // If this register is used by DBG_VALUE then insert new DBG_VALUE to 271 // identify spilled location as the place to find corresponding variable's 272 // value. 273 if (MachineInstr *DBG = LiveDbgValueMap.lookup(LRI->first)) { 274 const MDNode *MDPtr = 275 DBG->getOperand(DBG->getNumOperands()-1).getMetadata(); 276 int64_t Offset = 0; 277 if (DBG->getOperand(1).isImm()) 278 Offset = DBG->getOperand(1).getImm(); 279 DebugLoc DL; 280 if (MI == MBB->end()) { 281 // If MI is at basic block end then use last instruction's location. 282 MachineBasicBlock::iterator EI = MI; 283 DL = (--EI)->getDebugLoc(); 284 } 285 else 286 DL = MI->getDebugLoc(); 287 if (MachineInstr *NewDV = 288 TII->emitFrameIndexDebugValue(*MF, FI, Offset, MDPtr, DL)) { 289 MachineBasicBlock *MBB = DBG->getParent(); 290 MBB->insert(MI, NewDV); 291 DEBUG(dbgs() << "Inserting debug info due to spill:" << "\n" << *NewDV); 292 LiveDbgValueMap[LRI->first] = NewDV; 293 } 294 } 295 if (SpillKill) 296 LR.LastUse = 0; // Don't kill register again 297 } 298 killVirtReg(LRI); 299} 300 301/// spillAll - Spill all dirty virtregs without killing them. 302void RAFast::spillAll(MachineInstr *MI) { 303 if (LiveVirtRegs.empty()) return; 304 isBulkSpilling = true; 305 // The LiveRegMap is keyed by an unsigned (the virtreg number), so the order 306 // of spilling here is deterministic, if arbitrary. 307 for (LiveRegMap::iterator i = LiveVirtRegs.begin(), e = LiveVirtRegs.end(); 308 i != e; ++i) 309 spillVirtReg(MI, i); 310 LiveVirtRegs.clear(); 311 isBulkSpilling = false; 312} 313 314/// usePhysReg - Handle the direct use of a physical register. 315/// Check that the register is not used by a virtreg. 316/// Kill the physreg, marking it free. 317/// This may add implicit kills to MO->getParent() and invalidate MO. 318void RAFast::usePhysReg(MachineOperand &MO) { 319 unsigned PhysReg = MO.getReg(); 320 assert(TargetRegisterInfo::isPhysicalRegister(PhysReg) && 321 "Bad usePhysReg operand"); 322 323 switch (PhysRegState[PhysReg]) { 324 case regDisabled: 325 break; 326 case regReserved: 327 PhysRegState[PhysReg] = regFree; 328 // Fall through 329 case regFree: 330 UsedInInstr.set(PhysReg); 331 MO.setIsKill(); 332 return; 333 default: 334 // The physreg was allocated to a virtual register. That means to value we 335 // wanted has been clobbered. 336 llvm_unreachable("Instruction uses an allocated register"); 337 } 338 339 // Maybe a superregister is reserved? 340 for (const unsigned *AS = TRI->getAliasSet(PhysReg); 341 unsigned Alias = *AS; ++AS) { 342 switch (PhysRegState[Alias]) { 343 case regDisabled: 344 break; 345 case regReserved: 346 assert(TRI->isSuperRegister(PhysReg, Alias) && 347 "Instruction is not using a subregister of a reserved register"); 348 // Leave the superregister in the working set. 349 PhysRegState[Alias] = regFree; 350 UsedInInstr.set(Alias); 351 MO.getParent()->addRegisterKilled(Alias, TRI, true); 352 return; 353 case regFree: 354 if (TRI->isSuperRegister(PhysReg, Alias)) { 355 // Leave the superregister in the working set. 356 UsedInInstr.set(Alias); 357 MO.getParent()->addRegisterKilled(Alias, TRI, true); 358 return; 359 } 360 // Some other alias was in the working set - clear it. 361 PhysRegState[Alias] = regDisabled; 362 break; 363 default: 364 llvm_unreachable("Instruction uses an alias of an allocated register"); 365 } 366 } 367 368 // All aliases are disabled, bring register into working set. 369 PhysRegState[PhysReg] = regFree; 370 UsedInInstr.set(PhysReg); 371 MO.setIsKill(); 372} 373 374/// definePhysReg - Mark PhysReg as reserved or free after spilling any 375/// virtregs. This is very similar to defineVirtReg except the physreg is 376/// reserved instead of allocated. 377void RAFast::definePhysReg(MachineInstr *MI, unsigned PhysReg, 378 RegState NewState) { 379 UsedInInstr.set(PhysReg); 380 switch (unsigned VirtReg = PhysRegState[PhysReg]) { 381 case regDisabled: 382 break; 383 default: 384 spillVirtReg(MI, VirtReg); 385 // Fall through. 386 case regFree: 387 case regReserved: 388 PhysRegState[PhysReg] = NewState; 389 return; 390 } 391 392 // This is a disabled register, disable all aliases. 393 PhysRegState[PhysReg] = NewState; 394 for (const unsigned *AS = TRI->getAliasSet(PhysReg); 395 unsigned Alias = *AS; ++AS) { 396 UsedInInstr.set(Alias); 397 switch (unsigned VirtReg = PhysRegState[Alias]) { 398 case regDisabled: 399 break; 400 default: 401 spillVirtReg(MI, VirtReg); 402 // Fall through. 403 case regFree: 404 case regReserved: 405 PhysRegState[Alias] = regDisabled; 406 if (TRI->isSuperRegister(PhysReg, Alias)) 407 return; 408 break; 409 } 410 } 411} 412 413 414// calcSpillCost - Return the cost of spilling clearing out PhysReg and 415// aliases so it is free for allocation. 416// Returns 0 when PhysReg is free or disabled with all aliases disabled - it 417// can be allocated directly. 418// Returns spillImpossible when PhysReg or an alias can't be spilled. 419unsigned RAFast::calcSpillCost(unsigned PhysReg) const { 420 if (UsedInInstr.test(PhysReg)) 421 return spillImpossible; 422 switch (unsigned VirtReg = PhysRegState[PhysReg]) { 423 case regDisabled: 424 break; 425 case regFree: 426 return 0; 427 case regReserved: 428 return spillImpossible; 429 default: 430 return LiveVirtRegs.lookup(VirtReg).Dirty ? spillDirty : spillClean; 431 } 432 433 // This is a disabled register, add up const of aliases. 434 unsigned Cost = 0; 435 for (const unsigned *AS = TRI->getAliasSet(PhysReg); 436 unsigned Alias = *AS; ++AS) { 437 if (UsedInInstr.test(Alias)) 438 return spillImpossible; 439 switch (unsigned VirtReg = PhysRegState[Alias]) { 440 case regDisabled: 441 break; 442 case regFree: 443 ++Cost; 444 break; 445 case regReserved: 446 return spillImpossible; 447 default: 448 Cost += LiveVirtRegs.lookup(VirtReg).Dirty ? spillDirty : spillClean; 449 break; 450 } 451 } 452 return Cost; 453} 454 455 456/// assignVirtToPhysReg - This method updates local state so that we know 457/// that PhysReg is the proper container for VirtReg now. The physical 458/// register must not be used for anything else when this is called. 459/// 460void RAFast::assignVirtToPhysReg(LiveRegEntry &LRE, unsigned PhysReg) { 461 DEBUG(dbgs() << "Assigning %reg" << LRE.first << " to " 462 << TRI->getName(PhysReg) << "\n"); 463 PhysRegState[PhysReg] = LRE.first; 464 assert(!LRE.second.PhysReg && "Already assigned a physreg"); 465 LRE.second.PhysReg = PhysReg; 466} 467 468/// allocVirtReg - Allocate a physical register for VirtReg. 469void RAFast::allocVirtReg(MachineInstr *MI, LiveRegEntry &LRE, unsigned Hint) { 470 const unsigned VirtReg = LRE.first; 471 472 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 473 "Can only allocate virtual registers"); 474 475 const TargetRegisterClass *RC = MRI->getRegClass(VirtReg); 476 477 // Ignore invalid hints. 478 if (Hint && (!TargetRegisterInfo::isPhysicalRegister(Hint) || 479 !RC->contains(Hint) || !Allocatable.test(Hint))) 480 Hint = 0; 481 482 // Take hint when possible. 483 if (Hint) { 484 switch(calcSpillCost(Hint)) { 485 default: 486 definePhysReg(MI, Hint, regFree); 487 // Fall through. 488 case 0: 489 return assignVirtToPhysReg(LRE, Hint); 490 case spillImpossible: 491 break; 492 } 493 } 494 495 TargetRegisterClass::iterator AOB = RC->allocation_order_begin(*MF); 496 TargetRegisterClass::iterator AOE = RC->allocation_order_end(*MF); 497 498 // First try to find a completely free register. 499 for (TargetRegisterClass::iterator I = AOB; I != AOE; ++I) { 500 unsigned PhysReg = *I; 501 if (PhysRegState[PhysReg] == regFree && !UsedInInstr.test(PhysReg)) 502 return assignVirtToPhysReg(LRE, PhysReg); 503 } 504 505 DEBUG(dbgs() << "Allocating %reg" << VirtReg << " from " << RC->getName() 506 << "\n"); 507 508 unsigned BestReg = 0, BestCost = spillImpossible; 509 for (TargetRegisterClass::iterator I = AOB; I != AOE; ++I) { 510 unsigned Cost = calcSpillCost(*I); 511 // Cost is 0 when all aliases are already disabled. 512 if (Cost == 0) 513 return assignVirtToPhysReg(LRE, *I); 514 if (Cost < BestCost) 515 BestReg = *I, BestCost = Cost; 516 } 517 518 if (BestReg) { 519 definePhysReg(MI, BestReg, regFree); 520 return assignVirtToPhysReg(LRE, BestReg); 521 } 522 523 // Nothing we can do. 524 std::string msg; 525 raw_string_ostream Msg(msg); 526 Msg << "Ran out of registers during register allocation!"; 527 if (MI->isInlineAsm()) { 528 Msg << "\nPlease check your inline asm statement for " 529 << "invalid constraints:\n"; 530 MI->print(Msg, TM); 531 } 532 report_fatal_error(Msg.str()); 533} 534 535/// defineVirtReg - Allocate a register for VirtReg and mark it as dirty. 536RAFast::LiveRegMap::iterator 537RAFast::defineVirtReg(MachineInstr *MI, unsigned OpNum, 538 unsigned VirtReg, unsigned Hint) { 539 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 540 "Not a virtual register"); 541 LiveRegMap::iterator LRI; 542 bool New; 543 tie(LRI, New) = LiveVirtRegs.insert(std::make_pair(VirtReg, LiveReg())); 544 LiveReg &LR = LRI->second; 545 if (New) { 546 // If there is no hint, peek at the only use of this register. 547 if ((!Hint || !TargetRegisterInfo::isPhysicalRegister(Hint)) && 548 MRI->hasOneNonDBGUse(VirtReg)) { 549 const MachineInstr &UseMI = *MRI->use_nodbg_begin(VirtReg); 550 // It's a copy, use the destination register as a hint. 551 if (UseMI.isCopyLike()) 552 Hint = UseMI.getOperand(0).getReg(); 553 } 554 allocVirtReg(MI, *LRI, Hint); 555 } else if (LR.LastUse) { 556 // Redefining a live register - kill at the last use, unless it is this 557 // instruction defining VirtReg multiple times. 558 if (LR.LastUse != MI || LR.LastUse->getOperand(LR.LastOpNum).isUse()) 559 addKillFlag(LR); 560 } 561 assert(LR.PhysReg && "Register not assigned"); 562 LR.LastUse = MI; 563 LR.LastOpNum = OpNum; 564 LR.Dirty = true; 565 UsedInInstr.set(LR.PhysReg); 566 return LRI; 567} 568 569/// reloadVirtReg - Make sure VirtReg is available in a physreg and return it. 570RAFast::LiveRegMap::iterator 571RAFast::reloadVirtReg(MachineInstr *MI, unsigned OpNum, 572 unsigned VirtReg, unsigned Hint) { 573 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 574 "Not a virtual register"); 575 LiveRegMap::iterator LRI; 576 bool New; 577 tie(LRI, New) = LiveVirtRegs.insert(std::make_pair(VirtReg, LiveReg())); 578 LiveReg &LR = LRI->second; 579 MachineOperand &MO = MI->getOperand(OpNum); 580 if (New) { 581 allocVirtReg(MI, *LRI, Hint); 582 const TargetRegisterClass *RC = MRI->getRegClass(VirtReg); 583 int FrameIndex = getStackSpaceFor(VirtReg, RC); 584 DEBUG(dbgs() << "Reloading %reg" << VirtReg << " into " 585 << TRI->getName(LR.PhysReg) << "\n"); 586 TII->loadRegFromStackSlot(*MBB, MI, LR.PhysReg, FrameIndex, RC, TRI); 587 ++NumLoads; 588 } else if (LR.Dirty) { 589 if (isLastUseOfLocalReg(MO)) { 590 DEBUG(dbgs() << "Killing last use: " << MO << "\n"); 591 if (MO.isUse()) 592 MO.setIsKill(); 593 else 594 MO.setIsDead(); 595 } else if (MO.isKill()) { 596 DEBUG(dbgs() << "Clearing dubious kill: " << MO << "\n"); 597 MO.setIsKill(false); 598 } else if (MO.isDead()) { 599 DEBUG(dbgs() << "Clearing dubious dead: " << MO << "\n"); 600 MO.setIsDead(false); 601 } 602 } else if (MO.isKill()) { 603 // We must remove kill flags from uses of reloaded registers because the 604 // register would be killed immediately, and there might be a second use: 605 // %foo = OR %x<kill>, %x 606 // This would cause a second reload of %x into a different register. 607 DEBUG(dbgs() << "Clearing clean kill: " << MO << "\n"); 608 MO.setIsKill(false); 609 } else if (MO.isDead()) { 610 DEBUG(dbgs() << "Clearing clean dead: " << MO << "\n"); 611 MO.setIsDead(false); 612 } 613 assert(LR.PhysReg && "Register not assigned"); 614 LR.LastUse = MI; 615 LR.LastOpNum = OpNum; 616 UsedInInstr.set(LR.PhysReg); 617 return LRI; 618} 619 620// setPhysReg - Change operand OpNum in MI the refer the PhysReg, considering 621// subregs. This may invalidate any operand pointers. 622// Return true if the operand kills its register. 623bool RAFast::setPhysReg(MachineInstr *MI, unsigned OpNum, unsigned PhysReg) { 624 MachineOperand &MO = MI->getOperand(OpNum); 625 if (!MO.getSubReg()) { 626 MO.setReg(PhysReg); 627 return MO.isKill() || MO.isDead(); 628 } 629 630 // Handle subregister index. 631 MO.setReg(PhysReg ? TRI->getSubReg(PhysReg, MO.getSubReg()) : 0); 632 MO.setSubReg(0); 633 634 // A kill flag implies killing the full register. Add corresponding super 635 // register kill. 636 if (MO.isKill()) { 637 MI->addRegisterKilled(PhysReg, TRI, true); 638 return true; 639 } 640 return MO.isDead(); 641} 642 643// Handle special instruction operand like early clobbers and tied ops when 644// there are additional physreg defines. 645void RAFast::handleThroughOperands(MachineInstr *MI, 646 SmallVectorImpl<unsigned> &VirtDead) { 647 DEBUG(dbgs() << "Scanning for through registers:"); 648 SmallSet<unsigned, 8> ThroughRegs; 649 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 650 MachineOperand &MO = MI->getOperand(i); 651 if (!MO.isReg()) continue; 652 unsigned Reg = MO.getReg(); 653 if (!Reg || TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 654 if (MO.isEarlyClobber() || MI->isRegTiedToDefOperand(i) || 655 (MO.getSubReg() && MI->readsVirtualRegister(Reg))) { 656 if (ThroughRegs.insert(Reg)) 657 DEBUG(dbgs() << " %reg" << Reg); 658 } 659 } 660 661 // If any physreg defines collide with preallocated through registers, 662 // we must spill and reallocate. 663 DEBUG(dbgs() << "\nChecking for physdef collisions.\n"); 664 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 665 MachineOperand &MO = MI->getOperand(i); 666 if (!MO.isReg() || !MO.isDef()) continue; 667 unsigned Reg = MO.getReg(); 668 if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 669 UsedInInstr.set(Reg); 670 if (ThroughRegs.count(PhysRegState[Reg])) 671 definePhysReg(MI, Reg, regFree); 672 for (const unsigned *AS = TRI->getAliasSet(Reg); *AS; ++AS) { 673 UsedInInstr.set(*AS); 674 if (ThroughRegs.count(PhysRegState[*AS])) 675 definePhysReg(MI, *AS, regFree); 676 } 677 } 678 679 SmallVector<unsigned, 8> PartialDefs; 680 DEBUG(dbgs() << "Allocating tied uses and early clobbers.\n"); 681 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 682 MachineOperand &MO = MI->getOperand(i); 683 if (!MO.isReg()) continue; 684 unsigned Reg = MO.getReg(); 685 if (!Reg || TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 686 if (MO.isUse()) { 687 unsigned DefIdx = 0; 688 if (!MI->isRegTiedToDefOperand(i, &DefIdx)) continue; 689 DEBUG(dbgs() << "Operand " << i << "("<< MO << ") is tied to operand " 690 << DefIdx << ".\n"); 691 LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, 0); 692 unsigned PhysReg = LRI->second.PhysReg; 693 setPhysReg(MI, i, PhysReg); 694 // Note: we don't update the def operand yet. That would cause the normal 695 // def-scan to attempt spilling. 696 } else if (MO.getSubReg() && MI->readsVirtualRegister(Reg)) { 697 DEBUG(dbgs() << "Partial redefine: " << MO << "\n"); 698 // Reload the register, but don't assign to the operand just yet. 699 // That would confuse the later phys-def processing pass. 700 LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, 0); 701 PartialDefs.push_back(LRI->second.PhysReg); 702 } else if (MO.isEarlyClobber()) { 703 // Note: defineVirtReg may invalidate MO. 704 LiveRegMap::iterator LRI = defineVirtReg(MI, i, Reg, 0); 705 unsigned PhysReg = LRI->second.PhysReg; 706 if (setPhysReg(MI, i, PhysReg)) 707 VirtDead.push_back(Reg); 708 } 709 } 710 711 // Restore UsedInInstr to a state usable for allocating normal virtual uses. 712 UsedInInstr.reset(); 713 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 714 MachineOperand &MO = MI->getOperand(i); 715 if (!MO.isReg() || (MO.isDef() && !MO.isEarlyClobber())) continue; 716 unsigned Reg = MO.getReg(); 717 if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 718 UsedInInstr.set(Reg); 719 for (const unsigned *AS = TRI->getAliasSet(Reg); *AS; ++AS) 720 UsedInInstr.set(*AS); 721 } 722 723 // Also mark PartialDefs as used to avoid reallocation. 724 for (unsigned i = 0, e = PartialDefs.size(); i != e; ++i) 725 UsedInInstr.set(PartialDefs[i]); 726} 727 728void RAFast::AllocateBasicBlock() { 729 DEBUG(dbgs() << "\nAllocating " << *MBB); 730 731 PhysRegState.assign(TRI->getNumRegs(), regDisabled); 732 assert(LiveVirtRegs.empty() && "Mapping not cleared form last block?"); 733 734 MachineBasicBlock::iterator MII = MBB->begin(); 735 736 // Add live-in registers as live. 737 for (MachineBasicBlock::livein_iterator I = MBB->livein_begin(), 738 E = MBB->livein_end(); I != E; ++I) 739 if (Allocatable.test(*I)) 740 definePhysReg(MII, *I, regReserved); 741 742 SmallVector<unsigned, 8> VirtDead; 743 SmallVector<MachineInstr*, 32> Coalesced; 744 745 // Otherwise, sequentially allocate each instruction in the MBB. 746 while (MII != MBB->end()) { 747 MachineInstr *MI = MII++; 748 const TargetInstrDesc &TID = MI->getDesc(); 749 DEBUG({ 750 dbgs() << "\n>> " << *MI << "Regs:"; 751 for (unsigned Reg = 1, E = TRI->getNumRegs(); Reg != E; ++Reg) { 752 if (PhysRegState[Reg] == regDisabled) continue; 753 dbgs() << " " << TRI->getName(Reg); 754 switch(PhysRegState[Reg]) { 755 case regFree: 756 break; 757 case regReserved: 758 dbgs() << "*"; 759 break; 760 default: 761 dbgs() << "=%reg" << PhysRegState[Reg]; 762 if (LiveVirtRegs[PhysRegState[Reg]].Dirty) 763 dbgs() << "*"; 764 assert(LiveVirtRegs[PhysRegState[Reg]].PhysReg == Reg && 765 "Bad inverse map"); 766 break; 767 } 768 } 769 dbgs() << '\n'; 770 // Check that LiveVirtRegs is the inverse. 771 for (LiveRegMap::iterator i = LiveVirtRegs.begin(), 772 e = LiveVirtRegs.end(); i != e; ++i) { 773 assert(TargetRegisterInfo::isVirtualRegister(i->first) && 774 "Bad map key"); 775 assert(TargetRegisterInfo::isPhysicalRegister(i->second.PhysReg) && 776 "Bad map value"); 777 assert(PhysRegState[i->second.PhysReg] == i->first && 778 "Bad inverse map"); 779 } 780 }); 781 782 // Debug values are not allowed to change codegen in any way. 783 if (MI->isDebugValue()) { 784 bool ScanDbgValue = true; 785 while (ScanDbgValue) { 786 ScanDbgValue = false; 787 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 788 MachineOperand &MO = MI->getOperand(i); 789 if (!MO.isReg()) continue; 790 unsigned Reg = MO.getReg(); 791 if (!Reg || TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 792 LiveDbgValueMap[Reg] = MI; 793 LiveRegMap::iterator LRI = LiveVirtRegs.find(Reg); 794 if (LRI != LiveVirtRegs.end()) 795 setPhysReg(MI, i, LRI->second.PhysReg); 796 else { 797 int SS = StackSlotForVirtReg[Reg]; 798 if (SS == -1) 799 // We can't allocate a physreg for a DebugValue, sorry! 800 MO.setReg(0); 801 else { 802 // Modify DBG_VALUE now that the value is in a spill slot. 803 int64_t Offset = MI->getOperand(1).getImm(); 804 const MDNode *MDPtr = 805 MI->getOperand(MI->getNumOperands()-1).getMetadata(); 806 DebugLoc DL = MI->getDebugLoc(); 807 if (MachineInstr *NewDV = 808 TII->emitFrameIndexDebugValue(*MF, SS, Offset, MDPtr, DL)) { 809 DEBUG(dbgs() << "Modifying debug info due to spill:" << 810 "\t" << *MI); 811 MachineBasicBlock *MBB = MI->getParent(); 812 MBB->insert(MBB->erase(MI), NewDV); 813 // Scan NewDV operands from the beginning. 814 MI = NewDV; 815 ScanDbgValue = true; 816 break; 817 } else 818 // We can't allocate a physreg for a DebugValue; sorry! 819 MO.setReg(0); 820 } 821 } 822 } 823 } 824 // Next instruction. 825 continue; 826 } 827 828 // If this is a copy, we may be able to coalesce. 829 unsigned CopySrc = 0, CopyDst = 0, CopySrcSub = 0, CopyDstSub = 0; 830 if (MI->isCopy()) { 831 CopyDst = MI->getOperand(0).getReg(); 832 CopySrc = MI->getOperand(1).getReg(); 833 CopyDstSub = MI->getOperand(0).getSubReg(); 834 CopySrcSub = MI->getOperand(1).getSubReg(); 835 } 836 837 // Track registers used by instruction. 838 UsedInInstr.reset(); 839 840 // First scan. 841 // Mark physreg uses and early clobbers as used. 842 // Find the end of the virtreg operands 843 unsigned VirtOpEnd = 0; 844 bool hasTiedOps = false; 845 bool hasEarlyClobbers = false; 846 bool hasPartialRedefs = false; 847 bool hasPhysDefs = false; 848 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 849 MachineOperand &MO = MI->getOperand(i); 850 if (!MO.isReg()) continue; 851 unsigned Reg = MO.getReg(); 852 if (!Reg) continue; 853 if (TargetRegisterInfo::isVirtualRegister(Reg)) { 854 VirtOpEnd = i+1; 855 if (MO.isUse()) { 856 hasTiedOps = hasTiedOps || 857 TID.getOperandConstraint(i, TOI::TIED_TO) != -1; 858 } else { 859 if (MO.isEarlyClobber()) 860 hasEarlyClobbers = true; 861 if (MO.getSubReg() && MI->readsVirtualRegister(Reg)) 862 hasPartialRedefs = true; 863 } 864 continue; 865 } 866 if (!Allocatable.test(Reg)) continue; 867 if (MO.isUse()) { 868 usePhysReg(MO); 869 } else if (MO.isEarlyClobber()) { 870 definePhysReg(MI, Reg, (MO.isImplicit() || MO.isDead()) ? 871 regFree : regReserved); 872 hasEarlyClobbers = true; 873 } else 874 hasPhysDefs = true; 875 } 876 877 // The instruction may have virtual register operands that must be allocated 878 // the same register at use-time and def-time: early clobbers and tied 879 // operands. If there are also physical defs, these registers must avoid 880 // both physical defs and uses, making them more constrained than normal 881 // operands. 882 // Similarly, if there are multiple defs and tied operands, we must make 883 // sure the same register is allocated to uses and defs. 884 // We didn't detect inline asm tied operands above, so just make this extra 885 // pass for all inline asm. 886 if (MI->isInlineAsm() || hasEarlyClobbers || hasPartialRedefs || 887 (hasTiedOps && (hasPhysDefs || TID.getNumDefs() > 1))) { 888 handleThroughOperands(MI, VirtDead); 889 // Don't attempt coalescing when we have funny stuff going on. 890 CopyDst = 0; 891 // Pretend we have early clobbers so the use operands get marked below. 892 // This is not necessary for the common case of a single tied use. 893 hasEarlyClobbers = true; 894 } 895 896 // Second scan. 897 // Allocate virtreg uses. 898 for (unsigned i = 0; i != VirtOpEnd; ++i) { 899 MachineOperand &MO = MI->getOperand(i); 900 if (!MO.isReg()) continue; 901 unsigned Reg = MO.getReg(); 902 if (!Reg || TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 903 if (MO.isUse()) { 904 LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, CopyDst); 905 unsigned PhysReg = LRI->second.PhysReg; 906 CopySrc = (CopySrc == Reg || CopySrc == PhysReg) ? PhysReg : 0; 907 if (setPhysReg(MI, i, PhysReg)) 908 killVirtReg(LRI); 909 } 910 } 911 912 MRI->addPhysRegsUsed(UsedInInstr); 913 914 // Track registers defined by instruction - early clobbers and tied uses at 915 // this point. 916 UsedInInstr.reset(); 917 if (hasEarlyClobbers) { 918 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 919 MachineOperand &MO = MI->getOperand(i); 920 if (!MO.isReg()) continue; 921 unsigned Reg = MO.getReg(); 922 if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 923 // Look for physreg defs and tied uses. 924 if (!MO.isDef() && !MI->isRegTiedToDefOperand(i)) continue; 925 UsedInInstr.set(Reg); 926 for (const unsigned *AS = TRI->getAliasSet(Reg); *AS; ++AS) 927 UsedInInstr.set(*AS); 928 } 929 } 930 931 unsigned DefOpEnd = MI->getNumOperands(); 932 if (TID.isCall()) { 933 // Spill all virtregs before a call. This serves two purposes: 1. If an 934 // exception is thrown, the landing pad is going to expect to find 935 // registers in their spill slots, and 2. we don't have to wade through 936 // all the <imp-def> operands on the call instruction. 937 DefOpEnd = VirtOpEnd; 938 DEBUG(dbgs() << " Spilling remaining registers before call.\n"); 939 spillAll(MI); 940 941 // The imp-defs are skipped below, but we still need to mark those 942 // registers as used by the function. 943 SkippedInstrs.insert(&TID); 944 } 945 946 // Third scan. 947 // Allocate defs and collect dead defs. 948 for (unsigned i = 0; i != DefOpEnd; ++i) { 949 MachineOperand &MO = MI->getOperand(i); 950 if (!MO.isReg() || !MO.isDef() || !MO.getReg() || MO.isEarlyClobber()) 951 continue; 952 unsigned Reg = MO.getReg(); 953 954 if (TargetRegisterInfo::isPhysicalRegister(Reg)) { 955 if (!Allocatable.test(Reg)) continue; 956 definePhysReg(MI, Reg, (MO.isImplicit() || MO.isDead()) ? 957 regFree : regReserved); 958 continue; 959 } 960 LiveRegMap::iterator LRI = defineVirtReg(MI, i, Reg, CopySrc); 961 unsigned PhysReg = LRI->second.PhysReg; 962 if (setPhysReg(MI, i, PhysReg)) { 963 VirtDead.push_back(Reg); 964 CopyDst = 0; // cancel coalescing; 965 } else 966 CopyDst = (CopyDst == Reg || CopyDst == PhysReg) ? PhysReg : 0; 967 } 968 969 // Kill dead defs after the scan to ensure that multiple defs of the same 970 // register are allocated identically. We didn't need to do this for uses 971 // because we are crerating our own kill flags, and they are always at the 972 // last use. 973 for (unsigned i = 0, e = VirtDead.size(); i != e; ++i) 974 killVirtReg(VirtDead[i]); 975 VirtDead.clear(); 976 977 MRI->addPhysRegsUsed(UsedInInstr); 978 979 if (CopyDst && CopyDst == CopySrc && CopyDstSub == CopySrcSub) { 980 DEBUG(dbgs() << "-- coalescing: " << *MI); 981 Coalesced.push_back(MI); 982 } else { 983 DEBUG(dbgs() << "<< " << *MI); 984 } 985 } 986 987 // Spill all physical registers holding virtual registers now. 988 DEBUG(dbgs() << "Spilling live registers at end of block.\n"); 989 spillAll(MBB->getFirstTerminator()); 990 991 // Erase all the coalesced copies. We are delaying it until now because 992 // LiveVirtRegs might refer to the instrs. 993 for (unsigned i = 0, e = Coalesced.size(); i != e; ++i) 994 MBB->erase(Coalesced[i]); 995 NumCopies += Coalesced.size(); 996 997 DEBUG(MBB->dump()); 998} 999 1000/// runOnMachineFunction - Register allocate the whole function 1001/// 1002bool RAFast::runOnMachineFunction(MachineFunction &Fn) { 1003 DEBUG(dbgs() << "********** FAST REGISTER ALLOCATION **********\n" 1004 << "********** Function: " 1005 << ((Value*)Fn.getFunction())->getName() << '\n'); 1006 MF = &Fn; 1007 MRI = &MF->getRegInfo(); 1008 TM = &Fn.getTarget(); 1009 TRI = TM->getRegisterInfo(); 1010 TII = TM->getInstrInfo(); 1011 1012 UsedInInstr.resize(TRI->getNumRegs()); 1013 Allocatable = TRI->getAllocatableSet(*MF); 1014 1015 // initialize the virtual->physical register map to have a 'null' 1016 // mapping for all virtual registers 1017 unsigned LastVirtReg = MRI->getLastVirtReg(); 1018 StackSlotForVirtReg.grow(LastVirtReg); 1019 1020 // Loop over all of the basic blocks, eliminating virtual register references 1021 for (MachineFunction::iterator MBBi = Fn.begin(), MBBe = Fn.end(); 1022 MBBi != MBBe; ++MBBi) { 1023 MBB = &*MBBi; 1024 AllocateBasicBlock(); 1025 } 1026 1027 // Make sure the set of used physregs is closed under subreg operations. 1028 MRI->closePhysRegsUsed(*TRI); 1029 1030 // Add the clobber lists for all the instructions we skipped earlier. 1031 for (SmallPtrSet<const TargetInstrDesc*, 4>::const_iterator 1032 I = SkippedInstrs.begin(), E = SkippedInstrs.end(); I != E; ++I) 1033 if (const unsigned *Defs = (*I)->getImplicitDefs()) 1034 while (*Defs) 1035 MRI->setPhysRegUsed(*Defs++); 1036 1037 SkippedInstrs.clear(); 1038 StackSlotForVirtReg.clear(); 1039 LiveDbgValueMap.clear(); 1040 return true; 1041} 1042 1043FunctionPass *llvm::createFastRegisterAllocator() { 1044 return new RAFast(); 1045} 1046