RegAllocFast.cpp revision 7569322765651f19eea0609fb082e6b267d5d2b5
1//===-- RegAllocFast.cpp - A fast register allocator for debug code -------===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This register allocator allocates registers to a basic block at a time, 11// attempting to keep values in registers and reusing registers as appropriate. 12// 13//===----------------------------------------------------------------------===// 14 15#define DEBUG_TYPE "regalloc" 16#include "llvm/BasicBlock.h" 17#include "llvm/CodeGen/MachineFunctionPass.h" 18#include "llvm/CodeGen/MachineInstr.h" 19#include "llvm/CodeGen/MachineInstrBuilder.h" 20#include "llvm/CodeGen/MachineFrameInfo.h" 21#include "llvm/CodeGen/MachineRegisterInfo.h" 22#include "llvm/CodeGen/Passes.h" 23#include "llvm/CodeGen/RegAllocRegistry.h" 24#include "llvm/Target/TargetInstrInfo.h" 25#include "llvm/Target/TargetMachine.h" 26#include "llvm/Support/CommandLine.h" 27#include "llvm/Support/Debug.h" 28#include "llvm/Support/ErrorHandling.h" 29#include "llvm/Support/raw_ostream.h" 30#include "llvm/ADT/DenseMap.h" 31#include "llvm/ADT/IndexedMap.h" 32#include "llvm/ADT/SmallSet.h" 33#include "llvm/ADT/SmallVector.h" 34#include "llvm/ADT/Statistic.h" 35#include "llvm/ADT/STLExtras.h" 36#include <algorithm> 37using namespace llvm; 38 39STATISTIC(NumStores, "Number of stores added"); 40STATISTIC(NumLoads , "Number of loads added"); 41STATISTIC(NumCopies, "Number of copies coalesced"); 42 43static RegisterRegAlloc 44 fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator); 45 46namespace { 47 class RAFast : public MachineFunctionPass { 48 public: 49 static char ID; 50 RAFast() : MachineFunctionPass(ID), StackSlotForVirtReg(-1), 51 isBulkSpilling(false) {} 52 private: 53 const TargetMachine *TM; 54 MachineFunction *MF; 55 MachineRegisterInfo *MRI; 56 const TargetRegisterInfo *TRI; 57 const TargetInstrInfo *TII; 58 59 // Basic block currently being allocated. 60 MachineBasicBlock *MBB; 61 62 // StackSlotForVirtReg - Maps virtual regs to the frame index where these 63 // values are spilled. 64 IndexedMap<int, VirtReg2IndexFunctor> StackSlotForVirtReg; 65 66 // Everything we know about a live virtual register. 67 struct LiveReg { 68 MachineInstr *LastUse; // Last instr to use reg. 69 unsigned PhysReg; // Currently held here. 70 unsigned short LastOpNum; // OpNum on LastUse. 71 bool Dirty; // Register needs spill. 72 73 LiveReg(unsigned p=0) : LastUse(0), PhysReg(p), LastOpNum(0), 74 Dirty(false) {} 75 }; 76 77 typedef DenseMap<unsigned, LiveReg> LiveRegMap; 78 typedef LiveRegMap::value_type LiveRegEntry; 79 80 // LiveVirtRegs - This map contains entries for each virtual register 81 // that is currently available in a physical register. 82 LiveRegMap LiveVirtRegs; 83 84 DenseMap<unsigned, MachineInstr *> LiveDbgValueMap; 85 86 // RegState - Track the state of a physical register. 87 enum RegState { 88 // A disabled register is not available for allocation, but an alias may 89 // be in use. A register can only be moved out of the disabled state if 90 // all aliases are disabled. 91 regDisabled, 92 93 // A free register is not currently in use and can be allocated 94 // immediately without checking aliases. 95 regFree, 96 97 // A reserved register has been assigned expolicitly (e.g., setting up a 98 // call parameter), and it remains reserved until it is used. 99 regReserved 100 101 // A register state may also be a virtual register number, indication that 102 // the physical register is currently allocated to a virtual register. In 103 // that case, LiveVirtRegs contains the inverse mapping. 104 }; 105 106 // PhysRegState - One of the RegState enums, or a virtreg. 107 std::vector<unsigned> PhysRegState; 108 109 // UsedInInstr - BitVector of physregs that are used in the current 110 // instruction, and so cannot be allocated. 111 BitVector UsedInInstr; 112 113 // Allocatable - vector of allocatable physical registers. 114 BitVector Allocatable; 115 116 // SkippedInstrs - Descriptors of instructions whose clobber list was ignored 117 // because all registers were spilled. It is still necessary to mark all the 118 // clobbered registers as used by the function. 119 SmallPtrSet<const TargetInstrDesc*, 4> SkippedInstrs; 120 121 // isBulkSpilling - This flag is set when LiveRegMap will be cleared 122 // completely after spilling all live registers. LiveRegMap entries should 123 // not be erased. 124 bool isBulkSpilling; 125 126 enum { 127 spillClean = 1, 128 spillDirty = 100, 129 spillImpossible = ~0u 130 }; 131 public: 132 virtual const char *getPassName() const { 133 return "Fast Register Allocator"; 134 } 135 136 virtual void getAnalysisUsage(AnalysisUsage &AU) const { 137 AU.setPreservesCFG(); 138 AU.addRequiredID(PHIEliminationID); 139 AU.addRequiredID(TwoAddressInstructionPassID); 140 MachineFunctionPass::getAnalysisUsage(AU); 141 } 142 143 private: 144 bool runOnMachineFunction(MachineFunction &Fn); 145 void AllocateBasicBlock(); 146 void handleThroughOperands(MachineInstr *MI, 147 SmallVectorImpl<unsigned> &VirtDead); 148 int getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC); 149 bool isLastUseOfLocalReg(MachineOperand&); 150 151 void addKillFlag(const LiveReg&); 152 void killVirtReg(LiveRegMap::iterator); 153 void killVirtReg(unsigned VirtReg); 154 void spillVirtReg(MachineBasicBlock::iterator MI, LiveRegMap::iterator); 155 void spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg); 156 157 void usePhysReg(MachineOperand&); 158 void definePhysReg(MachineInstr *MI, unsigned PhysReg, RegState NewState); 159 unsigned calcSpillCost(unsigned PhysReg) const; 160 void assignVirtToPhysReg(LiveRegEntry &LRE, unsigned PhysReg); 161 void allocVirtReg(MachineInstr *MI, LiveRegEntry &LRE, unsigned Hint); 162 LiveRegMap::iterator defineVirtReg(MachineInstr *MI, unsigned OpNum, 163 unsigned VirtReg, unsigned Hint); 164 LiveRegMap::iterator reloadVirtReg(MachineInstr *MI, unsigned OpNum, 165 unsigned VirtReg, unsigned Hint); 166 void spillAll(MachineInstr *MI); 167 bool setPhysReg(MachineInstr *MI, unsigned OpNum, unsigned PhysReg); 168 }; 169 char RAFast::ID = 0; 170} 171 172/// getStackSpaceFor - This allocates space for the specified virtual register 173/// to be held on the stack. 174int RAFast::getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC) { 175 // Find the location Reg would belong... 176 int SS = StackSlotForVirtReg[VirtReg]; 177 if (SS != -1) 178 return SS; // Already has space allocated? 179 180 // Allocate a new stack object for this spill location... 181 int FrameIdx = MF->getFrameInfo()->CreateSpillStackObject(RC->getSize(), 182 RC->getAlignment()); 183 184 // Assign the slot. 185 StackSlotForVirtReg[VirtReg] = FrameIdx; 186 return FrameIdx; 187} 188 189/// isLastUseOfLocalReg - Return true if MO is the only remaining reference to 190/// its virtual register, and it is guaranteed to be a block-local register. 191/// 192bool RAFast::isLastUseOfLocalReg(MachineOperand &MO) { 193 // Check for non-debug uses or defs following MO. 194 // This is the most likely way to fail - fast path it. 195 MachineOperand *Next = &MO; 196 while ((Next = Next->getNextOperandForReg())) 197 if (!Next->isDebug()) 198 return false; 199 200 // If the register has ever been spilled or reloaded, we conservatively assume 201 // it is a global register used in multiple blocks. 202 if (StackSlotForVirtReg[MO.getReg()] != -1) 203 return false; 204 205 // Check that the use/def chain has exactly one operand - MO. 206 return &MRI->reg_nodbg_begin(MO.getReg()).getOperand() == &MO; 207} 208 209/// addKillFlag - Set kill flags on last use of a virtual register. 210void RAFast::addKillFlag(const LiveReg &LR) { 211 if (!LR.LastUse) return; 212 MachineOperand &MO = LR.LastUse->getOperand(LR.LastOpNum); 213 if (MO.isUse() && !LR.LastUse->isRegTiedToDefOperand(LR.LastOpNum)) { 214 if (MO.getReg() == LR.PhysReg) 215 MO.setIsKill(); 216 else 217 LR.LastUse->addRegisterKilled(LR.PhysReg, TRI, true); 218 } 219} 220 221/// killVirtReg - Mark virtreg as no longer available. 222void RAFast::killVirtReg(LiveRegMap::iterator LRI) { 223 addKillFlag(LRI->second); 224 const LiveReg &LR = LRI->second; 225 assert(PhysRegState[LR.PhysReg] == LRI->first && "Broken RegState mapping"); 226 PhysRegState[LR.PhysReg] = regFree; 227 // Erase from LiveVirtRegs unless we're spilling in bulk. 228 if (!isBulkSpilling) 229 LiveVirtRegs.erase(LRI); 230} 231 232/// killVirtReg - Mark virtreg as no longer available. 233void RAFast::killVirtReg(unsigned VirtReg) { 234 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 235 "killVirtReg needs a virtual register"); 236 LiveRegMap::iterator LRI = LiveVirtRegs.find(VirtReg); 237 if (LRI != LiveVirtRegs.end()) 238 killVirtReg(LRI); 239} 240 241/// spillVirtReg - This method spills the value specified by VirtReg into the 242/// corresponding stack slot if needed. If isKill is set, the register is also 243/// killed. 244void RAFast::spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg) { 245 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 246 "Spilling a physical register is illegal!"); 247 LiveRegMap::iterator LRI = LiveVirtRegs.find(VirtReg); 248 assert(LRI != LiveVirtRegs.end() && "Spilling unmapped virtual register"); 249 spillVirtReg(MI, LRI); 250} 251 252/// spillVirtReg - Do the actual work of spilling. 253void RAFast::spillVirtReg(MachineBasicBlock::iterator MI, 254 LiveRegMap::iterator LRI) { 255 LiveReg &LR = LRI->second; 256 assert(PhysRegState[LR.PhysReg] == LRI->first && "Broken RegState mapping"); 257 258 if (LR.Dirty) { 259 // If this physreg is used by the instruction, we want to kill it on the 260 // instruction, not on the spill. 261 bool SpillKill = LR.LastUse != MI; 262 LR.Dirty = false; 263 DEBUG(dbgs() << "Spilling %reg" << LRI->first 264 << " in " << TRI->getName(LR.PhysReg)); 265 const TargetRegisterClass *RC = MRI->getRegClass(LRI->first); 266 int FI = getStackSpaceFor(LRI->first, RC); 267 DEBUG(dbgs() << " to stack slot #" << FI << "\n"); 268 TII->storeRegToStackSlot(*MBB, MI, LR.PhysReg, SpillKill, FI, RC, TRI); 269 ++NumStores; // Update statistics 270 271 // If this register is used by DBG_VALUE then insert new DBG_VALUE to 272 // identify spilled location as the place to find corresponding variable's 273 // value. 274 if (MachineInstr *DBG = LiveDbgValueMap.lookup(LRI->first)) { 275 const MDNode *MDPtr = 276 DBG->getOperand(DBG->getNumOperands()-1).getMetadata(); 277 int64_t Offset = 0; 278 if (DBG->getOperand(1).isImm()) 279 Offset = DBG->getOperand(1).getImm(); 280 DebugLoc DL; 281 if (MI == MBB->end()) { 282 // If MI is at basic block end then use last instruction's location. 283 MachineBasicBlock::iterator EI = MI; 284 DL = (--EI)->getDebugLoc(); 285 } 286 else 287 DL = MI->getDebugLoc(); 288 if (MachineInstr *NewDV = 289 TII->emitFrameIndexDebugValue(*MF, FI, Offset, MDPtr, DL)) { 290 MachineBasicBlock *MBB = DBG->getParent(); 291 MBB->insert(MI, NewDV); 292 DEBUG(dbgs() << "Inserting debug info due to spill:" << "\n" << *NewDV); 293 LiveDbgValueMap[LRI->first] = NewDV; 294 } 295 } 296 if (SpillKill) 297 LR.LastUse = 0; // Don't kill register again 298 } 299 killVirtReg(LRI); 300} 301 302/// spillAll - Spill all dirty virtregs without killing them. 303void RAFast::spillAll(MachineInstr *MI) { 304 if (LiveVirtRegs.empty()) return; 305 isBulkSpilling = true; 306 // The LiveRegMap is keyed by an unsigned (the virtreg number), so the order 307 // of spilling here is deterministic, if arbitrary. 308 for (LiveRegMap::iterator i = LiveVirtRegs.begin(), e = LiveVirtRegs.end(); 309 i != e; ++i) 310 spillVirtReg(MI, i); 311 LiveVirtRegs.clear(); 312 isBulkSpilling = false; 313} 314 315/// usePhysReg - Handle the direct use of a physical register. 316/// Check that the register is not used by a virtreg. 317/// Kill the physreg, marking it free. 318/// This may add implicit kills to MO->getParent() and invalidate MO. 319void RAFast::usePhysReg(MachineOperand &MO) { 320 unsigned PhysReg = MO.getReg(); 321 assert(TargetRegisterInfo::isPhysicalRegister(PhysReg) && 322 "Bad usePhysReg operand"); 323 324 switch (PhysRegState[PhysReg]) { 325 case regDisabled: 326 break; 327 case regReserved: 328 PhysRegState[PhysReg] = regFree; 329 // Fall through 330 case regFree: 331 UsedInInstr.set(PhysReg); 332 MO.setIsKill(); 333 return; 334 default: 335 // The physreg was allocated to a virtual register. That means to value we 336 // wanted has been clobbered. 337 llvm_unreachable("Instruction uses an allocated register"); 338 } 339 340 // Maybe a superregister is reserved? 341 for (const unsigned *AS = TRI->getAliasSet(PhysReg); 342 unsigned Alias = *AS; ++AS) { 343 switch (PhysRegState[Alias]) { 344 case regDisabled: 345 break; 346 case regReserved: 347 assert(TRI->isSuperRegister(PhysReg, Alias) && 348 "Instruction is not using a subregister of a reserved register"); 349 // Leave the superregister in the working set. 350 PhysRegState[Alias] = regFree; 351 UsedInInstr.set(Alias); 352 MO.getParent()->addRegisterKilled(Alias, TRI, true); 353 return; 354 case regFree: 355 if (TRI->isSuperRegister(PhysReg, Alias)) { 356 // Leave the superregister in the working set. 357 UsedInInstr.set(Alias); 358 MO.getParent()->addRegisterKilled(Alias, TRI, true); 359 return; 360 } 361 // Some other alias was in the working set - clear it. 362 PhysRegState[Alias] = regDisabled; 363 break; 364 default: 365 llvm_unreachable("Instruction uses an alias of an allocated register"); 366 } 367 } 368 369 // All aliases are disabled, bring register into working set. 370 PhysRegState[PhysReg] = regFree; 371 UsedInInstr.set(PhysReg); 372 MO.setIsKill(); 373} 374 375/// definePhysReg - Mark PhysReg as reserved or free after spilling any 376/// virtregs. This is very similar to defineVirtReg except the physreg is 377/// reserved instead of allocated. 378void RAFast::definePhysReg(MachineInstr *MI, unsigned PhysReg, 379 RegState NewState) { 380 UsedInInstr.set(PhysReg); 381 switch (unsigned VirtReg = PhysRegState[PhysReg]) { 382 case regDisabled: 383 break; 384 default: 385 spillVirtReg(MI, VirtReg); 386 // Fall through. 387 case regFree: 388 case regReserved: 389 PhysRegState[PhysReg] = NewState; 390 return; 391 } 392 393 // This is a disabled register, disable all aliases. 394 PhysRegState[PhysReg] = NewState; 395 for (const unsigned *AS = TRI->getAliasSet(PhysReg); 396 unsigned Alias = *AS; ++AS) { 397 UsedInInstr.set(Alias); 398 switch (unsigned VirtReg = PhysRegState[Alias]) { 399 case regDisabled: 400 break; 401 default: 402 spillVirtReg(MI, VirtReg); 403 // Fall through. 404 case regFree: 405 case regReserved: 406 PhysRegState[Alias] = regDisabled; 407 if (TRI->isSuperRegister(PhysReg, Alias)) 408 return; 409 break; 410 } 411 } 412} 413 414 415// calcSpillCost - Return the cost of spilling clearing out PhysReg and 416// aliases so it is free for allocation. 417// Returns 0 when PhysReg is free or disabled with all aliases disabled - it 418// can be allocated directly. 419// Returns spillImpossible when PhysReg or an alias can't be spilled. 420unsigned RAFast::calcSpillCost(unsigned PhysReg) const { 421 if (UsedInInstr.test(PhysReg)) 422 return spillImpossible; 423 switch (unsigned VirtReg = PhysRegState[PhysReg]) { 424 case regDisabled: 425 break; 426 case regFree: 427 return 0; 428 case regReserved: 429 return spillImpossible; 430 default: 431 return LiveVirtRegs.lookup(VirtReg).Dirty ? spillDirty : spillClean; 432 } 433 434 // This is a disabled register, add up const of aliases. 435 unsigned Cost = 0; 436 for (const unsigned *AS = TRI->getAliasSet(PhysReg); 437 unsigned Alias = *AS; ++AS) { 438 if (UsedInInstr.test(Alias)) 439 return spillImpossible; 440 switch (unsigned VirtReg = PhysRegState[Alias]) { 441 case regDisabled: 442 break; 443 case regFree: 444 ++Cost; 445 break; 446 case regReserved: 447 return spillImpossible; 448 default: 449 Cost += LiveVirtRegs.lookup(VirtReg).Dirty ? spillDirty : spillClean; 450 break; 451 } 452 } 453 return Cost; 454} 455 456 457/// assignVirtToPhysReg - This method updates local state so that we know 458/// that PhysReg is the proper container for VirtReg now. The physical 459/// register must not be used for anything else when this is called. 460/// 461void RAFast::assignVirtToPhysReg(LiveRegEntry &LRE, unsigned PhysReg) { 462 DEBUG(dbgs() << "Assigning %reg" << LRE.first << " to " 463 << TRI->getName(PhysReg) << "\n"); 464 PhysRegState[PhysReg] = LRE.first; 465 assert(!LRE.second.PhysReg && "Already assigned a physreg"); 466 LRE.second.PhysReg = PhysReg; 467} 468 469/// allocVirtReg - Allocate a physical register for VirtReg. 470void RAFast::allocVirtReg(MachineInstr *MI, LiveRegEntry &LRE, unsigned Hint) { 471 const unsigned VirtReg = LRE.first; 472 473 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 474 "Can only allocate virtual registers"); 475 476 const TargetRegisterClass *RC = MRI->getRegClass(VirtReg); 477 478 // Ignore invalid hints. 479 if (Hint && (!TargetRegisterInfo::isPhysicalRegister(Hint) || 480 !RC->contains(Hint) || !Allocatable.test(Hint))) 481 Hint = 0; 482 483 // Take hint when possible. 484 if (Hint) { 485 switch(calcSpillCost(Hint)) { 486 default: 487 definePhysReg(MI, Hint, regFree); 488 // Fall through. 489 case 0: 490 return assignVirtToPhysReg(LRE, Hint); 491 case spillImpossible: 492 break; 493 } 494 } 495 496 TargetRegisterClass::iterator AOB = RC->allocation_order_begin(*MF); 497 TargetRegisterClass::iterator AOE = RC->allocation_order_end(*MF); 498 499 // First try to find a completely free register. 500 for (TargetRegisterClass::iterator I = AOB; I != AOE; ++I) { 501 unsigned PhysReg = *I; 502 if (PhysRegState[PhysReg] == regFree && !UsedInInstr.test(PhysReg)) 503 return assignVirtToPhysReg(LRE, PhysReg); 504 } 505 506 DEBUG(dbgs() << "Allocating %reg" << VirtReg << " from " << RC->getName() 507 << "\n"); 508 509 unsigned BestReg = 0, BestCost = spillImpossible; 510 for (TargetRegisterClass::iterator I = AOB; I != AOE; ++I) { 511 unsigned Cost = calcSpillCost(*I); 512 // Cost is 0 when all aliases are already disabled. 513 if (Cost == 0) 514 return assignVirtToPhysReg(LRE, *I); 515 if (Cost < BestCost) 516 BestReg = *I, BestCost = Cost; 517 } 518 519 if (BestReg) { 520 definePhysReg(MI, BestReg, regFree); 521 return assignVirtToPhysReg(LRE, BestReg); 522 } 523 524 // Nothing we can do. 525 std::string msg; 526 raw_string_ostream Msg(msg); 527 Msg << "Ran out of registers during register allocation!"; 528 if (MI->isInlineAsm()) { 529 Msg << "\nPlease check your inline asm statement for " 530 << "invalid constraints:\n"; 531 MI->print(Msg, TM); 532 } 533 report_fatal_error(Msg.str()); 534} 535 536/// defineVirtReg - Allocate a register for VirtReg and mark it as dirty. 537RAFast::LiveRegMap::iterator 538RAFast::defineVirtReg(MachineInstr *MI, unsigned OpNum, 539 unsigned VirtReg, unsigned Hint) { 540 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 541 "Not a virtual register"); 542 LiveRegMap::iterator LRI; 543 bool New; 544 tie(LRI, New) = LiveVirtRegs.insert(std::make_pair(VirtReg, LiveReg())); 545 LiveReg &LR = LRI->second; 546 if (New) { 547 // If there is no hint, peek at the only use of this register. 548 if ((!Hint || !TargetRegisterInfo::isPhysicalRegister(Hint)) && 549 MRI->hasOneNonDBGUse(VirtReg)) { 550 const MachineInstr &UseMI = *MRI->use_nodbg_begin(VirtReg); 551 // It's a copy, use the destination register as a hint. 552 if (UseMI.isCopyLike()) 553 Hint = UseMI.getOperand(0).getReg(); 554 } 555 allocVirtReg(MI, *LRI, Hint); 556 } else if (LR.LastUse) { 557 // Redefining a live register - kill at the last use, unless it is this 558 // instruction defining VirtReg multiple times. 559 if (LR.LastUse != MI || LR.LastUse->getOperand(LR.LastOpNum).isUse()) 560 addKillFlag(LR); 561 } 562 assert(LR.PhysReg && "Register not assigned"); 563 LR.LastUse = MI; 564 LR.LastOpNum = OpNum; 565 LR.Dirty = true; 566 UsedInInstr.set(LR.PhysReg); 567 return LRI; 568} 569 570/// reloadVirtReg - Make sure VirtReg is available in a physreg and return it. 571RAFast::LiveRegMap::iterator 572RAFast::reloadVirtReg(MachineInstr *MI, unsigned OpNum, 573 unsigned VirtReg, unsigned Hint) { 574 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 575 "Not a virtual register"); 576 LiveRegMap::iterator LRI; 577 bool New; 578 tie(LRI, New) = LiveVirtRegs.insert(std::make_pair(VirtReg, LiveReg())); 579 LiveReg &LR = LRI->second; 580 MachineOperand &MO = MI->getOperand(OpNum); 581 if (New) { 582 allocVirtReg(MI, *LRI, Hint); 583 const TargetRegisterClass *RC = MRI->getRegClass(VirtReg); 584 int FrameIndex = getStackSpaceFor(VirtReg, RC); 585 DEBUG(dbgs() << "Reloading %reg" << VirtReg << " into " 586 << TRI->getName(LR.PhysReg) << "\n"); 587 TII->loadRegFromStackSlot(*MBB, MI, LR.PhysReg, FrameIndex, RC, TRI); 588 ++NumLoads; 589 } else if (LR.Dirty) { 590 if (isLastUseOfLocalReg(MO)) { 591 DEBUG(dbgs() << "Killing last use: " << MO << "\n"); 592 if (MO.isUse()) 593 MO.setIsKill(); 594 else 595 MO.setIsDead(); 596 } else if (MO.isKill()) { 597 DEBUG(dbgs() << "Clearing dubious kill: " << MO << "\n"); 598 MO.setIsKill(false); 599 } else if (MO.isDead()) { 600 DEBUG(dbgs() << "Clearing dubious dead: " << MO << "\n"); 601 MO.setIsDead(false); 602 } 603 } else if (MO.isKill()) { 604 // We must remove kill flags from uses of reloaded registers because the 605 // register would be killed immediately, and there might be a second use: 606 // %foo = OR %x<kill>, %x 607 // This would cause a second reload of %x into a different register. 608 DEBUG(dbgs() << "Clearing clean kill: " << MO << "\n"); 609 MO.setIsKill(false); 610 } else if (MO.isDead()) { 611 DEBUG(dbgs() << "Clearing clean dead: " << MO << "\n"); 612 MO.setIsDead(false); 613 } 614 assert(LR.PhysReg && "Register not assigned"); 615 LR.LastUse = MI; 616 LR.LastOpNum = OpNum; 617 UsedInInstr.set(LR.PhysReg); 618 return LRI; 619} 620 621// setPhysReg - Change operand OpNum in MI the refer the PhysReg, considering 622// subregs. This may invalidate any operand pointers. 623// Return true if the operand kills its register. 624bool RAFast::setPhysReg(MachineInstr *MI, unsigned OpNum, unsigned PhysReg) { 625 MachineOperand &MO = MI->getOperand(OpNum); 626 if (!MO.getSubReg()) { 627 MO.setReg(PhysReg); 628 return MO.isKill() || MO.isDead(); 629 } 630 631 // Handle subregister index. 632 MO.setReg(PhysReg ? TRI->getSubReg(PhysReg, MO.getSubReg()) : 0); 633 MO.setSubReg(0); 634 635 // A kill flag implies killing the full register. Add corresponding super 636 // register kill. 637 if (MO.isKill()) { 638 MI->addRegisterKilled(PhysReg, TRI, true); 639 return true; 640 } 641 return MO.isDead(); 642} 643 644// Handle special instruction operand like early clobbers and tied ops when 645// there are additional physreg defines. 646void RAFast::handleThroughOperands(MachineInstr *MI, 647 SmallVectorImpl<unsigned> &VirtDead) { 648 DEBUG(dbgs() << "Scanning for through registers:"); 649 SmallSet<unsigned, 8> ThroughRegs; 650 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 651 MachineOperand &MO = MI->getOperand(i); 652 if (!MO.isReg()) continue; 653 unsigned Reg = MO.getReg(); 654 if (!Reg || TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 655 if (MO.isEarlyClobber() || MI->isRegTiedToDefOperand(i) || 656 (MO.getSubReg() && MI->readsVirtualRegister(Reg))) { 657 if (ThroughRegs.insert(Reg)) 658 DEBUG(dbgs() << " %reg" << Reg); 659 } 660 } 661 662 // If any physreg defines collide with preallocated through registers, 663 // we must spill and reallocate. 664 DEBUG(dbgs() << "\nChecking for physdef collisions.\n"); 665 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 666 MachineOperand &MO = MI->getOperand(i); 667 if (!MO.isReg() || !MO.isDef()) continue; 668 unsigned Reg = MO.getReg(); 669 if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 670 UsedInInstr.set(Reg); 671 if (ThroughRegs.count(PhysRegState[Reg])) 672 definePhysReg(MI, Reg, regFree); 673 for (const unsigned *AS = TRI->getAliasSet(Reg); *AS; ++AS) { 674 UsedInInstr.set(*AS); 675 if (ThroughRegs.count(PhysRegState[*AS])) 676 definePhysReg(MI, *AS, regFree); 677 } 678 } 679 680 SmallVector<unsigned, 8> PartialDefs; 681 DEBUG(dbgs() << "Allocating tied uses and early clobbers.\n"); 682 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 683 MachineOperand &MO = MI->getOperand(i); 684 if (!MO.isReg()) continue; 685 unsigned Reg = MO.getReg(); 686 if (!Reg || TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 687 if (MO.isUse()) { 688 unsigned DefIdx = 0; 689 if (!MI->isRegTiedToDefOperand(i, &DefIdx)) continue; 690 DEBUG(dbgs() << "Operand " << i << "("<< MO << ") is tied to operand " 691 << DefIdx << ".\n"); 692 LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, 0); 693 unsigned PhysReg = LRI->second.PhysReg; 694 setPhysReg(MI, i, PhysReg); 695 // Note: we don't update the def operand yet. That would cause the normal 696 // def-scan to attempt spilling. 697 } else if (MO.getSubReg() && MI->readsVirtualRegister(Reg)) { 698 DEBUG(dbgs() << "Partial redefine: " << MO << "\n"); 699 // Reload the register, but don't assign to the operand just yet. 700 // That would confuse the later phys-def processing pass. 701 LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, 0); 702 PartialDefs.push_back(LRI->second.PhysReg); 703 } else if (MO.isEarlyClobber()) { 704 // Note: defineVirtReg may invalidate MO. 705 LiveRegMap::iterator LRI = defineVirtReg(MI, i, Reg, 0); 706 unsigned PhysReg = LRI->second.PhysReg; 707 if (setPhysReg(MI, i, PhysReg)) 708 VirtDead.push_back(Reg); 709 } 710 } 711 712 // Restore UsedInInstr to a state usable for allocating normal virtual uses. 713 UsedInInstr.reset(); 714 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 715 MachineOperand &MO = MI->getOperand(i); 716 if (!MO.isReg() || (MO.isDef() && !MO.isEarlyClobber())) continue; 717 unsigned Reg = MO.getReg(); 718 if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 719 UsedInInstr.set(Reg); 720 for (const unsigned *AS = TRI->getAliasSet(Reg); *AS; ++AS) 721 UsedInInstr.set(*AS); 722 } 723 724 // Also mark PartialDefs as used to avoid reallocation. 725 for (unsigned i = 0, e = PartialDefs.size(); i != e; ++i) 726 UsedInInstr.set(PartialDefs[i]); 727} 728 729void RAFast::AllocateBasicBlock() { 730 DEBUG(dbgs() << "\nAllocating " << *MBB); 731 732 PhysRegState.assign(TRI->getNumRegs(), regDisabled); 733 assert(LiveVirtRegs.empty() && "Mapping not cleared form last block?"); 734 735 MachineBasicBlock::iterator MII = MBB->begin(); 736 737 // Add live-in registers as live. 738 for (MachineBasicBlock::livein_iterator I = MBB->livein_begin(), 739 E = MBB->livein_end(); I != E; ++I) 740 definePhysReg(MII, *I, regReserved); 741 742 SmallVector<unsigned, 8> VirtDead; 743 SmallVector<MachineInstr*, 32> Coalesced; 744 745 // Otherwise, sequentially allocate each instruction in the MBB. 746 while (MII != MBB->end()) { 747 MachineInstr *MI = MII++; 748 const TargetInstrDesc &TID = MI->getDesc(); 749 DEBUG({ 750 dbgs() << "\n>> " << *MI << "Regs:"; 751 for (unsigned Reg = 1, E = TRI->getNumRegs(); Reg != E; ++Reg) { 752 if (PhysRegState[Reg] == regDisabled) continue; 753 dbgs() << " " << TRI->getName(Reg); 754 switch(PhysRegState[Reg]) { 755 case regFree: 756 break; 757 case regReserved: 758 dbgs() << "*"; 759 break; 760 default: 761 dbgs() << "=%reg" << PhysRegState[Reg]; 762 if (LiveVirtRegs[PhysRegState[Reg]].Dirty) 763 dbgs() << "*"; 764 assert(LiveVirtRegs[PhysRegState[Reg]].PhysReg == Reg && 765 "Bad inverse map"); 766 break; 767 } 768 } 769 dbgs() << '\n'; 770 // Check that LiveVirtRegs is the inverse. 771 for (LiveRegMap::iterator i = LiveVirtRegs.begin(), 772 e = LiveVirtRegs.end(); i != e; ++i) { 773 assert(TargetRegisterInfo::isVirtualRegister(i->first) && 774 "Bad map key"); 775 assert(TargetRegisterInfo::isPhysicalRegister(i->second.PhysReg) && 776 "Bad map value"); 777 assert(PhysRegState[i->second.PhysReg] == i->first && 778 "Bad inverse map"); 779 } 780 }); 781 782 // Debug values are not allowed to change codegen in any way. 783 if (MI->isDebugValue()) { 784 bool ScanDbgValue = true; 785 while (ScanDbgValue) { 786 ScanDbgValue = false; 787 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 788 MachineOperand &MO = MI->getOperand(i); 789 if (!MO.isReg()) continue; 790 unsigned Reg = MO.getReg(); 791 if (!Reg || TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 792 LiveDbgValueMap[Reg] = MI; 793 LiveRegMap::iterator LRI = LiveVirtRegs.find(Reg); 794 if (LRI != LiveVirtRegs.end()) 795 setPhysReg(MI, i, LRI->second.PhysReg); 796 else { 797 int SS = StackSlotForVirtReg[Reg]; 798 if (SS == -1) 799 MO.setReg(0); // We can't allocate a physreg for a DebugValue, sorry! 800 else { 801 // Modify DBG_VALUE now that the value is in a spill slot. 802 int64_t Offset = MI->getOperand(1).getImm(); 803 const MDNode *MDPtr = 804 MI->getOperand(MI->getNumOperands()-1).getMetadata(); 805 DebugLoc DL = MI->getDebugLoc(); 806 if (MachineInstr *NewDV = 807 TII->emitFrameIndexDebugValue(*MF, SS, Offset, MDPtr, DL)) { 808 DEBUG(dbgs() << "Modifying debug info due to spill:" << "\t" << *MI); 809 MachineBasicBlock *MBB = MI->getParent(); 810 MBB->insert(MBB->erase(MI), NewDV); 811 // Scan NewDV operands from the beginning. 812 MI = NewDV; 813 ScanDbgValue = true; 814 break; 815 } else 816 MO.setReg(0); // We can't allocate a physreg for a DebugValue, sorry! 817 } 818 } 819 } 820 } 821 // Next instruction. 822 continue; 823 } 824 825 // If this is a copy, we may be able to coalesce. 826 unsigned CopySrc = 0, CopyDst = 0, CopySrcSub = 0, CopyDstSub = 0; 827 if (MI->isCopy()) { 828 CopyDst = MI->getOperand(0).getReg(); 829 CopySrc = MI->getOperand(1).getReg(); 830 CopyDstSub = MI->getOperand(0).getSubReg(); 831 CopySrcSub = MI->getOperand(1).getSubReg(); 832 } 833 834 // Track registers used by instruction. 835 UsedInInstr.reset(); 836 837 // First scan. 838 // Mark physreg uses and early clobbers as used. 839 // Find the end of the virtreg operands 840 unsigned VirtOpEnd = 0; 841 bool hasTiedOps = false; 842 bool hasEarlyClobbers = false; 843 bool hasPartialRedefs = false; 844 bool hasPhysDefs = false; 845 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 846 MachineOperand &MO = MI->getOperand(i); 847 if (!MO.isReg()) continue; 848 unsigned Reg = MO.getReg(); 849 if (!Reg) continue; 850 if (TargetRegisterInfo::isVirtualRegister(Reg)) { 851 VirtOpEnd = i+1; 852 if (MO.isUse()) { 853 hasTiedOps = hasTiedOps || 854 TID.getOperandConstraint(i, TOI::TIED_TO) != -1; 855 } else { 856 if (MO.isEarlyClobber()) 857 hasEarlyClobbers = true; 858 if (MO.getSubReg() && MI->readsVirtualRegister(Reg)) 859 hasPartialRedefs = true; 860 } 861 continue; 862 } 863 if (!Allocatable.test(Reg)) continue; 864 if (MO.isUse()) { 865 usePhysReg(MO); 866 } else if (MO.isEarlyClobber()) { 867 definePhysReg(MI, Reg, (MO.isImplicit() || MO.isDead()) ? 868 regFree : regReserved); 869 hasEarlyClobbers = true; 870 } else 871 hasPhysDefs = true; 872 } 873 874 // The instruction may have virtual register operands that must be allocated 875 // the same register at use-time and def-time: early clobbers and tied 876 // operands. If there are also physical defs, these registers must avoid 877 // both physical defs and uses, making them more constrained than normal 878 // operands. 879 // Similarly, if there are multiple defs and tied operands, we must make sure 880 // the same register is allocated to uses and defs. 881 // We didn't detect inline asm tied operands above, so just make this extra 882 // pass for all inline asm. 883 if (MI->isInlineAsm() || hasEarlyClobbers || hasPartialRedefs || 884 (hasTiedOps && (hasPhysDefs || TID.getNumDefs() > 1))) { 885 handleThroughOperands(MI, VirtDead); 886 // Don't attempt coalescing when we have funny stuff going on. 887 CopyDst = 0; 888 // Pretend we have early clobbers so the use operands get marked below. 889 // This is not necessary for the common case of a single tied use. 890 hasEarlyClobbers = true; 891 } 892 893 // Second scan. 894 // Allocate virtreg uses. 895 for (unsigned i = 0; i != VirtOpEnd; ++i) { 896 MachineOperand &MO = MI->getOperand(i); 897 if (!MO.isReg()) continue; 898 unsigned Reg = MO.getReg(); 899 if (!Reg || TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 900 if (MO.isUse()) { 901 LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, CopyDst); 902 unsigned PhysReg = LRI->second.PhysReg; 903 CopySrc = (CopySrc == Reg || CopySrc == PhysReg) ? PhysReg : 0; 904 if (setPhysReg(MI, i, PhysReg)) 905 killVirtReg(LRI); 906 } 907 } 908 909 MRI->addPhysRegsUsed(UsedInInstr); 910 911 // Track registers defined by instruction - early clobbers and tied uses at 912 // this point. 913 UsedInInstr.reset(); 914 if (hasEarlyClobbers) { 915 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 916 MachineOperand &MO = MI->getOperand(i); 917 if (!MO.isReg()) continue; 918 unsigned Reg = MO.getReg(); 919 if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 920 // Look for physreg defs and tied uses. 921 if (!MO.isDef() && !MI->isRegTiedToDefOperand(i)) continue; 922 UsedInInstr.set(Reg); 923 for (const unsigned *AS = TRI->getAliasSet(Reg); *AS; ++AS) 924 UsedInInstr.set(*AS); 925 } 926 } 927 928 unsigned DefOpEnd = MI->getNumOperands(); 929 if (TID.isCall()) { 930 // Spill all virtregs before a call. This serves two purposes: 1. If an 931 // exception is thrown, the landing pad is going to expect to find registers 932 // in their spill slots, and 2. we don't have to wade through all the 933 // <imp-def> operands on the call instruction. 934 DefOpEnd = VirtOpEnd; 935 DEBUG(dbgs() << " Spilling remaining registers before call.\n"); 936 spillAll(MI); 937 938 // The imp-defs are skipped below, but we still need to mark those 939 // registers as used by the function. 940 SkippedInstrs.insert(&TID); 941 } 942 943 // Third scan. 944 // Allocate defs and collect dead defs. 945 for (unsigned i = 0; i != DefOpEnd; ++i) { 946 MachineOperand &MO = MI->getOperand(i); 947 if (!MO.isReg() || !MO.isDef() || !MO.getReg() || MO.isEarlyClobber()) 948 continue; 949 unsigned Reg = MO.getReg(); 950 951 if (TargetRegisterInfo::isPhysicalRegister(Reg)) { 952 if (!Allocatable.test(Reg)) continue; 953 definePhysReg(MI, Reg, (MO.isImplicit() || MO.isDead()) ? 954 regFree : regReserved); 955 continue; 956 } 957 LiveRegMap::iterator LRI = defineVirtReg(MI, i, Reg, CopySrc); 958 unsigned PhysReg = LRI->second.PhysReg; 959 if (setPhysReg(MI, i, PhysReg)) { 960 VirtDead.push_back(Reg); 961 CopyDst = 0; // cancel coalescing; 962 } else 963 CopyDst = (CopyDst == Reg || CopyDst == PhysReg) ? PhysReg : 0; 964 } 965 966 // Kill dead defs after the scan to ensure that multiple defs of the same 967 // register are allocated identically. We didn't need to do this for uses 968 // because we are crerating our own kill flags, and they are always at the 969 // last use. 970 for (unsigned i = 0, e = VirtDead.size(); i != e; ++i) 971 killVirtReg(VirtDead[i]); 972 VirtDead.clear(); 973 974 MRI->addPhysRegsUsed(UsedInInstr); 975 976 if (CopyDst && CopyDst == CopySrc && CopyDstSub == CopySrcSub) { 977 DEBUG(dbgs() << "-- coalescing: " << *MI); 978 Coalesced.push_back(MI); 979 } else { 980 DEBUG(dbgs() << "<< " << *MI); 981 } 982 } 983 984 // Spill all physical registers holding virtual registers now. 985 DEBUG(dbgs() << "Spilling live registers at end of block.\n"); 986 spillAll(MBB->getFirstTerminator()); 987 988 // Erase all the coalesced copies. We are delaying it until now because 989 // LiveVirtRegs might refer to the instrs. 990 for (unsigned i = 0, e = Coalesced.size(); i != e; ++i) 991 MBB->erase(Coalesced[i]); 992 NumCopies += Coalesced.size(); 993 994 DEBUG(MBB->dump()); 995} 996 997/// runOnMachineFunction - Register allocate the whole function 998/// 999bool RAFast::runOnMachineFunction(MachineFunction &Fn) { 1000 DEBUG(dbgs() << "********** FAST REGISTER ALLOCATION **********\n" 1001 << "********** Function: " 1002 << ((Value*)Fn.getFunction())->getName() << '\n'); 1003 MF = &Fn; 1004 MRI = &MF->getRegInfo(); 1005 TM = &Fn.getTarget(); 1006 TRI = TM->getRegisterInfo(); 1007 TII = TM->getInstrInfo(); 1008 1009 UsedInInstr.resize(TRI->getNumRegs()); 1010 Allocatable = TRI->getAllocatableSet(*MF); 1011 1012 // initialize the virtual->physical register map to have a 'null' 1013 // mapping for all virtual registers 1014 unsigned LastVirtReg = MRI->getLastVirtReg(); 1015 StackSlotForVirtReg.grow(LastVirtReg); 1016 1017 // Loop over all of the basic blocks, eliminating virtual register references 1018 for (MachineFunction::iterator MBBi = Fn.begin(), MBBe = Fn.end(); 1019 MBBi != MBBe; ++MBBi) { 1020 MBB = &*MBBi; 1021 AllocateBasicBlock(); 1022 } 1023 1024 // Make sure the set of used physregs is closed under subreg operations. 1025 MRI->closePhysRegsUsed(*TRI); 1026 1027 // Add the clobber lists for all the instructions we skipped earlier. 1028 for (SmallPtrSet<const TargetInstrDesc*, 4>::const_iterator 1029 I = SkippedInstrs.begin(), E = SkippedInstrs.end(); I != E; ++I) 1030 if (const unsigned *Defs = (*I)->getImplicitDefs()) 1031 while (*Defs) 1032 MRI->setPhysRegUsed(*Defs++); 1033 1034 SkippedInstrs.clear(); 1035 StackSlotForVirtReg.clear(); 1036 LiveDbgValueMap.clear(); 1037 return true; 1038} 1039 1040FunctionPass *llvm::createFastRegisterAllocator() { 1041 return new RAFast(); 1042} 1043