AggressiveAntiDepBreaker.cpp revision e29e8e100ea38be1771e5f010a5511cbb990d515
1//===----- AggressiveAntiDepBreaker.cpp - Anti-dep breaker ----------------===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file implements the AggressiveAntiDepBreaker class, which 11// implements register anti-dependence breaking during post-RA 12// scheduling. It attempts to break all anti-dependencies within a 13// block. 14// 15//===----------------------------------------------------------------------===// 16 17#define DEBUG_TYPE "post-RA-sched" 18#include "AggressiveAntiDepBreaker.h" 19#include "llvm/CodeGen/MachineBasicBlock.h" 20#include "llvm/CodeGen/MachineFrameInfo.h" 21#include "llvm/CodeGen/MachineInstr.h" 22#include "llvm/Target/TargetInstrInfo.h" 23#include "llvm/Target/TargetMachine.h" 24#include "llvm/Target/TargetInstrInfo.h" 25#include "llvm/Target/TargetRegisterInfo.h" 26#include "llvm/Support/CommandLine.h" 27#include "llvm/Support/Debug.h" 28#include "llvm/Support/ErrorHandling.h" 29#include "llvm/Support/raw_ostream.h" 30using namespace llvm; 31 32// If DebugDiv > 0 then only break antidep with (ID % DebugDiv) == DebugMod 33static cl::opt<int> 34DebugDiv("agg-antidep-debugdiv", 35 cl::desc("Debug control for aggressive anti-dep breaker"), 36 cl::init(0), cl::Hidden); 37static cl::opt<int> 38DebugMod("agg-antidep-debugmod", 39 cl::desc("Debug control for aggressive anti-dep breaker"), 40 cl::init(0), cl::Hidden); 41 42AggressiveAntiDepState::AggressiveAntiDepState(const unsigned TargetRegs, 43 MachineBasicBlock *BB) : 44 NumTargetRegs(TargetRegs), GroupNodes(TargetRegs, 0), 45 GroupNodeIndices(TargetRegs, 0), 46 KillIndices(TargetRegs, 0), 47 DefIndices(TargetRegs, 0) 48{ 49 const unsigned BBSize = BB->size(); 50 for (unsigned i = 0; i < NumTargetRegs; ++i) { 51 // Initialize all registers to be in their own group. Initially we 52 // assign the register to the same-indexed GroupNode. 53 GroupNodeIndices[i] = i; 54 // Initialize the indices to indicate that no registers are live. 55 KillIndices[i] = ~0u; 56 DefIndices[i] = BBSize; 57 } 58} 59 60unsigned AggressiveAntiDepState::GetGroup(unsigned Reg) { 61 unsigned Node = GroupNodeIndices[Reg]; 62 while (GroupNodes[Node] != Node) 63 Node = GroupNodes[Node]; 64 65 return Node; 66} 67 68void AggressiveAntiDepState::GetGroupRegs( 69 unsigned Group, 70 std::vector<unsigned> &Regs, 71 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference> *RegRefs) 72{ 73 for (unsigned Reg = 0; Reg != NumTargetRegs; ++Reg) { 74 if ((GetGroup(Reg) == Group) && (RegRefs->count(Reg) > 0)) 75 Regs.push_back(Reg); 76 } 77} 78 79unsigned AggressiveAntiDepState::UnionGroups(unsigned Reg1, unsigned Reg2) 80{ 81 assert(GroupNodes[0] == 0 && "GroupNode 0 not parent!"); 82 assert(GroupNodeIndices[0] == 0 && "Reg 0 not in Group 0!"); 83 84 // find group for each register 85 unsigned Group1 = GetGroup(Reg1); 86 unsigned Group2 = GetGroup(Reg2); 87 88 // if either group is 0, then that must become the parent 89 unsigned Parent = (Group1 == 0) ? Group1 : Group2; 90 unsigned Other = (Parent == Group1) ? Group2 : Group1; 91 GroupNodes.at(Other) = Parent; 92 return Parent; 93} 94 95unsigned AggressiveAntiDepState::LeaveGroup(unsigned Reg) 96{ 97 // Create a new GroupNode for Reg. Reg's existing GroupNode must 98 // stay as is because there could be other GroupNodes referring to 99 // it. 100 unsigned idx = GroupNodes.size(); 101 GroupNodes.push_back(idx); 102 GroupNodeIndices[Reg] = idx; 103 return idx; 104} 105 106bool AggressiveAntiDepState::IsLive(unsigned Reg) 107{ 108 // KillIndex must be defined and DefIndex not defined for a register 109 // to be live. 110 return((KillIndices[Reg] != ~0u) && (DefIndices[Reg] == ~0u)); 111} 112 113 114 115AggressiveAntiDepBreaker:: 116AggressiveAntiDepBreaker(MachineFunction& MFi, 117 TargetSubtarget::RegClassVector& CriticalPathRCs) : 118 AntiDepBreaker(), MF(MFi), 119 MRI(MF.getRegInfo()), 120 TII(MF.getTarget().getInstrInfo()), 121 TRI(MF.getTarget().getRegisterInfo()), 122 AllocatableSet(TRI->getAllocatableSet(MF)), 123 State(NULL) { 124 /* Collect a bitset of all registers that are only broken if they 125 are on the critical path. */ 126 for (unsigned i = 0, e = CriticalPathRCs.size(); i < e; ++i) { 127 BitVector CPSet = TRI->getAllocatableSet(MF, CriticalPathRCs[i]); 128 if (CriticalPathSet.none()) 129 CriticalPathSet = CPSet; 130 else 131 CriticalPathSet |= CPSet; 132 } 133 134 DEBUG(dbgs() << "AntiDep Critical-Path Registers:"); 135 DEBUG(for (int r = CriticalPathSet.find_first(); r != -1; 136 r = CriticalPathSet.find_next(r)) 137 dbgs() << " " << TRI->getName(r)); 138 DEBUG(dbgs() << '\n'); 139} 140 141AggressiveAntiDepBreaker::~AggressiveAntiDepBreaker() { 142 delete State; 143} 144 145void AggressiveAntiDepBreaker::StartBlock(MachineBasicBlock *BB) { 146 assert(State == NULL); 147 State = new AggressiveAntiDepState(TRI->getNumRegs(), BB); 148 149 bool IsReturnBlock = (!BB->empty() && BB->back().getDesc().isReturn()); 150 std::vector<unsigned> &KillIndices = State->GetKillIndices(); 151 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 152 153 // Determine the live-out physregs for this block. 154 if (IsReturnBlock) { 155 // In a return block, examine the function live-out regs. 156 for (MachineRegisterInfo::liveout_iterator I = MRI.liveout_begin(), 157 E = MRI.liveout_end(); I != E; ++I) { 158 for (const unsigned *Alias = TRI->getOverlaps(*I); 159 unsigned Reg = *Alias; ++Alias) { 160 State->UnionGroups(Reg, 0); 161 KillIndices[Reg] = BB->size(); 162 DefIndices[Reg] = ~0u; 163 } 164 } 165 } 166 167 // In a non-return block, examine the live-in regs of all successors. 168 // Note a return block can have successors if the return instruction is 169 // predicated. 170 for (MachineBasicBlock::succ_iterator SI = BB->succ_begin(), 171 SE = BB->succ_end(); SI != SE; ++SI) 172 for (MachineBasicBlock::livein_iterator I = (*SI)->livein_begin(), 173 E = (*SI)->livein_end(); I != E; ++I) { 174 for (const unsigned *Alias = TRI->getOverlaps(*I); 175 unsigned Reg = *Alias; ++Alias) { 176 State->UnionGroups(Reg, 0); 177 KillIndices[Reg] = BB->size(); 178 DefIndices[Reg] = ~0u; 179 } 180 } 181 182 // Mark live-out callee-saved registers. In a return block this is 183 // all callee-saved registers. In non-return this is any 184 // callee-saved register that is not saved in the prolog. 185 const MachineFrameInfo *MFI = MF.getFrameInfo(); 186 BitVector Pristine = MFI->getPristineRegs(BB); 187 for (const unsigned *I = TRI->getCalleeSavedRegs(); *I; ++I) { 188 unsigned Reg = *I; 189 if (!IsReturnBlock && !Pristine.test(Reg)) continue; 190 for (const unsigned *Alias = TRI->getOverlaps(Reg); 191 unsigned AliasReg = *Alias; ++Alias) { 192 State->UnionGroups(AliasReg, 0); 193 KillIndices[AliasReg] = BB->size(); 194 DefIndices[AliasReg] = ~0u; 195 } 196 } 197} 198 199void AggressiveAntiDepBreaker::FinishBlock() { 200 delete State; 201 State = NULL; 202} 203 204void AggressiveAntiDepBreaker::Observe(MachineInstr *MI, unsigned Count, 205 unsigned InsertPosIndex) { 206 assert(Count < InsertPosIndex && "Instruction index out of expected range!"); 207 208 std::set<unsigned> PassthruRegs; 209 GetPassthruRegs(MI, PassthruRegs); 210 PrescanInstruction(MI, Count, PassthruRegs); 211 ScanInstruction(MI, Count); 212 213 DEBUG(dbgs() << "Observe: "); 214 DEBUG(MI->dump()); 215 DEBUG(dbgs() << "\tRegs:"); 216 217 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 218 for (unsigned Reg = 0; Reg != TRI->getNumRegs(); ++Reg) { 219 // If Reg is current live, then mark that it can't be renamed as 220 // we don't know the extent of its live-range anymore (now that it 221 // has been scheduled). If it is not live but was defined in the 222 // previous schedule region, then set its def index to the most 223 // conservative location (i.e. the beginning of the previous 224 // schedule region). 225 if (State->IsLive(Reg)) { 226 DEBUG(if (State->GetGroup(Reg) != 0) 227 dbgs() << " " << TRI->getName(Reg) << "=g" << 228 State->GetGroup(Reg) << "->g0(region live-out)"); 229 State->UnionGroups(Reg, 0); 230 } else if ((DefIndices[Reg] < InsertPosIndex) 231 && (DefIndices[Reg] >= Count)) { 232 DefIndices[Reg] = Count; 233 } 234 } 235 DEBUG(dbgs() << '\n'); 236} 237 238bool AggressiveAntiDepBreaker::IsImplicitDefUse(MachineInstr *MI, 239 MachineOperand& MO) 240{ 241 if (!MO.isReg() || !MO.isImplicit()) 242 return false; 243 244 unsigned Reg = MO.getReg(); 245 if (Reg == 0) 246 return false; 247 248 MachineOperand *Op = NULL; 249 if (MO.isDef()) 250 Op = MI->findRegisterUseOperand(Reg, true); 251 else 252 Op = MI->findRegisterDefOperand(Reg); 253 254 return((Op != NULL) && Op->isImplicit()); 255} 256 257void AggressiveAntiDepBreaker::GetPassthruRegs(MachineInstr *MI, 258 std::set<unsigned>& PassthruRegs) { 259 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 260 MachineOperand &MO = MI->getOperand(i); 261 if (!MO.isReg()) continue; 262 if ((MO.isDef() && MI->isRegTiedToUseOperand(i)) || 263 IsImplicitDefUse(MI, MO)) { 264 const unsigned Reg = MO.getReg(); 265 PassthruRegs.insert(Reg); 266 for (const unsigned *Subreg = TRI->getSubRegisters(Reg); 267 *Subreg; ++Subreg) { 268 PassthruRegs.insert(*Subreg); 269 } 270 } 271 } 272} 273 274/// AntiDepEdges - Return in Edges the anti- and output- dependencies 275/// in SU that we want to consider for breaking. 276static void AntiDepEdges(const SUnit *SU, std::vector<const SDep*>& Edges) { 277 SmallSet<unsigned, 4> RegSet; 278 for (SUnit::const_pred_iterator P = SU->Preds.begin(), PE = SU->Preds.end(); 279 P != PE; ++P) { 280 if ((P->getKind() == SDep::Anti) || (P->getKind() == SDep::Output)) { 281 unsigned Reg = P->getReg(); 282 if (RegSet.count(Reg) == 0) { 283 Edges.push_back(&*P); 284 RegSet.insert(Reg); 285 } 286 } 287 } 288} 289 290/// CriticalPathStep - Return the next SUnit after SU on the bottom-up 291/// critical path. 292static const SUnit *CriticalPathStep(const SUnit *SU) { 293 const SDep *Next = 0; 294 unsigned NextDepth = 0; 295 // Find the predecessor edge with the greatest depth. 296 if (SU != 0) { 297 for (SUnit::const_pred_iterator P = SU->Preds.begin(), PE = SU->Preds.end(); 298 P != PE; ++P) { 299 const SUnit *PredSU = P->getSUnit(); 300 unsigned PredLatency = P->getLatency(); 301 unsigned PredTotalLatency = PredSU->getDepth() + PredLatency; 302 // In the case of a latency tie, prefer an anti-dependency edge over 303 // other types of edges. 304 if (NextDepth < PredTotalLatency || 305 (NextDepth == PredTotalLatency && P->getKind() == SDep::Anti)) { 306 NextDepth = PredTotalLatency; 307 Next = &*P; 308 } 309 } 310 } 311 312 return (Next) ? Next->getSUnit() : 0; 313} 314 315void AggressiveAntiDepBreaker::HandleLastUse(unsigned Reg, unsigned KillIdx, 316 const char *tag, 317 const char *header, 318 const char *footer) { 319 std::vector<unsigned> &KillIndices = State->GetKillIndices(); 320 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 321 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>& 322 RegRefs = State->GetRegRefs(); 323 324 if (!State->IsLive(Reg)) { 325 KillIndices[Reg] = KillIdx; 326 DefIndices[Reg] = ~0u; 327 RegRefs.erase(Reg); 328 State->LeaveGroup(Reg); 329 DEBUG(if (header != NULL) { 330 dbgs() << header << TRI->getName(Reg); header = NULL; }); 331 DEBUG(dbgs() << "->g" << State->GetGroup(Reg) << tag); 332 } 333 // Repeat for subregisters. 334 for (const unsigned *Subreg = TRI->getSubRegisters(Reg); 335 *Subreg; ++Subreg) { 336 unsigned SubregReg = *Subreg; 337 if (!State->IsLive(SubregReg)) { 338 KillIndices[SubregReg] = KillIdx; 339 DefIndices[SubregReg] = ~0u; 340 RegRefs.erase(SubregReg); 341 State->LeaveGroup(SubregReg); 342 DEBUG(if (header != NULL) { 343 dbgs() << header << TRI->getName(Reg); header = NULL; }); 344 DEBUG(dbgs() << " " << TRI->getName(SubregReg) << "->g" << 345 State->GetGroup(SubregReg) << tag); 346 } 347 } 348 349 DEBUG(if ((header == NULL) && (footer != NULL)) dbgs() << footer); 350} 351 352void AggressiveAntiDepBreaker::PrescanInstruction(MachineInstr *MI, 353 unsigned Count, 354 std::set<unsigned>& PassthruRegs) { 355 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 356 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>& 357 RegRefs = State->GetRegRefs(); 358 359 // Handle dead defs by simulating a last-use of the register just 360 // after the def. A dead def can occur because the def is truly 361 // dead, or because only a subregister is live at the def. If we 362 // don't do this the dead def will be incorrectly merged into the 363 // previous def. 364 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 365 MachineOperand &MO = MI->getOperand(i); 366 if (!MO.isReg() || !MO.isDef()) continue; 367 unsigned Reg = MO.getReg(); 368 if (Reg == 0) continue; 369 370 HandleLastUse(Reg, Count + 1, "", "\tDead Def: ", "\n"); 371 } 372 373 DEBUG(dbgs() << "\tDef Groups:"); 374 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 375 MachineOperand &MO = MI->getOperand(i); 376 if (!MO.isReg() || !MO.isDef()) continue; 377 unsigned Reg = MO.getReg(); 378 if (Reg == 0) continue; 379 380 DEBUG(dbgs() << " " << TRI->getName(Reg) << "=g" << State->GetGroup(Reg)); 381 382 // If MI's defs have a special allocation requirement, don't allow 383 // any def registers to be changed. Also assume all registers 384 // defined in a call must not be changed (ABI). 385 if (MI->getDesc().isCall() || MI->getDesc().hasExtraDefRegAllocReq() || 386 TII->isPredicated(MI)) { 387 DEBUG(if (State->GetGroup(Reg) != 0) dbgs() << "->g0(alloc-req)"); 388 State->UnionGroups(Reg, 0); 389 } 390 391 // Any aliased that are live at this point are completely or 392 // partially defined here, so group those aliases with Reg. 393 for (const unsigned *Alias = TRI->getAliasSet(Reg); *Alias; ++Alias) { 394 unsigned AliasReg = *Alias; 395 if (State->IsLive(AliasReg)) { 396 State->UnionGroups(Reg, AliasReg); 397 DEBUG(dbgs() << "->g" << State->GetGroup(Reg) << "(via " << 398 TRI->getName(AliasReg) << ")"); 399 } 400 } 401 402 // Note register reference... 403 const TargetRegisterClass *RC = NULL; 404 if (i < MI->getDesc().getNumOperands()) 405 RC = MI->getDesc().OpInfo[i].getRegClass(TRI); 406 AggressiveAntiDepState::RegisterReference RR = { &MO, RC }; 407 RegRefs.insert(std::make_pair(Reg, RR)); 408 } 409 410 DEBUG(dbgs() << '\n'); 411 412 // Scan the register defs for this instruction and update 413 // live-ranges. 414 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 415 MachineOperand &MO = MI->getOperand(i); 416 if (!MO.isReg() || !MO.isDef()) continue; 417 unsigned Reg = MO.getReg(); 418 if (Reg == 0) continue; 419 // Ignore KILLs and passthru registers for liveness... 420 if (MI->isKill() || (PassthruRegs.count(Reg) != 0)) 421 continue; 422 423 // Update def for Reg and aliases. 424 for (const unsigned *Alias = TRI->getOverlaps(Reg); 425 unsigned AliasReg = *Alias; ++Alias) 426 DefIndices[AliasReg] = Count; 427 } 428} 429 430void AggressiveAntiDepBreaker::ScanInstruction(MachineInstr *MI, 431 unsigned Count) { 432 DEBUG(dbgs() << "\tUse Groups:"); 433 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>& 434 RegRefs = State->GetRegRefs(); 435 436 // If MI's uses have special allocation requirement, don't allow 437 // any use registers to be changed. Also assume all registers 438 // used in a call must not be changed (ABI). 439 // FIXME: The issue with predicated instruction is more complex. We are being 440 // conservatively here because the kill markers cannot be trusted after 441 // if-conversion: 442 // %R6<def> = LDR %SP, %reg0, 92, pred:14, pred:%reg0; mem:LD4[FixedStack14] 443 // ... 444 // STR %R0, %R6<kill>, %reg0, 0, pred:0, pred:%CPSR; mem:ST4[%395] 445 // %R6<def> = LDR %SP, %reg0, 100, pred:0, pred:%CPSR; mem:LD4[FixedStack12] 446 // STR %R0, %R6<kill>, %reg0, 0, pred:14, pred:%reg0; mem:ST4[%396](align=8) 447 // 448 // The first R6 kill is not really a kill since it's killed by a predicated 449 // instruction which may not be executed. The second R6 def may or may not 450 // re-define R6 so it's not safe to change it since the last R6 use cannot be 451 // changed. 452 bool Special = MI->getDesc().isCall() || 453 MI->getDesc().hasExtraSrcRegAllocReq() || 454 TII->isPredicated(MI); 455 456 // Scan the register uses for this instruction and update 457 // live-ranges, groups and RegRefs. 458 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 459 MachineOperand &MO = MI->getOperand(i); 460 if (!MO.isReg() || !MO.isUse()) continue; 461 unsigned Reg = MO.getReg(); 462 if (Reg == 0) continue; 463 464 DEBUG(dbgs() << " " << TRI->getName(Reg) << "=g" << 465 State->GetGroup(Reg)); 466 467 // It wasn't previously live but now it is, this is a kill. Forget 468 // the previous live-range information and start a new live-range 469 // for the register. 470 HandleLastUse(Reg, Count, "(last-use)"); 471 472 if (Special) { 473 DEBUG(if (State->GetGroup(Reg) != 0) dbgs() << "->g0(alloc-req)"); 474 State->UnionGroups(Reg, 0); 475 } 476 477 // Note register reference... 478 const TargetRegisterClass *RC = NULL; 479 if (i < MI->getDesc().getNumOperands()) 480 RC = MI->getDesc().OpInfo[i].getRegClass(TRI); 481 AggressiveAntiDepState::RegisterReference RR = { &MO, RC }; 482 RegRefs.insert(std::make_pair(Reg, RR)); 483 } 484 485 DEBUG(dbgs() << '\n'); 486 487 // Form a group of all defs and uses of a KILL instruction to ensure 488 // that all registers are renamed as a group. 489 if (MI->isKill()) { 490 DEBUG(dbgs() << "\tKill Group:"); 491 492 unsigned FirstReg = 0; 493 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 494 MachineOperand &MO = MI->getOperand(i); 495 if (!MO.isReg()) continue; 496 unsigned Reg = MO.getReg(); 497 if (Reg == 0) continue; 498 499 if (FirstReg != 0) { 500 DEBUG(dbgs() << "=" << TRI->getName(Reg)); 501 State->UnionGroups(FirstReg, Reg); 502 } else { 503 DEBUG(dbgs() << " " << TRI->getName(Reg)); 504 FirstReg = Reg; 505 } 506 } 507 508 DEBUG(dbgs() << "->g" << State->GetGroup(FirstReg) << '\n'); 509 } 510} 511 512BitVector AggressiveAntiDepBreaker::GetRenameRegisters(unsigned Reg) { 513 BitVector BV(TRI->getNumRegs(), false); 514 bool first = true; 515 516 // Check all references that need rewriting for Reg. For each, use 517 // the corresponding register class to narrow the set of registers 518 // that are appropriate for renaming. 519 std::pair<std::multimap<unsigned, 520 AggressiveAntiDepState::RegisterReference>::iterator, 521 std::multimap<unsigned, 522 AggressiveAntiDepState::RegisterReference>::iterator> 523 Range = State->GetRegRefs().equal_range(Reg); 524 for (std::multimap<unsigned, 525 AggressiveAntiDepState::RegisterReference>::iterator Q = Range.first, 526 QE = Range.second; Q != QE; ++Q) { 527 const TargetRegisterClass *RC = Q->second.RC; 528 if (RC == NULL) continue; 529 530 BitVector RCBV = TRI->getAllocatableSet(MF, RC); 531 if (first) { 532 BV |= RCBV; 533 first = false; 534 } else { 535 BV &= RCBV; 536 } 537 538 DEBUG(dbgs() << " " << RC->getName()); 539 } 540 541 return BV; 542} 543 544bool AggressiveAntiDepBreaker::FindSuitableFreeRegisters( 545 unsigned AntiDepGroupIndex, 546 RenameOrderType& RenameOrder, 547 std::map<unsigned, unsigned> &RenameMap) { 548 std::vector<unsigned> &KillIndices = State->GetKillIndices(); 549 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 550 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>& 551 RegRefs = State->GetRegRefs(); 552 553 // Collect all referenced registers in the same group as 554 // AntiDepReg. These all need to be renamed together if we are to 555 // break the anti-dependence. 556 std::vector<unsigned> Regs; 557 State->GetGroupRegs(AntiDepGroupIndex, Regs, &RegRefs); 558 assert(Regs.size() > 0 && "Empty register group!"); 559 if (Regs.size() == 0) 560 return false; 561 562 // Find the "superest" register in the group. At the same time, 563 // collect the BitVector of registers that can be used to rename 564 // each register. 565 DEBUG(dbgs() << "\tRename Candidates for Group g" << AntiDepGroupIndex 566 << ":\n"); 567 std::map<unsigned, BitVector> RenameRegisterMap; 568 unsigned SuperReg = 0; 569 for (unsigned i = 0, e = Regs.size(); i != e; ++i) { 570 unsigned Reg = Regs[i]; 571 if ((SuperReg == 0) || TRI->isSuperRegister(SuperReg, Reg)) 572 SuperReg = Reg; 573 574 // If Reg has any references, then collect possible rename regs 575 if (RegRefs.count(Reg) > 0) { 576 DEBUG(dbgs() << "\t\t" << TRI->getName(Reg) << ":"); 577 578 BitVector BV = GetRenameRegisters(Reg); 579 RenameRegisterMap.insert(std::pair<unsigned, BitVector>(Reg, BV)); 580 581 DEBUG(dbgs() << " ::"); 582 DEBUG(for (int r = BV.find_first(); r != -1; r = BV.find_next(r)) 583 dbgs() << " " << TRI->getName(r)); 584 DEBUG(dbgs() << "\n"); 585 } 586 } 587 588 // All group registers should be a subreg of SuperReg. 589 for (unsigned i = 0, e = Regs.size(); i != e; ++i) { 590 unsigned Reg = Regs[i]; 591 if (Reg == SuperReg) continue; 592 bool IsSub = TRI->isSubRegister(SuperReg, Reg); 593 assert(IsSub && "Expecting group subregister"); 594 if (!IsSub) 595 return false; 596 } 597 598#ifndef NDEBUG 599 // If DebugDiv > 0 then only rename (renamecnt % DebugDiv) == DebugMod 600 if (DebugDiv > 0) { 601 static int renamecnt = 0; 602 if (renamecnt++ % DebugDiv != DebugMod) 603 return false; 604 605 dbgs() << "*** Performing rename " << TRI->getName(SuperReg) << 606 " for debug ***\n"; 607 } 608#endif 609 610 // Check each possible rename register for SuperReg in round-robin 611 // order. If that register is available, and the corresponding 612 // registers are available for the other group subregisters, then we 613 // can use those registers to rename. 614 615 // FIXME: Using getMinimalPhysRegClass is very conservative. We should 616 // check every use of the register and find the largest register class 617 // that can be used in all of them. 618 const TargetRegisterClass *SuperRC = 619 TRI->getMinimalPhysRegClass(SuperReg, MVT::Other); 620 621 const TargetRegisterClass::iterator RB = SuperRC->allocation_order_begin(MF); 622 const TargetRegisterClass::iterator RE = SuperRC->allocation_order_end(MF); 623 if (RB == RE) { 624 DEBUG(dbgs() << "\tEmpty Super Regclass!!\n"); 625 return false; 626 } 627 628 DEBUG(dbgs() << "\tFind Registers:"); 629 630 if (RenameOrder.count(SuperRC) == 0) 631 RenameOrder.insert(RenameOrderType::value_type(SuperRC, RE)); 632 633 const TargetRegisterClass::iterator OrigR = RenameOrder[SuperRC]; 634 const TargetRegisterClass::iterator EndR = ((OrigR == RE) ? RB : OrigR); 635 TargetRegisterClass::iterator R = OrigR; 636 do { 637 if (R == RB) R = RE; 638 --R; 639 const unsigned NewSuperReg = *R; 640 // Don't consider non-allocatable registers 641 if (!AllocatableSet.test(NewSuperReg)) continue; 642 // Don't replace a register with itself. 643 if (NewSuperReg == SuperReg) continue; 644 645 DEBUG(dbgs() << " [" << TRI->getName(NewSuperReg) << ':'); 646 RenameMap.clear(); 647 648 // For each referenced group register (which must be a SuperReg or 649 // a subregister of SuperReg), find the corresponding subregister 650 // of NewSuperReg and make sure it is free to be renamed. 651 for (unsigned i = 0, e = Regs.size(); i != e; ++i) { 652 unsigned Reg = Regs[i]; 653 unsigned NewReg = 0; 654 if (Reg == SuperReg) { 655 NewReg = NewSuperReg; 656 } else { 657 unsigned NewSubRegIdx = TRI->getSubRegIndex(SuperReg, Reg); 658 if (NewSubRegIdx != 0) 659 NewReg = TRI->getSubReg(NewSuperReg, NewSubRegIdx); 660 } 661 662 DEBUG(dbgs() << " " << TRI->getName(NewReg)); 663 664 // Check if Reg can be renamed to NewReg. 665 BitVector BV = RenameRegisterMap[Reg]; 666 if (!BV.test(NewReg)) { 667 DEBUG(dbgs() << "(no rename)"); 668 goto next_super_reg; 669 } 670 671 // If NewReg is dead and NewReg's most recent def is not before 672 // Regs's kill, it's safe to replace Reg with NewReg. We 673 // must also check all aliases of NewReg, because we can't define a 674 // register when any sub or super is already live. 675 if (State->IsLive(NewReg) || (KillIndices[Reg] > DefIndices[NewReg])) { 676 DEBUG(dbgs() << "(live)"); 677 goto next_super_reg; 678 } else { 679 bool found = false; 680 for (const unsigned *Alias = TRI->getAliasSet(NewReg); 681 *Alias; ++Alias) { 682 unsigned AliasReg = *Alias; 683 if (State->IsLive(AliasReg) || 684 (KillIndices[Reg] > DefIndices[AliasReg])) { 685 DEBUG(dbgs() << "(alias " << TRI->getName(AliasReg) << " live)"); 686 found = true; 687 break; 688 } 689 } 690 if (found) 691 goto next_super_reg; 692 } 693 694 // Record that 'Reg' can be renamed to 'NewReg'. 695 RenameMap.insert(std::pair<unsigned, unsigned>(Reg, NewReg)); 696 } 697 698 // If we fall-out here, then every register in the group can be 699 // renamed, as recorded in RenameMap. 700 RenameOrder.erase(SuperRC); 701 RenameOrder.insert(RenameOrderType::value_type(SuperRC, R)); 702 DEBUG(dbgs() << "]\n"); 703 return true; 704 705 next_super_reg: 706 DEBUG(dbgs() << ']'); 707 } while (R != EndR); 708 709 DEBUG(dbgs() << '\n'); 710 711 // No registers are free and available! 712 return false; 713} 714 715/// BreakAntiDependencies - Identifiy anti-dependencies within the 716/// ScheduleDAG and break them by renaming registers. 717/// 718unsigned AggressiveAntiDepBreaker::BreakAntiDependencies( 719 const std::vector<SUnit>& SUnits, 720 MachineBasicBlock::iterator Begin, 721 MachineBasicBlock::iterator End, 722 unsigned InsertPosIndex, 723 DbgValueVector &DbgValues) { 724 725 std::vector<unsigned> &KillIndices = State->GetKillIndices(); 726 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 727 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>& 728 RegRefs = State->GetRegRefs(); 729 730 // The code below assumes that there is at least one instruction, 731 // so just duck out immediately if the block is empty. 732 if (SUnits.empty()) return 0; 733 734 // For each regclass the next register to use for renaming. 735 RenameOrderType RenameOrder; 736 737 // ...need a map from MI to SUnit. 738 std::map<MachineInstr *, const SUnit *> MISUnitMap; 739 for (unsigned i = 0, e = SUnits.size(); i != e; ++i) { 740 const SUnit *SU = &SUnits[i]; 741 MISUnitMap.insert(std::pair<MachineInstr *, const SUnit *>(SU->getInstr(), 742 SU)); 743 } 744 745 // Track progress along the critical path through the SUnit graph as 746 // we walk the instructions. This is needed for regclasses that only 747 // break critical-path anti-dependencies. 748 const SUnit *CriticalPathSU = 0; 749 MachineInstr *CriticalPathMI = 0; 750 if (CriticalPathSet.any()) { 751 for (unsigned i = 0, e = SUnits.size(); i != e; ++i) { 752 const SUnit *SU = &SUnits[i]; 753 if (!CriticalPathSU || 754 ((SU->getDepth() + SU->Latency) > 755 (CriticalPathSU->getDepth() + CriticalPathSU->Latency))) { 756 CriticalPathSU = SU; 757 } 758 } 759 760 CriticalPathMI = CriticalPathSU->getInstr(); 761 } 762 763#ifndef NDEBUG 764 DEBUG(dbgs() << "\n===== Aggressive anti-dependency breaking\n"); 765 DEBUG(dbgs() << "Available regs:"); 766 for (unsigned Reg = 0; Reg < TRI->getNumRegs(); ++Reg) { 767 if (!State->IsLive(Reg)) 768 DEBUG(dbgs() << " " << TRI->getName(Reg)); 769 } 770 DEBUG(dbgs() << '\n'); 771#endif 772 773 // Attempt to break anti-dependence edges. Walk the instructions 774 // from the bottom up, tracking information about liveness as we go 775 // to help determine which registers are available. 776 unsigned Broken = 0; 777 unsigned Count = InsertPosIndex - 1; 778 for (MachineBasicBlock::iterator I = End, E = Begin; 779 I != E; --Count) { 780 MachineInstr *MI = --I; 781 782 DEBUG(dbgs() << "Anti: "); 783 DEBUG(MI->dump()); 784 785 std::set<unsigned> PassthruRegs; 786 GetPassthruRegs(MI, PassthruRegs); 787 788 // Process the defs in MI... 789 PrescanInstruction(MI, Count, PassthruRegs); 790 791 // The dependence edges that represent anti- and output- 792 // dependencies that are candidates for breaking. 793 std::vector<const SDep *> Edges; 794 const SUnit *PathSU = MISUnitMap[MI]; 795 AntiDepEdges(PathSU, Edges); 796 797 // If MI is not on the critical path, then we don't rename 798 // registers in the CriticalPathSet. 799 BitVector *ExcludeRegs = NULL; 800 if (MI == CriticalPathMI) { 801 CriticalPathSU = CriticalPathStep(CriticalPathSU); 802 CriticalPathMI = (CriticalPathSU) ? CriticalPathSU->getInstr() : 0; 803 } else { 804 ExcludeRegs = &CriticalPathSet; 805 } 806 807 // Ignore KILL instructions (they form a group in ScanInstruction 808 // but don't cause any anti-dependence breaking themselves) 809 if (!MI->isKill()) { 810 // Attempt to break each anti-dependency... 811 for (unsigned i = 0, e = Edges.size(); i != e; ++i) { 812 const SDep *Edge = Edges[i]; 813 SUnit *NextSU = Edge->getSUnit(); 814 815 if ((Edge->getKind() != SDep::Anti) && 816 (Edge->getKind() != SDep::Output)) continue; 817 818 unsigned AntiDepReg = Edge->getReg(); 819 DEBUG(dbgs() << "\tAntidep reg: " << TRI->getName(AntiDepReg)); 820 assert(AntiDepReg != 0 && "Anti-dependence on reg0?"); 821 822 if (!AllocatableSet.test(AntiDepReg)) { 823 // Don't break anti-dependencies on non-allocatable registers. 824 DEBUG(dbgs() << " (non-allocatable)\n"); 825 continue; 826 } else if ((ExcludeRegs != NULL) && ExcludeRegs->test(AntiDepReg)) { 827 // Don't break anti-dependencies for critical path registers 828 // if not on the critical path 829 DEBUG(dbgs() << " (not critical-path)\n"); 830 continue; 831 } else if (PassthruRegs.count(AntiDepReg) != 0) { 832 // If the anti-dep register liveness "passes-thru", then 833 // don't try to change it. It will be changed along with 834 // the use if required to break an earlier antidep. 835 DEBUG(dbgs() << " (passthru)\n"); 836 continue; 837 } else { 838 // No anti-dep breaking for implicit deps 839 MachineOperand *AntiDepOp = MI->findRegisterDefOperand(AntiDepReg); 840 assert(AntiDepOp != NULL && 841 "Can't find index for defined register operand"); 842 if ((AntiDepOp == NULL) || AntiDepOp->isImplicit()) { 843 DEBUG(dbgs() << " (implicit)\n"); 844 continue; 845 } 846 847 // If the SUnit has other dependencies on the SUnit that 848 // it anti-depends on, don't bother breaking the 849 // anti-dependency since those edges would prevent such 850 // units from being scheduled past each other 851 // regardless. 852 // 853 // Also, if there are dependencies on other SUnits with the 854 // same register as the anti-dependency, don't attempt to 855 // break it. 856 for (SUnit::const_pred_iterator P = PathSU->Preds.begin(), 857 PE = PathSU->Preds.end(); P != PE; ++P) { 858 if (P->getSUnit() == NextSU ? 859 (P->getKind() != SDep::Anti || P->getReg() != AntiDepReg) : 860 (P->getKind() == SDep::Data && P->getReg() == AntiDepReg)) { 861 AntiDepReg = 0; 862 break; 863 } 864 } 865 for (SUnit::const_pred_iterator P = PathSU->Preds.begin(), 866 PE = PathSU->Preds.end(); P != PE; ++P) { 867 if ((P->getSUnit() == NextSU) && (P->getKind() != SDep::Anti) && 868 (P->getKind() != SDep::Output)) { 869 DEBUG(dbgs() << " (real dependency)\n"); 870 AntiDepReg = 0; 871 break; 872 } else if ((P->getSUnit() != NextSU) && 873 (P->getKind() == SDep::Data) && 874 (P->getReg() == AntiDepReg)) { 875 DEBUG(dbgs() << " (other dependency)\n"); 876 AntiDepReg = 0; 877 break; 878 } 879 } 880 881 if (AntiDepReg == 0) continue; 882 } 883 884 assert(AntiDepReg != 0); 885 if (AntiDepReg == 0) continue; 886 887 // Determine AntiDepReg's register group. 888 const unsigned GroupIndex = State->GetGroup(AntiDepReg); 889 if (GroupIndex == 0) { 890 DEBUG(dbgs() << " (zero group)\n"); 891 continue; 892 } 893 894 DEBUG(dbgs() << '\n'); 895 896 // Look for a suitable register to use to break the anti-dependence. 897 std::map<unsigned, unsigned> RenameMap; 898 if (FindSuitableFreeRegisters(GroupIndex, RenameOrder, RenameMap)) { 899 DEBUG(dbgs() << "\tBreaking anti-dependence edge on " 900 << TRI->getName(AntiDepReg) << ":"); 901 902 // Handle each group register... 903 for (std::map<unsigned, unsigned>::iterator 904 S = RenameMap.begin(), E = RenameMap.end(); S != E; ++S) { 905 unsigned CurrReg = S->first; 906 unsigned NewReg = S->second; 907 908 DEBUG(dbgs() << " " << TRI->getName(CurrReg) << "->" << 909 TRI->getName(NewReg) << "(" << 910 RegRefs.count(CurrReg) << " refs)"); 911 912 // Update the references to the old register CurrReg to 913 // refer to the new register NewReg. 914 std::pair<std::multimap<unsigned, 915 AggressiveAntiDepState::RegisterReference>::iterator, 916 std::multimap<unsigned, 917 AggressiveAntiDepState::RegisterReference>::iterator> 918 Range = RegRefs.equal_range(CurrReg); 919 for (std::multimap<unsigned, 920 AggressiveAntiDepState::RegisterReference>::iterator 921 Q = Range.first, QE = Range.second; Q != QE; ++Q) { 922 Q->second.Operand->setReg(NewReg); 923 // If the SU for the instruction being updated has debug 924 // information related to the anti-dependency register, make 925 // sure to update that as well. 926 const SUnit *SU = MISUnitMap[Q->second.Operand->getParent()]; 927 if (!SU) continue; 928 for (DbgValueVector::iterator DVI = DbgValues.begin(), 929 DVE = DbgValues.end(); DVI != DVE; ++DVI) 930 if (DVI->second == Q->second.Operand->getParent()) 931 UpdateDbgValue(DVI->first, AntiDepReg, NewReg); 932 } 933 934 // We just went back in time and modified history; the 935 // liveness information for CurrReg is now inconsistent. Set 936 // the state as if it were dead. 937 State->UnionGroups(NewReg, 0); 938 RegRefs.erase(NewReg); 939 DefIndices[NewReg] = DefIndices[CurrReg]; 940 KillIndices[NewReg] = KillIndices[CurrReg]; 941 942 State->UnionGroups(CurrReg, 0); 943 RegRefs.erase(CurrReg); 944 DefIndices[CurrReg] = KillIndices[CurrReg]; 945 KillIndices[CurrReg] = ~0u; 946 assert(((KillIndices[CurrReg] == ~0u) != 947 (DefIndices[CurrReg] == ~0u)) && 948 "Kill and Def maps aren't consistent for AntiDepReg!"); 949 } 950 951 ++Broken; 952 DEBUG(dbgs() << '\n'); 953 } 954 } 955 } 956 957 ScanInstruction(MI, Count); 958 } 959 960 return Broken; 961} 962