MemoryDependenceAnalysis.cpp revision 39f372e23e49cecb8db2eb7120eb331173e50c74
1//===- MemoryDependenceAnalysis.cpp - Mem Deps Implementation --*- C++ -*-===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file implements an analysis that determines, for a given memory 11// operation, what preceding memory operations it depends on. It builds on 12// alias analysis information, and tries to provide a lazy, caching interface to 13// a common kind of alias information query. 14// 15//===----------------------------------------------------------------------===// 16 17#define DEBUG_TYPE "memdep" 18#include "llvm/Analysis/MemoryDependenceAnalysis.h" 19#include "llvm/Constants.h" 20#include "llvm/Instructions.h" 21#include "llvm/Function.h" 22#include "llvm/Analysis/AliasAnalysis.h" 23#include "llvm/ADT/Statistic.h" 24#include "llvm/ADT/STLExtras.h" 25#include "llvm/Support/CFG.h" 26#include "llvm/Support/CommandLine.h" 27#include "llvm/Support/Debug.h" 28#include "llvm/Target/TargetData.h" 29 30using namespace llvm; 31 32// Control the calculation of non-local dependencies by only examining the 33// predecessors if the basic block has less than X amount (50 by default). 34static cl::opt<int> 35PredLimit("nonlocaldep-threshold", cl::Hidden, cl::init(50), 36 cl::desc("Control the calculation of non-local" 37 "dependencies (default = 50)")); 38 39STATISTIC(NumCacheNonlocal, "Number of cached non-local responses"); 40STATISTIC(NumUncacheNonlocal, "Number of uncached non-local responses"); 41 42char MemoryDependenceAnalysis::ID = 0; 43 44// Register this pass... 45static RegisterPass<MemoryDependenceAnalysis> X("memdep", 46 "Memory Dependence Analysis", false, true); 47 48/// verifyRemoved - Verify that the specified instruction does not occur 49/// in our internal data structures. 50void MemoryDependenceAnalysis::verifyRemoved(Instruction *D) const { 51 for (LocalDepMapType::const_iterator I = LocalDeps.begin(), 52 E = LocalDeps.end(); I != E; ++I) { 53 assert(I->first != D && "Inst occurs in data structures"); 54 assert(I->second.first.getPointer() != D && 55 "Inst occurs in data structures"); 56 } 57 58 for (nonLocalDepMapType::const_iterator I = depGraphNonLocal.begin(), 59 E = depGraphNonLocal.end(); I != E; ++I) { 60 assert(I->first != D && "Inst occurs in data structures"); 61 for (DenseMap<BasicBlock*, DepResultTy>::iterator II = I->second.begin(), 62 EE = I->second.end(); II != EE; ++II) 63 assert(II->second.getPointer() != D && "Inst occurs in data structures"); 64 } 65 66 for (reverseDepMapType::const_iterator I = reverseDep.begin(), 67 E = reverseDep.end(); I != E; ++I) 68 for (SmallPtrSet<Instruction*, 4>::const_iterator II = I->second.begin(), 69 EE = I->second.end(); II != EE; ++II) 70 assert(*II != D && "Inst occurs in data structures"); 71 72 for (reverseDepMapType::const_iterator I = reverseDepNonLocal.begin(), 73 E = reverseDepNonLocal.end(); 74 I != E; ++I) 75 for (SmallPtrSet<Instruction*, 4>::const_iterator II = I->second.begin(), 76 EE = I->second.end(); II != EE; ++II) 77 assert(*II != D && "Inst occurs in data structures"); 78} 79 80/// getAnalysisUsage - Does not modify anything. It uses Alias Analysis. 81/// 82void MemoryDependenceAnalysis::getAnalysisUsage(AnalysisUsage &AU) const { 83 AU.setPreservesAll(); 84 AU.addRequiredTransitive<AliasAnalysis>(); 85 AU.addRequiredTransitive<TargetData>(); 86} 87 88/// getCallSiteDependency - Private helper for finding the local dependencies 89/// of a call site. 90MemoryDependenceAnalysis::DepResultTy 91MemoryDependenceAnalysis:: 92getCallSiteDependency(CallSite C, Instruction *start, BasicBlock *block) { 93 std::pair<DepResultTy, bool> &cachedResult = LocalDeps[C.getInstruction()]; 94 AliasAnalysis& AA = getAnalysis<AliasAnalysis>(); 95 TargetData& TD = getAnalysis<TargetData>(); 96 BasicBlock::iterator blockBegin = C.getInstruction()->getParent()->begin(); 97 BasicBlock::iterator QI = C.getInstruction(); 98 99 // If the starting point was specified, use it 100 if (start) { 101 QI = start; 102 blockBegin = start->getParent()->begin(); 103 // If the starting point wasn't specified, but the block was, use it 104 } else if (!start && block) { 105 QI = block->end(); 106 blockBegin = block->begin(); 107 } 108 109 // Walk backwards through the block, looking for dependencies 110 while (QI != blockBegin) { 111 --QI; 112 113 // If this inst is a memory op, get the pointer it accessed 114 Value* pointer = 0; 115 uint64_t pointerSize = 0; 116 if (StoreInst* S = dyn_cast<StoreInst>(QI)) { 117 pointer = S->getPointerOperand(); 118 pointerSize = TD.getTypeStoreSize(S->getOperand(0)->getType()); 119 } else if (AllocationInst* AI = dyn_cast<AllocationInst>(QI)) { 120 pointer = AI; 121 if (ConstantInt* C = dyn_cast<ConstantInt>(AI->getArraySize())) 122 pointerSize = C->getZExtValue() * 123 TD.getABITypeSize(AI->getAllocatedType()); 124 else 125 pointerSize = ~0UL; 126 } else if (VAArgInst* V = dyn_cast<VAArgInst>(QI)) { 127 pointer = V->getOperand(0); 128 pointerSize = TD.getTypeStoreSize(V->getType()); 129 } else if (FreeInst* F = dyn_cast<FreeInst>(QI)) { 130 pointer = F->getPointerOperand(); 131 132 // FreeInsts erase the entire structure 133 pointerSize = ~0UL; 134 } else if (CallSite::get(QI).getInstruction() != 0) { 135 AliasAnalysis::ModRefBehavior result = 136 AA.getModRefBehavior(CallSite::get(QI)); 137 if (result != AliasAnalysis::DoesNotAccessMemory) { 138 if (!start && !block) { 139 cachedResult.first = DepResultTy(QI, Normal); 140 cachedResult.second = true; 141 reverseDep[DepResultTy(QI, Normal)].insert(C.getInstruction()); 142 } 143 return DepResultTy(QI, Normal); 144 } else { 145 continue; 146 } 147 } else 148 continue; 149 150 if (AA.getModRefInfo(C, pointer, pointerSize) != AliasAnalysis::NoModRef) { 151 if (!start && !block) { 152 cachedResult.first = DepResultTy(QI, Normal); 153 cachedResult.second = true; 154 reverseDep[DepResultTy(QI, Normal)].insert(C.getInstruction()); 155 } 156 return DepResultTy(QI, Normal); 157 } 158 } 159 160 // No dependence found 161 cachedResult.first = DepResultTy(0, NonLocal); 162 cachedResult.second = true; 163 reverseDep[DepResultTy(0, NonLocal)].insert(C.getInstruction()); 164 return DepResultTy(0, NonLocal); 165} 166 167/// nonLocalHelper - Private helper used to calculate non-local dependencies 168/// by doing DFS on the predecessors of a block to find its dependencies. 169void MemoryDependenceAnalysis::nonLocalHelper(Instruction* query, 170 BasicBlock* block, 171 DenseMap<BasicBlock*, DepResultTy> &resp) { 172 // Set of blocks that we've already visited in our DFS 173 SmallPtrSet<BasicBlock*, 4> visited; 174 // If we're updating a dirtied cache entry, we don't need to reprocess 175 // already computed entries. 176 for (DenseMap<BasicBlock*, DepResultTy>::iterator I = resp.begin(), 177 E = resp.end(); I != E; ++I) 178 if (I->second.getInt() != Dirty) 179 visited.insert(I->first); 180 181 // Current stack of the DFS 182 SmallVector<BasicBlock*, 4> stack; 183 for (pred_iterator PI = pred_begin(block), PE = pred_end(block); 184 PI != PE; ++PI) 185 stack.push_back(*PI); 186 187 // Do a basic DFS 188 while (!stack.empty()) { 189 BasicBlock* BB = stack.back(); 190 191 // If we've already visited this block, no need to revist 192 if (visited.count(BB)) { 193 stack.pop_back(); 194 continue; 195 } 196 197 // If we find a new block with a local dependency for query, 198 // then we insert the new dependency and backtrack. 199 if (BB != block) { 200 visited.insert(BB); 201 202 DepResultTy localDep = getDependency(query, 0, BB); 203 if (localDep.getInt() != NonLocal) { 204 resp.insert(std::make_pair(BB, localDep)); 205 stack.pop_back(); 206 207 continue; 208 } 209 // If we re-encounter the starting block, we still need to search it 210 // because there might be a dependency in the starting block AFTER 211 // the position of the query. This is necessary to get loops right. 212 } else if (BB == block) { 213 visited.insert(BB); 214 215 DepResultTy localDep = getDependency(query, 0, BB); 216 if (localDep != DepResultTy(query, Normal)) 217 resp.insert(std::make_pair(BB, localDep)); 218 219 stack.pop_back(); 220 221 continue; 222 } 223 224 // If we didn't find anything, recurse on the precessors of this block 225 // Only do this for blocks with a small number of predecessors. 226 bool predOnStack = false; 227 bool inserted = false; 228 if (std::distance(pred_begin(BB), pred_end(BB)) <= PredLimit) { 229 for (pred_iterator PI = pred_begin(BB), PE = pred_end(BB); 230 PI != PE; ++PI) 231 if (!visited.count(*PI)) { 232 stack.push_back(*PI); 233 inserted = true; 234 } else 235 predOnStack = true; 236 } 237 238 // If we inserted a new predecessor, then we'll come back to this block 239 if (inserted) 240 continue; 241 // If we didn't insert because we have no predecessors, then this 242 // query has no dependency at all. 243 else if (!inserted && !predOnStack) { 244 resp.insert(std::make_pair(BB, DepResultTy(0, None))); 245 // If we didn't insert because our predecessors are already on the stack, 246 // then we might still have a dependency, but it will be discovered during 247 // backtracking. 248 } else if (!inserted && predOnStack){ 249 resp.insert(std::make_pair(BB, DepResultTy(0, NonLocal))); 250 } 251 252 stack.pop_back(); 253 } 254} 255 256/// getNonLocalDependency - Fills the passed-in map with the non-local 257/// dependencies of the queries. The map will contain NonLocal for 258/// blocks between the query and its dependencies. 259void MemoryDependenceAnalysis::getNonLocalDependency(Instruction* query, 260 DenseMap<BasicBlock*, DepResultTy> &resp) { 261 if (depGraphNonLocal.count(query)) { 262 DenseMap<BasicBlock*, DepResultTy> &cached = depGraphNonLocal[query]; 263 NumCacheNonlocal++; 264 265 SmallVector<BasicBlock*, 4> dirtied; 266 for (DenseMap<BasicBlock*, DepResultTy>::iterator I = cached.begin(), 267 E = cached.end(); I != E; ++I) 268 if (I->second.getInt() == Dirty) 269 dirtied.push_back(I->first); 270 271 for (SmallVector<BasicBlock*, 4>::iterator I = dirtied.begin(), 272 E = dirtied.end(); I != E; ++I) { 273 DepResultTy localDep = getDependency(query, 0, *I); 274 if (localDep.getInt() != NonLocal) 275 cached[*I] = localDep; 276 else { 277 cached.erase(*I); 278 nonLocalHelper(query, *I, cached); 279 } 280 } 281 282 resp = cached; 283 284 // Update the reverse non-local dependency cache 285 for (DenseMap<BasicBlock*, DepResultTy>::iterator I = resp.begin(), 286 E = resp.end(); I != E; ++I) 287 reverseDepNonLocal[I->second].insert(query); 288 289 return; 290 } else 291 NumUncacheNonlocal++; 292 293 // If not, go ahead and search for non-local deps. 294 nonLocalHelper(query, query->getParent(), resp); 295 296 // Update the non-local dependency cache 297 for (DenseMap<BasicBlock*, DepResultTy>::iterator I = resp.begin(), 298 E = resp.end(); I != E; ++I) { 299 depGraphNonLocal[query].insert(*I); 300 reverseDepNonLocal[I->second].insert(query); 301 } 302} 303 304/// getDependency - Return the instruction on which a memory operation 305/// depends. The local parameter indicates if the query should only 306/// evaluate dependencies within the same basic block. 307MemoryDependenceAnalysis::DepResultTy 308MemoryDependenceAnalysis::getDependency(Instruction *query, 309 Instruction *start, 310 BasicBlock *block) { 311 // Start looking for dependencies with the queried inst 312 BasicBlock::iterator QI = query; 313 314 // Check for a cached result 315 std::pair<DepResultTy, bool>& cachedResult = LocalDeps[query]; 316 // If we have a _confirmed_ cached entry, return it 317 if (!block && !start) { 318 if (cachedResult.second) 319 return cachedResult.first; 320 else if (cachedResult.first.getInt() == Normal && 321 cachedResult.first.getPointer()) 322 // If we have an unconfirmed cached entry, we can start our search from 323 // it. 324 QI = cachedResult.first.getPointer(); 325 } 326 327 if (start) 328 QI = start; 329 else if (!start && block) 330 QI = block->end(); 331 332 AliasAnalysis& AA = getAnalysis<AliasAnalysis>(); 333 TargetData& TD = getAnalysis<TargetData>(); 334 335 // Get the pointer value for which dependence will be determined 336 Value* dependee = 0; 337 uint64_t dependeeSize = 0; 338 bool queryIsVolatile = false; 339 if (StoreInst* S = dyn_cast<StoreInst>(query)) { 340 dependee = S->getPointerOperand(); 341 dependeeSize = TD.getTypeStoreSize(S->getOperand(0)->getType()); 342 queryIsVolatile = S->isVolatile(); 343 } else if (LoadInst* L = dyn_cast<LoadInst>(query)) { 344 dependee = L->getPointerOperand(); 345 dependeeSize = TD.getTypeStoreSize(L->getType()); 346 queryIsVolatile = L->isVolatile(); 347 } else if (VAArgInst* V = dyn_cast<VAArgInst>(query)) { 348 dependee = V->getOperand(0); 349 dependeeSize = TD.getTypeStoreSize(V->getType()); 350 } else if (FreeInst* F = dyn_cast<FreeInst>(query)) { 351 dependee = F->getPointerOperand(); 352 353 // FreeInsts erase the entire structure, not just a field 354 dependeeSize = ~0UL; 355 } else if (CallSite::get(query).getInstruction() != 0) 356 return getCallSiteDependency(CallSite::get(query), start, block); 357 else if (isa<AllocationInst>(query)) 358 return DepResultTy(0, None); 359 else 360 return DepResultTy(0, None); 361 362 BasicBlock::iterator blockBegin = block ? block->begin() 363 : query->getParent()->begin(); 364 365 // Walk backwards through the basic block, looking for dependencies 366 while (QI != blockBegin) { 367 --QI; 368 369 // If this inst is a memory op, get the pointer it accessed 370 Value* pointer = 0; 371 uint64_t pointerSize = 0; 372 if (StoreInst* S = dyn_cast<StoreInst>(QI)) { 373 // All volatile loads/stores depend on each other 374 if (queryIsVolatile && S->isVolatile()) { 375 if (!start && !block) { 376 cachedResult.first = DepResultTy(S, Normal); 377 cachedResult.second = true; 378 reverseDep[DepResultTy(S, Normal)].insert(query); 379 } 380 381 return DepResultTy(S, Normal); 382 } 383 384 pointer = S->getPointerOperand(); 385 pointerSize = TD.getTypeStoreSize(S->getOperand(0)->getType()); 386 } else if (LoadInst* L = dyn_cast<LoadInst>(QI)) { 387 // All volatile loads/stores depend on each other 388 if (queryIsVolatile && L->isVolatile()) { 389 if (!start && !block) { 390 cachedResult.first = DepResultTy(L, Normal); 391 cachedResult.second = true; 392 reverseDep[DepResultTy(L, Normal)].insert(query); 393 } 394 395 return DepResultTy(L, Normal); 396 } 397 398 pointer = L->getPointerOperand(); 399 pointerSize = TD.getTypeStoreSize(L->getType()); 400 } else if (AllocationInst* AI = dyn_cast<AllocationInst>(QI)) { 401 pointer = AI; 402 if (ConstantInt* C = dyn_cast<ConstantInt>(AI->getArraySize())) 403 pointerSize = C->getZExtValue() * 404 TD.getABITypeSize(AI->getAllocatedType()); 405 else 406 pointerSize = ~0UL; 407 } else if (VAArgInst* V = dyn_cast<VAArgInst>(QI)) { 408 pointer = V->getOperand(0); 409 pointerSize = TD.getTypeStoreSize(V->getType()); 410 } else if (FreeInst* F = dyn_cast<FreeInst>(QI)) { 411 pointer = F->getPointerOperand(); 412 413 // FreeInsts erase the entire structure 414 pointerSize = ~0UL; 415 } else if (CallSite::get(QI).getInstruction() != 0) { 416 // Call insts need special handling. Check if they can modify our pointer 417 AliasAnalysis::ModRefResult MR = AA.getModRefInfo(CallSite::get(QI), 418 dependee, dependeeSize); 419 420 if (MR != AliasAnalysis::NoModRef) { 421 // Loads don't depend on read-only calls 422 if (isa<LoadInst>(query) && MR == AliasAnalysis::Ref) 423 continue; 424 425 if (!start && !block) { 426 cachedResult.first = DepResultTy(QI, Normal); 427 cachedResult.second = true; 428 reverseDep[DepResultTy(QI, Normal)].insert(query); 429 } 430 return DepResultTy(QI, Normal); 431 } else { 432 continue; 433 } 434 } 435 436 // If we found a pointer, check if it could be the same as our pointer 437 if (pointer) { 438 AliasAnalysis::AliasResult R = AA.alias(pointer, pointerSize, 439 dependee, dependeeSize); 440 441 if (R != AliasAnalysis::NoAlias) { 442 // May-alias loads don't depend on each other 443 if (isa<LoadInst>(query) && isa<LoadInst>(QI) && 444 R == AliasAnalysis::MayAlias) 445 continue; 446 447 if (!start && !block) { 448 cachedResult.first = DepResultTy(QI, Normal); 449 cachedResult.second = true; 450 reverseDep[DepResultTy(QI, Normal)].insert(query); 451 } 452 453 return DepResultTy(QI, Normal); 454 } 455 } 456 } 457 458 // If we found nothing, return the non-local flag 459 if (!start && !block) { 460 cachedResult.first = DepResultTy(0, NonLocal); 461 cachedResult.second = true; 462 reverseDep[DepResultTy(0, NonLocal)].insert(query); 463 } 464 465 return DepResultTy(0, NonLocal); 466} 467 468/// dropInstruction - Remove an instruction from the analysis, making 469/// absolutely conservative assumptions when updating the cache. This is 470/// useful, for example when an instruction is changed rather than removed. 471void MemoryDependenceAnalysis::dropInstruction(Instruction* drop) { 472 LocalDepMapType::iterator depGraphEntry = LocalDeps.find(drop); 473 if (depGraphEntry != LocalDeps.end()) 474 reverseDep[depGraphEntry->second.first].erase(drop); 475 476 // Drop dependency information for things that depended on this instr 477 SmallPtrSet<Instruction*, 4>& set = reverseDep[DepResultTy(drop, Normal)]; 478 for (SmallPtrSet<Instruction*, 4>::iterator I = set.begin(), E = set.end(); 479 I != E; ++I) 480 LocalDeps.erase(*I); 481 482 LocalDeps.erase(drop); 483 reverseDep.erase(DepResultTy(drop, Normal)); 484 485 for (DenseMap<BasicBlock*, DepResultTy>::iterator DI = 486 depGraphNonLocal[drop].begin(), DE = depGraphNonLocal[drop].end(); 487 DI != DE; ++DI) 488 if (DI->second.getInt() != None) 489 reverseDepNonLocal[DI->second].erase(drop); 490 491 if (reverseDepNonLocal.count(DepResultTy(drop, Normal))) { 492 SmallPtrSet<Instruction*, 4>& set = 493 reverseDepNonLocal[DepResultTy(drop, Normal)]; 494 for (SmallPtrSet<Instruction*, 4>::iterator I = set.begin(), E = set.end(); 495 I != E; ++I) 496 for (DenseMap<BasicBlock*, DepResultTy>::iterator DI = 497 depGraphNonLocal[*I].begin(), DE = depGraphNonLocal[*I].end(); 498 DI != DE; ++DI) 499 if (DI->second == DepResultTy(drop, Normal)) 500 DI->second = DepResultTy(0, Dirty); 501 } 502 503 reverseDepNonLocal.erase(DepResultTy(drop, Normal)); 504 depGraphNonLocal.erase(drop); 505} 506 507/// removeInstruction - Remove an instruction from the dependence analysis, 508/// updating the dependence of instructions that previously depended on it. 509/// This method attempts to keep the cache coherent using the reverse map. 510void MemoryDependenceAnalysis::removeInstruction(Instruction *RemInst) { 511 // Walk through the Non-local dependencies, removing this one as the value 512 // for any cached queries. 513 for (DenseMap<BasicBlock*, DepResultTy>::iterator DI = 514 depGraphNonLocal[RemInst].begin(), DE = depGraphNonLocal[RemInst].end(); 515 DI != DE; ++DI) 516 if (DI->second.getInt() != None) 517 reverseDepNonLocal[DI->second].erase(RemInst); 518 519 // Shortly after this, we will look for things that depend on RemInst. In 520 // order to update these, we'll need a new dependency to base them on. We 521 // could completely delete any entries that depend on this, but it is better 522 // to make a more accurate approximation where possible. Compute that better 523 // approximation if we can. 524 DepResultTy NewDependency; 525 bool NewDependencyConfirmed = false; 526 527 // If we have a cached local dependence query for this instruction, remove it. 528 // 529 LocalDepMapType::iterator LocalDepEntry = LocalDeps.find(RemInst); 530 if (LocalDepEntry != LocalDeps.end()) { 531 DepResultTy LocalDep = LocalDepEntry->second.first; 532 bool IsConfirmed = LocalDepEntry->second.second; 533 534 // Remove this local dependency info. 535 LocalDeps.erase(LocalDepEntry); 536 537 // Remove us from DepInst's reverse set now that the local dep info is gone. 538 reverseDep[LocalDep].erase(RemInst); 539 540 // If we have unconfirmed info, don't trust it. 541 if (IsConfirmed) { 542 // If we have a confirmed non-local flag, use it. 543 if (LocalDep.getInt() == NonLocal || LocalDep.getInt() == None) { 544 // The only time this dependency is confirmed is if it is non-local. 545 NewDependency = LocalDep; 546 NewDependencyConfirmed = true; 547 } else { 548 // If we have dep info for RemInst, set them to it. 549 Instruction *NDI = next(BasicBlock::iterator(LocalDep.getPointer())); 550 if (NDI != RemInst) // Don't use RemInst for the new dependency! 551 NewDependency = DepResultTy(NDI, Normal); 552 } 553 } 554 } 555 556 // If we don't already have a local dependency answer for this instruction, 557 // use the immediate successor of RemInst. We use the successor because 558 // getDependence starts by checking the immediate predecessor of what is in 559 // the cache. 560 if (NewDependency == DepResultTy(0, Normal)) 561 NewDependency = DepResultTy(next(BasicBlock::iterator(RemInst)), Normal); 562 563 // Loop over all of the things that depend on the instruction we're removing. 564 // 565 reverseDepMapType::iterator ReverseDepIt = 566 reverseDep.find(DepResultTy(RemInst, Normal)); 567 if (ReverseDepIt != reverseDep.end()) { 568 SmallPtrSet<Instruction*, 4> &ReverseDeps = ReverseDepIt->second; 569 for (SmallPtrSet<Instruction*, 4>::iterator I = ReverseDeps.begin(), 570 E = ReverseDeps.end(); I != E; ++I) { 571 Instruction *InstDependingOnRemInst = *I; 572 573 // If we thought the instruction depended on itself (possible for 574 // unconfirmed dependencies) ignore the update. 575 if (InstDependingOnRemInst == RemInst) continue; 576 577 // Insert the new dependencies. 578 LocalDeps[InstDependingOnRemInst] = 579 std::make_pair(NewDependency, NewDependencyConfirmed); 580 581 // If our NewDependency is an instruction, make sure to remember that new 582 // things depend on it. 583 // FIXME: Just insert all deps! 584 if (NewDependency.getInt() != NonLocal && NewDependency.getInt() != None) 585 reverseDep[NewDependency].insert(InstDependingOnRemInst); 586 } 587 reverseDep.erase(DepResultTy(RemInst, Normal)); 588 } 589 590 ReverseDepIt = reverseDepNonLocal.find(DepResultTy(RemInst, Normal)); 591 if (ReverseDepIt != reverseDepNonLocal.end()) { 592 SmallPtrSet<Instruction*, 4>& set = ReverseDepIt->second; 593 for (SmallPtrSet<Instruction*, 4>::iterator I = set.begin(), E = set.end(); 594 I != E; ++I) 595 for (DenseMap<BasicBlock*, DepResultTy>::iterator DI = 596 depGraphNonLocal[*I].begin(), DE = depGraphNonLocal[*I].end(); 597 DI != DE; ++DI) 598 if (DI->second == DepResultTy(RemInst, Normal)) 599 DI->second = DepResultTy(0, Dirty); 600 reverseDepNonLocal.erase(ReverseDepIt); 601 } 602 603 depGraphNonLocal.erase(RemInst); 604 605 getAnalysis<AliasAnalysis>().deleteValue(RemInst); 606 607 DEBUG(verifyRemoved(RemInst)); 608} 609