MemoryDependenceAnalysis.cpp revision 7b929dad59785f62a66f7c58615082f98441e95e
1//===- MemoryDependenceAnalysis.cpp - Mem Deps Implementation --*- C++ -*-===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file implements an analysis that determines, for a given memory 11// operation, what preceding memory operations it depends on. It builds on 12// alias analysis information, and tries to provide a lazy, caching interface to 13// a common kind of alias information query. 14// 15//===----------------------------------------------------------------------===// 16 17#define DEBUG_TYPE "memdep" 18#include "llvm/Analysis/MemoryDependenceAnalysis.h" 19#include "llvm/Instructions.h" 20#include "llvm/IntrinsicInst.h" 21#include "llvm/Function.h" 22#include "llvm/Analysis/AliasAnalysis.h" 23#include "llvm/Analysis/MallocHelper.h" 24#include "llvm/ADT/Statistic.h" 25#include "llvm/ADT/STLExtras.h" 26#include "llvm/Support/PredIteratorCache.h" 27#include "llvm/Support/Debug.h" 28using namespace llvm; 29 30STATISTIC(NumCacheNonLocal, "Number of fully cached non-local responses"); 31STATISTIC(NumCacheDirtyNonLocal, "Number of dirty cached non-local responses"); 32STATISTIC(NumUncacheNonLocal, "Number of uncached non-local responses"); 33 34STATISTIC(NumCacheNonLocalPtr, 35 "Number of fully cached non-local ptr responses"); 36STATISTIC(NumCacheDirtyNonLocalPtr, 37 "Number of cached, but dirty, non-local ptr responses"); 38STATISTIC(NumUncacheNonLocalPtr, 39 "Number of uncached non-local ptr responses"); 40STATISTIC(NumCacheCompleteNonLocalPtr, 41 "Number of block queries that were completely cached"); 42 43char MemoryDependenceAnalysis::ID = 0; 44 45// Register this pass... 46static RegisterPass<MemoryDependenceAnalysis> X("memdep", 47 "Memory Dependence Analysis", false, true); 48 49MemoryDependenceAnalysis::MemoryDependenceAnalysis() 50: FunctionPass(&ID), PredCache(0) { 51} 52MemoryDependenceAnalysis::~MemoryDependenceAnalysis() { 53} 54 55/// Clean up memory in between runs 56void MemoryDependenceAnalysis::releaseMemory() { 57 LocalDeps.clear(); 58 NonLocalDeps.clear(); 59 NonLocalPointerDeps.clear(); 60 ReverseLocalDeps.clear(); 61 ReverseNonLocalDeps.clear(); 62 ReverseNonLocalPtrDeps.clear(); 63 PredCache->clear(); 64} 65 66 67 68/// getAnalysisUsage - Does not modify anything. It uses Alias Analysis. 69/// 70void MemoryDependenceAnalysis::getAnalysisUsage(AnalysisUsage &AU) const { 71 AU.setPreservesAll(); 72 AU.addRequiredTransitive<AliasAnalysis>(); 73} 74 75bool MemoryDependenceAnalysis::runOnFunction(Function &) { 76 AA = &getAnalysis<AliasAnalysis>(); 77 if (PredCache == 0) 78 PredCache.reset(new PredIteratorCache()); 79 return false; 80} 81 82/// RemoveFromReverseMap - This is a helper function that removes Val from 83/// 'Inst's set in ReverseMap. If the set becomes empty, remove Inst's entry. 84template <typename KeyTy> 85static void RemoveFromReverseMap(DenseMap<Instruction*, 86 SmallPtrSet<KeyTy, 4> > &ReverseMap, 87 Instruction *Inst, KeyTy Val) { 88 typename DenseMap<Instruction*, SmallPtrSet<KeyTy, 4> >::iterator 89 InstIt = ReverseMap.find(Inst); 90 assert(InstIt != ReverseMap.end() && "Reverse map out of sync?"); 91 bool Found = InstIt->second.erase(Val); 92 assert(Found && "Invalid reverse map!"); Found=Found; 93 if (InstIt->second.empty()) 94 ReverseMap.erase(InstIt); 95} 96 97 98/// getCallSiteDependencyFrom - Private helper for finding the local 99/// dependencies of a call site. 100MemDepResult MemoryDependenceAnalysis:: 101getCallSiteDependencyFrom(CallSite CS, bool isReadOnlyCall, 102 BasicBlock::iterator ScanIt, BasicBlock *BB) { 103 // Walk backwards through the block, looking for dependencies 104 while (ScanIt != BB->begin()) { 105 Instruction *Inst = --ScanIt; 106 107 // If this inst is a memory op, get the pointer it accessed 108 Value *Pointer = 0; 109 uint64_t PointerSize = 0; 110 if (StoreInst *S = dyn_cast<StoreInst>(Inst)) { 111 Pointer = S->getPointerOperand(); 112 PointerSize = AA->getTypeStoreSize(S->getOperand(0)->getType()); 113 } else if (VAArgInst *V = dyn_cast<VAArgInst>(Inst)) { 114 Pointer = V->getOperand(0); 115 PointerSize = AA->getTypeStoreSize(V->getType()); 116 } else if (FreeInst *F = dyn_cast<FreeInst>(Inst)) { 117 Pointer = F->getPointerOperand(); 118 119 // FreeInsts erase the entire structure 120 PointerSize = ~0ULL; 121 } else if (isa<CallInst>(Inst) || isa<InvokeInst>(Inst)) { 122 // Debug intrinsics don't cause dependences. 123 if (isa<DbgInfoIntrinsic>(Inst)) continue; 124 CallSite InstCS = CallSite::get(Inst); 125 // If these two calls do not interfere, look past it. 126 switch (AA->getModRefInfo(CS, InstCS)) { 127 case AliasAnalysis::NoModRef: 128 // If the two calls don't interact (e.g. InstCS is readnone) keep 129 // scanning. 130 continue; 131 case AliasAnalysis::Ref: 132 // If the two calls read the same memory locations and CS is a readonly 133 // function, then we have two cases: 1) the calls may not interfere with 134 // each other at all. 2) the calls may produce the same value. In case 135 // #1 we want to ignore the values, in case #2, we want to return Inst 136 // as a Def dependence. This allows us to CSE in cases like: 137 // X = strlen(P); 138 // memchr(...); 139 // Y = strlen(P); // Y = X 140 if (isReadOnlyCall) { 141 if (CS.getCalledFunction() != 0 && 142 CS.getCalledFunction() == InstCS.getCalledFunction()) 143 return MemDepResult::getDef(Inst); 144 // Ignore unrelated read/read call dependences. 145 continue; 146 } 147 // FALL THROUGH 148 default: 149 return MemDepResult::getClobber(Inst); 150 } 151 } else { 152 // Non-memory instruction. 153 continue; 154 } 155 156 if (AA->getModRefInfo(CS, Pointer, PointerSize) != AliasAnalysis::NoModRef) 157 return MemDepResult::getClobber(Inst); 158 } 159 160 // No dependence found. If this is the entry block of the function, it is a 161 // clobber, otherwise it is non-local. 162 if (BB != &BB->getParent()->getEntryBlock()) 163 return MemDepResult::getNonLocal(); 164 return MemDepResult::getClobber(ScanIt); 165} 166 167/// getPointerDependencyFrom - Return the instruction on which a memory 168/// location depends. If isLoad is true, this routine ignore may-aliases with 169/// read-only operations. 170MemDepResult MemoryDependenceAnalysis:: 171getPointerDependencyFrom(Value *MemPtr, uint64_t MemSize, bool isLoad, 172 BasicBlock::iterator ScanIt, BasicBlock *BB) { 173 174 // Walk backwards through the basic block, looking for dependencies. 175 while (ScanIt != BB->begin()) { 176 Instruction *Inst = --ScanIt; 177 178 // Debug intrinsics don't cause dependences. 179 if (isa<DbgInfoIntrinsic>(Inst)) continue; 180 181 // Values depend on loads if the pointers are must aliased. This means that 182 // a load depends on another must aliased load from the same value. 183 if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) { 184 Value *Pointer = LI->getPointerOperand(); 185 uint64_t PointerSize = AA->getTypeStoreSize(LI->getType()); 186 187 // If we found a pointer, check if it could be the same as our pointer. 188 AliasAnalysis::AliasResult R = 189 AA->alias(Pointer, PointerSize, MemPtr, MemSize); 190 if (R == AliasAnalysis::NoAlias) 191 continue; 192 193 // May-alias loads don't depend on each other without a dependence. 194 if (isLoad && R == AliasAnalysis::MayAlias) 195 continue; 196 // Stores depend on may and must aliased loads, loads depend on must-alias 197 // loads. 198 return MemDepResult::getDef(Inst); 199 } 200 201 if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) { 202 // If alias analysis can tell that this store is guaranteed to not modify 203 // the query pointer, ignore it. Use getModRefInfo to handle cases where 204 // the query pointer points to constant memory etc. 205 if (AA->getModRefInfo(SI, MemPtr, MemSize) == AliasAnalysis::NoModRef) 206 continue; 207 208 // Ok, this store might clobber the query pointer. Check to see if it is 209 // a must alias: in this case, we want to return this as a def. 210 Value *Pointer = SI->getPointerOperand(); 211 uint64_t PointerSize = AA->getTypeStoreSize(SI->getOperand(0)->getType()); 212 213 // If we found a pointer, check if it could be the same as our pointer. 214 AliasAnalysis::AliasResult R = 215 AA->alias(Pointer, PointerSize, MemPtr, MemSize); 216 217 if (R == AliasAnalysis::NoAlias) 218 continue; 219 if (R == AliasAnalysis::MayAlias) 220 return MemDepResult::getClobber(Inst); 221 return MemDepResult::getDef(Inst); 222 } 223 224 // If this is an allocation, and if we know that the accessed pointer is to 225 // the allocation, return Def. This means that there is no dependence and 226 // the access can be optimized based on that. For example, a load could 227 // turn into undef. 228 // Note: Only determine this to be a malloc if Inst is the malloc call, not 229 // a subsequent bitcast of the malloc call result. There can be stores to 230 // the malloced memory between the malloc call and its bitcast uses, and we 231 // need to continue scanning until the malloc call. 232 if (isa<AllocaInst>(Inst) || extractMallocCall(Inst)) { 233 Value *AccessPtr = MemPtr->getUnderlyingObject(); 234 235 if (AccessPtr == Inst || 236 AA->alias(Inst, 1, AccessPtr, 1) == AliasAnalysis::MustAlias) 237 return MemDepResult::getDef(Inst); 238 continue; 239 } 240 241 // See if this instruction (e.g. a call or vaarg) mod/ref's the pointer. 242 switch (AA->getModRefInfo(Inst, MemPtr, MemSize)) { 243 case AliasAnalysis::NoModRef: 244 // If the call has no effect on the queried pointer, just ignore it. 245 continue; 246 case AliasAnalysis::Ref: 247 // If the call is known to never store to the pointer, and if this is a 248 // load query, we can safely ignore it (scan past it). 249 if (isLoad) 250 continue; 251 // FALL THROUGH. 252 default: 253 // Otherwise, there is a potential dependence. Return a clobber. 254 return MemDepResult::getClobber(Inst); 255 } 256 } 257 258 // No dependence found. If this is the entry block of the function, it is a 259 // clobber, otherwise it is non-local. 260 if (BB != &BB->getParent()->getEntryBlock()) 261 return MemDepResult::getNonLocal(); 262 return MemDepResult::getClobber(ScanIt); 263} 264 265/// getDependency - Return the instruction on which a memory operation 266/// depends. 267MemDepResult MemoryDependenceAnalysis::getDependency(Instruction *QueryInst) { 268 Instruction *ScanPos = QueryInst; 269 270 // Check for a cached result 271 MemDepResult &LocalCache = LocalDeps[QueryInst]; 272 273 // If the cached entry is non-dirty, just return it. Note that this depends 274 // on MemDepResult's default constructing to 'dirty'. 275 if (!LocalCache.isDirty()) 276 return LocalCache; 277 278 // Otherwise, if we have a dirty entry, we know we can start the scan at that 279 // instruction, which may save us some work. 280 if (Instruction *Inst = LocalCache.getInst()) { 281 ScanPos = Inst; 282 283 RemoveFromReverseMap(ReverseLocalDeps, Inst, QueryInst); 284 } 285 286 BasicBlock *QueryParent = QueryInst->getParent(); 287 288 Value *MemPtr = 0; 289 uint64_t MemSize = 0; 290 291 // Do the scan. 292 if (BasicBlock::iterator(QueryInst) == QueryParent->begin()) { 293 // No dependence found. If this is the entry block of the function, it is a 294 // clobber, otherwise it is non-local. 295 if (QueryParent != &QueryParent->getParent()->getEntryBlock()) 296 LocalCache = MemDepResult::getNonLocal(); 297 else 298 LocalCache = MemDepResult::getClobber(QueryInst); 299 } else if (StoreInst *SI = dyn_cast<StoreInst>(QueryInst)) { 300 // If this is a volatile store, don't mess around with it. Just return the 301 // previous instruction as a clobber. 302 if (SI->isVolatile()) 303 LocalCache = MemDepResult::getClobber(--BasicBlock::iterator(ScanPos)); 304 else { 305 MemPtr = SI->getPointerOperand(); 306 MemSize = AA->getTypeStoreSize(SI->getOperand(0)->getType()); 307 } 308 } else if (LoadInst *LI = dyn_cast<LoadInst>(QueryInst)) { 309 // If this is a volatile load, don't mess around with it. Just return the 310 // previous instruction as a clobber. 311 if (LI->isVolatile()) 312 LocalCache = MemDepResult::getClobber(--BasicBlock::iterator(ScanPos)); 313 else { 314 MemPtr = LI->getPointerOperand(); 315 MemSize = AA->getTypeStoreSize(LI->getType()); 316 } 317 } else if (isa<CallInst>(QueryInst) || isa<InvokeInst>(QueryInst)) { 318 CallSite QueryCS = CallSite::get(QueryInst); 319 bool isReadOnly = AA->onlyReadsMemory(QueryCS); 320 LocalCache = getCallSiteDependencyFrom(QueryCS, isReadOnly, ScanPos, 321 QueryParent); 322 } else if (FreeInst *FI = dyn_cast<FreeInst>(QueryInst)) { 323 MemPtr = FI->getPointerOperand(); 324 // FreeInsts erase the entire structure, not just a field. 325 MemSize = ~0UL; 326 } else { 327 // Non-memory instruction. 328 LocalCache = MemDepResult::getClobber(--BasicBlock::iterator(ScanPos)); 329 } 330 331 // If we need to do a pointer scan, make it happen. 332 if (MemPtr) 333 LocalCache = getPointerDependencyFrom(MemPtr, MemSize, 334 isa<LoadInst>(QueryInst), 335 ScanPos, QueryParent); 336 337 // Remember the result! 338 if (Instruction *I = LocalCache.getInst()) 339 ReverseLocalDeps[I].insert(QueryInst); 340 341 return LocalCache; 342} 343 344#ifndef NDEBUG 345/// AssertSorted - This method is used when -debug is specified to verify that 346/// cache arrays are properly kept sorted. 347static void AssertSorted(MemoryDependenceAnalysis::NonLocalDepInfo &Cache, 348 int Count = -1) { 349 if (Count == -1) Count = Cache.size(); 350 if (Count == 0) return; 351 352 for (unsigned i = 1; i != unsigned(Count); ++i) 353 assert(Cache[i-1] <= Cache[i] && "Cache isn't sorted!"); 354} 355#endif 356 357/// getNonLocalCallDependency - Perform a full dependency query for the 358/// specified call, returning the set of blocks that the value is 359/// potentially live across. The returned set of results will include a 360/// "NonLocal" result for all blocks where the value is live across. 361/// 362/// This method assumes the instruction returns a "NonLocal" dependency 363/// within its own block. 364/// 365/// This returns a reference to an internal data structure that may be 366/// invalidated on the next non-local query or when an instruction is 367/// removed. Clients must copy this data if they want it around longer than 368/// that. 369const MemoryDependenceAnalysis::NonLocalDepInfo & 370MemoryDependenceAnalysis::getNonLocalCallDependency(CallSite QueryCS) { 371 assert(getDependency(QueryCS.getInstruction()).isNonLocal() && 372 "getNonLocalCallDependency should only be used on calls with non-local deps!"); 373 PerInstNLInfo &CacheP = NonLocalDeps[QueryCS.getInstruction()]; 374 NonLocalDepInfo &Cache = CacheP.first; 375 376 /// DirtyBlocks - This is the set of blocks that need to be recomputed. In 377 /// the cached case, this can happen due to instructions being deleted etc. In 378 /// the uncached case, this starts out as the set of predecessors we care 379 /// about. 380 SmallVector<BasicBlock*, 32> DirtyBlocks; 381 382 if (!Cache.empty()) { 383 // Okay, we have a cache entry. If we know it is not dirty, just return it 384 // with no computation. 385 if (!CacheP.second) { 386 NumCacheNonLocal++; 387 return Cache; 388 } 389 390 // If we already have a partially computed set of results, scan them to 391 // determine what is dirty, seeding our initial DirtyBlocks worklist. 392 for (NonLocalDepInfo::iterator I = Cache.begin(), E = Cache.end(); 393 I != E; ++I) 394 if (I->second.isDirty()) 395 DirtyBlocks.push_back(I->first); 396 397 // Sort the cache so that we can do fast binary search lookups below. 398 std::sort(Cache.begin(), Cache.end()); 399 400 ++NumCacheDirtyNonLocal; 401 //cerr << "CACHED CASE: " << DirtyBlocks.size() << " dirty: " 402 // << Cache.size() << " cached: " << *QueryInst; 403 } else { 404 // Seed DirtyBlocks with each of the preds of QueryInst's block. 405 BasicBlock *QueryBB = QueryCS.getInstruction()->getParent(); 406 for (BasicBlock **PI = PredCache->GetPreds(QueryBB); *PI; ++PI) 407 DirtyBlocks.push_back(*PI); 408 NumUncacheNonLocal++; 409 } 410 411 // isReadonlyCall - If this is a read-only call, we can be more aggressive. 412 bool isReadonlyCall = AA->onlyReadsMemory(QueryCS); 413 414 SmallPtrSet<BasicBlock*, 64> Visited; 415 416 unsigned NumSortedEntries = Cache.size(); 417 DEBUG(AssertSorted(Cache)); 418 419 // Iterate while we still have blocks to update. 420 while (!DirtyBlocks.empty()) { 421 BasicBlock *DirtyBB = DirtyBlocks.back(); 422 DirtyBlocks.pop_back(); 423 424 // Already processed this block? 425 if (!Visited.insert(DirtyBB)) 426 continue; 427 428 // Do a binary search to see if we already have an entry for this block in 429 // the cache set. If so, find it. 430 DEBUG(AssertSorted(Cache, NumSortedEntries)); 431 NonLocalDepInfo::iterator Entry = 432 std::upper_bound(Cache.begin(), Cache.begin()+NumSortedEntries, 433 std::make_pair(DirtyBB, MemDepResult())); 434 if (Entry != Cache.begin() && prior(Entry)->first == DirtyBB) 435 --Entry; 436 437 MemDepResult *ExistingResult = 0; 438 if (Entry != Cache.begin()+NumSortedEntries && 439 Entry->first == DirtyBB) { 440 // If we already have an entry, and if it isn't already dirty, the block 441 // is done. 442 if (!Entry->second.isDirty()) 443 continue; 444 445 // Otherwise, remember this slot so we can update the value. 446 ExistingResult = &Entry->second; 447 } 448 449 // If the dirty entry has a pointer, start scanning from it so we don't have 450 // to rescan the entire block. 451 BasicBlock::iterator ScanPos = DirtyBB->end(); 452 if (ExistingResult) { 453 if (Instruction *Inst = ExistingResult->getInst()) { 454 ScanPos = Inst; 455 // We're removing QueryInst's use of Inst. 456 RemoveFromReverseMap(ReverseNonLocalDeps, Inst, 457 QueryCS.getInstruction()); 458 } 459 } 460 461 // Find out if this block has a local dependency for QueryInst. 462 MemDepResult Dep; 463 464 if (ScanPos != DirtyBB->begin()) { 465 Dep = getCallSiteDependencyFrom(QueryCS, isReadonlyCall,ScanPos, DirtyBB); 466 } else if (DirtyBB != &DirtyBB->getParent()->getEntryBlock()) { 467 // No dependence found. If this is the entry block of the function, it is 468 // a clobber, otherwise it is non-local. 469 Dep = MemDepResult::getNonLocal(); 470 } else { 471 Dep = MemDepResult::getClobber(ScanPos); 472 } 473 474 // If we had a dirty entry for the block, update it. Otherwise, just add 475 // a new entry. 476 if (ExistingResult) 477 *ExistingResult = Dep; 478 else 479 Cache.push_back(std::make_pair(DirtyBB, Dep)); 480 481 // If the block has a dependency (i.e. it isn't completely transparent to 482 // the value), remember the association! 483 if (!Dep.isNonLocal()) { 484 // Keep the ReverseNonLocalDeps map up to date so we can efficiently 485 // update this when we remove instructions. 486 if (Instruction *Inst = Dep.getInst()) 487 ReverseNonLocalDeps[Inst].insert(QueryCS.getInstruction()); 488 } else { 489 490 // If the block *is* completely transparent to the load, we need to check 491 // the predecessors of this block. Add them to our worklist. 492 for (BasicBlock **PI = PredCache->GetPreds(DirtyBB); *PI; ++PI) 493 DirtyBlocks.push_back(*PI); 494 } 495 } 496 497 return Cache; 498} 499 500/// getNonLocalPointerDependency - Perform a full dependency query for an 501/// access to the specified (non-volatile) memory location, returning the 502/// set of instructions that either define or clobber the value. 503/// 504/// This method assumes the pointer has a "NonLocal" dependency within its 505/// own block. 506/// 507void MemoryDependenceAnalysis:: 508getNonLocalPointerDependency(Value *Pointer, bool isLoad, BasicBlock *FromBB, 509 SmallVectorImpl<NonLocalDepEntry> &Result) { 510 assert(isa<PointerType>(Pointer->getType()) && 511 "Can't get pointer deps of a non-pointer!"); 512 Result.clear(); 513 514 // We know that the pointer value is live into FromBB find the def/clobbers 515 // from presecessors. 516 const Type *EltTy = cast<PointerType>(Pointer->getType())->getElementType(); 517 uint64_t PointeeSize = AA->getTypeStoreSize(EltTy); 518 519 // This is the set of blocks we've inspected, and the pointer we consider in 520 // each block. Because of critical edges, we currently bail out if querying 521 // a block with multiple different pointers. This can happen during PHI 522 // translation. 523 DenseMap<BasicBlock*, Value*> Visited; 524 if (!getNonLocalPointerDepFromBB(Pointer, PointeeSize, isLoad, FromBB, 525 Result, Visited, true)) 526 return; 527 Result.clear(); 528 Result.push_back(std::make_pair(FromBB, 529 MemDepResult::getClobber(FromBB->begin()))); 530} 531 532/// GetNonLocalInfoForBlock - Compute the memdep value for BB with 533/// Pointer/PointeeSize using either cached information in Cache or by doing a 534/// lookup (which may use dirty cache info if available). If we do a lookup, 535/// add the result to the cache. 536MemDepResult MemoryDependenceAnalysis:: 537GetNonLocalInfoForBlock(Value *Pointer, uint64_t PointeeSize, 538 bool isLoad, BasicBlock *BB, 539 NonLocalDepInfo *Cache, unsigned NumSortedEntries) { 540 541 // Do a binary search to see if we already have an entry for this block in 542 // the cache set. If so, find it. 543 NonLocalDepInfo::iterator Entry = 544 std::upper_bound(Cache->begin(), Cache->begin()+NumSortedEntries, 545 std::make_pair(BB, MemDepResult())); 546 if (Entry != Cache->begin() && prior(Entry)->first == BB) 547 --Entry; 548 549 MemDepResult *ExistingResult = 0; 550 if (Entry != Cache->begin()+NumSortedEntries && Entry->first == BB) 551 ExistingResult = &Entry->second; 552 553 // If we have a cached entry, and it is non-dirty, use it as the value for 554 // this dependency. 555 if (ExistingResult && !ExistingResult->isDirty()) { 556 ++NumCacheNonLocalPtr; 557 return *ExistingResult; 558 } 559 560 // Otherwise, we have to scan for the value. If we have a dirty cache 561 // entry, start scanning from its position, otherwise we scan from the end 562 // of the block. 563 BasicBlock::iterator ScanPos = BB->end(); 564 if (ExistingResult && ExistingResult->getInst()) { 565 assert(ExistingResult->getInst()->getParent() == BB && 566 "Instruction invalidated?"); 567 ++NumCacheDirtyNonLocalPtr; 568 ScanPos = ExistingResult->getInst(); 569 570 // Eliminating the dirty entry from 'Cache', so update the reverse info. 571 ValueIsLoadPair CacheKey(Pointer, isLoad); 572 RemoveFromReverseMap(ReverseNonLocalPtrDeps, ScanPos, CacheKey); 573 } else { 574 ++NumUncacheNonLocalPtr; 575 } 576 577 // Scan the block for the dependency. 578 MemDepResult Dep = getPointerDependencyFrom(Pointer, PointeeSize, isLoad, 579 ScanPos, BB); 580 581 // If we had a dirty entry for the block, update it. Otherwise, just add 582 // a new entry. 583 if (ExistingResult) 584 *ExistingResult = Dep; 585 else 586 Cache->push_back(std::make_pair(BB, Dep)); 587 588 // If the block has a dependency (i.e. it isn't completely transparent to 589 // the value), remember the reverse association because we just added it 590 // to Cache! 591 if (Dep.isNonLocal()) 592 return Dep; 593 594 // Keep the ReverseNonLocalPtrDeps map up to date so we can efficiently 595 // update MemDep when we remove instructions. 596 Instruction *Inst = Dep.getInst(); 597 assert(Inst && "Didn't depend on anything?"); 598 ValueIsLoadPair CacheKey(Pointer, isLoad); 599 ReverseNonLocalPtrDeps[Inst].insert(CacheKey); 600 return Dep; 601} 602 603/// SortNonLocalDepInfoCache - Sort the a NonLocalDepInfo cache, given a certain 604/// number of elements in the array that are already properly ordered. This is 605/// optimized for the case when only a few entries are added. 606static void 607SortNonLocalDepInfoCache(MemoryDependenceAnalysis::NonLocalDepInfo &Cache, 608 unsigned NumSortedEntries) { 609 switch (Cache.size() - NumSortedEntries) { 610 case 0: 611 // done, no new entries. 612 break; 613 case 2: { 614 // Two new entries, insert the last one into place. 615 MemoryDependenceAnalysis::NonLocalDepEntry Val = Cache.back(); 616 Cache.pop_back(); 617 MemoryDependenceAnalysis::NonLocalDepInfo::iterator Entry = 618 std::upper_bound(Cache.begin(), Cache.end()-1, Val); 619 Cache.insert(Entry, Val); 620 // FALL THROUGH. 621 } 622 case 1: 623 // One new entry, Just insert the new value at the appropriate position. 624 if (Cache.size() != 1) { 625 MemoryDependenceAnalysis::NonLocalDepEntry Val = Cache.back(); 626 Cache.pop_back(); 627 MemoryDependenceAnalysis::NonLocalDepInfo::iterator Entry = 628 std::upper_bound(Cache.begin(), Cache.end(), Val); 629 Cache.insert(Entry, Val); 630 } 631 break; 632 default: 633 // Added many values, do a full scale sort. 634 std::sort(Cache.begin(), Cache.end()); 635 break; 636 } 637} 638 639 640/// getNonLocalPointerDepFromBB - Perform a dependency query based on 641/// pointer/pointeesize starting at the end of StartBB. Add any clobber/def 642/// results to the results vector and keep track of which blocks are visited in 643/// 'Visited'. 644/// 645/// This has special behavior for the first block queries (when SkipFirstBlock 646/// is true). In this special case, it ignores the contents of the specified 647/// block and starts returning dependence info for its predecessors. 648/// 649/// This function returns false on success, or true to indicate that it could 650/// not compute dependence information for some reason. This should be treated 651/// as a clobber dependence on the first instruction in the predecessor block. 652bool MemoryDependenceAnalysis:: 653getNonLocalPointerDepFromBB(Value *Pointer, uint64_t PointeeSize, 654 bool isLoad, BasicBlock *StartBB, 655 SmallVectorImpl<NonLocalDepEntry> &Result, 656 DenseMap<BasicBlock*, Value*> &Visited, 657 bool SkipFirstBlock) { 658 659 // Look up the cached info for Pointer. 660 ValueIsLoadPair CacheKey(Pointer, isLoad); 661 662 std::pair<BBSkipFirstBlockPair, NonLocalDepInfo> *CacheInfo = 663 &NonLocalPointerDeps[CacheKey]; 664 NonLocalDepInfo *Cache = &CacheInfo->second; 665 666 // If we have valid cached information for exactly the block we are 667 // investigating, just return it with no recomputation. 668 if (CacheInfo->first == BBSkipFirstBlockPair(StartBB, SkipFirstBlock)) { 669 // We have a fully cached result for this query then we can just return the 670 // cached results and populate the visited set. However, we have to verify 671 // that we don't already have conflicting results for these blocks. Check 672 // to ensure that if a block in the results set is in the visited set that 673 // it was for the same pointer query. 674 if (!Visited.empty()) { 675 for (NonLocalDepInfo::iterator I = Cache->begin(), E = Cache->end(); 676 I != E; ++I) { 677 DenseMap<BasicBlock*, Value*>::iterator VI = Visited.find(I->first); 678 if (VI == Visited.end() || VI->second == Pointer) continue; 679 680 // We have a pointer mismatch in a block. Just return clobber, saying 681 // that something was clobbered in this result. We could also do a 682 // non-fully cached query, but there is little point in doing this. 683 return true; 684 } 685 } 686 687 for (NonLocalDepInfo::iterator I = Cache->begin(), E = Cache->end(); 688 I != E; ++I) { 689 Visited.insert(std::make_pair(I->first, Pointer)); 690 if (!I->second.isNonLocal()) 691 Result.push_back(*I); 692 } 693 ++NumCacheCompleteNonLocalPtr; 694 return false; 695 } 696 697 // Otherwise, either this is a new block, a block with an invalid cache 698 // pointer or one that we're about to invalidate by putting more info into it 699 // than its valid cache info. If empty, the result will be valid cache info, 700 // otherwise it isn't. 701 if (Cache->empty()) 702 CacheInfo->first = BBSkipFirstBlockPair(StartBB, SkipFirstBlock); 703 else 704 CacheInfo->first = BBSkipFirstBlockPair(); 705 706 SmallVector<BasicBlock*, 32> Worklist; 707 Worklist.push_back(StartBB); 708 709 // Keep track of the entries that we know are sorted. Previously cached 710 // entries will all be sorted. The entries we add we only sort on demand (we 711 // don't insert every element into its sorted position). We know that we 712 // won't get any reuse from currently inserted values, because we don't 713 // revisit blocks after we insert info for them. 714 unsigned NumSortedEntries = Cache->size(); 715 DEBUG(AssertSorted(*Cache)); 716 717 while (!Worklist.empty()) { 718 BasicBlock *BB = Worklist.pop_back_val(); 719 720 // Skip the first block if we have it. 721 if (!SkipFirstBlock) { 722 // Analyze the dependency of *Pointer in FromBB. See if we already have 723 // been here. 724 assert(Visited.count(BB) && "Should check 'visited' before adding to WL"); 725 726 // Get the dependency info for Pointer in BB. If we have cached 727 // information, we will use it, otherwise we compute it. 728 DEBUG(AssertSorted(*Cache, NumSortedEntries)); 729 MemDepResult Dep = GetNonLocalInfoForBlock(Pointer, PointeeSize, isLoad, 730 BB, Cache, NumSortedEntries); 731 732 // If we got a Def or Clobber, add this to the list of results. 733 if (!Dep.isNonLocal()) { 734 Result.push_back(NonLocalDepEntry(BB, Dep)); 735 continue; 736 } 737 } 738 739 // If 'Pointer' is an instruction defined in this block, then we need to do 740 // phi translation to change it into a value live in the predecessor block. 741 // If phi translation fails, then we can't continue dependence analysis. 742 Instruction *PtrInst = dyn_cast<Instruction>(Pointer); 743 bool NeedsPHITranslation = PtrInst && PtrInst->getParent() == BB; 744 745 // If no PHI translation is needed, just add all the predecessors of this 746 // block to scan them as well. 747 if (!NeedsPHITranslation) { 748 SkipFirstBlock = false; 749 for (BasicBlock **PI = PredCache->GetPreds(BB); *PI; ++PI) { 750 // Verify that we haven't looked at this block yet. 751 std::pair<DenseMap<BasicBlock*,Value*>::iterator, bool> 752 InsertRes = Visited.insert(std::make_pair(*PI, Pointer)); 753 if (InsertRes.second) { 754 // First time we've looked at *PI. 755 Worklist.push_back(*PI); 756 continue; 757 } 758 759 // If we have seen this block before, but it was with a different 760 // pointer then we have a phi translation failure and we have to treat 761 // this as a clobber. 762 if (InsertRes.first->second != Pointer) 763 goto PredTranslationFailure; 764 } 765 continue; 766 } 767 768 // If we do need to do phi translation, then there are a bunch of different 769 // cases, because we have to find a Value* live in the predecessor block. We 770 // know that PtrInst is defined in this block at least. 771 772 // We may have added values to the cache list before this PHI translation. 773 // If so, we haven't done anything to ensure that the cache remains sorted. 774 // Sort it now (if needed) so that recursive invocations of 775 // getNonLocalPointerDepFromBB and other routines that could reuse the cache 776 // value will only see properly sorted cache arrays. 777 if (Cache && NumSortedEntries != Cache->size()) { 778 SortNonLocalDepInfoCache(*Cache, NumSortedEntries); 779 NumSortedEntries = Cache->size(); 780 } 781 782 // If this is directly a PHI node, just use the incoming values for each 783 // pred as the phi translated version. 784 if (PHINode *PtrPHI = dyn_cast<PHINode>(PtrInst)) { 785 Cache = 0; 786 787 for (BasicBlock **PI = PredCache->GetPreds(BB); *PI; ++PI) { 788 BasicBlock *Pred = *PI; 789 Value *PredPtr = PtrPHI->getIncomingValueForBlock(Pred); 790 791 // Check to see if we have already visited this pred block with another 792 // pointer. If so, we can't do this lookup. This failure can occur 793 // with PHI translation when a critical edge exists and the PHI node in 794 // the successor translates to a pointer value different than the 795 // pointer the block was first analyzed with. 796 std::pair<DenseMap<BasicBlock*,Value*>::iterator, bool> 797 InsertRes = Visited.insert(std::make_pair(Pred, PredPtr)); 798 799 if (!InsertRes.second) { 800 // If the predecessor was visited with PredPtr, then we already did 801 // the analysis and can ignore it. 802 if (InsertRes.first->second == PredPtr) 803 continue; 804 805 // Otherwise, the block was previously analyzed with a different 806 // pointer. We can't represent the result of this case, so we just 807 // treat this as a phi translation failure. 808 goto PredTranslationFailure; 809 } 810 811 // FIXME: it is entirely possible that PHI translating will end up with 812 // the same value. Consider PHI translating something like: 813 // X = phi [x, bb1], [y, bb2]. PHI translating for bb1 doesn't *need* 814 // to recurse here, pedantically speaking. 815 816 // If we have a problem phi translating, fall through to the code below 817 // to handle the failure condition. 818 if (getNonLocalPointerDepFromBB(PredPtr, PointeeSize, isLoad, Pred, 819 Result, Visited)) 820 goto PredTranslationFailure; 821 } 822 823 // Refresh the CacheInfo/Cache pointer so that it isn't invalidated. 824 CacheInfo = &NonLocalPointerDeps[CacheKey]; 825 Cache = &CacheInfo->second; 826 NumSortedEntries = Cache->size(); 827 828 // Since we did phi translation, the "Cache" set won't contain all of the 829 // results for the query. This is ok (we can still use it to accelerate 830 // specific block queries) but we can't do the fastpath "return all 831 // results from the set" Clear out the indicator for this. 832 CacheInfo->first = BBSkipFirstBlockPair(); 833 SkipFirstBlock = false; 834 continue; 835 } 836 837 // TODO: BITCAST, GEP. 838 839 // cerr << "MEMDEP: Could not PHI translate: " << *Pointer; 840 // if (isa<BitCastInst>(PtrInst) || isa<GetElementPtrInst>(PtrInst)) 841 // cerr << "OP:\t\t\t\t" << *PtrInst->getOperand(0); 842 PredTranslationFailure: 843 844 if (Cache == 0) { 845 // Refresh the CacheInfo/Cache pointer if it got invalidated. 846 CacheInfo = &NonLocalPointerDeps[CacheKey]; 847 Cache = &CacheInfo->second; 848 NumSortedEntries = Cache->size(); 849 } 850 851 // Since we did phi translation, the "Cache" set won't contain all of the 852 // results for the query. This is ok (we can still use it to accelerate 853 // specific block queries) but we can't do the fastpath "return all 854 // results from the set" Clear out the indicator for this. 855 CacheInfo->first = BBSkipFirstBlockPair(); 856 857 // If *nothing* works, mark the pointer as being clobbered by the first 858 // instruction in this block. 859 // 860 // If this is the magic first block, return this as a clobber of the whole 861 // incoming value. Since we can't phi translate to one of the predecessors, 862 // we have to bail out. 863 if (SkipFirstBlock) 864 return true; 865 866 for (NonLocalDepInfo::reverse_iterator I = Cache->rbegin(); ; ++I) { 867 assert(I != Cache->rend() && "Didn't find current block??"); 868 if (I->first != BB) 869 continue; 870 871 assert(I->second.isNonLocal() && 872 "Should only be here with transparent block"); 873 I->second = MemDepResult::getClobber(BB->begin()); 874 ReverseNonLocalPtrDeps[BB->begin()].insert(CacheKey); 875 Result.push_back(*I); 876 break; 877 } 878 } 879 880 // Okay, we're done now. If we added new values to the cache, re-sort it. 881 SortNonLocalDepInfoCache(*Cache, NumSortedEntries); 882 DEBUG(AssertSorted(*Cache)); 883 return false; 884} 885 886/// RemoveCachedNonLocalPointerDependencies - If P exists in 887/// CachedNonLocalPointerInfo, remove it. 888void MemoryDependenceAnalysis:: 889RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair P) { 890 CachedNonLocalPointerInfo::iterator It = 891 NonLocalPointerDeps.find(P); 892 if (It == NonLocalPointerDeps.end()) return; 893 894 // Remove all of the entries in the BB->val map. This involves removing 895 // instructions from the reverse map. 896 NonLocalDepInfo &PInfo = It->second.second; 897 898 for (unsigned i = 0, e = PInfo.size(); i != e; ++i) { 899 Instruction *Target = PInfo[i].second.getInst(); 900 if (Target == 0) continue; // Ignore non-local dep results. 901 assert(Target->getParent() == PInfo[i].first); 902 903 // Eliminating the dirty entry from 'Cache', so update the reverse info. 904 RemoveFromReverseMap(ReverseNonLocalPtrDeps, Target, P); 905 } 906 907 // Remove P from NonLocalPointerDeps (which deletes NonLocalDepInfo). 908 NonLocalPointerDeps.erase(It); 909} 910 911 912/// invalidateCachedPointerInfo - This method is used to invalidate cached 913/// information about the specified pointer, because it may be too 914/// conservative in memdep. This is an optional call that can be used when 915/// the client detects an equivalence between the pointer and some other 916/// value and replaces the other value with ptr. This can make Ptr available 917/// in more places that cached info does not necessarily keep. 918void MemoryDependenceAnalysis::invalidateCachedPointerInfo(Value *Ptr) { 919 // If Ptr isn't really a pointer, just ignore it. 920 if (!isa<PointerType>(Ptr->getType())) return; 921 // Flush store info for the pointer. 922 RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair(Ptr, false)); 923 // Flush load info for the pointer. 924 RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair(Ptr, true)); 925} 926 927/// removeInstruction - Remove an instruction from the dependence analysis, 928/// updating the dependence of instructions that previously depended on it. 929/// This method attempts to keep the cache coherent using the reverse map. 930void MemoryDependenceAnalysis::removeInstruction(Instruction *RemInst) { 931 // Walk through the Non-local dependencies, removing this one as the value 932 // for any cached queries. 933 NonLocalDepMapType::iterator NLDI = NonLocalDeps.find(RemInst); 934 if (NLDI != NonLocalDeps.end()) { 935 NonLocalDepInfo &BlockMap = NLDI->second.first; 936 for (NonLocalDepInfo::iterator DI = BlockMap.begin(), DE = BlockMap.end(); 937 DI != DE; ++DI) 938 if (Instruction *Inst = DI->second.getInst()) 939 RemoveFromReverseMap(ReverseNonLocalDeps, Inst, RemInst); 940 NonLocalDeps.erase(NLDI); 941 } 942 943 // If we have a cached local dependence query for this instruction, remove it. 944 // 945 LocalDepMapType::iterator LocalDepEntry = LocalDeps.find(RemInst); 946 if (LocalDepEntry != LocalDeps.end()) { 947 // Remove us from DepInst's reverse set now that the local dep info is gone. 948 if (Instruction *Inst = LocalDepEntry->second.getInst()) 949 RemoveFromReverseMap(ReverseLocalDeps, Inst, RemInst); 950 951 // Remove this local dependency info. 952 LocalDeps.erase(LocalDepEntry); 953 } 954 955 // If we have any cached pointer dependencies on this instruction, remove 956 // them. If the instruction has non-pointer type, then it can't be a pointer 957 // base. 958 959 // Remove it from both the load info and the store info. The instruction 960 // can't be in either of these maps if it is non-pointer. 961 if (isa<PointerType>(RemInst->getType())) { 962 RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair(RemInst, false)); 963 RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair(RemInst, true)); 964 } 965 966 // Loop over all of the things that depend on the instruction we're removing. 967 // 968 SmallVector<std::pair<Instruction*, Instruction*>, 8> ReverseDepsToAdd; 969 970 // If we find RemInst as a clobber or Def in any of the maps for other values, 971 // we need to replace its entry with a dirty version of the instruction after 972 // it. If RemInst is a terminator, we use a null dirty value. 973 // 974 // Using a dirty version of the instruction after RemInst saves having to scan 975 // the entire block to get to this point. 976 MemDepResult NewDirtyVal; 977 if (!RemInst->isTerminator()) 978 NewDirtyVal = MemDepResult::getDirty(++BasicBlock::iterator(RemInst)); 979 980 ReverseDepMapType::iterator ReverseDepIt = ReverseLocalDeps.find(RemInst); 981 if (ReverseDepIt != ReverseLocalDeps.end()) { 982 SmallPtrSet<Instruction*, 4> &ReverseDeps = ReverseDepIt->second; 983 // RemInst can't be the terminator if it has local stuff depending on it. 984 assert(!ReverseDeps.empty() && !isa<TerminatorInst>(RemInst) && 985 "Nothing can locally depend on a terminator"); 986 987 for (SmallPtrSet<Instruction*, 4>::iterator I = ReverseDeps.begin(), 988 E = ReverseDeps.end(); I != E; ++I) { 989 Instruction *InstDependingOnRemInst = *I; 990 assert(InstDependingOnRemInst != RemInst && 991 "Already removed our local dep info"); 992 993 LocalDeps[InstDependingOnRemInst] = NewDirtyVal; 994 995 // Make sure to remember that new things depend on NewDepInst. 996 assert(NewDirtyVal.getInst() && "There is no way something else can have " 997 "a local dep on this if it is a terminator!"); 998 ReverseDepsToAdd.push_back(std::make_pair(NewDirtyVal.getInst(), 999 InstDependingOnRemInst)); 1000 } 1001 1002 ReverseLocalDeps.erase(ReverseDepIt); 1003 1004 // Add new reverse deps after scanning the set, to avoid invalidating the 1005 // 'ReverseDeps' reference. 1006 while (!ReverseDepsToAdd.empty()) { 1007 ReverseLocalDeps[ReverseDepsToAdd.back().first] 1008 .insert(ReverseDepsToAdd.back().second); 1009 ReverseDepsToAdd.pop_back(); 1010 } 1011 } 1012 1013 ReverseDepIt = ReverseNonLocalDeps.find(RemInst); 1014 if (ReverseDepIt != ReverseNonLocalDeps.end()) { 1015 SmallPtrSet<Instruction*, 4> &Set = ReverseDepIt->second; 1016 for (SmallPtrSet<Instruction*, 4>::iterator I = Set.begin(), E = Set.end(); 1017 I != E; ++I) { 1018 assert(*I != RemInst && "Already removed NonLocalDep info for RemInst"); 1019 1020 PerInstNLInfo &INLD = NonLocalDeps[*I]; 1021 // The information is now dirty! 1022 INLD.second = true; 1023 1024 for (NonLocalDepInfo::iterator DI = INLD.first.begin(), 1025 DE = INLD.first.end(); DI != DE; ++DI) { 1026 if (DI->second.getInst() != RemInst) continue; 1027 1028 // Convert to a dirty entry for the subsequent instruction. 1029 DI->second = NewDirtyVal; 1030 1031 if (Instruction *NextI = NewDirtyVal.getInst()) 1032 ReverseDepsToAdd.push_back(std::make_pair(NextI, *I)); 1033 } 1034 } 1035 1036 ReverseNonLocalDeps.erase(ReverseDepIt); 1037 1038 // Add new reverse deps after scanning the set, to avoid invalidating 'Set' 1039 while (!ReverseDepsToAdd.empty()) { 1040 ReverseNonLocalDeps[ReverseDepsToAdd.back().first] 1041 .insert(ReverseDepsToAdd.back().second); 1042 ReverseDepsToAdd.pop_back(); 1043 } 1044 } 1045 1046 // If the instruction is in ReverseNonLocalPtrDeps then it appears as a 1047 // value in the NonLocalPointerDeps info. 1048 ReverseNonLocalPtrDepTy::iterator ReversePtrDepIt = 1049 ReverseNonLocalPtrDeps.find(RemInst); 1050 if (ReversePtrDepIt != ReverseNonLocalPtrDeps.end()) { 1051 SmallPtrSet<ValueIsLoadPair, 4> &Set = ReversePtrDepIt->second; 1052 SmallVector<std::pair<Instruction*, ValueIsLoadPair>,8> ReversePtrDepsToAdd; 1053 1054 for (SmallPtrSet<ValueIsLoadPair, 4>::iterator I = Set.begin(), 1055 E = Set.end(); I != E; ++I) { 1056 ValueIsLoadPair P = *I; 1057 assert(P.getPointer() != RemInst && 1058 "Already removed NonLocalPointerDeps info for RemInst"); 1059 1060 NonLocalDepInfo &NLPDI = NonLocalPointerDeps[P].second; 1061 1062 // The cache is not valid for any specific block anymore. 1063 NonLocalPointerDeps[P].first = BBSkipFirstBlockPair(); 1064 1065 // Update any entries for RemInst to use the instruction after it. 1066 for (NonLocalDepInfo::iterator DI = NLPDI.begin(), DE = NLPDI.end(); 1067 DI != DE; ++DI) { 1068 if (DI->second.getInst() != RemInst) continue; 1069 1070 // Convert to a dirty entry for the subsequent instruction. 1071 DI->second = NewDirtyVal; 1072 1073 if (Instruction *NewDirtyInst = NewDirtyVal.getInst()) 1074 ReversePtrDepsToAdd.push_back(std::make_pair(NewDirtyInst, P)); 1075 } 1076 1077 // Re-sort the NonLocalDepInfo. Changing the dirty entry to its 1078 // subsequent value may invalidate the sortedness. 1079 std::sort(NLPDI.begin(), NLPDI.end()); 1080 } 1081 1082 ReverseNonLocalPtrDeps.erase(ReversePtrDepIt); 1083 1084 while (!ReversePtrDepsToAdd.empty()) { 1085 ReverseNonLocalPtrDeps[ReversePtrDepsToAdd.back().first] 1086 .insert(ReversePtrDepsToAdd.back().second); 1087 ReversePtrDepsToAdd.pop_back(); 1088 } 1089 } 1090 1091 1092 assert(!NonLocalDeps.count(RemInst) && "RemInst got reinserted?"); 1093 AA->deleteValue(RemInst); 1094 DEBUG(verifyRemoved(RemInst)); 1095} 1096/// verifyRemoved - Verify that the specified instruction does not occur 1097/// in our internal data structures. 1098void MemoryDependenceAnalysis::verifyRemoved(Instruction *D) const { 1099 for (LocalDepMapType::const_iterator I = LocalDeps.begin(), 1100 E = LocalDeps.end(); I != E; ++I) { 1101 assert(I->first != D && "Inst occurs in data structures"); 1102 assert(I->second.getInst() != D && 1103 "Inst occurs in data structures"); 1104 } 1105 1106 for (CachedNonLocalPointerInfo::const_iterator I =NonLocalPointerDeps.begin(), 1107 E = NonLocalPointerDeps.end(); I != E; ++I) { 1108 assert(I->first.getPointer() != D && "Inst occurs in NLPD map key"); 1109 const NonLocalDepInfo &Val = I->second.second; 1110 for (NonLocalDepInfo::const_iterator II = Val.begin(), E = Val.end(); 1111 II != E; ++II) 1112 assert(II->second.getInst() != D && "Inst occurs as NLPD value"); 1113 } 1114 1115 for (NonLocalDepMapType::const_iterator I = NonLocalDeps.begin(), 1116 E = NonLocalDeps.end(); I != E; ++I) { 1117 assert(I->first != D && "Inst occurs in data structures"); 1118 const PerInstNLInfo &INLD = I->second; 1119 for (NonLocalDepInfo::const_iterator II = INLD.first.begin(), 1120 EE = INLD.first.end(); II != EE; ++II) 1121 assert(II->second.getInst() != D && "Inst occurs in data structures"); 1122 } 1123 1124 for (ReverseDepMapType::const_iterator I = ReverseLocalDeps.begin(), 1125 E = ReverseLocalDeps.end(); I != E; ++I) { 1126 assert(I->first != D && "Inst occurs in data structures"); 1127 for (SmallPtrSet<Instruction*, 4>::const_iterator II = I->second.begin(), 1128 EE = I->second.end(); II != EE; ++II) 1129 assert(*II != D && "Inst occurs in data structures"); 1130 } 1131 1132 for (ReverseDepMapType::const_iterator I = ReverseNonLocalDeps.begin(), 1133 E = ReverseNonLocalDeps.end(); 1134 I != E; ++I) { 1135 assert(I->first != D && "Inst occurs in data structures"); 1136 for (SmallPtrSet<Instruction*, 4>::const_iterator II = I->second.begin(), 1137 EE = I->second.end(); II != EE; ++II) 1138 assert(*II != D && "Inst occurs in data structures"); 1139 } 1140 1141 for (ReverseNonLocalPtrDepTy::const_iterator 1142 I = ReverseNonLocalPtrDeps.begin(), 1143 E = ReverseNonLocalPtrDeps.end(); I != E; ++I) { 1144 assert(I->first != D && "Inst occurs in rev NLPD map"); 1145 1146 for (SmallPtrSet<ValueIsLoadPair, 4>::const_iterator II = I->second.begin(), 1147 E = I->second.end(); II != E; ++II) 1148 assert(*II != ValueIsLoadPair(D, false) && 1149 *II != ValueIsLoadPair(D, true) && 1150 "Inst occurs in ReverseNonLocalPtrDeps map"); 1151 } 1152 1153} 1154